From 88f56c6b2057d319268f94ae7398ea91ce11379c Mon Sep 17 00:00:00 2001 From: Kenny Daniel Date: Mon, 6 Oct 2025 16:05:01 -0600 Subject: [PATCH] Use WKB from hyparquet --- demo/bundle.min.js | 2 +- demo/bundle.min.js.map | 2 +- package.json | 12 ++-- src/index.js | 3 +- src/toGeoJson.js | 9 ++- src/wkb.js | 144 ----------------------------------------- test/wkb.test.js | 2 +- 7 files changed, 18 insertions(+), 156 deletions(-) delete mode 100644 src/wkb.js diff --git a/demo/bundle.min.js b/demo/bundle.min.js index 07751ec..bc28a64 100644 --- a/demo/bundle.min.js +++ b/demo/bundle.min.js @@ -1,2 +1,2 @@ -const e=["BOOLEAN","INT32","INT64","INT96","FLOAT","DOUBLE","BYTE_ARRAY","FIXED_LEN_BYTE_ARRAY"],t=["PLAIN","GROUP_VAR_INT","PLAIN_DICTIONARY","RLE","BIT_PACKED","DELTA_BINARY_PACKED","DELTA_LENGTH_BYTE_ARRAY","DELTA_BYTE_ARRAY","RLE_DICTIONARY","BYTE_STREAM_SPLIT"],n=["REQUIRED","OPTIONAL","REPEATED"],r=["UTF8","MAP","MAP_KEY_VALUE","LIST","ENUM","DECIMAL","DATE","TIME_MILLIS","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","UINT_32","UINT_64","INT_8","INT_16","INT_32","INT_64","JSON","BSON","INTERVAL"],o=["UNCOMPRESSED","SNAPPY","GZIP","LZO","BROTLI","LZ4","ZSTD","LZ4_RAW"],i=["DATA_PAGE","INDEX_PAGE","DICTIONARY_PAGE","DATA_PAGE_V2"],f={timestampFromMilliseconds:e=>new Date(Number(e)),timestampFromMicroseconds:e=>new Date(Number(e/1000n)),timestampFromNanoseconds:e=>new Date(Number(e/1000000n)),dateFromDays:e=>new Date(864e5*e)};function s(e,t,n,r){if(t&&n.endsWith("_DICTIONARY")){let n=e;e instanceof Uint8Array&&!(t instanceof Uint8Array)&&(n=new t.constructor(e.length));for(let r=0;rJSON.parse(t.decode(e)))}if("BSON"===f)throw new Error("parquet bson not supported");if("INTERVAL"===f)throw new Error("parquet interval not supported");if("UTF8"===f||"STRING"===s?.type||o&&"BYTE_ARRAY"===i){const t=new TextDecoder,n=new Array(e.length);for(let r=0;r=2**(n-1)&&(t-=2**n),t}function c(e){return 86400000000000n*((e>>64n)-2440588n)+(0xffffffffffffffffn&e)}function u(e){if(!e)return;const t=e[1]<<8|e[0],n=t>>15?-1:1,r=t>>10&31,o=1023&t;return 0===r?n*2**-14*(o/1024):31===r?o?NaN:n*(1/0):n*2**(r-15)*(1+o/1024)}function d(e,t,n){const r=e[t],o=[];let i=1;if(r.num_children)for(;o.lengtht.element.name===e);if(!o)throw new Error(`parquet schema element not found: ${t}`);r.push(o),n=o}return r}function p(e){let t=0;for(const{element:n}of e)"REPEATED"===n.repetition_type&&t++;return t}function h(e){let t=0;for(const{element:n}of e.slice(1))"REQUIRED"!==n.repetition_type&&t++;return t}const g=0,w=1,y=2,m=3,E=4,A=5,I=6,T=7,b=8,v=9,L=12;function N(e){let t=0;const n={};for(;e.offset>>1^-(1&t)}(e);case I:return O(e);case T:{const t=e.view.getFloat64(e.offset,!0);return e.offset+=8,t}case b:{const t=D(e),n=new Uint8Array(e.view.buffer,e.view.byteOffset+e.offset,t);return e.offset+=t,n}case v:{const[t,n]=function(e){const t=e.view.getUint8(e.offset++),n=t>>4,r=U(t);if(15===n){return[r,D(e)]}return[r,n]}(e),r=t===w||t===y,o=new Array(n);for(let i=0;i>1n^-(1n&t)}function U(e){return 15&e}function P(e,t){const n=e.view.getUint8(e.offset++);if((15&n)===g)return[0,0,t];const r=n>>4;let o;if(!r)throw new Error("non-delta field id not supported");return o=t+r,[U(n),o,o]}async function S(e,{parsers:t,initialFetchSize:n=524288}={}){if(!(e&&e.byteLength>=0))throw new Error("parquet expected AsyncBuffer");const r=Math.max(0,e.byteLength-n),o=await e.slice(r,e.byteLength),i=new DataView(o);if(827474256!==i.getUint32(o.byteLength-4,!0))throw new Error("parquet file invalid (footer != PAR1)");const f=i.getUint32(o.byteLength-8,!0);if(f>e.byteLength-8)throw new Error(`parquet metadata length ${f} exceeds available buffer ${e.byteLength-8}`);if(f+8>n){const n=e.byteLength-f-8,i=await e.slice(n,r),s=new ArrayBuffer(f+8),a=new Uint8Array(s);return a.set(new Uint8Array(i)),a.set(new Uint8Array(o),r-n),B(s,{parsers:t})}return B(o,{parsers:t})}function B(s,{parsers:a}={}){if(!(s instanceof ArrayBuffer))throw new Error("parquet expected ArrayBuffer");const l=new DataView(s);if(a={...f,...a},l.byteLength<8)throw new Error("parquet file is too short");if(827474256!==l.getUint32(l.byteLength-4,!0))throw new Error("parquet file invalid (footer != PAR1)");const c=l.byteLength-8,u=l.getUint32(c,!0);if(u>l.byteLength-8)throw new Error(`parquet metadata length ${u} exceeds available buffer ${l.byteLength-8}`);const d=N({view:l,offset:c-u}),_=new TextDecoder;function p(e){return e&&_.decode(e)}const h=d.field_1,g=d.field_2.map(t=>({type:e[t.field_1],type_length:t.field_2,repetition_type:n[t.field_3],name:p(t.field_4),num_children:t.field_5,converted_type:r[t.field_6],scale:t.field_7,precision:t.field_8,field_id:t.field_9,logical_type:M(t.field_10)})),w=g.filter(e=>e.type),y=d.field_3,m=d.field_4.map(n=>({columns:n.field_1.map((n,r)=>({file_path:p(n.field_1),file_offset:n.field_2,meta_data:n.field_3&&{type:e[n.field_3.field_1],encodings:n.field_3.field_2?.map(e=>t[e]),path_in_schema:n.field_3.field_3.map(p),codec:o[n.field_3.field_4],num_values:n.field_3.field_5,total_uncompressed_size:n.field_3.field_6,total_compressed_size:n.field_3.field_7,key_value_metadata:n.field_3.field_8,data_page_offset:n.field_3.field_9,index_page_offset:n.field_3.field_10,dictionary_page_offset:n.field_3.field_11,statistics:$(n.field_3.field_12,w[r],a),encoding_stats:n.field_3.field_13?.map(e=>({page_type:i[e.field_1],encoding:t[e.field_2],count:e.field_3})),bloom_filter_offset:n.field_3.field_14,bloom_filter_length:n.field_3.field_15,size_statistics:n.field_3.field_16&&{unencoded_byte_array_data_bytes:n.field_3.field_16.field_1,repetition_level_histogram:n.field_3.field_16.field_2,definition_level_histogram:n.field_3.field_16.field_3}},offset_index_offset:n.field_4,offset_index_length:n.field_5,column_index_offset:n.field_6,column_index_length:n.field_7,crypto_metadata:n.field_8,encrypted_column_metadata:n.field_9})),total_byte_size:n.field_2,num_rows:n.field_3,sorting_columns:n.field_4?.map(e=>({column_idx:e.field_1,descending:e.field_2,nulls_first:e.field_3})),file_offset:n.field_5,total_compressed_size:n.field_6,ordinal:n.field_7})),E=d.field_5?.map(e=>({key:p(e.field_1),value:p(e.field_2)}));return{version:h,schema:g,num_rows:y,row_groups:m,key_value_metadata:E,created_by:p(d.field_6),metadata_length:u}}function M(e){return e?.field_1?{type:"STRING"}:e?.field_2?{type:"MAP"}:e?.field_3?{type:"LIST"}:e?.field_4?{type:"ENUM"}:e?.field_5?{type:"DECIMAL",scale:e.field_5.field_1,precision:e.field_5.field_2}:e?.field_6?{type:"DATE"}:e?.field_7?{type:"TIME",isAdjustedToUTC:e.field_7.field_1,unit:F(e.field_7.field_2)}:e?.field_8?{type:"TIMESTAMP",isAdjustedToUTC:e.field_8.field_1,unit:F(e.field_8.field_2)}:e?.field_10?{type:"INTEGER",bitWidth:e.field_10.field_1,isSigned:e.field_10.field_2}:e?.field_11?{type:"NULL"}:e?.field_12?{type:"JSON"}:e?.field_13?{type:"BSON"}:e?.field_14?{type:"UUID"}:e?.field_15?{type:"FLOAT16"}:e}function F(e){if(e.field_1)return"MILLIS";if(e.field_2)return"MICROS";if(e.field_3)return"NANOS";throw new Error("parquet time unit required")}function $(e,t,n){return e&&{max:q(e.field_1,t,n),min:q(e.field_2,t,n),null_count:e.field_3,distinct_count:e.field_4,max_value:q(e.field_5,t,n),min_value:q(e.field_6,t,n),is_max_value_exact:e.field_7,is_min_value_exact:e.field_8}}function q(e,t,n){const{type:r,converted_type:o,logical_type:i}=t;if(void 0===e)return e;if("BOOLEAN"===r)return 1===e[0];if("BYTE_ARRAY"===r)return(new TextDecoder).decode(e);const f=new DataView(e.buffer,e.byteOffset,e.byteLength);return"FLOAT"===r&&4===f.byteLength?f.getFloat32(0,!0):"DOUBLE"===r&&8===f.byteLength?f.getFloat64(0,!0):"INT32"===r&&"DATE"===o?n.dateFromDays(f.getInt32(0,!0)):"INT64"===r&&"TIMESTAMP_MILLIS"===o?n.timestampFromMilliseconds(f.getBigInt64(0,!0)):"INT64"===r&&"TIMESTAMP_MICROS"===o?n.timestampFromMicroseconds(f.getBigInt64(0,!0)):"INT64"===r&&"TIMESTAMP"===i?.type&&"NANOS"===i?.unit?n.timestampFromNanoseconds(f.getBigInt64(0,!0)):"INT64"===r&&"TIMESTAMP"===i?.type&&"MICROS"===i?.unit?n.timestampFromMicroseconds(f.getBigInt64(0,!0)):"INT64"===r&&"TIMESTAMP"===i?.type?n.timestampFromMilliseconds(f.getBigInt64(0,!0)):"INT32"===r&&4===f.byteLength?f.getInt32(0,!0):"INT64"===r&&8===f.byteLength?f.getBigInt64(0,!0):"DECIMAL"===o?l(e)*10**-(t.scale||0):"FLOAT16"===i?.type?u(e):e}function Y(e,t){for(let n=0;n{if(!e.ok)throw new Error(`fetch head failed ${e.status}`);const t=e.headers.get("Content-Length");if(!t)throw new Error("missing content length");return parseInt(t)})}(e,n,o);const f=n||{};return{byteLength:t,async slice(t,n){if(i)return i.then(e=>e.slice(t,n));const r=new Headers(f.headers),s=void 0===n?"":n-1;r.set("Range",`bytes=${t}-${s}`);const a=await o(e,{...f,headers:r});if(!a.ok||!a.body)throw new Error(`fetch failed ${a.status}`);if(200===a.status)return i=a.arrayBuffer(),i.then(e=>e.slice(t,n));if(206===a.status)return a.arrayBuffer();throw new Error(`fetch received unexpected status code ${a.status}`)}}}function x({byteLength:e,slice:t},{minSize:n=524288}={}){if(e(await n).slice(e,t)}}const r=new Map;return{byteLength:e,slice(n,o){const i=function(e,t,n){if(e<0){if(void 0!==t)throw new Error(`invalid suffix range [${e}, ${t}]`);return void 0===n?`${e},`:`${n+e},${n}`}if(void 0!==t){if(e>t)throw new Error(`invalid empty range [${e}, ${t}]`);return`${e},${t}`}return void 0===n?`${e},`:`${e},${n}`}(n,o,e),f=r.get(i);if(f)return f;const s=t(n,o);return r.set(i,s),s}}}function G(e){if(!e)return[];if(1===e.length)return e[0];const t=[];for(const n of e)Y(t,n);return t}function k({dictionary_page_offset:e,data_page_offset:t,total_compressed_size:n}){const r=e||t;return{startByte:Number(r),endByte:Number(r+n)}}function z(e,t,n,r,o){const i=t?.length||n.length;if(!i)return r;const f=h(o),s=o.map(({element:e})=>e.repetition_type);let a=0;const l=[e];let c=e,u=0,d=0,_=0;if(n[0])for(;u1)return!1;const t=e.children[0];return!(t.children.length>1)&&"REPEATED"===t.element.repetition_type}(t)){let f=t.children[0],s=i;1===f.children.length&&(f=f.children[0],s++),V(e,f,s);const a=f.path.join("."),l=e.get(a);if(!l)throw new Error("parquet list column missing values");return o&&j(l,n),e.set(r,l),void e.delete(a)}if(function(e){if(!e)return!1;if("MAP"!==e.element.converted_type)return!1;if(e.children.length>1)return!1;const t=e.children[0];if(2!==t.children.length)return!1;if("REPEATED"!==t.element.repetition_type)return!1;const n=t.children.find(e=>"key"===e.element.name);if("REPEATED"===n?.element.repetition_type)return!1;const r=t.children.find(e=>"value"===e.element.name);return"REPEATED"!==r?.element.repetition_type}(t)){const f=t.children[0].element.name;V(e,t.children[0].children[0],i+1),V(e,t.children[0].children[1],i+1);const s=e.get(`${r}.${f}.key`),a=e.get(`${r}.${f}.value`);if(!s)throw new Error("parquet map column missing keys");if(!a)throw new Error("parquet map column missing values");if(s.length!==a.length)throw new Error("parquet map column key/value length mismatch");const l=Q(s,a,i);return o&&j(l,n),e.delete(`${r}.${f}.key`),e.delete(`${r}.${f}.value`),void e.set(r,l)}if(t.children.length){const i="REQUIRED"===t.element.repetition_type?n:n+1,f={};for(const n of t.children){V(e,n,i);const t=e.get(n.path.join("."));if(!t)throw new Error("parquet struct missing child data");f[n.element.name]=t}for(const n of t.children)e.delete(n.path.join("."));const s=J(f,i);o&&j(s,n),e.set(r,s)}}function j(e,t){for(let n=0;n>l&u;for(l+=i;l>=8;)l-=8n,e.offset++,l&&(t|=BigInt(e.view.getUint8(e.offset))<>>1;Z(e,o,t,n,i),i+=o}}e.offset=o+r}function Z(e,t,n,r,o){const i=n+7>>3;let f=0;for(let t=0;t>1<<3;const f=(1<8?(l-=8,a-=8,s>>>=8):a-l>l&f),i--,l+=n);return o}function te(e,t,n,r){const o=function(e,t){switch(e){case"INT32":case"FLOAT":return 4;case"INT64":case"DOUBLE":return 8;case"FIXED_LEN_BYTE_ARRAY":if(!t)throw new Error("parquet byteWidth missing type_length");return t;default:throw new Error(`parquet unsupported type: ${e}`)}}(n,r),i=new Uint8Array(t*o);for(let n=0;n1){const r=p(n);if(r){const n=new Array(t.num_values);return X(e,K(r),n),n}}return[]}(f,t,o),{definitionLevels:l,numNulls:c}=function(e,t,n){const r=h(n);if(!r)return{definitionLevels:[],numNulls:0};const o=new Array(t.num_values);X(e,K(r),o);let i=t.num_values;for(const e of o)e===r&&i--;0===i&&(o.length=0);return{definitionLevels:o,numNulls:i}}(f,t,o),u=t.num_values-c;if("PLAIN"===t.encoding)s=ne(f,n,u,r.type_length);else if("PLAIN_DICTIONARY"===t.encoding||"RLE_DICTIONARY"===t.encoding||"RLE"===t.encoding){const e="BOOLEAN"===n?1:i.getUint8(f.offset++);e?(s=new Array(u),"BOOLEAN"===n?(X(f,e,s),s=s.map(e=>!!e)):X(f,e,s,i.byteLength-f.offset)):s=new Uint8Array(u)}else if("BYTE_STREAM_SPLIT"===t.encoding)s=te(f,u,n,r.type_length);else if("DELTA_BINARY_PACKED"===t.encoding){s="INT32"===n?new Int32Array(u):new BigInt64Array(u),W(f,u,s)}else{if("DELTA_LENGTH_BYTE_ARRAY"!==t.encoding)throw new Error(`parquet unsupported encoding: ${t.encoding}`);s=new Array(u),H(f,u,s)}return{definitionLevels:l,repetitionLevels:a,dataPage:s}}function se(e,t,n,r){let o;const i=r?.[n];if("UNCOMPRESSED"===n)o=e;else if(i)o=i(e,t);else{if("SNAPPY"!==n)throw new Error(`parquet unsupported compression codec: ${n}`);o=new Uint8Array(t),function(e,t){const n=e.byteLength,r=t.byteLength;let o=0,i=0;for(;o=n)throw new Error("invalid snappy length header");for(;o=n)throw new Error("missing eof marker");if(3&r){let s=0;switch(3&r){case 1:f=4+(r>>>2&7),s=e[o]+(r>>>5<<8),o++;break;case 2:if(n<=o+1)throw new Error("snappy error end of input");f=(r>>>2)+1,s=e[o]+(e[o+1]<<8),o+=2;break;case 3:if(n<=o+3)throw new Error("snappy error end of input");f=(r>>>2)+1,s=e[o]+(e[o+1]<<8)+(e[o+2]<<16)+(e[o+3]<<24),o+=4}if(0===s||isNaN(s))throw new Error(`invalid offset ${s} pos ${o} inputLength ${n}`);if(s>i)throw new Error("cannot copy from before start of buffer");ie(t,i-s,t,i,f),i+=f}else{let f=(r>>>2)+1;if(f>60){if(o+3>=n)throw new Error("snappy error literal pos + 3 >= inputLength");const t=f-60;f=e[o]+(e[o+1]<<8)+(e[o+2]<<16)+(e[o+3]<<24),f=1+(f&oe[t]),o+=t}if(o+f>n)throw new Error("snappy error literal exceeds input length");ie(e,o,t,i,f),o+=f,i+=f}}if(i!==r)throw new Error("premature end of input")}(e,o)}if(o?.length!==t)throw new Error(`parquet decompressed page length ${o?.length} does not match header ${t}`);return o}function ae(e,t,n){const r={view:new DataView(e.buffer,e.byteOffset,e.byteLength),offset:0},{type:o,element:i,schemaPath:f,codec:s,compressors:a}=n,l=t.data_page_header_v2;if(!l)throw new Error("parquet data page header v2 is undefined");const c=function(e,t,n){const r=p(n);if(!r)return[];const o=new Array(t.num_values);return X(e,K(r),o,t.repetition_levels_byte_length),o}(r,l,f);r.offset=l.repetition_levels_byte_length;const u=function(e,t,n){const r=h(n);if(r){const n=new Array(t.num_values);return X(e,K(r),n,t.definition_levels_byte_length),n}}(r,l,f),d=t.uncompressed_page_size-l.definition_levels_byte_length-l.repetition_levels_byte_length;let _=e.subarray(r.offset);!1!==l.is_compressed&&(_=se(_,d,s,a));const g=new DataView(_.buffer,_.byteOffset,_.byteLength),w={view:g,offset:0};let y;const m=l.num_values-l.num_nulls;if("PLAIN"===l.encoding)y=ne(w,o,m,i.type_length);else if("RLE"===l.encoding)y=new Array(m),X(w,1,y),y=y.map(e=>!!e);else if("PLAIN_DICTIONARY"===l.encoding||"RLE_DICTIONARY"===l.encoding){const e=g.getUint8(w.offset++);y=new Array(m),X(w,e,y,d-1)}else if("DELTA_BINARY_PACKED"===l.encoding){y="INT32"===o?new Int32Array(m):new BigInt64Array(m),W(w,m,y)}else if("DELTA_LENGTH_BYTE_ARRAY"===l.encoding)y=new Array(m),H(w,m,y);else if("DELTA_BYTE_ARRAY"===l.encoding)y=new Array(m),function(e,t,n){const r=new Int32Array(t);W(e,t,r);const o=new Int32Array(t);W(e,t,o);for(let i=0;i{c&&i({columnName:f,columnData:c,rowStart:t+u-c.length,rowEnd:t+u})});for(;u=e.view.byteLength-1);){const t=ue(e);if("DICTIONARY_PAGE"===t.type)l=ce(e,t,o,l,void 0,0),l=a(l,o);else{const r=c?.length||0,i=ce(e,t,o,l,c,n-u);c===i?u+=i.length-r:(d?.(),s.push(i),u+=i.length,c=i)}}return d?.(),u>r&&c&&(s[s.length-1]=c.slice(0,r-(u-c.length))),s}function ce(e,t,n,r,o,i){const{type:f,element:a,schemaPath:l,codec:c,compressors:u}=n,d=new Uint8Array(e.view.buffer,e.view.byteOffset+e.offset,t.compressed_page_size);if(e.offset+=t.compressed_page_size,"DATA_PAGE"===t.type){const e=t.data_page_header;if(!e)throw new Error("parquet data page header is undefined");if(i>e.num_values&&function(e){if(2!==e.length)return!1;const[,t]=e;return"REPEATED"!==t.element.repetition_type&&!t.children.length}(l))return new Array(e.num_values);const f=se(d,Number(t.uncompressed_page_size),c,u),{definitionLevels:a,repetitionLevels:_,dataPage:p}=fe(f,e,n);let h=s(p,r,e.encoding,n);if(_.length||a?.length){return z(Array.isArray(o)?o:[],a,_,h,l)}for(let e=2;e[e]));return h}if("DATA_PAGE_V2"===t.type){const e=t.data_page_header_v2;if(!e)throw new Error("parquet data page header v2 is undefined");if(i>e.num_rows)return new Array(e.num_values);const{definitionLevels:f,repetitionLevels:a,dataPage:c}=ae(d,t,n),u=s(c,r,e.encoding,n);return z(Array.isArray(o)?o:[],f,a,u,l)}if("DICTIONARY_PAGE"===t.type){const e=t.dictionary_page_header;if(!e)throw new Error("parquet dictionary page header is undefined");const n=se(d,Number(t.uncompressed_page_size),c,u);return ne({view:new DataView(n.buffer,n.byteOffset,n.byteLength),offset:0},f,e.num_values,a.type_length)}throw new Error(`parquet unsupported page type: ${t.type}`)}function ue(e){const n=N(e);return{type:i[n.field_1],uncompressed_page_size:n.field_2,compressed_page_size:n.field_3,crc:n.field_4,data_page_header:n.field_5&&{num_values:n.field_5.field_1,encoding:t[n.field_5.field_2],definition_level_encoding:t[n.field_5.field_3],repetition_level_encoding:t[n.field_5.field_4],statistics:n.field_5.field_5&&{max:n.field_5.field_5.field_1,min:n.field_5.field_5.field_2,null_count:n.field_5.field_5.field_3,distinct_count:n.field_5.field_5.field_4,max_value:n.field_5.field_5.field_5,min_value:n.field_5.field_5.field_6}},index_page_header:n.field_6,dictionary_page_header:n.field_7&&{num_values:n.field_7.field_1,encoding:t[n.field_7.field_2],is_sorted:n.field_7.field_3},data_page_header_v2:n.field_8&&{num_values:n.field_8.field_1,num_nulls:n.field_8.field_2,num_rows:n.field_8.field_3,encoding:t[n.field_8.field_4],definition_levels_byte_length:n.field_8.field_5,repetition_levels_byte_length:n.field_8.field_6,is_compressed:void 0===n.field_8.field_7||n.field_8.field_7,statistics:n.field_8.field_8}}}async function de({asyncColumns:e},t,n,r,o){const i=new Array(n),f=await Promise.all(e.map(({data:e})=>e.then(G))),s=e.map(e=>e.pathInSchema[0]).filter(e=>!r||r.includes(e)),a=r??s,l=a.map(t=>e.findIndex(e=>e.pathInSchema[0]===t));for(let r=t;r=0&&(t[e]=f[l[e]][r]);i[r]=t}return i}async function _e(e){e.metadata??=await S(e.file);const t=function(e){if(!e.metadata)throw new Error("parquet requires metadata");const t=function({metadata:e,rowStart:t=0,rowEnd:n=1/0,columns:r}){if(!e)throw new Error("parquetPlan requires metadata");const o=[],i=[];let f=0;for(const s of e.row_groups){const e=Number(s.num_rows),a=f+e;if(e>0&&a>=t&&fe.slice(t,n));return{byteLength:e.byteLength,slice(r,o=e.byteLength){const i=t.findIndex(({startByte:e,endByte:t})=>e<=r&&o<=t);if(i<0)throw new Error(`no prefetch for range [${r}, ${o}]`);if(t[i].startByte!==r||t[i].endByte!==o){const e=r-t[i].startByte,f=o-t[i].startByte;return n[i]instanceof Promise?n[i].then(t=>t.slice(e,f)):n[i].slice(e,f)}return n[i]}}}(e.file,t),t.groups.map(n=>function(e,{metadata:t,columns:n},r){const{file:o,compressors:i,utf8:s}=e,a=[],l={...f,...e.parsers};for(const{file_path:f,meta_data:c}of r.rowGroup.columns){if(f)throw new Error("parquet file_path not supported");if(!c)throw new Error("parquet column metadata is undefined");const u=c.path_in_schema[0];if(n&&!n.includes(u))continue;const{startByte:d,endByte:p}=k(c),h=p-d;if(h>1<<30){console.warn(`parquet skipping huge column "${c.path_in_schema}" ${h} bytes`);continue}const g=Promise.resolve(o.slice(d,p));a.push({pathInSchema:c.path_in_schema,data:g.then(n=>{const o=_(t.schema,c.path_in_schema),f={view:new DataView(n),offset:0},a={columnName:c.path_in_schema.join("."),type:c.type,element:o[o.length-1].element,schemaPath:o,codec:c.codec,parsers:l,compressors:i,utf8:s};return le(f,r,a,e.onPage)})})}return{groupStart:r.groupStart,groupRows:r.groupRows,asyncColumns:a}}(e,t,n))}(e),{rowStart:n=0,rowEnd:r,columns:o,onChunk:i,onComplete:s,rowFormat:a}=e;if(!s&&!i){for(const{asyncColumns:e}of t)for(const{data:t}of e)await t;return}const l=function({schema:e}){return _(e,[])[0]}(e.metadata),c=t.map(e=>function(e,t){const{asyncColumns:n}=e,r=[];for(const e of t.children)if(e.children.length){const t=n.filter(t=>t.pathInSchema[0]===e.element.name);if(!t.length)continue;const o=new Map,i=Promise.all(t.map(e=>e.data.then(t=>{o.set(e.pathInSchema.join("."),G(t))}))).then(()=>{V(o,e);const t=o.get(e.path.join("."));if(!t)throw new Error("parquet column data not assembled");return[t]});r.push({pathInSchema:e.path,data:i})}else{const t=n.find(t=>t.pathInSchema[0]===e.element.name);t&&r.push(t)}return{...e,asyncColumns:r}}(e,l));if(i)for(const e of c)for(const t of e.asyncColumns)t.data.then(n=>{let r=e.groupStart;for(const e of n)i({columnName:t.pathInSchema[0],columnData:e,rowStart:r,rowEnd:r+e.length}),r+=e.length});if(s){const e=[];for(const t of c){const i=Math.max(n-t.groupStart,0),f=Math.min((r??1/0)-t.groupStart,t.groupRows);Y(e,(await de(t,i,f,o,a)).slice(i,f))}s(e)}else for(const{asyncColumns:e}of c)for(const{data:t}of e)await t}function pe(e){const t=new DataView(e.buffer,e.byteOffset,e.byteLength);let n=0;const r=e[n];n+=1;const o=1===r,i=t.getUint32(n,o);if(n+=4,1===i){const e=t.getFloat64(n,o);n+=8;const r=t.getFloat64(n,o);return n+=8,{type:"Point",coordinates:[e,r]}}if(2===i){const e=t.getUint32(n,o);n+=4;const r=[];for(let i=0;i"geo"===e.key);if(!r)throw new Error('Invalid GeoParquet file: missing "geo" metadata');const o=JSON.parse(r.value||"{}"),i=await(f={file:e,metadata:n,utf8:!1,compressors:t},new Promise((e,t)=>{_e({rowFormat:"object",...f,onComplete:e}).catch(t)}));var f;const s=[],a=o.primary_column||"geometry";for(const e of i){const t=e[a];if(!t)continue;const n=pe(t),r={};for(const t of Object.keys(e)){const n=e[t];t!==a&&null!==n&&(r[t]=n)}const o={type:"Feature",geometry:n,properties:r};s.push(o)}return{type:"FeatureCollection",features:s}}!async function(){const{Map:e}=await google.maps.importLibrary("maps"),t=new e(document.getElementById("map"),{center:{lat:39,lng:-98},zoom:4});try{const e=x(await C({url:"https://hyparam.github.io/geoparquet/demo/polys.parquet",byteLength:29838}));console.log("GeoParquet file:",e);const n=await he({file:e});console.log("GeoJSON:",n),t.data.addGeoJson(n)}catch(e){console.error("Error loading or parsing GeoParquet file:",e)}}(); +const e=["BOOLEAN","INT32","INT64","INT96","FLOAT","DOUBLE","BYTE_ARRAY","FIXED_LEN_BYTE_ARRAY"],t=["PLAIN","GROUP_VAR_INT","PLAIN_DICTIONARY","RLE","BIT_PACKED","DELTA_BINARY_PACKED","DELTA_LENGTH_BYTE_ARRAY","DELTA_BYTE_ARRAY","RLE_DICTIONARY","BYTE_STREAM_SPLIT"],n=["REQUIRED","OPTIONAL","REPEATED"],r=["UTF8","MAP","MAP_KEY_VALUE","LIST","ENUM","DECIMAL","DATE","TIME_MILLIS","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","UINT_32","UINT_64","INT_8","INT_16","INT_32","INT_64","JSON","BSON","INTERVAL"],i=["UNCOMPRESSED","SNAPPY","GZIP","LZO","BROTLI","LZ4","ZSTD","LZ4_RAW"],o=["DATA_PAGE","INDEX_PAGE","DICTIONARY_PAGE","DATA_PAGE_V2"],f=["SPHERICAL","VINCENTY","THOMAS","ANDOYER","KARNEY"];function s(e){const t=a(e);if(1===t.type)return{type:"Point",coordinates:l(e,t)};if(2===t.type)return{type:"LineString",coordinates:c(e,t)};if(3===t.type)return{type:"Polygon",coordinates:u(e,t)};if(4===t.type){const n=[];for(let r=0;r1&&i<=7&&(f=t.getUint32(e.offset,n),e.offset+=4);let s=2;return o&&s++,3===o&&s++,{littleEndian:n,type:i,dim:s,count:f}}function l(e,t){const n=[];for(let r=0;rnew Date(Number(e)),timestampFromMicroseconds:e=>new Date(Number(e/1000n)),timestampFromNanoseconds:e=>new Date(Number(e/1000000n)),dateFromDays:e=>new Date(864e5*e),stringFromBytes:e=>e&&d.decode(e),geometryFromBytes:e=>e&&s({view:new DataView(e.buffer,e.byteOffset,e.byteLength),offset:0}),geographyFromBytes:e=>e&&s({view:new DataView(e.buffer,e.byteOffset,e.byteLength),offset:0})};function p(e,t,n,r){if(t&&n.endsWith("_DICTIONARY")){let n=e;e instanceof Uint8Array&&!(t instanceof Uint8Array)&&(n=new t.constructor(e.length));for(let r=0;r{return r.timestampFromNanoseconds(86400000000000n*(((t=e)>>64n)-2440588n)+(0xffffffffffffffffn&t));var t});if("DATE"===f)return Array.from(e).map(e=>r.dateFromDays(e));if("TIMESTAMP_MILLIS"===f)return Array.from(e).map(e=>r.timestampFromMilliseconds(e));if("TIMESTAMP_MICROS"===f)return Array.from(e).map(e=>r.timestampFromMicroseconds(e));if("JSON"===f)return e.map(e=>JSON.parse(d.decode(e)));if("BSON"===f)throw new Error("parquet bson not supported");if("INTERVAL"===f)throw new Error("parquet interval not supported");if("GEOMETRY"===s?.type)return e.map(e=>r.geometryFromBytes(e));if("GEOGRAPHY"===s?.type)return e.map(e=>r.geographyFromBytes(e));if("UTF8"===f||"STRING"===s?.type||i&&"BYTE_ARRAY"===o)return e.map(e=>r.stringFromBytes(e));if("UINT_64"===f||"INTEGER"===s?.type&&64===s.bitWidth&&!s.isSigned){if(e instanceof BigInt64Array)return new BigUint64Array(e.buffer,e.byteOffset,e.length);const t=new BigUint64Array(e.length);for(let n=0;n=2n**BigInt(n-1)&&(t-=2n**BigInt(n)),Number(t)}function m(e){if(!e)return;const t=e[1]<<8|e[0],n=t>>15?-1:1,r=t>>10&31,i=1023&t;return 0===r?n*2**-14*(i/1024):31===r?i?NaN:n*(1/0):n*2**(r-15)*(1+i/1024)}function g(e,t,n){const r=e[t],i=[];let o=1;if(r.num_children)for(;i.lengtht.element.name===e);if(!i)throw new Error(`parquet schema element not found: ${t}`);r.push(i),n=i}return r}function E(e){let t=0;for(const{element:n}of e)"REPEATED"===n.repetition_type&&t++;return t}function A(e){let t=0;for(const{element:n}of e.slice(1))"REQUIRED"!==n.repetition_type&&t++;return t}function I(e){if(2!==e.length)return!1;const[,t]=e;return"REPEATED"!==t.element.repetition_type&&!t.children.length}const b=0,v=1,T=2,L=3,N=4,R=5,O=6,D=7,B=8,P=9,S=12;function M(e){let t=0;const n={};for(;e.offset>4;15===r&&(r=F(e));const i=n===v||n===T,o=new Array(r);for(let t=0;t>>1^-(1&t)}function $(e){const t=function(e){let t=0n,n=0n;for(;;){const r=e.view.getUint8(e.offset++);if(t|=BigInt(127&r)<>1n^-(1n&t)}function q(e,t){const n=e.view.getUint8(e.offset++),r=15&n;if(r===b)return[0,0,t];const i=n>>4,o=i?t+i:Y(e);return[r,o,o]}const C=new TextDecoder;function x(e){return e&&C.decode(e)}async function G(e,{parsers:t,initialFetchSize:n=524288}={}){if(!(e&&e.byteLength>=0))throw new Error("parquet expected AsyncBuffer");const r=Math.max(0,e.byteLength-n),i=await e.slice(r,e.byteLength),o=new DataView(i);if(827474256!==o.getUint32(i.byteLength-4,!0))throw new Error("parquet file invalid (footer != PAR1)");const f=o.getUint32(i.byteLength-8,!0);if(f>e.byteLength-8)throw new Error(`parquet metadata length ${f} exceeds available buffer ${e.byteLength-8}`);if(f+8>n){const n=e.byteLength-f-8,o=await e.slice(n,r),s=new ArrayBuffer(f+8),a=new Uint8Array(s);return a.set(new Uint8Array(o)),a.set(new Uint8Array(i),r-n),k(s,{parsers:t})}return k(i,{parsers:t})}function k(f,{parsers:s}={}){if(!(f instanceof ArrayBuffer))throw new Error("parquet expected ArrayBuffer");const a=new DataView(f);if(s={..._,...s},a.byteLength<8)throw new Error("parquet file is too short");if(827474256!==a.getUint32(a.byteLength-4,!0))throw new Error("parquet file invalid (footer != PAR1)");const l=a.byteLength-8,c=a.getUint32(l,!0);if(c>a.byteLength-8)throw new Error(`parquet metadata length ${c} exceeds available buffer ${a.byteLength-8}`);const u=M({view:a,offset:l-c}),d=u.field_1,p=u.field_2.map(t=>({type:e[t.field_1],type_length:t.field_2,repetition_type:n[t.field_3],name:x(t.field_4),num_children:t.field_5,converted_type:r[t.field_6],scale:t.field_7,precision:t.field_8,field_id:t.field_9,logical_type:z(t.field_10)})),h=p.filter(e=>e.type),y=u.field_3,m=u.field_4.map(n=>({columns:n.field_1.map((n,r)=>({file_path:x(n.field_1),file_offset:n.field_2,meta_data:n.field_3&&{type:e[n.field_3.field_1],encodings:n.field_3.field_2?.map(e=>t[e]),path_in_schema:n.field_3.field_3.map(x),codec:i[n.field_3.field_4],num_values:n.field_3.field_5,total_uncompressed_size:n.field_3.field_6,total_compressed_size:n.field_3.field_7,key_value_metadata:n.field_3.field_8,data_page_offset:n.field_3.field_9,index_page_offset:n.field_3.field_10,dictionary_page_offset:n.field_3.field_11,statistics:j(n.field_3.field_12,h[r],s),encoding_stats:n.field_3.field_13?.map(e=>({page_type:o[e.field_1],encoding:t[e.field_2],count:e.field_3})),bloom_filter_offset:n.field_3.field_14,bloom_filter_length:n.field_3.field_15,size_statistics:n.field_3.field_16&&{unencoded_byte_array_data_bytes:n.field_3.field_16.field_1,repetition_level_histogram:n.field_3.field_16.field_2,definition_level_histogram:n.field_3.field_16.field_3},geospatial_statistics:n.field_3.field_17&&{bbox:n.field_3.field_17.field_1&&{xmin:n.field_3.field_17.field_1.field_1,xmax:n.field_3.field_17.field_1.field_2,ymin:n.field_3.field_17.field_1.field_3,ymax:n.field_3.field_17.field_1.field_4,zmin:n.field_3.field_17.field_1.field_5,zmax:n.field_3.field_17.field_1.field_6,mmin:n.field_3.field_17.field_1.field_7,mmax:n.field_3.field_17.field_1.field_8},geospatial_types:n.field_3.field_17.field_2}},offset_index_offset:n.field_4,offset_index_length:n.field_5,column_index_offset:n.field_6,column_index_length:n.field_7,crypto_metadata:n.field_8,encrypted_column_metadata:n.field_9})),total_byte_size:n.field_2,num_rows:n.field_3,sorting_columns:n.field_4?.map(e=>({column_idx:e.field_1,descending:e.field_2,nulls_first:e.field_3})),file_offset:n.field_5,total_compressed_size:n.field_6,ordinal:n.field_7})),g=u.field_5?.map(e=>({key:x(e.field_1),value:x(e.field_2)}));return{version:d,schema:p,num_rows:y,row_groups:m,key_value_metadata:g,created_by:x(u.field_6),metadata_length:c}}function z(e){return e?.field_1?{type:"STRING"}:e?.field_2?{type:"MAP"}:e?.field_3?{type:"LIST"}:e?.field_4?{type:"ENUM"}:e?.field_5?{type:"DECIMAL",scale:e.field_5.field_1,precision:e.field_5.field_2}:e?.field_6?{type:"DATE"}:e?.field_7?{type:"TIME",isAdjustedToUTC:e.field_7.field_1,unit:V(e.field_7.field_2)}:e?.field_8?{type:"TIMESTAMP",isAdjustedToUTC:e.field_8.field_1,unit:V(e.field_8.field_2)}:e?.field_10?{type:"INTEGER",bitWidth:e.field_10.field_1,isSigned:e.field_10.field_2}:e?.field_11?{type:"NULL"}:e?.field_12?{type:"JSON"}:e?.field_13?{type:"BSON"}:e?.field_14?{type:"UUID"}:e?.field_15?{type:"FLOAT16"}:e?.field_16?{type:"VARIANT"}:e?.field_17?{type:"GEOMETRY",crs:x(e.field_17.field_1)}:e?.field_18?{type:"GEOGRAPHY",crs:x(e.field_18.field_1),algorithm:f[e.field_18.field_2]}:e}function V(e){if(e.field_1)return"MILLIS";if(e.field_2)return"MICROS";if(e.field_3)return"NANOS";throw new Error("parquet time unit required")}function j(e,t,n){return e&&{max:H(e.field_1,t,n),min:H(e.field_2,t,n),null_count:e.field_3,distinct_count:e.field_4,max_value:H(e.field_5,t,n),min_value:H(e.field_6,t,n),is_max_value_exact:e.field_7,is_min_value_exact:e.field_8}}function H(e,t,n){const{type:r,converted_type:i,logical_type:o}=t;if(void 0===e)return e;if("BOOLEAN"===r)return 1===e[0];if("BYTE_ARRAY"===r)return n.stringFromBytes(e);const f=new DataView(e.buffer,e.byteOffset,e.byteLength);return"FLOAT"===r&&4===f.byteLength?f.getFloat32(0,!0):"DOUBLE"===r&&8===f.byteLength?f.getFloat64(0,!0):"INT32"===r&&"DATE"===i?n.dateFromDays(f.getInt32(0,!0)):"INT64"===r&&"TIMESTAMP_MILLIS"===i?n.timestampFromMilliseconds(f.getBigInt64(0,!0)):"INT64"===r&&"TIMESTAMP_MICROS"===i?n.timestampFromMicroseconds(f.getBigInt64(0,!0)):"INT64"===r&&"TIMESTAMP"===o?.type&&"NANOS"===o?.unit?n.timestampFromNanoseconds(f.getBigInt64(0,!0)):"INT64"===r&&"TIMESTAMP"===o?.type&&"MICROS"===o?.unit?n.timestampFromMicroseconds(f.getBigInt64(0,!0)):"INT64"===r&&"TIMESTAMP"===o?.type?n.timestampFromMilliseconds(f.getBigInt64(0,!0)):"INT32"===r&&4===f.byteLength?f.getInt32(0,!0):"INT64"===r&&8===f.byteLength?f.getBigInt64(0,!0):"DECIMAL"===i?y(e)*10**-(t.scale||0):"FLOAT16"===o?.type?m(e):e}function Q(e,t){for(let n=0;n{if(!e.ok)throw new Error(`fetch head failed ${e.status}`);const t=e.headers.get("Content-Length");if(!t)throw new Error("missing content length");return parseInt(t)})}(e,n,i);const f=n||{};return{byteLength:t,async slice(t,n){if(o)return o.then(e=>e.slice(t,n));const r=new Headers(f.headers),s=void 0===n?"":n-1;r.set("Range",`bytes=${t}-${s}`);const a=await i(e,{...f,headers:r});if(!a.ok||!a.body)throw new Error(`fetch failed ${a.status}`);if(200===a.status)return o=a.arrayBuffer(),o.then(e=>e.slice(t,n));if(206===a.status)return a.arrayBuffer();throw new Error(`fetch received unexpected status code ${a.status}`)}}}function K({byteLength:e,slice:t},{minSize:n=524288}={}){if(e(await n).slice(e,t)}}const r=new Map;return{byteLength:e,slice(n,i){const o=function(e,t,n){if(e<0){if(void 0!==t)throw new Error(`invalid suffix range [${e}, ${t}]`);return void 0===n?`${e},`:`${n+e},${n}`}if(void 0!==t){if(e>t)throw new Error(`invalid empty range [${e}, ${t}]`);return`${e},${t}`}return void 0===n?`${e},`:`${e},${n}`}(n,i,e),f=r.get(o);if(f)return f;const s=t(n,i);return r.set(o,s),s}}}function W(e){if(!e)return[];if(1===e.length)return e[0];const t=[];for(const n of e)Q(t,n);return t}function X({dictionary_page_offset:e,data_page_offset:t,total_compressed_size:n}){const r=e||t;return{startByte:Number(r),endByte:Number(r+n)}}function Z(e,t,n,r,i){const o=t?.length||n.length;if(!o)return r;const f=A(i),s=i.map(({element:e})=>e.repetition_type);let a=0;const l=[e];let c=e,u=0,d=0,_=0;if(n[0])for(;u1)return!1;const t=e.children[0];return!(t.children.length>1)&&"REPEATED"===t.element.repetition_type}(t)){let f=t.children[0],s=o;1===f.children.length&&(f=f.children[0],s++),ee(e,f,s);const a=f.path.join("."),l=e.get(a);if(!l)throw new Error("parquet list column missing values");return i&&te(l,n),e.set(r,l),void e.delete(a)}if(function(e){if(!e)return!1;if("MAP"!==e.element.converted_type)return!1;if(e.children.length>1)return!1;const t=e.children[0];if(2!==t.children.length)return!1;if("REPEATED"!==t.element.repetition_type)return!1;const n=t.children.find(e=>"key"===e.element.name);if("REPEATED"===n?.element.repetition_type)return!1;const r=t.children.find(e=>"value"===e.element.name);return"REPEATED"!==r?.element.repetition_type}(t)){const f=t.children[0].element.name;ee(e,t.children[0].children[0],o+1),ee(e,t.children[0].children[1],o+1);const s=e.get(`${r}.${f}.key`),a=e.get(`${r}.${f}.value`);if(!s)throw new Error("parquet map column missing keys");if(!a)throw new Error("parquet map column missing values");if(s.length!==a.length)throw new Error("parquet map column key/value length mismatch");const l=ne(s,a,o);return i&&te(l,n),e.delete(`${r}.${f}.key`),e.delete(`${r}.${f}.value`),void e.set(r,l)}if(t.children.length){const o="REQUIRED"===t.element.repetition_type?n:n+1,f={};for(const n of t.children){ee(e,n,o);const t=e.get(n.path.join("."));if(!t)throw new Error("parquet struct missing child data");f[n.element.name]=t}for(const n of t.children)e.delete(n.path.join("."));const s=re(f,o);i&&te(s,n),e.set(r,s)}}function te(e,t){for(let n=0;n>l&u;for(l+=o;l>=8;)l-=8n,e.offset++,l&&(t|=BigInt(e.view.getUint8(e.offset))<>>1;ae(e,i,t,n,o),o+=i}}e.offset=i+r}function ae(e,t,n,r,i){const o=n+7>>3;let f=0;for(let t=0;t>1<<3;const f=(1<8?(l-=8,a-=8,s>>>=8):a-l>l&f),o--,l+=n);return i}function ce(e,t,n,r){const i=function(e,t){switch(e){case"INT32":case"FLOAT":return 4;case"INT64":case"DOUBLE":return 8;case"FIXED_LEN_BYTE_ARRAY":if(!t)throw new Error("parquet byteWidth missing type_length");return t;default:throw new Error(`parquet unsupported type: ${e}`)}}(n,r),o=new Uint8Array(t*i);for(let n=0;n1){const r=E(n);if(r){const n=new Array(t.num_values);return se(e,fe(r),n),n}}return[]}(f,t,i),{definitionLevels:l,numNulls:c}=function(e,t,n){const r=A(n);if(!r)return{definitionLevels:[],numNulls:0};const i=new Array(t.num_values);se(e,fe(r),i);let o=t.num_values;for(const e of i)e===r&&o--;0===o&&(i.length=0);return{definitionLevels:i,numNulls:o}}(f,t,i),u=t.num_values-c;if("PLAIN"===t.encoding)s=ue(f,n,u,r.type_length);else if("PLAIN_DICTIONARY"===t.encoding||"RLE_DICTIONARY"===t.encoding||"RLE"===t.encoding){const e="BOOLEAN"===n?1:o.getUint8(f.offset++);e?(s=new Array(u),"BOOLEAN"===n?(se(f,e,s),s=s.map(e=>!!e)):se(f,e,s,o.byteLength-f.offset)):s=new Uint8Array(u)}else if("BYTE_STREAM_SPLIT"===t.encoding)s=ce(f,u,n,r.type_length);else if("DELTA_BINARY_PACKED"===t.encoding){s="INT32"===n?new Int32Array(u):new BigInt64Array(u),ie(f,u,s)}else{if("DELTA_LENGTH_BYTE_ARRAY"!==t.encoding)throw new Error(`parquet unsupported encoding: ${t.encoding}`);s=new Array(u),oe(f,u,s)}return{definitionLevels:l,repetitionLevels:a,dataPage:s}}function ye(e,t,n,r){let i;const o=r?.[n];if("UNCOMPRESSED"===n)i=e;else if(o)i=o(e,t);else{if("SNAPPY"!==n)throw new Error(`parquet unsupported compression codec: ${n}`);i=new Uint8Array(t),function(e,t){const n=e.byteLength,r=t.byteLength;let i=0,o=0;for(;i=n)throw new Error("invalid snappy length header");for(;i=n)throw new Error("missing eof marker");if(3&r){let s=0;switch(3&r){case 1:f=4+(r>>>2&7),s=e[i]+(r>>>5<<8),i++;break;case 2:if(n<=i+1)throw new Error("snappy error end of input");f=(r>>>2)+1,s=e[i]+(e[i+1]<<8),i+=2;break;case 3:if(n<=i+3)throw new Error("snappy error end of input");f=(r>>>2)+1,s=e[i]+(e[i+1]<<8)+(e[i+2]<<16)+(e[i+3]<<24),i+=4}if(0===s||isNaN(s))throw new Error(`invalid offset ${s} pos ${i} inputLength ${n}`);if(s>o)throw new Error("cannot copy from before start of buffer");pe(t,o-s,t,o,f),o+=f}else{let f=(r>>>2)+1;if(f>60){if(i+3>=n)throw new Error("snappy error literal pos + 3 >= inputLength");const t=f-60;f=e[i]+(e[i+1]<<8)+(e[i+2]<<16)+(e[i+3]<<24),f=1+(f&_e[t]),i+=t}if(i+f>n)throw new Error("snappy error literal exceeds input length");pe(e,i,t,o,f),i+=f,o+=f}}if(o!==r)throw new Error("premature end of input")}(e,i)}if(i?.length!==t)throw new Error(`parquet decompressed page length ${i?.length} does not match header ${t}`);return i}function me(e,t,n){const r={view:new DataView(e.buffer,e.byteOffset,e.byteLength),offset:0},{type:i,element:o,schemaPath:f,codec:s,compressors:a}=n,l=t.data_page_header_v2;if(!l)throw new Error("parquet data page header v2 is undefined");const c=function(e,t,n){const r=E(n);if(!r)return[];const i=new Array(t.num_values);return se(e,fe(r),i,t.repetition_levels_byte_length),i}(r,l,f);r.offset=l.repetition_levels_byte_length;const u=function(e,t,n){const r=A(n);if(r){const n=new Array(t.num_values);return se(e,fe(r),n,t.definition_levels_byte_length),n}}(r,l,f),d=t.uncompressed_page_size-l.definition_levels_byte_length-l.repetition_levels_byte_length;let _=e.subarray(r.offset);!1!==l.is_compressed&&(_=ye(_,d,s,a));const p=new DataView(_.buffer,_.byteOffset,_.byteLength),h={view:p,offset:0};let y;const m=l.num_values-l.num_nulls;if("PLAIN"===l.encoding)y=ue(h,i,m,o.type_length);else if("RLE"===l.encoding)y=new Array(m),se(h,1,y),y=y.map(e=>!!e);else if("PLAIN_DICTIONARY"===l.encoding||"RLE_DICTIONARY"===l.encoding){const e=p.getUint8(h.offset++);y=new Array(m),se(h,e,y,d-1)}else if("DELTA_BINARY_PACKED"===l.encoding){y="INT32"===i?new Int32Array(m):new BigInt64Array(m),ie(h,m,y)}else if("DELTA_LENGTH_BYTE_ARRAY"===l.encoding)y=new Array(m),oe(h,m,y);else if("DELTA_BYTE_ARRAY"===l.encoding)y=new Array(m),function(e,t,n){const r=new Int32Array(t);ie(e,t,r);const i=new Int32Array(t);ie(e,t,i);for(let o=0;o{u&&o({columnName:f,columnData:u,rowStart:t+d-u.length,rowEnd:t+d})});for(;(a?d=e.view.byteLength-1);){const t=Ee(e);if("DICTIONARY_PAGE"===t.type)c=we(e,t,i,c,void 0,0),c=h(c,i);else{const r=u?.length||0,o=we(e,t,i,c,u,n-d);u===o?d+=o.length-r:(_?.(),l.push(o),d+=o.length,u=o)}}return _?.(),d>r&&u&&(l[l.length-1]=u.slice(0,r-(d-u.length))),l}function we(e,t,n,r,i,o){const{type:f,element:s,schemaPath:a,codec:l,compressors:c}=n,u=new Uint8Array(e.view.buffer,e.view.byteOffset+e.offset,t.compressed_page_size);if(e.offset+=t.compressed_page_size,"DATA_PAGE"===t.type){const e=t.data_page_header;if(!e)throw new Error("parquet data page header is undefined");if(o>e.num_values&&I(a))return new Array(e.num_values);const f=ye(u,Number(t.uncompressed_page_size),l,c),{definitionLevels:s,repetitionLevels:d,dataPage:_}=he(f,e,n);let h=p(_,r,e.encoding,n);if(d.length||s?.length){return Z(Array.isArray(i)?i:[],s,d,h,a)}for(let e=2;e[e]));return h}if("DATA_PAGE_V2"===t.type){const e=t.data_page_header_v2;if(!e)throw new Error("parquet data page header v2 is undefined");if(o>e.num_rows)return new Array(e.num_values);const{definitionLevels:f,repetitionLevels:s,dataPage:l}=me(u,t,n),c=p(l,r,e.encoding,n);return Z(Array.isArray(i)?i:[],f,s,c,a)}if("DICTIONARY_PAGE"===t.type){const e=t.dictionary_page_header;if(!e)throw new Error("parquet dictionary page header is undefined");const n=ye(u,Number(t.uncompressed_page_size),l,c);return ue({view:new DataView(n.buffer,n.byteOffset,n.byteLength),offset:0},f,e.num_values,s.type_length)}throw new Error(`parquet unsupported page type: ${t.type}`)}function Ee(e){const n=M(e);return{type:o[n.field_1],uncompressed_page_size:n.field_2,compressed_page_size:n.field_3,crc:n.field_4,data_page_header:n.field_5&&{num_values:n.field_5.field_1,encoding:t[n.field_5.field_2],definition_level_encoding:t[n.field_5.field_3],repetition_level_encoding:t[n.field_5.field_4],statistics:n.field_5.field_5&&{max:n.field_5.field_5.field_1,min:n.field_5.field_5.field_2,null_count:n.field_5.field_5.field_3,distinct_count:n.field_5.field_5.field_4,max_value:n.field_5.field_5.field_5,min_value:n.field_5.field_5.field_6}},index_page_header:n.field_6,dictionary_page_header:n.field_7&&{num_values:n.field_7.field_1,encoding:t[n.field_7.field_2],is_sorted:n.field_7.field_3},data_page_header_v2:n.field_8&&{num_values:n.field_8.field_1,num_nulls:n.field_8.field_2,num_rows:n.field_8.field_3,encoding:t[n.field_8.field_4],definition_levels_byte_length:n.field_8.field_5,repetition_levels_byte_length:n.field_8.field_6,is_compressed:void 0===n.field_8.field_7||n.field_8.field_7,statistics:n.field_8.field_8}}}async function Ae({asyncColumns:e},t,n,r,i){const o=await Promise.all(e.map(({data:e})=>e.then(W))),f=e.map(e=>e.pathInSchema[0]).filter(e=>!r||r.includes(e)),s=r??f,a=s.map(t=>e.findIndex(e=>e.pathInSchema[0]===t)),l=n-t;if("object"===i){const n=new Array(l);for(let r=0;r=0&&(i[e]=o[a[e]][r]);c[n]=i}return c}async function Ie(e){e.metadata??=await G(e.file);const t=function(e){if(!e.metadata)throw new Error("parquet requires metadata");const t=function({metadata:e,rowStart:t=0,rowEnd:n=1/0,columns:r}){if(!e)throw new Error("parquetPlan requires metadata");const i=[],o=[];let f=0;for(const s of e.row_groups){const e=Number(s.num_rows),a=f+e;if(e>0&&a>=t&&fe.slice(t,n));return{byteLength:e.byteLength,slice(r,i=e.byteLength){const o=t.findIndex(({startByte:e,endByte:t})=>e<=r&&i<=t);if(o<0)throw new Error(`no prefetch for range [${r}, ${i}]`);if(t[o].startByte!==r||t[o].endByte!==i){const e=r-t[o].startByte,f=i-t[o].startByte;return n[o]instanceof Promise?n[o].then(t=>t.slice(e,f)):n[o].slice(e,f)}return n[o]}}}(e.file,t),t.groups.map(n=>function(e,{metadata:t,columns:n},r){const{file:i,compressors:o,utf8:f}=e,s=[],a={..._,...e.parsers};for(const{file_path:l,meta_data:c}of r.rowGroup.columns){if(l)throw new Error("parquet file_path not supported");if(!c)throw new Error("parquet column metadata is undefined");const u=c.path_in_schema[0];if(n&&!n.includes(u))continue;const{startByte:d,endByte:_}=X(c),p=_-d;if(p>1<<30){console.warn(`parquet skipping huge column "${c.path_in_schema}" ${p} bytes`);continue}const h=Promise.resolve(i.slice(d,_));s.push({pathInSchema:c.path_in_schema,data:h.then(n=>{const i=w(t.schema,c.path_in_schema),s={view:new DataView(n),offset:0},l={columnName:c.path_in_schema.join("."),type:c.type,element:i[i.length-1].element,schemaPath:i,codec:c.codec,parsers:a,compressors:o,utf8:f};return ge(s,r,l,e.onPage)})})}return{groupStart:r.groupStart,groupRows:r.groupRows,asyncColumns:s}}(e,t,n))}(e),{rowStart:n=0,rowEnd:r,columns:i,onChunk:o,onComplete:f,rowFormat:s}=e;if(!f&&!o){for(const{asyncColumns:e}of t)for(const{data:t}of e)await t;return}const a=function({schema:e}){return w(e,[])[0]}(e.metadata),l=t.map(e=>function(e,t){const{asyncColumns:n}=e,r=[];for(const e of t.children)if(e.children.length){const t=n.filter(t=>t.pathInSchema[0]===e.element.name);if(!t.length)continue;const i=new Map,o=Promise.all(t.map(e=>e.data.then(t=>{i.set(e.pathInSchema.join("."),W(t))}))).then(()=>{ee(i,e);const t=i.get(e.path.join("."));if(!t)throw new Error("parquet column data not assembled");return[t]});r.push({pathInSchema:e.path,data:o})}else{const t=n.find(t=>t.pathInSchema[0]===e.element.name);t&&r.push(t)}return{...e,asyncColumns:r}}(e,a));if(o)for(const e of l)for(const t of e.asyncColumns)t.data.then(n=>{let r=e.groupStart;for(const e of n)o({columnName:t.pathInSchema[0],columnData:e,rowStart:r,rowEnd:r+e.length}),r+=e.length});if(f){const e=[];for(const t of l){const o=Math.max(n-t.groupStart,0),f=Math.min((r??1/0)-t.groupStart,t.groupRows);Q(e,"object"===s?await Ae(t,o,f,i,"object"):await Ae(t,o,f,i,"array"))}f(e)}else for(const{asyncColumns:e}of l)for(const{data:t}of e)await t}async function be({file:e,compressors:t}){const n=await G(e),r=n.key_value_metadata?.find(e=>"geo"===e.key);if(!r)throw new Error('Invalid GeoParquet file: missing "geo" metadata');const i=JSON.parse(r.value||"{}"),o=await(f={file:e,metadata:n,utf8:!1,compressors:t},new Promise((e,t)=>{Ie({...f,rowFormat:"object",onComplete:e}).catch(t)}));var f;const s=[],a=i.primary_column||"geometry";for(const e of o){const t=e[a];if(!t)continue;const n=ve(t),r={};for(const t of Object.keys(e)){const n=e[t];t!==a&&null!==n&&(r[t]=n)}const i={type:"Feature",geometry:n,properties:r};s.push(i)}return{type:"FeatureCollection",features:s}}function ve(e){return s({view:new DataView(e.buffer,e.byteOffset,e.byteLength),offset:0})}!async function(){const{Map:e}=await google.maps.importLibrary("maps"),t=new e(document.getElementById("map"),{center:{lat:39,lng:-98},zoom:4});try{const e=K(await J({url:"https://hyparam.github.io/geoparquet/demo/polys.parquet",byteLength:29838}));console.log("GeoParquet file:",e);const n=await be({file:e});console.log("GeoJSON:",n),t.data.addGeoJson(n)}catch(e){console.error("Error loading or parsing GeoParquet file:",e)}}(); //# sourceMappingURL=bundle.min.js.map diff --git a/demo/bundle.min.js.map b/demo/bundle.min.js.map index 69f8747..5f91478 100644 --- a/demo/bundle.min.js.map +++ b/demo/bundle.min.js.map @@ -1 +1 @@ -{"version":3,"file":"bundle.min.js","sources":["../node_modules/hyparquet/src/constants.js","../node_modules/hyparquet/src/convert.js","../node_modules/hyparquet/src/schema.js","../node_modules/hyparquet/src/thrift.js","../node_modules/hyparquet/src/metadata.js","../node_modules/hyparquet/src/utils.js","../node_modules/hyparquet/src/plan.js","../node_modules/hyparquet/src/assemble.js","../node_modules/hyparquet/src/delta.js","../node_modules/hyparquet/src/encoding.js","../node_modules/hyparquet/src/plain.js","../node_modules/hyparquet/src/snappy.js","../node_modules/hyparquet/src/datapage.js","../node_modules/hyparquet/src/column.js","../node_modules/hyparquet/src/rowgroup.js","../node_modules/hyparquet/src/read.js","../src/wkb.js","../src/toGeoJson.js","demo.js"],"sourcesContent":["/** @type {import('../src/types.d.ts').ParquetType[]} */\nexport const ParquetType = [\n 'BOOLEAN',\n 'INT32',\n 'INT64',\n 'INT96', // deprecated\n 'FLOAT',\n 'DOUBLE',\n 'BYTE_ARRAY',\n 'FIXED_LEN_BYTE_ARRAY',\n]\n\n/** @type {import('../src/types.d.ts').Encoding[]} */\nexport const Encoding = [\n 'PLAIN',\n 'GROUP_VAR_INT', // deprecated\n 'PLAIN_DICTIONARY',\n 'RLE',\n 'BIT_PACKED', // deprecated\n 'DELTA_BINARY_PACKED',\n 'DELTA_LENGTH_BYTE_ARRAY',\n 'DELTA_BYTE_ARRAY',\n 'RLE_DICTIONARY',\n 'BYTE_STREAM_SPLIT',\n]\n\n/** @type {import('../src/types.d.ts').FieldRepetitionType[]} */\nexport const FieldRepetitionType = [\n 'REQUIRED',\n 'OPTIONAL',\n 'REPEATED',\n]\n\n/** @type {import('../src/types.d.ts').ConvertedType[]} */\nexport const ConvertedType = [\n 'UTF8',\n 'MAP',\n 'MAP_KEY_VALUE',\n 'LIST',\n 'ENUM',\n 'DECIMAL',\n 'DATE',\n 'TIME_MILLIS',\n 'TIME_MICROS',\n 'TIMESTAMP_MILLIS',\n 'TIMESTAMP_MICROS',\n 'UINT_8',\n 'UINT_16',\n 'UINT_32',\n 'UINT_64',\n 'INT_8',\n 'INT_16',\n 'INT_32',\n 'INT_64',\n 'JSON',\n 'BSON',\n 'INTERVAL',\n]\n\n/** @type {import('../src/types.d.ts').CompressionCodec[]} */\nexport const CompressionCodec = [\n 'UNCOMPRESSED',\n 'SNAPPY',\n 'GZIP',\n 'LZO',\n 'BROTLI',\n 'LZ4',\n 'ZSTD',\n 'LZ4_RAW',\n]\n\n/** @type {import('../src/types.d.ts').PageType[]} */\nexport const PageType = [\n 'DATA_PAGE',\n 'INDEX_PAGE',\n 'DICTIONARY_PAGE',\n 'DATA_PAGE_V2',\n]\n\n/** @type {import('../src/types.d.ts').BoundaryOrder[]} */\nexport const BoundaryOrder = [\n 'UNORDERED',\n 'ASCENDING',\n 'DESCENDING',\n]\n","/**\n * @import {ColumnDecoder, DecodedArray, Encoding, ParquetParsers, SchemaElement} from '../src/types.d.ts'\n */\n\n/**\n * Default type parsers when no custom ones are given\n * @type ParquetParsers\n */\nexport const DEFAULT_PARSERS = {\n timestampFromMilliseconds(millis) {\n return new Date(Number(millis))\n },\n timestampFromMicroseconds(micros) {\n return new Date(Number(micros / 1000n))\n },\n timestampFromNanoseconds(nanos) {\n return new Date(Number(nanos / 1000000n))\n },\n dateFromDays(days) {\n const dayInMillis = 86400000\n return new Date(days * dayInMillis)\n },\n}\n\n/**\n * Convert known types from primitive to rich, and dereference dictionary.\n *\n * @param {DecodedArray} data series of primitive types\n * @param {DecodedArray | undefined} dictionary\n * @param {Encoding} encoding\n * @param {ColumnDecoder} columnDecoder\n * @returns {DecodedArray} series of rich types\n */\nexport function convertWithDictionary(data, dictionary, encoding, columnDecoder) {\n if (dictionary && encoding.endsWith('_DICTIONARY')) {\n let output = data\n if (data instanceof Uint8Array && !(dictionary instanceof Uint8Array)) {\n // @ts-expect-error upgrade data to match dictionary type with fancy constructor\n output = new dictionary.constructor(data.length)\n }\n for (let i = 0; i < data.length; i++) {\n output[i] = dictionary[data[i]]\n }\n return output\n } else {\n return convert(data, columnDecoder)\n }\n}\n\n/**\n * Convert known types from primitive to rich.\n *\n * @param {DecodedArray} data series of primitive types\n * @param {Pick} columnDecoder\n * @returns {DecodedArray} series of rich types\n */\nexport function convert(data, columnDecoder) {\n const { element, parsers, utf8 = true } = columnDecoder\n const { type, converted_type: ctype, logical_type: ltype } = element\n if (ctype === 'DECIMAL') {\n const scale = element.scale || 0\n const factor = 10 ** -scale\n const arr = new Array(data.length)\n for (let i = 0; i < arr.length; i++) {\n if (data[0] instanceof Uint8Array) {\n arr[i] = parseDecimal(data[i]) * factor\n } else {\n arr[i] = Number(data[i]) * factor\n }\n }\n return arr\n }\n if (!ctype && type === 'INT96') {\n const arr = new Array(data.length)\n for (let i = 0; i < arr.length; i++) {\n arr[i] = parsers.timestampFromNanoseconds(parseInt96Nanos(data[i]))\n }\n return arr\n }\n if (ctype === 'DATE') {\n const arr = new Array(data.length)\n for (let i = 0; i < arr.length; i++) {\n arr[i] = parsers.dateFromDays(data[i])\n }\n return arr\n }\n if (ctype === 'TIMESTAMP_MILLIS') {\n const arr = new Array(data.length)\n for (let i = 0; i < arr.length; i++) {\n arr[i] = parsers.timestampFromMilliseconds(data[i])\n }\n return arr\n }\n if (ctype === 'TIMESTAMP_MICROS') {\n const arr = new Array(data.length)\n for (let i = 0; i < arr.length; i++) {\n arr[i] = parsers.timestampFromMicroseconds(data[i])\n }\n return arr\n }\n if (ctype === 'JSON') {\n const decoder = new TextDecoder()\n return data.map(v => JSON.parse(decoder.decode(v)))\n }\n if (ctype === 'BSON') {\n throw new Error('parquet bson not supported')\n }\n if (ctype === 'INTERVAL') {\n throw new Error('parquet interval not supported')\n }\n if (ctype === 'UTF8' || ltype?.type === 'STRING' || utf8 && type === 'BYTE_ARRAY') {\n const decoder = new TextDecoder()\n const arr = new Array(data.length)\n for (let i = 0; i < arr.length; i++) {\n arr[i] = data[i] && decoder.decode(data[i])\n }\n return arr\n }\n if (ctype === 'UINT_64' || ltype?.type === 'INTEGER' && ltype.bitWidth === 64 && !ltype.isSigned) {\n if (data instanceof BigInt64Array) {\n return new BigUint64Array(data.buffer, data.byteOffset, data.length)\n }\n const arr = new BigUint64Array(data.length)\n for (let i = 0; i < arr.length; i++) arr[i] = BigInt(data[i])\n return arr\n }\n if (ctype === 'UINT_32' || ltype?.type === 'INTEGER' && ltype.bitWidth === 32 && !ltype.isSigned) {\n if (data instanceof Int32Array) {\n return new Uint32Array(data.buffer, data.byteOffset, data.length)\n }\n const arr = new Uint32Array(data.length)\n for (let i = 0; i < arr.length; i++) arr[i] = data[i]\n return arr\n }\n if (ltype?.type === 'FLOAT16') {\n return Array.from(data).map(parseFloat16)\n }\n if (ltype?.type === 'TIMESTAMP') {\n const { unit } = ltype\n /** @type {ParquetParsers[keyof ParquetParsers]} */\n let parser = parsers.timestampFromMilliseconds\n if (unit === 'MICROS') parser = parsers.timestampFromMicroseconds\n if (unit === 'NANOS') parser = parsers.timestampFromNanoseconds\n const arr = new Array(data.length)\n for (let i = 0; i < arr.length; i++) {\n arr[i] = parser(data[i])\n }\n return arr\n }\n return data\n}\n\n/**\n * @param {Uint8Array} bytes\n * @returns {number}\n */\nexport function parseDecimal(bytes) {\n let value = 0\n for (const byte of bytes) {\n value = value * 256 + byte\n }\n\n // handle signed\n const bits = bytes.length * 8\n if (value >= 2 ** (bits - 1)) {\n value -= 2 ** bits\n }\n\n return value\n}\n\n/**\n * Converts INT96 date format (hi 32bit days, lo 64bit nanos) to nanos since epoch\n * @param {bigint} value\n * @returns {bigint}\n */\nfunction parseInt96Nanos(value) {\n const days = (value >> 64n) - 2440588n\n const nano = value & 0xffffffffffffffffn\n return days * 86400000000000n + nano\n}\n\n/**\n * @param {Uint8Array | undefined} bytes\n * @returns {number | undefined}\n */\nexport function parseFloat16(bytes) {\n if (!bytes) return undefined\n const int16 = bytes[1] << 8 | bytes[0]\n const sign = int16 >> 15 ? -1 : 1\n const exp = int16 >> 10 & 0x1f\n const frac = int16 & 0x3ff\n if (exp === 0) return sign * 2 ** -14 * (frac / 1024) // subnormals\n if (exp === 0x1f) return frac ? NaN : sign * Infinity\n return sign * 2 ** (exp - 15) * (1 + frac / 1024)\n}\n","/**\n * Build a tree from the schema elements.\n *\n * @import {SchemaElement, SchemaTree} from '../src/types.d.ts'\n * @param {SchemaElement[]} schema\n * @param {number} rootIndex index of the root element\n * @param {string[]} path path to the element\n * @returns {SchemaTree} tree of schema elements\n */\nfunction schemaTree(schema, rootIndex, path) {\n const element = schema[rootIndex]\n const children = []\n let count = 1\n\n // Read the specified number of children\n if (element.num_children) {\n while (children.length < element.num_children) {\n const childElement = schema[rootIndex + count]\n const child = schemaTree(schema, rootIndex + count, [...path, childElement.name])\n count += child.count\n children.push(child)\n }\n }\n\n return { count, element, children, path }\n}\n\n/**\n * Get schema elements from the root to the given element name.\n *\n * @param {SchemaElement[]} schema\n * @param {string[]} name path to the element\n * @returns {SchemaTree[]} list of schema elements\n */\nexport function getSchemaPath(schema, name) {\n let tree = schemaTree(schema, 0, [])\n const path = [tree]\n for (const part of name) {\n const child = tree.children.find(child => child.element.name === part)\n if (!child) throw new Error(`parquet schema element not found: ${name}`)\n path.push(child)\n tree = child\n }\n return path\n}\n\n/**\n * Get the max repetition level for a given schema path.\n *\n * @param {SchemaTree[]} schemaPath\n * @returns {number} max repetition level\n */\nexport function getMaxRepetitionLevel(schemaPath) {\n let maxLevel = 0\n for (const { element } of schemaPath) {\n if (element.repetition_type === 'REPEATED') {\n maxLevel++\n }\n }\n return maxLevel\n}\n\n/**\n * Get the max definition level for a given schema path.\n *\n * @param {SchemaTree[]} schemaPath\n * @returns {number} max definition level\n */\nexport function getMaxDefinitionLevel(schemaPath) {\n let maxLevel = 0\n for (const { element } of schemaPath.slice(1)) {\n if (element.repetition_type !== 'REQUIRED') {\n maxLevel++\n }\n }\n return maxLevel\n}\n\n/**\n * Check if a column is list-like.\n *\n * @param {SchemaTree} schema\n * @returns {boolean} true if list-like\n */\nexport function isListLike(schema) {\n if (!schema) return false\n if (schema.element.converted_type !== 'LIST') return false\n if (schema.children.length > 1) return false\n\n const firstChild = schema.children[0]\n if (firstChild.children.length > 1) return false\n if (firstChild.element.repetition_type !== 'REPEATED') return false\n\n return true\n}\n\n/**\n * Check if a column is map-like.\n *\n * @param {SchemaTree} schema\n * @returns {boolean} true if map-like\n */\nexport function isMapLike(schema) {\n if (!schema) return false\n if (schema.element.converted_type !== 'MAP') return false\n if (schema.children.length > 1) return false\n\n const firstChild = schema.children[0]\n if (firstChild.children.length !== 2) return false\n if (firstChild.element.repetition_type !== 'REPEATED') return false\n\n const keyChild = firstChild.children.find(child => child.element.name === 'key')\n if (keyChild?.element.repetition_type === 'REPEATED') return false\n\n const valueChild = firstChild.children.find(child => child.element.name === 'value')\n if (valueChild?.element.repetition_type === 'REPEATED') return false\n\n return true\n}\n\n/**\n * Returns true if a column is non-nested.\n *\n * @param {SchemaTree[]} schemaPath\n * @returns {boolean}\n */\nexport function isFlatColumn(schemaPath) {\n if (schemaPath.length !== 2) return false\n const [, column] = schemaPath\n if (column.element.repetition_type === 'REPEATED') return false\n if (column.children.length) return false\n return true\n}\n","// TCompactProtocol types\nexport const CompactType = {\n STOP: 0,\n TRUE: 1,\n FALSE: 2,\n BYTE: 3,\n I16: 4,\n I32: 5,\n I64: 6,\n DOUBLE: 7,\n BINARY: 8,\n LIST: 9,\n SET: 10,\n MAP: 11,\n STRUCT: 12,\n UUID: 13,\n}\n\n/**\n * Parse TCompactProtocol\n *\n * @param {DataReader} reader\n * @returns {{ [key: `field_${number}`]: any }}\n */\nexport function deserializeTCompactProtocol(reader) {\n let lastFid = 0\n /** @type {ThriftObject} */\n const value = {}\n\n while (reader.offset < reader.view.byteLength) {\n // Parse each field based on its type and add to the result object\n const [type, fid, newLastFid] = readFieldBegin(reader, lastFid)\n lastFid = newLastFid\n\n if (type === CompactType.STOP) {\n break\n }\n\n // Handle the field based on its type\n value[`field_${fid}`] = readElement(reader, type)\n }\n\n return value\n}\n\n/**\n * Read a single element based on its type\n *\n * @import {DataReader, ThriftObject, ThriftType} from '../src/types.d.ts'\n * @param {DataReader} reader\n * @param {number} type\n * @returns {ThriftType}\n */\nfunction readElement(reader, type) {\n switch (type) {\n case CompactType.TRUE:\n return true\n case CompactType.FALSE:\n return false\n case CompactType.BYTE:\n // read byte directly\n return reader.view.getInt8(reader.offset++)\n case CompactType.I16:\n case CompactType.I32:\n return readZigZag(reader)\n case CompactType.I64:\n return readZigZagBigInt(reader)\n case CompactType.DOUBLE: {\n const value = reader.view.getFloat64(reader.offset, true)\n reader.offset += 8\n return value\n }\n case CompactType.BINARY: {\n const stringLength = readVarInt(reader)\n const strBytes = new Uint8Array(reader.view.buffer, reader.view.byteOffset + reader.offset, stringLength)\n reader.offset += stringLength\n return strBytes\n }\n case CompactType.LIST: {\n const [elemType, listSize] = readCollectionBegin(reader)\n const boolType = elemType === CompactType.TRUE || elemType === CompactType.FALSE\n const values = new Array(listSize)\n for (let i = 0; i < listSize; i++) {\n values[i] = boolType ? readElement(reader, CompactType.BYTE) === 1 : readElement(reader, elemType)\n }\n return values\n }\n case CompactType.STRUCT: {\n /** @type {ThriftObject} */\n const structValues = {}\n let structLastFid = 0\n while (true) {\n let structFieldType, structFid\n [structFieldType, structFid, structLastFid] = readFieldBegin(reader, structLastFid)\n if (structFieldType === CompactType.STOP) {\n break\n }\n structValues[`field_${structFid}`] = readElement(reader, structFieldType)\n }\n return structValues\n }\n // TODO: MAP, SET, UUID\n default:\n throw new Error(`thrift unhandled type: ${type}`)\n }\n}\n\n/**\n * Var int, also known as Unsigned LEB128.\n * Var ints take 1 to 5 bytes (int32) or 1 to 10 bytes (int64).\n * Reads groups of 7 low bits until high bit is 0.\n *\n * @param {DataReader} reader\n * @returns {number}\n */\nexport function readVarInt(reader) {\n let result = 0\n let shift = 0\n while (true) {\n const byte = reader.view.getUint8(reader.offset++)\n result |= (byte & 0x7f) << shift\n if (!(byte & 0x80)) {\n return result\n }\n shift += 7\n }\n}\n\n/**\n * Read a varint as a bigint.\n *\n * @param {DataReader} reader\n * @returns {bigint}\n */\nfunction readVarBigInt(reader) {\n let result = 0n\n let shift = 0n\n while (true) {\n const byte = reader.view.getUint8(reader.offset++)\n result |= BigInt(byte & 0x7f) << shift\n if (!(byte & 0x80)) {\n return result\n }\n shift += 7n\n }\n}\n\n/**\n * Values of type int32 and int64 are transformed to a zigzag int.\n * A zigzag int folds positive and negative numbers into the positive number space.\n *\n * @param {DataReader} reader\n * @returns {number}\n */\nexport function readZigZag(reader) {\n const zigzag = readVarInt(reader)\n // convert zigzag to int\n return zigzag >>> 1 ^ -(zigzag & 1)\n}\n\n/**\n * A zigzag int folds positive and negative numbers into the positive number space.\n * This version returns a BigInt.\n *\n * @param {DataReader} reader\n * @returns {bigint}\n */\nexport function readZigZagBigInt(reader) {\n const zigzag = readVarBigInt(reader)\n // convert zigzag to int\n return zigzag >> 1n ^ -(zigzag & 1n)\n}\n\n/**\n * Get thrift type from half a byte\n *\n * @param {number} byte\n * @returns {number}\n */\nfunction getCompactType(byte) {\n return byte & 0x0f\n}\n\n/**\n * Read field type and field id\n *\n * @param {DataReader} reader\n * @param {number} lastFid\n * @returns {[number, number, number]} [type, fid, newLastFid]\n */\nfunction readFieldBegin(reader, lastFid) {\n const type = reader.view.getUint8(reader.offset++)\n if ((type & 0x0f) === CompactType.STOP) {\n // STOP also ends a struct\n return [0, 0, lastFid]\n }\n const delta = type >> 4\n let fid // field id\n if (delta) {\n // add delta to last field id\n fid = lastFid + delta\n } else {\n throw new Error('non-delta field id not supported')\n }\n return [getCompactType(type), fid, fid]\n}\n\n/**\n * Read collection type and size\n *\n * @param {DataReader} reader\n * @returns {[number, number]} [type, size]\n */\nfunction readCollectionBegin(reader) {\n const sizeType = reader.view.getUint8(reader.offset++)\n const size = sizeType >> 4\n const type = getCompactType(sizeType)\n if (size === 15) {\n const newSize = readVarInt(reader)\n return [type, newSize]\n }\n return [type, size]\n}\n","import { CompressionCodec, ConvertedType, Encoding, FieldRepetitionType, PageType, ParquetType } from './constants.js'\nimport { DEFAULT_PARSERS, parseDecimal, parseFloat16 } from './convert.js'\nimport { getSchemaPath } from './schema.js'\nimport { deserializeTCompactProtocol } from './thrift.js'\n\nexport const defaultInitialFetchSize = 1 << 19 // 512kb\n\n/**\n * Read parquet metadata from an async buffer.\n *\n * An AsyncBuffer is like an ArrayBuffer, but the slices are loaded\n * asynchronously, possibly over the network.\n *\n * You must provide the byteLength of the buffer, typically from a HEAD request.\n *\n * In theory, you could use suffix-range requests to fetch the end of the file,\n * and save a round trip. But in practice, this doesn't work because chrome\n * deems suffix-range requests as a not-safe-listed header, and will require\n * a pre-flight. So the byteLength is required.\n *\n * To make this efficient, we initially request the last 512kb of the file,\n * which is likely to contain the metadata. If the metadata length exceeds the\n * initial fetch, 512kb, we request the rest of the metadata from the AsyncBuffer.\n *\n * This ensures that we either make one 512kb initial request for the metadata,\n * or a second request for up to the metadata size.\n *\n * @param {AsyncBuffer} asyncBuffer parquet file contents\n * @param {MetadataOptions & { initialFetchSize?: number }} options initial fetch size in bytes (default 512kb)\n * @returns {Promise} parquet metadata object\n */\nexport async function parquetMetadataAsync(asyncBuffer, { parsers, initialFetchSize = defaultInitialFetchSize } = {}) {\n if (!asyncBuffer || !(asyncBuffer.byteLength >= 0)) throw new Error('parquet expected AsyncBuffer')\n\n // fetch last bytes (footer) of the file\n const footerOffset = Math.max(0, asyncBuffer.byteLength - initialFetchSize)\n const footerBuffer = await asyncBuffer.slice(footerOffset, asyncBuffer.byteLength)\n\n // Check for parquet magic number \"PAR1\"\n const footerView = new DataView(footerBuffer)\n if (footerView.getUint32(footerBuffer.byteLength - 4, true) !== 0x31524150) {\n throw new Error('parquet file invalid (footer != PAR1)')\n }\n\n // Parquet files store metadata at the end of the file\n // Metadata length is 4 bytes before the last PAR1\n const metadataLength = footerView.getUint32(footerBuffer.byteLength - 8, true)\n if (metadataLength > asyncBuffer.byteLength - 8) {\n throw new Error(`parquet metadata length ${metadataLength} exceeds available buffer ${asyncBuffer.byteLength - 8}`)\n }\n\n // check if metadata size fits inside the initial fetch\n if (metadataLength + 8 > initialFetchSize) {\n // fetch the rest of the metadata\n const metadataOffset = asyncBuffer.byteLength - metadataLength - 8\n const metadataBuffer = await asyncBuffer.slice(metadataOffset, footerOffset)\n // combine initial fetch with the new slice\n const combinedBuffer = new ArrayBuffer(metadataLength + 8)\n const combinedView = new Uint8Array(combinedBuffer)\n combinedView.set(new Uint8Array(metadataBuffer))\n combinedView.set(new Uint8Array(footerBuffer), footerOffset - metadataOffset)\n return parquetMetadata(combinedBuffer, { parsers })\n } else {\n // parse metadata from the footer\n return parquetMetadata(footerBuffer, { parsers })\n }\n}\n\n/**\n * Read parquet metadata from a buffer synchronously.\n *\n * @param {ArrayBuffer} arrayBuffer parquet file footer\n * @param {MetadataOptions} options metadata parsing options\n * @returns {FileMetaData} parquet metadata object\n */\nexport function parquetMetadata(arrayBuffer, { parsers } = {}) {\n if (!(arrayBuffer instanceof ArrayBuffer)) throw new Error('parquet expected ArrayBuffer')\n const view = new DataView(arrayBuffer)\n\n // Use default parsers if not given\n parsers = { ...DEFAULT_PARSERS, ...parsers }\n\n // Validate footer magic number \"PAR1\"\n if (view.byteLength < 8) {\n throw new Error('parquet file is too short')\n }\n if (view.getUint32(view.byteLength - 4, true) !== 0x31524150) {\n throw new Error('parquet file invalid (footer != PAR1)')\n }\n\n // Parquet files store metadata at the end of the file\n // Metadata length is 4 bytes before the last PAR1\n const metadataLengthOffset = view.byteLength - 8\n const metadataLength = view.getUint32(metadataLengthOffset, true)\n if (metadataLength > view.byteLength - 8) {\n // {metadata}, metadata_length, PAR1\n throw new Error(`parquet metadata length ${metadataLength} exceeds available buffer ${view.byteLength - 8}`)\n }\n\n const metadataOffset = metadataLengthOffset - metadataLength\n const reader = { view, offset: metadataOffset }\n const metadata = deserializeTCompactProtocol(reader)\n const decoder = new TextDecoder()\n function decode(/** @type {Uint8Array} */ value) {\n return value && decoder.decode(value)\n }\n\n // Parse metadata from thrift data\n const version = metadata.field_1\n /** @type {SchemaElement[]} */\n const schema = metadata.field_2.map((/** @type {any} */ field) => ({\n type: ParquetType[field.field_1],\n type_length: field.field_2,\n repetition_type: FieldRepetitionType[field.field_3],\n name: decode(field.field_4),\n num_children: field.field_5,\n converted_type: ConvertedType[field.field_6],\n scale: field.field_7,\n precision: field.field_8,\n field_id: field.field_9,\n logical_type: logicalType(field.field_10),\n }))\n // schema element per column index\n const columnSchema = schema.filter(e => e.type)\n const num_rows = metadata.field_3\n const row_groups = metadata.field_4.map((/** @type {any} */ rowGroup) => ({\n columns: rowGroup.field_1.map((/** @type {any} */ column, /** @type {number} */ columnIndex) => ({\n file_path: decode(column.field_1),\n file_offset: column.field_2,\n meta_data: column.field_3 && {\n type: ParquetType[column.field_3.field_1],\n encodings: column.field_3.field_2?.map((/** @type {number} */ e) => Encoding[e]),\n path_in_schema: column.field_3.field_3.map(decode),\n codec: CompressionCodec[column.field_3.field_4],\n num_values: column.field_3.field_5,\n total_uncompressed_size: column.field_3.field_6,\n total_compressed_size: column.field_3.field_7,\n key_value_metadata: column.field_3.field_8,\n data_page_offset: column.field_3.field_9,\n index_page_offset: column.field_3.field_10,\n dictionary_page_offset: column.field_3.field_11,\n statistics: convertStats(column.field_3.field_12, columnSchema[columnIndex], parsers),\n encoding_stats: column.field_3.field_13?.map((/** @type {any} */ encodingStat) => ({\n page_type: PageType[encodingStat.field_1],\n encoding: Encoding[encodingStat.field_2],\n count: encodingStat.field_3,\n })),\n bloom_filter_offset: column.field_3.field_14,\n bloom_filter_length: column.field_3.field_15,\n size_statistics: column.field_3.field_16 && {\n unencoded_byte_array_data_bytes: column.field_3.field_16.field_1,\n repetition_level_histogram: column.field_3.field_16.field_2,\n definition_level_histogram: column.field_3.field_16.field_3,\n },\n },\n offset_index_offset: column.field_4,\n offset_index_length: column.field_5,\n column_index_offset: column.field_6,\n column_index_length: column.field_7,\n crypto_metadata: column.field_8,\n encrypted_column_metadata: column.field_9,\n })),\n total_byte_size: rowGroup.field_2,\n num_rows: rowGroup.field_3,\n sorting_columns: rowGroup.field_4?.map((/** @type {any} */ sortingColumn) => ({\n column_idx: sortingColumn.field_1,\n descending: sortingColumn.field_2,\n nulls_first: sortingColumn.field_3,\n })),\n file_offset: rowGroup.field_5,\n total_compressed_size: rowGroup.field_6,\n ordinal: rowGroup.field_7,\n }))\n const key_value_metadata = metadata.field_5?.map((/** @type {any} */ keyValue) => ({\n key: decode(keyValue.field_1),\n value: decode(keyValue.field_2),\n }))\n const created_by = decode(metadata.field_6)\n\n return {\n version,\n schema,\n num_rows,\n row_groups,\n key_value_metadata,\n created_by,\n metadata_length: metadataLength,\n }\n}\n\n/**\n * Return a tree of schema elements from parquet metadata.\n *\n * @param {{schema: SchemaElement[]}} metadata parquet metadata object\n * @returns {SchemaTree} tree of schema elements\n */\nexport function parquetSchema({ schema }) {\n return getSchemaPath(schema, [])[0]\n}\n\n/**\n * @param {any} logicalType\n * @returns {LogicalType | undefined}\n */\nfunction logicalType(logicalType) {\n if (logicalType?.field_1) return { type: 'STRING' }\n if (logicalType?.field_2) return { type: 'MAP' }\n if (logicalType?.field_3) return { type: 'LIST' }\n if (logicalType?.field_4) return { type: 'ENUM' }\n if (logicalType?.field_5) return {\n type: 'DECIMAL',\n scale: logicalType.field_5.field_1,\n precision: logicalType.field_5.field_2,\n }\n if (logicalType?.field_6) return { type: 'DATE' }\n if (logicalType?.field_7) return {\n type: 'TIME',\n isAdjustedToUTC: logicalType.field_7.field_1,\n unit: timeUnit(logicalType.field_7.field_2),\n }\n if (logicalType?.field_8) return {\n type: 'TIMESTAMP',\n isAdjustedToUTC: logicalType.field_8.field_1,\n unit: timeUnit(logicalType.field_8.field_2),\n }\n if (logicalType?.field_10) return {\n type: 'INTEGER',\n bitWidth: logicalType.field_10.field_1,\n isSigned: logicalType.field_10.field_2,\n }\n if (logicalType?.field_11) return { type: 'NULL' }\n if (logicalType?.field_12) return { type: 'JSON' }\n if (logicalType?.field_13) return { type: 'BSON' }\n if (logicalType?.field_14) return { type: 'UUID' }\n if (logicalType?.field_15) return { type: 'FLOAT16' }\n return logicalType\n}\n\n/**\n * @param {any} unit\n * @returns {TimeUnit}\n */\nfunction timeUnit(unit) {\n if (unit.field_1) return 'MILLIS'\n if (unit.field_2) return 'MICROS'\n if (unit.field_3) return 'NANOS'\n throw new Error('parquet time unit required')\n}\n\n/**\n * Convert column statistics based on column type.\n *\n * @import {AsyncBuffer, FileMetaData, LogicalType, MetadataOptions, MinMaxType, ParquetParsers, SchemaElement, SchemaTree, Statistics, TimeUnit} from '../src/types.d.ts'\n * @param {any} stats\n * @param {SchemaElement} schema\n * @param {ParquetParsers} parsers\n * @returns {Statistics}\n */\nfunction convertStats(stats, schema, parsers) {\n return stats && {\n max: convertMetadata(stats.field_1, schema, parsers),\n min: convertMetadata(stats.field_2, schema, parsers),\n null_count: stats.field_3,\n distinct_count: stats.field_4,\n max_value: convertMetadata(stats.field_5, schema, parsers),\n min_value: convertMetadata(stats.field_6, schema, parsers),\n is_max_value_exact: stats.field_7,\n is_min_value_exact: stats.field_8,\n }\n}\n\n/**\n * @param {Uint8Array | undefined} value\n * @param {SchemaElement} schema\n * @param {ParquetParsers} parsers\n * @returns {MinMaxType | undefined}\n */\nexport function convertMetadata(value, schema, parsers) {\n const { type, converted_type, logical_type } = schema\n if (value === undefined) return value\n if (type === 'BOOLEAN') return value[0] === 1\n if (type === 'BYTE_ARRAY') return new TextDecoder().decode(value)\n const view = new DataView(value.buffer, value.byteOffset, value.byteLength)\n if (type === 'FLOAT' && view.byteLength === 4) return view.getFloat32(0, true)\n if (type === 'DOUBLE' && view.byteLength === 8) return view.getFloat64(0, true)\n if (type === 'INT32' && converted_type === 'DATE') return parsers.dateFromDays(view.getInt32(0, true))\n if (type === 'INT64' && converted_type === 'TIMESTAMP_MILLIS') return parsers.timestampFromMilliseconds(view.getBigInt64(0, true))\n if (type === 'INT64' && converted_type === 'TIMESTAMP_MICROS') return parsers.timestampFromMicroseconds(view.getBigInt64(0, true))\n if (type === 'INT64' && logical_type?.type === 'TIMESTAMP' && logical_type?.unit === 'NANOS') return parsers.timestampFromNanoseconds(view.getBigInt64(0, true))\n if (type === 'INT64' && logical_type?.type === 'TIMESTAMP' && logical_type?.unit === 'MICROS') return parsers.timestampFromMicroseconds(view.getBigInt64(0, true))\n if (type === 'INT64' && logical_type?.type === 'TIMESTAMP') return parsers.timestampFromMilliseconds(view.getBigInt64(0, true))\n if (type === 'INT32' && view.byteLength === 4) return view.getInt32(0, true)\n if (type === 'INT64' && view.byteLength === 8) return view.getBigInt64(0, true)\n if (converted_type === 'DECIMAL') return parseDecimal(value) * 10 ** -(schema.scale || 0)\n if (logical_type?.type === 'FLOAT16') return parseFloat16(value)\n if (type === 'FIXED_LEN_BYTE_ARRAY') return value\n // assert(false)\n return value\n}\n","import { defaultInitialFetchSize } from './metadata.js'\n\n/**\n * Replace bigint, date, etc with legal JSON types.\n *\n * @param {any} obj object to convert\n * @returns {unknown} converted object\n */\nexport function toJson(obj) {\n if (obj === undefined) return null\n if (typeof obj === 'bigint') return Number(obj)\n if (Array.isArray(obj)) return obj.map(toJson)\n if (obj instanceof Uint8Array) return Array.from(obj)\n if (obj instanceof Date) return obj.toISOString()\n if (obj instanceof Object) {\n /** @type {Record} */\n const newObj = {}\n for (const key of Object.keys(obj)) {\n if (obj[key] === undefined) continue\n newObj[key] = toJson(obj[key])\n }\n return newObj\n }\n return obj\n}\n\n/**\n * Concatenate two arrays fast.\n *\n * @param {any[]} aaa first array\n * @param {DecodedArray} bbb second array\n */\nexport function concat(aaa, bbb) {\n const chunk = 10000\n for (let i = 0; i < bbb.length; i += chunk) {\n aaa.push(...bbb.slice(i, i + chunk))\n }\n}\n\n/**\n * Deep equality comparison\n *\n * @param {any} a First object to compare\n * @param {any} b Second object to compare\n * @returns {boolean} true if objects are equal\n */\nexport function equals(a, b) {\n if (a === b) return true\n if (a instanceof Uint8Array && b instanceof Uint8Array) return equals(Array.from(a), Array.from(b))\n if (!a || !b || typeof a !== typeof b) return false\n return Array.isArray(a) && Array.isArray(b)\n ? a.length === b.length && a.every((v, i) => equals(v, b[i]))\n : typeof a === 'object' && Object.keys(a).length === Object.keys(b).length && Object.keys(a).every(k => equals(a[k], b[k]))\n}\n\n/**\n * Get the byte length of a URL using a HEAD request.\n * If requestInit is provided, it will be passed to fetch.\n *\n * @param {string} url\n * @param {RequestInit} [requestInit] fetch options\n * @param {typeof globalThis.fetch} [customFetch] fetch function to use\n * @returns {Promise}\n */\nexport async function byteLengthFromUrl(url, requestInit, customFetch) {\n const fetch = customFetch ?? globalThis.fetch\n return await fetch(url, { ...requestInit, method: 'HEAD' })\n .then(res => {\n if (!res.ok) throw new Error(`fetch head failed ${res.status}`)\n const length = res.headers.get('Content-Length')\n if (!length) throw new Error('missing content length')\n return parseInt(length)\n })\n}\n\n/**\n * Construct an AsyncBuffer for a URL.\n * If byteLength is not provided, will make a HEAD request to get the file size.\n * If fetch is provided, it will be used instead of the global fetch.\n * If requestInit is provided, it will be passed to fetch.\n *\n * @param {object} options\n * @param {string} options.url\n * @param {number} [options.byteLength]\n * @param {typeof globalThis.fetch} [options.fetch] fetch function to use\n * @param {RequestInit} [options.requestInit]\n * @returns {Promise}\n */\nexport async function asyncBufferFromUrl({ url, byteLength, requestInit, fetch: customFetch }) {\n if (!url) throw new Error('missing url')\n const fetch = customFetch ?? globalThis.fetch\n // byte length from HEAD request\n byteLength ||= await byteLengthFromUrl(url, requestInit, fetch)\n\n /**\n * A promise for the whole buffer, if range requests are not supported.\n * @type {Promise|undefined}\n */\n let buffer = undefined\n const init = requestInit || {}\n\n return {\n byteLength,\n async slice(start, end) {\n if (buffer) {\n return buffer.then(buffer => buffer.slice(start, end))\n }\n\n const headers = new Headers(init.headers)\n const endStr = end === undefined ? '' : end - 1\n headers.set('Range', `bytes=${start}-${endStr}`)\n\n const res = await fetch(url, { ...init, headers })\n if (!res.ok || !res.body) throw new Error(`fetch failed ${res.status}`)\n\n if (res.status === 200) {\n // Endpoint does not support range requests and returned the whole object\n buffer = res.arrayBuffer()\n return buffer.then(buffer => buffer.slice(start, end))\n } else if (res.status === 206) {\n // The endpoint supports range requests and sent us the requested range\n return res.arrayBuffer()\n } else {\n throw new Error(`fetch received unexpected status code ${res.status}`)\n }\n },\n }\n}\n\n/**\n * Returns a cached layer on top of an AsyncBuffer. For caching slices of a file\n * that are read multiple times, possibly over a network.\n *\n * @param {AsyncBuffer} file file-like object to cache\n * @param {{ minSize?: number }} [options]\n * @returns {AsyncBuffer} cached file-like object\n */\nexport function cachedAsyncBuffer({ byteLength, slice }, { minSize = defaultInitialFetchSize } = {}) {\n if (byteLength < minSize) {\n // Cache whole file if it's small\n const buffer = slice(0, byteLength)\n return {\n byteLength,\n async slice(start, end) {\n return (await buffer).slice(start, end)\n },\n }\n }\n const cache = new Map()\n return {\n byteLength,\n /**\n * @param {number} start\n * @param {number} [end]\n * @returns {Awaitable}\n */\n slice(start, end) {\n const key = cacheKey(start, end, byteLength)\n const cached = cache.get(key)\n if (cached) return cached\n // cache miss, read from file\n const promise = slice(start, end)\n cache.set(key, promise)\n return promise\n },\n }\n}\n\n\n/**\n * Returns canonical cache key for a byte range 'start,end'.\n * Normalize int-range and suffix-range requests to the same key.\n *\n * @import {AsyncBuffer, Awaitable, DecodedArray} from '../src/types.d.ts'\n * @param {number} start start byte of range\n * @param {number} [end] end byte of range, or undefined for suffix range\n * @param {number} [size] size of file, or undefined for suffix range\n * @returns {string}\n */\nfunction cacheKey(start, end, size) {\n if (start < 0) {\n if (end !== undefined) throw new Error(`invalid suffix range [${start}, ${end}]`)\n if (size === undefined) return `${start},`\n return `${size + start},${size}`\n } else if (end !== undefined) {\n if (start > end) throw new Error(`invalid empty range [${start}, ${end}]`)\n return `${start},${end}`\n } else if (size === undefined) {\n return `${start},`\n } else {\n return `${start},${size}`\n }\n}\n\n/**\n * Flatten a list of lists into a single list.\n *\n * @param {DecodedArray[]} [chunks]\n * @returns {DecodedArray}\n */\nexport function flatten(chunks) {\n if (!chunks) return []\n if (chunks.length === 1) return chunks[0]\n /** @type {any[]} */\n const output = []\n for (const chunk of chunks) {\n concat(output, chunk)\n }\n return output\n}\n","import { concat } from './utils.js'\n\n// Combine column chunks into a single byte range if less than 32mb\nconst columnChunkAggregation = 1 << 25 // 32mb\n\n/**\n * @import {AsyncBuffer, ByteRange, ColumnMetaData, GroupPlan, ParquetReadOptions, QueryPlan} from '../src/types.js'\n */\n/**\n * Plan which byte ranges to read to satisfy a read request.\n * Metadata must be non-null.\n *\n * @param {ParquetReadOptions} options\n * @returns {QueryPlan}\n */\nexport function parquetPlan({ metadata, rowStart = 0, rowEnd = Infinity, columns }) {\n if (!metadata) throw new Error('parquetPlan requires metadata')\n /** @type {GroupPlan[]} */\n const groups = []\n /** @type {ByteRange[]} */\n const fetches = []\n\n // find which row groups to read\n let groupStart = 0 // first row index of the current group\n for (const rowGroup of metadata.row_groups) {\n const groupRows = Number(rowGroup.num_rows)\n const groupEnd = groupStart + groupRows\n // if row group overlaps with row range, add it to the plan\n if (groupRows > 0 && groupEnd >= rowStart && groupStart < rowEnd) {\n /** @type {ByteRange[]} */\n const ranges = []\n // loop through each column chunk\n for (const { file_path, meta_data } of rowGroup.columns) {\n if (file_path) throw new Error('parquet file_path not supported')\n if (!meta_data) throw new Error('parquet column metadata is undefined')\n // add included columns to the plan\n if (!columns || columns.includes(meta_data.path_in_schema[0])) {\n ranges.push(getColumnRange(meta_data))\n }\n }\n const selectStart = Math.max(rowStart - groupStart, 0)\n const selectEnd = Math.min(rowEnd - groupStart, groupRows)\n groups.push({ ranges, rowGroup, groupStart, groupRows, selectStart, selectEnd })\n\n // map group plan to ranges\n const groupSize = ranges[ranges.length - 1]?.endByte - ranges[0]?.startByte\n if (!columns && groupSize < columnChunkAggregation) {\n // full row group\n fetches.push({\n startByte: ranges[0].startByte,\n endByte: ranges[ranges.length - 1].endByte,\n })\n } else if (ranges.length) {\n concat(fetches, ranges)\n } else if (columns?.length) {\n throw new Error(`parquet columns not found: ${columns.join(', ')}`)\n }\n }\n\n groupStart = groupEnd\n }\n if (!isFinite(rowEnd)) rowEnd = groupStart\n\n return { metadata, rowStart, rowEnd, columns, fetches, groups }\n}\n\n/**\n * @param {ColumnMetaData} columnMetadata\n * @returns {ByteRange}\n */\nexport function getColumnRange({ dictionary_page_offset, data_page_offset, total_compressed_size }) {\n const columnOffset = dictionary_page_offset || data_page_offset\n return {\n startByte: Number(columnOffset),\n endByte: Number(columnOffset + total_compressed_size),\n }\n}\n\n/**\n * Prefetch byte ranges from an AsyncBuffer.\n *\n * @param {AsyncBuffer} file\n * @param {QueryPlan} plan\n * @returns {AsyncBuffer}\n */\nexport function prefetchAsyncBuffer(file, { fetches }) {\n // fetch byte ranges from the file\n const promises = fetches.map(({ startByte, endByte }) => file.slice(startByte, endByte))\n return {\n byteLength: file.byteLength,\n slice(start, end = file.byteLength) {\n // find matching slice\n const index = fetches.findIndex(({ startByte, endByte }) => startByte <= start && end <= endByte)\n if (index < 0) throw new Error(`no prefetch for range [${start}, ${end}]`)\n if (fetches[index].startByte !== start || fetches[index].endByte !== end) {\n // slice a subrange of the prefetch\n const startOffset = start - fetches[index].startByte\n const endOffset = end - fetches[index].startByte\n if (promises[index] instanceof Promise) {\n return promises[index].then(buffer => buffer.slice(startOffset, endOffset))\n } else {\n return promises[index].slice(startOffset, endOffset)\n }\n } else {\n return promises[index]\n }\n },\n }\n}\n","import { getMaxDefinitionLevel, isListLike, isMapLike } from './schema.js'\n\n/**\n * Reconstructs a complex nested structure from flat arrays of values and\n * definition and repetition levels, according to Dremel encoding.\n *\n * @param {any[]} output\n * @param {number[] | undefined} definitionLevels\n * @param {number[]} repetitionLevels\n * @param {DecodedArray} values\n * @param {SchemaTree[]} schemaPath\n * @returns {DecodedArray}\n */\nexport function assembleLists(output, definitionLevels, repetitionLevels, values, schemaPath) {\n const n = definitionLevels?.length || repetitionLevels.length\n if (!n) return values\n const maxDefinitionLevel = getMaxDefinitionLevel(schemaPath)\n const repetitionPath = schemaPath.map(({ element }) => element.repetition_type)\n let valueIndex = 0\n\n // Track state of nested structures\n const containerStack = [output]\n let currentContainer = output\n let currentDepth = 0 // schema depth\n let currentDefLevel = 0 // list depth\n let currentRepLevel = 0\n\n if (repetitionLevels[0]) {\n // continue previous row\n while (currentDepth < repetitionPath.length - 2 && currentRepLevel < repetitionLevels[0]) {\n currentDepth++\n if (repetitionPath[currentDepth] !== 'REQUIRED') {\n // go into last list\n currentContainer = currentContainer.at(-1)\n containerStack.push(currentContainer)\n currentDefLevel++\n }\n if (repetitionPath[currentDepth] === 'REPEATED') currentRepLevel++\n }\n }\n\n for (let i = 0; i < n; i++) {\n // assert(currentDefLevel === containerStack.length - 1)\n const def = definitionLevels?.length ? definitionLevels[i] : maxDefinitionLevel\n const rep = repetitionLevels[i]\n\n // Pop up to start of rep level\n while (currentDepth && (rep < currentRepLevel || repetitionPath[currentDepth] !== 'REPEATED')) {\n if (repetitionPath[currentDepth] !== 'REQUIRED') {\n containerStack.pop()\n currentDefLevel--\n }\n if (repetitionPath[currentDepth] === 'REPEATED') currentRepLevel--\n currentDepth--\n }\n // @ts-expect-error won't be empty\n currentContainer = containerStack.at(-1)\n\n // Go deeper to end of definition level\n while (\n (currentDepth < repetitionPath.length - 2 || repetitionPath[currentDepth + 1] === 'REPEATED') &&\n (currentDefLevel < def || repetitionPath[currentDepth + 1] === 'REQUIRED')\n ) {\n currentDepth++\n if (repetitionPath[currentDepth] !== 'REQUIRED') {\n /** @type {any[]} */\n const newList = []\n currentContainer.push(newList)\n currentContainer = newList\n containerStack.push(newList)\n currentDefLevel++\n }\n if (repetitionPath[currentDepth] === 'REPEATED') currentRepLevel++\n }\n\n // Add value or null based on definition level\n if (def === maxDefinitionLevel) {\n // assert(currentDepth === maxDefinitionLevel || currentDepth === repetitionPath.length - 2)\n currentContainer.push(values[valueIndex++])\n } else if (currentDepth === repetitionPath.length - 2) {\n currentContainer.push(null)\n } else {\n currentContainer.push([])\n }\n }\n\n // Handle edge cases for empty inputs or single-level data\n if (!output.length) {\n // return max definition level of nested lists\n for (let i = 0; i < maxDefinitionLevel; i++) {\n /** @type {any[]} */\n const newList = []\n currentContainer.push(newList)\n currentContainer = newList\n }\n }\n\n return output\n}\n\n/**\n * Assemble a nested structure from subcolumn data.\n * https://github.com/apache/parquet-format/blob/apache-parquet-format-2.10.0/LogicalTypes.md#nested-types\n *\n * @param {Map} subcolumnData\n * @param {SchemaTree} schema top-level schema element\n * @param {number} [depth] depth of nested structure\n */\nexport function assembleNested(subcolumnData, schema, depth = 0) {\n const path = schema.path.join('.')\n const optional = schema.element.repetition_type === 'OPTIONAL'\n const nextDepth = optional ? depth + 1 : depth\n\n if (isListLike(schema)) {\n let sublist = schema.children[0]\n let subDepth = nextDepth\n if (sublist.children.length === 1) {\n sublist = sublist.children[0]\n subDepth++\n }\n assembleNested(subcolumnData, sublist, subDepth)\n\n const subcolumn = sublist.path.join('.')\n const values = subcolumnData.get(subcolumn)\n if (!values) throw new Error('parquet list column missing values')\n if (optional) flattenAtDepth(values, depth)\n subcolumnData.set(path, values)\n subcolumnData.delete(subcolumn)\n return\n }\n\n if (isMapLike(schema)) {\n const mapName = schema.children[0].element.name\n\n // Assemble keys and values\n assembleNested(subcolumnData, schema.children[0].children[0], nextDepth + 1)\n assembleNested(subcolumnData, schema.children[0].children[1], nextDepth + 1)\n\n const keys = subcolumnData.get(`${path}.${mapName}.key`)\n const values = subcolumnData.get(`${path}.${mapName}.value`)\n\n if (!keys) throw new Error('parquet map column missing keys')\n if (!values) throw new Error('parquet map column missing values')\n if (keys.length !== values.length) {\n throw new Error('parquet map column key/value length mismatch')\n }\n\n const out = assembleMaps(keys, values, nextDepth)\n if (optional) flattenAtDepth(out, depth)\n\n subcolumnData.delete(`${path}.${mapName}.key`)\n subcolumnData.delete(`${path}.${mapName}.value`)\n subcolumnData.set(path, out)\n return\n }\n\n // Struct-like column\n if (schema.children.length) {\n // construct a meta struct and then invert\n const invertDepth = schema.element.repetition_type === 'REQUIRED' ? depth : depth + 1\n /** @type {Record} */\n const struct = {}\n for (const child of schema.children) {\n assembleNested(subcolumnData, child, invertDepth)\n const childData = subcolumnData.get(child.path.join('.'))\n if (!childData) throw new Error('parquet struct missing child data')\n struct[child.element.name] = childData\n }\n // remove children\n for (const child of schema.children) {\n subcolumnData.delete(child.path.join('.'))\n }\n // invert struct by depth\n const inverted = invertStruct(struct, invertDepth)\n if (optional) flattenAtDepth(inverted, depth)\n subcolumnData.set(path, inverted)\n }\n}\n\n/**\n * @import {DecodedArray, SchemaTree} from '../src/types.d.ts'\n * @param {DecodedArray} arr\n * @param {number} depth\n */\nfunction flattenAtDepth(arr, depth) {\n for (let i = 0; i < arr.length; i++) {\n if (depth) {\n flattenAtDepth(arr[i], depth - 1)\n } else {\n arr[i] = arr[i][0]\n }\n }\n}\n\n/**\n * @param {DecodedArray} keys\n * @param {DecodedArray} values\n * @param {number} depth\n * @returns {any[]}\n */\nfunction assembleMaps(keys, values, depth) {\n const out = []\n for (let i = 0; i < keys.length; i++) {\n if (depth) {\n out.push(assembleMaps(keys[i], values[i], depth - 1)) // go deeper\n } else {\n if (keys[i]) {\n /** @type {Record} */\n const obj = {}\n for (let j = 0; j < keys[i].length; j++) {\n const value = values[i][j]\n obj[keys[i][j]] = value === undefined ? null : value\n }\n out.push(obj)\n } else {\n out.push(undefined)\n }\n }\n }\n return out\n}\n\n/**\n * Invert a struct-like object by depth.\n *\n * @param {Record} struct\n * @param {number} depth\n * @returns {any[]}\n */\nfunction invertStruct(struct, depth) {\n const keys = Object.keys(struct)\n const length = struct[keys[0]]?.length\n const out = []\n for (let i = 0; i < length; i++) {\n /** @type {Record} */\n const obj = {}\n for (const key of keys) {\n if (struct[key].length !== length) throw new Error('parquet struct parsing error')\n obj[key] = struct[key][i]\n }\n if (depth) {\n out.push(invertStruct(obj, depth - 1)) // deeper\n } else {\n out.push(obj)\n }\n }\n return out\n}\n","import { readVarInt, readZigZagBigInt } from './thrift.js'\n\n/**\n * @import {DataReader} from '../src/types.d.ts'\n * @param {DataReader} reader\n * @param {number} count number of values to read\n * @param {Int32Array | BigInt64Array} output\n */\nexport function deltaBinaryUnpack(reader, count, output) {\n const int32 = output instanceof Int32Array\n const blockSize = readVarInt(reader)\n const miniblockPerBlock = readVarInt(reader)\n readVarInt(reader) // assert(=== count)\n let value = readZigZagBigInt(reader) // first value\n let outputIndex = 0\n output[outputIndex++] = int32 ? Number(value) : value\n\n const valuesPerMiniblock = blockSize / miniblockPerBlock\n\n while (outputIndex < count) {\n // new block\n const minDelta = readZigZagBigInt(reader)\n const bitWidths = new Uint8Array(miniblockPerBlock)\n for (let i = 0; i < miniblockPerBlock; i++) {\n bitWidths[i] = reader.view.getUint8(reader.offset++)\n }\n\n for (let i = 0; i < miniblockPerBlock && outputIndex < count; i++) {\n // new miniblock\n const bitWidth = BigInt(bitWidths[i])\n if (bitWidth) {\n let bitpackPos = 0n\n let miniblockCount = valuesPerMiniblock\n const mask = (1n << bitWidth) - 1n\n while (miniblockCount && outputIndex < count) {\n let bits = BigInt(reader.view.getUint8(reader.offset)) >> bitpackPos & mask // TODO: don't re-read value every time\n bitpackPos += bitWidth\n while (bitpackPos >= 8) {\n bitpackPos -= 8n\n reader.offset++\n if (bitpackPos) {\n bits |= BigInt(reader.view.getUint8(reader.offset)) << bitWidth - bitpackPos & mask\n }\n }\n const delta = minDelta + bits\n value += delta\n output[outputIndex++] = int32 ? Number(value) : value\n miniblockCount--\n }\n if (miniblockCount) {\n // consume leftover miniblock\n reader.offset += Math.ceil((miniblockCount * Number(bitWidth) + Number(bitpackPos)) / 8)\n }\n } else {\n for (let j = 0; j < valuesPerMiniblock && outputIndex < count; j++) {\n value += minDelta\n output[outputIndex++] = int32 ? Number(value) : value\n }\n }\n }\n }\n}\n\n/**\n * @param {DataReader} reader\n * @param {number} count\n * @param {Uint8Array[]} output\n */\nexport function deltaLengthByteArray(reader, count, output) {\n const lengths = new Int32Array(count)\n deltaBinaryUnpack(reader, count, lengths)\n for (let i = 0; i < count; i++) {\n output[i] = new Uint8Array(reader.view.buffer, reader.view.byteOffset + reader.offset, lengths[i])\n reader.offset += lengths[i]\n }\n}\n\n/**\n * @param {DataReader} reader\n * @param {number} count\n * @param {Uint8Array[]} output\n */\nexport function deltaByteArray(reader, count, output) {\n const prefixData = new Int32Array(count)\n deltaBinaryUnpack(reader, count, prefixData)\n const suffixData = new Int32Array(count)\n deltaBinaryUnpack(reader, count, suffixData)\n\n for (let i = 0; i < count; i++) {\n const suffix = new Uint8Array(reader.view.buffer, reader.view.byteOffset + reader.offset, suffixData[i])\n if (prefixData[i]) {\n // copy from previous value\n output[i] = new Uint8Array(prefixData[i] + suffixData[i])\n output[i].set(output[i - 1].subarray(0, prefixData[i]))\n output[i].set(suffix, prefixData[i])\n } else {\n output[i] = suffix\n }\n reader.offset += suffixData[i]\n }\n}\n","import { readVarInt } from './thrift.js'\n\n/**\n * Minimum bits needed to store value.\n *\n * @param {number} value\n * @returns {number}\n */\nexport function bitWidth(value) {\n return 32 - Math.clz32(value)\n}\n\n/**\n * Read values from a run-length encoded/bit-packed hybrid encoding.\n *\n * If length is zero, then read int32 length at the start.\n *\n * @param {DataReader} reader\n * @param {number} width - bitwidth\n * @param {DecodedArray} output\n * @param {number} [length] - length of the encoded data\n */\nexport function readRleBitPackedHybrid(reader, width, output, length) {\n if (length === undefined) {\n length = reader.view.getUint32(reader.offset, true)\n reader.offset += 4\n }\n const startOffset = reader.offset\n let seen = 0\n while (seen < output.length) {\n const header = readVarInt(reader)\n if (header & 1) {\n // bit-packed\n seen = readBitPacked(reader, header, width, output, seen)\n } else {\n // rle\n const count = header >>> 1\n readRle(reader, count, width, output, seen)\n seen += count\n }\n }\n reader.offset = startOffset + length // duckdb writes an empty block\n}\n\n/**\n * Run-length encoding: read value with bitWidth and repeat it count times.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @param {number} bitWidth\n * @param {DecodedArray} output\n * @param {number} seen\n */\nfunction readRle(reader, count, bitWidth, output, seen) {\n const width = bitWidth + 7 >> 3\n let value = 0\n for (let i = 0; i < width; i++) {\n value |= reader.view.getUint8(reader.offset++) << (i << 3)\n }\n // assert(value < 1 << bitWidth)\n\n // repeat value count times\n for (let i = 0; i < count; i++) {\n output[seen + i] = value\n }\n}\n\n/**\n * Read a bit-packed run of the rle/bitpack hybrid.\n * Supports width > 8 (crossing bytes).\n *\n * @param {DataReader} reader\n * @param {number} header - bit-pack header\n * @param {number} bitWidth\n * @param {DecodedArray} output\n * @param {number} seen\n * @returns {number} total output values so far\n */\nfunction readBitPacked(reader, header, bitWidth, output, seen) {\n let count = header >> 1 << 3 // values to read\n const mask = (1 << bitWidth) - 1\n\n let data = 0\n if (reader.offset < reader.view.byteLength) {\n data = reader.view.getUint8(reader.offset++)\n } else if (mask) {\n // sometimes out-of-bounds reads are masked out\n throw new Error(`parquet bitpack offset ${reader.offset} out of range`)\n }\n let left = 8\n let right = 0\n\n // read values\n while (count) {\n // if we have crossed a byte boundary, shift the data\n if (right > 8) {\n right -= 8\n left -= 8\n data >>>= 8\n } else if (left - right < bitWidth) {\n // if we don't have bitWidth number of bits to read, read next byte\n data |= reader.view.getUint8(reader.offset) << left\n reader.offset++\n left += 8\n } else {\n if (seen < output.length) {\n // emit value\n output[seen++] = data >> right & mask\n }\n count--\n right += bitWidth\n }\n }\n\n return seen\n}\n\n/**\n * @param {DataReader} reader\n * @param {number} count\n * @param {ParquetType} type\n * @param {number | undefined} typeLength\n * @returns {DecodedArray}\n */\nexport function byteStreamSplit(reader, count, type, typeLength) {\n const width = byteWidth(type, typeLength)\n const bytes = new Uint8Array(count * width)\n for (let b = 0; b < width; b++) {\n for (let i = 0; i < count; i++) {\n bytes[i * width + b] = reader.view.getUint8(reader.offset++)\n }\n }\n // interpret bytes as typed array\n if (type === 'FLOAT') return new Float32Array(bytes.buffer)\n else if (type === 'DOUBLE') return new Float64Array(bytes.buffer)\n else if (type === 'INT32') return new Int32Array(bytes.buffer)\n else if (type === 'INT64') return new BigInt64Array(bytes.buffer)\n else if (type === 'FIXED_LEN_BYTE_ARRAY') {\n // split into arrays of typeLength\n const split = new Array(count)\n for (let i = 0; i < count; i++) {\n split[i] = bytes.subarray(i * width, (i + 1) * width)\n }\n return split\n }\n throw new Error(`parquet byte_stream_split unsupported type: ${type}`)\n}\n\n/**\n * @import {DataReader, DecodedArray, ParquetType} from '../src/types.d.ts'\n * @param {ParquetType} type\n * @param {number | undefined} typeLength\n * @returns {number}\n */\nfunction byteWidth(type, typeLength) {\n switch (type) {\n case 'INT32':\n case 'FLOAT':\n return 4\n case 'INT64':\n case 'DOUBLE':\n return 8\n case 'FIXED_LEN_BYTE_ARRAY':\n if (!typeLength) throw new Error('parquet byteWidth missing type_length')\n return typeLength\n default:\n throw new Error(`parquet unsupported type: ${type}`)\n }\n}\n","/**\n * Read `count` values of the given type from the reader.view.\n *\n * @param {DataReader} reader - buffer to read data from\n * @param {ParquetType} type - parquet type of the data\n * @param {number} count - number of values to read\n * @param {number | undefined} fixedLength - length of each fixed length byte array\n * @returns {DecodedArray} array of values\n */\nexport function readPlain(reader, type, count, fixedLength) {\n if (count === 0) return []\n if (type === 'BOOLEAN') {\n return readPlainBoolean(reader, count)\n } else if (type === 'INT32') {\n return readPlainInt32(reader, count)\n } else if (type === 'INT64') {\n return readPlainInt64(reader, count)\n } else if (type === 'INT96') {\n return readPlainInt96(reader, count)\n } else if (type === 'FLOAT') {\n return readPlainFloat(reader, count)\n } else if (type === 'DOUBLE') {\n return readPlainDouble(reader, count)\n } else if (type === 'BYTE_ARRAY') {\n return readPlainByteArray(reader, count)\n } else if (type === 'FIXED_LEN_BYTE_ARRAY') {\n if (!fixedLength) throw new Error('parquet missing fixed length')\n return readPlainByteArrayFixed(reader, count, fixedLength)\n } else {\n throw new Error(`parquet unhandled type: ${type}`)\n }\n}\n\n/**\n * Read `count` boolean values.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @returns {boolean[]}\n */\nfunction readPlainBoolean(reader, count) {\n const values = new Array(count)\n for (let i = 0; i < count; i++) {\n const byteOffset = reader.offset + (i / 8 | 0)\n const bitOffset = i % 8\n const byte = reader.view.getUint8(byteOffset)\n values[i] = (byte & 1 << bitOffset) !== 0\n }\n reader.offset += Math.ceil(count / 8)\n return values\n}\n\n/**\n * Read `count` int32 values.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @returns {Int32Array}\n */\nfunction readPlainInt32(reader, count) {\n const values = (reader.view.byteOffset + reader.offset) % 4\n ? new Int32Array(align(reader.view.buffer, reader.view.byteOffset + reader.offset, count * 4))\n : new Int32Array(reader.view.buffer, reader.view.byteOffset + reader.offset, count)\n reader.offset += count * 4\n return values\n}\n\n/**\n * Read `count` int64 values.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @returns {BigInt64Array}\n */\nfunction readPlainInt64(reader, count) {\n const values = (reader.view.byteOffset + reader.offset) % 8\n ? new BigInt64Array(align(reader.view.buffer, reader.view.byteOffset + reader.offset, count * 8))\n : new BigInt64Array(reader.view.buffer, reader.view.byteOffset + reader.offset, count)\n reader.offset += count * 8\n return values\n}\n\n/**\n * Read `count` int96 values.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @returns {bigint[]}\n */\nfunction readPlainInt96(reader, count) {\n const values = new Array(count)\n for (let i = 0; i < count; i++) {\n const low = reader.view.getBigInt64(reader.offset + i * 12, true)\n const high = reader.view.getInt32(reader.offset + i * 12 + 8, true)\n values[i] = BigInt(high) << 64n | low\n }\n reader.offset += count * 12\n return values\n}\n\n/**\n * Read `count` float values.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @returns {Float32Array}\n */\nfunction readPlainFloat(reader, count) {\n const values = (reader.view.byteOffset + reader.offset) % 4\n ? new Float32Array(align(reader.view.buffer, reader.view.byteOffset + reader.offset, count * 4))\n : new Float32Array(reader.view.buffer, reader.view.byteOffset + reader.offset, count)\n reader.offset += count * 4\n return values\n}\n\n/**\n * Read `count` double values.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @returns {Float64Array}\n */\nfunction readPlainDouble(reader, count) {\n const values = (reader.view.byteOffset + reader.offset) % 8\n ? new Float64Array(align(reader.view.buffer, reader.view.byteOffset + reader.offset, count * 8))\n : new Float64Array(reader.view.buffer, reader.view.byteOffset + reader.offset, count)\n reader.offset += count * 8\n return values\n}\n\n/**\n * Read `count` byte array values.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @returns {Uint8Array[]}\n */\nfunction readPlainByteArray(reader, count) {\n const values = new Array(count)\n for (let i = 0; i < count; i++) {\n const length = reader.view.getUint32(reader.offset, true)\n reader.offset += 4\n values[i] = new Uint8Array(reader.view.buffer, reader.view.byteOffset + reader.offset, length)\n reader.offset += length\n }\n return values\n}\n\n/**\n * Read a fixed length byte array.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @param {number} fixedLength\n * @returns {Uint8Array[]}\n */\nfunction readPlainByteArrayFixed(reader, count, fixedLength) {\n // assert(reader.view.byteLength - reader.offset >= count * fixedLength)\n const values = new Array(count)\n for (let i = 0; i < count; i++) {\n values[i] = new Uint8Array(reader.view.buffer, reader.view.byteOffset + reader.offset, fixedLength)\n reader.offset += fixedLength\n }\n return values\n}\n\n/**\n * Create a new buffer with the offset and size.\n *\n * @import {DataReader, DecodedArray, ParquetType} from '../src/types.d.ts'\n * @param {ArrayBufferLike} buffer\n * @param {number} offset\n * @param {number} size\n * @returns {ArrayBuffer}\n */\nfunction align(buffer, offset, size) {\n const aligned = new ArrayBuffer(size)\n new Uint8Array(aligned).set(new Uint8Array(buffer, offset, size))\n return aligned\n}\n","/**\n * The MIT License (MIT)\n * Copyright (c) 2016 Zhipeng Jia\n * https://github.com/zhipeng-jia/snappyjs\n */\n\nconst WORD_MASK = [0, 0xff, 0xffff, 0xffffff, 0xffffffff]\n\n/**\n * Copy bytes from one array to another\n *\n * @param {Uint8Array} fromArray source array\n * @param {number} fromPos source position\n * @param {Uint8Array} toArray destination array\n * @param {number} toPos destination position\n * @param {number} length number of bytes to copy\n */\nfunction copyBytes(fromArray, fromPos, toArray, toPos, length) {\n for (let i = 0; i < length; i++) {\n toArray[toPos + i] = fromArray[fromPos + i]\n }\n}\n\n/**\n * Decompress snappy data.\n * Accepts an output buffer to avoid allocating a new buffer for each call.\n *\n * @param {Uint8Array} input compressed data\n * @param {Uint8Array} output output buffer\n */\nexport function snappyUncompress(input, output) {\n const inputLength = input.byteLength\n const outputLength = output.byteLength\n let pos = 0\n let outPos = 0\n\n // skip preamble (contains uncompressed length as varint)\n while (pos < inputLength) {\n const c = input[pos]\n pos++\n if (c < 128) {\n break\n }\n }\n if (outputLength && pos >= inputLength) {\n throw new Error('invalid snappy length header')\n }\n\n while (pos < inputLength) {\n const c = input[pos]\n let len = 0\n pos++\n\n if (pos >= inputLength) {\n throw new Error('missing eof marker')\n }\n\n // There are two types of elements, literals and copies (back references)\n if ((c & 0x3) === 0) {\n // Literals are uncompressed data stored directly in the byte stream\n let len = (c >>> 2) + 1\n // Longer literal length is encoded in multiple bytes\n if (len > 60) {\n if (pos + 3 >= inputLength) {\n throw new Error('snappy error literal pos + 3 >= inputLength')\n }\n const lengthSize = len - 60 // length bytes - 1\n len = input[pos]\n + (input[pos + 1] << 8)\n + (input[pos + 2] << 16)\n + (input[pos + 3] << 24)\n len = (len & WORD_MASK[lengthSize]) + 1\n pos += lengthSize\n }\n if (pos + len > inputLength) {\n throw new Error('snappy error literal exceeds input length')\n }\n copyBytes(input, pos, output, outPos, len)\n pos += len\n outPos += len\n } else {\n // Copy elements\n let offset = 0 // offset back from current position to read\n switch (c & 0x3) {\n case 1:\n // Copy with 1-byte offset\n len = (c >>> 2 & 0x7) + 4\n offset = input[pos] + (c >>> 5 << 8)\n pos++\n break\n case 2:\n // Copy with 2-byte offset\n if (inputLength <= pos + 1) {\n throw new Error('snappy error end of input')\n }\n len = (c >>> 2) + 1\n offset = input[pos] + (input[pos + 1] << 8)\n pos += 2\n break\n case 3:\n // Copy with 4-byte offset\n if (inputLength <= pos + 3) {\n throw new Error('snappy error end of input')\n }\n len = (c >>> 2) + 1\n offset = input[pos]\n + (input[pos + 1] << 8)\n + (input[pos + 2] << 16)\n + (input[pos + 3] << 24)\n pos += 4\n break\n default:\n break\n }\n if (offset === 0 || isNaN(offset)) {\n throw new Error(`invalid offset ${offset} pos ${pos} inputLength ${inputLength}`)\n }\n if (offset > outPos) {\n throw new Error('cannot copy from before start of buffer')\n }\n copyBytes(output, outPos - offset, output, outPos, len)\n outPos += len\n }\n }\n\n if (outPos !== outputLength) throw new Error('premature end of input')\n}\n","import { deltaBinaryUnpack, deltaByteArray, deltaLengthByteArray } from './delta.js'\nimport { bitWidth, byteStreamSplit, readRleBitPackedHybrid } from './encoding.js'\nimport { readPlain } from './plain.js'\nimport { getMaxDefinitionLevel, getMaxRepetitionLevel } from './schema.js'\nimport { snappyUncompress } from './snappy.js'\n\n/**\n * Read a data page from uncompressed reader.\n *\n * @param {Uint8Array} bytes raw page data (should already be decompressed)\n * @param {DataPageHeader} daph data page header\n * @param {ColumnDecoder} columnDecoder\n * @returns {DataPage} definition levels, repetition levels, and array of values\n */\nexport function readDataPage(bytes, daph, { type, element, schemaPath }) {\n const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength)\n const reader = { view, offset: 0 }\n /** @type {DecodedArray} */\n let dataPage\n\n // repetition and definition levels\n const repetitionLevels = readRepetitionLevels(reader, daph, schemaPath)\n // assert(!repetitionLevels.length || repetitionLevels.length === daph.num_values)\n const { definitionLevels, numNulls } = readDefinitionLevels(reader, daph, schemaPath)\n // assert(!definitionLevels.length || definitionLevels.length === daph.num_values)\n\n // read values based on encoding\n const nValues = daph.num_values - numNulls\n if (daph.encoding === 'PLAIN') {\n dataPage = readPlain(reader, type, nValues, element.type_length)\n } else if (\n daph.encoding === 'PLAIN_DICTIONARY' ||\n daph.encoding === 'RLE_DICTIONARY' ||\n daph.encoding === 'RLE'\n ) {\n const bitWidth = type === 'BOOLEAN' ? 1 : view.getUint8(reader.offset++)\n if (bitWidth) {\n dataPage = new Array(nValues)\n if (type === 'BOOLEAN') {\n readRleBitPackedHybrid(reader, bitWidth, dataPage)\n dataPage = dataPage.map(x => !!x) // convert to boolean\n } else {\n // assert(daph.encoding.endsWith('_DICTIONARY'))\n readRleBitPackedHybrid(reader, bitWidth, dataPage, view.byteLength - reader.offset)\n }\n } else {\n dataPage = new Uint8Array(nValues) // nValue zeroes\n }\n } else if (daph.encoding === 'BYTE_STREAM_SPLIT') {\n dataPage = byteStreamSplit(reader, nValues, type, element.type_length)\n } else if (daph.encoding === 'DELTA_BINARY_PACKED') {\n const int32 = type === 'INT32'\n dataPage = int32 ? new Int32Array(nValues) : new BigInt64Array(nValues)\n deltaBinaryUnpack(reader, nValues, dataPage)\n } else if (daph.encoding === 'DELTA_LENGTH_BYTE_ARRAY') {\n dataPage = new Array(nValues)\n deltaLengthByteArray(reader, nValues, dataPage)\n } else {\n throw new Error(`parquet unsupported encoding: ${daph.encoding}`)\n }\n\n return { definitionLevels, repetitionLevels, dataPage }\n}\n\n/**\n * @import {ColumnDecoder, CompressionCodec, Compressors, DataPage, DataPageHeader, DataPageHeaderV2, DataReader, DecodedArray, PageHeader, SchemaTree} from '../src/types.d.ts'\n * @param {DataReader} reader data view for the page\n * @param {DataPageHeader} daph data page header\n * @param {SchemaTree[]} schemaPath\n * @returns {any[]} repetition levels and number of bytes read\n */\nfunction readRepetitionLevels(reader, daph, schemaPath) {\n if (schemaPath.length > 1) {\n const maxRepetitionLevel = getMaxRepetitionLevel(schemaPath)\n if (maxRepetitionLevel) {\n const values = new Array(daph.num_values)\n readRleBitPackedHybrid(reader, bitWidth(maxRepetitionLevel), values)\n return values\n }\n }\n return []\n}\n\n/**\n * @param {DataReader} reader data view for the page\n * @param {DataPageHeader} daph data page header\n * @param {SchemaTree[]} schemaPath\n * @returns {{ definitionLevels: number[], numNulls: number }} definition levels\n */\nfunction readDefinitionLevels(reader, daph, schemaPath) {\n const maxDefinitionLevel = getMaxDefinitionLevel(schemaPath)\n if (!maxDefinitionLevel) return { definitionLevels: [], numNulls: 0 }\n\n const definitionLevels = new Array(daph.num_values)\n readRleBitPackedHybrid(reader, bitWidth(maxDefinitionLevel), definitionLevels)\n\n // count nulls\n let numNulls = daph.num_values\n for (const def of definitionLevels) {\n if (def === maxDefinitionLevel) numNulls--\n }\n if (numNulls === 0) definitionLevels.length = 0\n\n return { definitionLevels, numNulls }\n}\n\n/**\n * @param {Uint8Array} compressedBytes\n * @param {number} uncompressed_page_size\n * @param {CompressionCodec} codec\n * @param {Compressors | undefined} compressors\n * @returns {Uint8Array}\n */\nexport function decompressPage(compressedBytes, uncompressed_page_size, codec, compressors) {\n /** @type {Uint8Array} */\n let page\n const customDecompressor = compressors?.[codec]\n if (codec === 'UNCOMPRESSED') {\n page = compressedBytes\n } else if (customDecompressor) {\n page = customDecompressor(compressedBytes, uncompressed_page_size)\n } else if (codec === 'SNAPPY') {\n page = new Uint8Array(uncompressed_page_size)\n snappyUncompress(compressedBytes, page)\n } else {\n throw new Error(`parquet unsupported compression codec: ${codec}`)\n }\n if (page?.length !== uncompressed_page_size) {\n throw new Error(`parquet decompressed page length ${page?.length} does not match header ${uncompressed_page_size}`)\n }\n return page\n}\n\n\n/**\n * Read a data page from the given Uint8Array.\n *\n * @param {Uint8Array} compressedBytes raw page data\n * @param {PageHeader} ph page header\n * @param {ColumnDecoder} columnDecoder\n * @returns {DataPage} definition levels, repetition levels, and array of values\n */\nexport function readDataPageV2(compressedBytes, ph, columnDecoder) {\n const view = new DataView(compressedBytes.buffer, compressedBytes.byteOffset, compressedBytes.byteLength)\n const reader = { view, offset: 0 }\n const { type, element, schemaPath, codec, compressors } = columnDecoder\n const daph2 = ph.data_page_header_v2\n if (!daph2) throw new Error('parquet data page header v2 is undefined')\n\n // repetition levels\n const repetitionLevels = readRepetitionLevelsV2(reader, daph2, schemaPath)\n reader.offset = daph2.repetition_levels_byte_length // readVarInt() => len for boolean v2?\n\n // definition levels\n const definitionLevels = readDefinitionLevelsV2(reader, daph2, schemaPath)\n // assert(reader.offset === daph2.repetition_levels_byte_length + daph2.definition_levels_byte_length)\n\n const uncompressedPageSize = ph.uncompressed_page_size - daph2.definition_levels_byte_length - daph2.repetition_levels_byte_length\n\n let page = compressedBytes.subarray(reader.offset)\n if (daph2.is_compressed !== false) {\n page = decompressPage(page, uncompressedPageSize, codec, compressors)\n }\n const pageView = new DataView(page.buffer, page.byteOffset, page.byteLength)\n const pageReader = { view: pageView, offset: 0 }\n\n // read values based on encoding\n /** @type {DecodedArray} */\n let dataPage\n const nValues = daph2.num_values - daph2.num_nulls\n if (daph2.encoding === 'PLAIN') {\n dataPage = readPlain(pageReader, type, nValues, element.type_length)\n } else if (daph2.encoding === 'RLE') {\n // assert(type === 'BOOLEAN')\n dataPage = new Array(nValues)\n readRleBitPackedHybrid(pageReader, 1, dataPage)\n dataPage = dataPage.map(x => !!x)\n } else if (\n daph2.encoding === 'PLAIN_DICTIONARY' ||\n daph2.encoding === 'RLE_DICTIONARY'\n ) {\n const bitWidth = pageView.getUint8(pageReader.offset++)\n dataPage = new Array(nValues)\n readRleBitPackedHybrid(pageReader, bitWidth, dataPage, uncompressedPageSize - 1)\n } else if (daph2.encoding === 'DELTA_BINARY_PACKED') {\n const int32 = type === 'INT32'\n dataPage = int32 ? new Int32Array(nValues) : new BigInt64Array(nValues)\n deltaBinaryUnpack(pageReader, nValues, dataPage)\n } else if (daph2.encoding === 'DELTA_LENGTH_BYTE_ARRAY') {\n dataPage = new Array(nValues)\n deltaLengthByteArray(pageReader, nValues, dataPage)\n } else if (daph2.encoding === 'DELTA_BYTE_ARRAY') {\n dataPage = new Array(nValues)\n deltaByteArray(pageReader, nValues, dataPage)\n } else if (daph2.encoding === 'BYTE_STREAM_SPLIT') {\n dataPage = byteStreamSplit(reader, nValues, type, element.type_length)\n } else {\n throw new Error(`parquet unsupported encoding: ${daph2.encoding}`)\n }\n\n return { definitionLevels, repetitionLevels, dataPage }\n}\n\n/**\n * @param {DataReader} reader\n * @param {DataPageHeaderV2} daph2 data page header v2\n * @param {SchemaTree[]} schemaPath\n * @returns {any[]} repetition levels\n */\nfunction readRepetitionLevelsV2(reader, daph2, schemaPath) {\n const maxRepetitionLevel = getMaxRepetitionLevel(schemaPath)\n if (!maxRepetitionLevel) return []\n\n const values = new Array(daph2.num_values)\n readRleBitPackedHybrid(reader, bitWidth(maxRepetitionLevel), values, daph2.repetition_levels_byte_length)\n return values\n}\n\n/**\n * @param {DataReader} reader\n * @param {DataPageHeaderV2} daph2 data page header v2\n * @param {SchemaTree[]} schemaPath\n * @returns {number[] | undefined} definition levels\n */\nfunction readDefinitionLevelsV2(reader, daph2, schemaPath) {\n const maxDefinitionLevel = getMaxDefinitionLevel(schemaPath)\n if (maxDefinitionLevel) {\n // V2 we know the length\n const values = new Array(daph2.num_values)\n readRleBitPackedHybrid(reader, bitWidth(maxDefinitionLevel), values, daph2.definition_levels_byte_length)\n return values\n }\n}\n","import { assembleLists } from './assemble.js'\nimport { Encoding, PageType } from './constants.js'\nimport { convert, convertWithDictionary } from './convert.js'\nimport { decompressPage, readDataPage, readDataPageV2 } from './datapage.js'\nimport { readPlain } from './plain.js'\nimport { isFlatColumn } from './schema.js'\nimport { deserializeTCompactProtocol } from './thrift.js'\n\n/**\n * Parse column data from a buffer.\n *\n * @param {DataReader} reader\n * @param {RowGroupSelect} rowGroupSelect row group selection\n * @param {ColumnDecoder} columnDecoder column decoder params\n * @param {(chunk: ColumnData) => void} [onPage] callback for each page\n * @returns {DecodedArray[]}\n */\nexport function readColumn(reader, { groupStart, selectStart, selectEnd }, columnDecoder, onPage) {\n const { columnName } = columnDecoder\n /** @type {DecodedArray[]} */\n const chunks = []\n /** @type {DecodedArray | undefined} */\n let dictionary = undefined\n /** @type {DecodedArray | undefined} */\n let lastChunk = undefined\n let rowCount = 0\n\n const emitLastChunk = onPage && (() => {\n lastChunk && onPage({\n columnName,\n columnData: lastChunk,\n rowStart: groupStart + rowCount - lastChunk.length,\n rowEnd: groupStart + rowCount,\n })\n })\n\n while (rowCount < selectEnd) {\n if (reader.offset >= reader.view.byteLength - 1) break // end of reader\n\n // read page header\n const header = parquetHeader(reader)\n if (header.type === 'DICTIONARY_PAGE') {\n // assert(!dictionary)\n dictionary = readPage(reader, header, columnDecoder, dictionary, undefined, 0)\n dictionary = convert(dictionary, columnDecoder)\n } else {\n const lastChunkLength = lastChunk?.length || 0\n const values = readPage(reader, header, columnDecoder, dictionary, lastChunk, selectStart - rowCount)\n if (lastChunk === values) {\n // continued from previous page\n rowCount += values.length - lastChunkLength\n } else {\n emitLastChunk?.()\n chunks.push(values)\n rowCount += values.length\n lastChunk = values\n }\n }\n }\n emitLastChunk?.()\n // assert(rowCount >= selectEnd)\n if (rowCount > selectEnd && lastChunk) {\n // truncate last chunk to row limit\n chunks[chunks.length - 1] = lastChunk.slice(0, selectEnd - (rowCount - lastChunk.length))\n }\n return chunks\n}\n\n/**\n * Read a page (data or dictionary) from a buffer.\n *\n * @param {DataReader} reader\n * @param {PageHeader} header\n * @param {ColumnDecoder} columnDecoder\n * @param {DecodedArray | undefined} dictionary\n * @param {DecodedArray | undefined} previousChunk\n * @param {number} pageStart skip this many rows in the page\n * @returns {DecodedArray}\n */\nexport function readPage(reader, header, columnDecoder, dictionary, previousChunk, pageStart) {\n const { type, element, schemaPath, codec, compressors } = columnDecoder\n // read compressed_page_size bytes\n const compressedBytes = new Uint8Array(\n reader.view.buffer, reader.view.byteOffset + reader.offset, header.compressed_page_size\n )\n reader.offset += header.compressed_page_size\n\n // parse page data by type\n if (header.type === 'DATA_PAGE') {\n const daph = header.data_page_header\n if (!daph) throw new Error('parquet data page header is undefined')\n\n // skip unnecessary non-nested pages\n if (pageStart > daph.num_values && isFlatColumn(schemaPath)) {\n return new Array(daph.num_values) // TODO: don't allocate array\n }\n\n const page = decompressPage(compressedBytes, Number(header.uncompressed_page_size), codec, compressors)\n const { definitionLevels, repetitionLevels, dataPage } = readDataPage(page, daph, columnDecoder)\n // assert(!daph.statistics?.null_count || daph.statistics.null_count === BigInt(daph.num_values - dataPage.length))\n\n // convert types, dereference dictionary, and assemble lists\n let values = convertWithDictionary(dataPage, dictionary, daph.encoding, columnDecoder)\n if (repetitionLevels.length || definitionLevels?.length) {\n const output = Array.isArray(previousChunk) ? previousChunk : []\n return assembleLists(output, definitionLevels, repetitionLevels, values, schemaPath)\n } else {\n // wrap nested flat data by depth\n for (let i = 2; i < schemaPath.length; i++) {\n if (schemaPath[i].element.repetition_type !== 'REQUIRED') {\n values = Array.from(values, e => [e])\n }\n }\n return values\n }\n } else if (header.type === 'DATA_PAGE_V2') {\n const daph2 = header.data_page_header_v2\n if (!daph2) throw new Error('parquet data page header v2 is undefined')\n\n // skip unnecessary pages\n if (pageStart > daph2.num_rows) {\n return new Array(daph2.num_values) // TODO: don't allocate array\n }\n\n const { definitionLevels, repetitionLevels, dataPage } =\n readDataPageV2(compressedBytes, header, columnDecoder)\n\n // convert types, dereference dictionary, and assemble lists\n const values = convertWithDictionary(dataPage, dictionary, daph2.encoding, columnDecoder)\n const output = Array.isArray(previousChunk) ? previousChunk : []\n return assembleLists(output, definitionLevels, repetitionLevels, values, schemaPath)\n } else if (header.type === 'DICTIONARY_PAGE') {\n const diph = header.dictionary_page_header\n if (!diph) throw new Error('parquet dictionary page header is undefined')\n\n const page = decompressPage(\n compressedBytes, Number(header.uncompressed_page_size), codec, compressors\n )\n\n const reader = { view: new DataView(page.buffer, page.byteOffset, page.byteLength), offset: 0 }\n return readPlain(reader, type, diph.num_values, element.type_length)\n } else {\n throw new Error(`parquet unsupported page type: ${header.type}`)\n }\n}\n\n/**\n * Read parquet header from a buffer.\n *\n * @import {ColumnData, ColumnDecoder, DataReader, DecodedArray, PageHeader, RowGroupSelect} from '../src/types.d.ts'\n * @param {DataReader} reader\n * @returns {PageHeader}\n */\nfunction parquetHeader(reader) {\n const header = deserializeTCompactProtocol(reader)\n\n // Parse parquet header from thrift data\n const type = PageType[header.field_1]\n const uncompressed_page_size = header.field_2\n const compressed_page_size = header.field_3\n const crc = header.field_4\n const data_page_header = header.field_5 && {\n num_values: header.field_5.field_1,\n encoding: Encoding[header.field_5.field_2],\n definition_level_encoding: Encoding[header.field_5.field_3],\n repetition_level_encoding: Encoding[header.field_5.field_4],\n statistics: header.field_5.field_5 && {\n max: header.field_5.field_5.field_1,\n min: header.field_5.field_5.field_2,\n null_count: header.field_5.field_5.field_3,\n distinct_count: header.field_5.field_5.field_4,\n max_value: header.field_5.field_5.field_5,\n min_value: header.field_5.field_5.field_6,\n },\n }\n const index_page_header = header.field_6\n const dictionary_page_header = header.field_7 && {\n num_values: header.field_7.field_1,\n encoding: Encoding[header.field_7.field_2],\n is_sorted: header.field_7.field_3,\n }\n const data_page_header_v2 = header.field_8 && {\n num_values: header.field_8.field_1,\n num_nulls: header.field_8.field_2,\n num_rows: header.field_8.field_3,\n encoding: Encoding[header.field_8.field_4],\n definition_levels_byte_length: header.field_8.field_5,\n repetition_levels_byte_length: header.field_8.field_6,\n is_compressed: header.field_8.field_7 === undefined ? true : header.field_8.field_7, // default true\n statistics: header.field_8.field_8,\n }\n\n return {\n type,\n uncompressed_page_size,\n compressed_page_size,\n crc,\n data_page_header,\n index_page_header,\n dictionary_page_header,\n data_page_header_v2,\n }\n}\n","import { assembleNested } from './assemble.js'\nimport { readColumn } from './column.js'\nimport { DEFAULT_PARSERS } from './convert.js'\nimport { getColumnRange } from './plan.js'\nimport { getSchemaPath } from './schema.js'\nimport { flatten } from './utils.js'\n\n/**\n * @import {AsyncColumn, AsyncRowGroup, DecodedArray, GroupPlan, ParquetParsers, ParquetReadOptions, QueryPlan, RowGroup, SchemaTree} from './types.js'\n */\n/**\n * Read a row group from a file-like object.\n *\n * @param {ParquetReadOptions} options\n * @param {QueryPlan} plan\n * @param {GroupPlan} groupPlan\n * @returns {AsyncRowGroup} resolves to column data\n */\nexport function readRowGroup(options, { metadata, columns }, groupPlan) {\n const { file, compressors, utf8 } = options\n\n /** @type {AsyncColumn[]} */\n const asyncColumns = []\n /** @type {ParquetParsers} */\n const parsers = { ...DEFAULT_PARSERS, ...options.parsers }\n\n // read column data\n for (const { file_path, meta_data } of groupPlan.rowGroup.columns) {\n if (file_path) throw new Error('parquet file_path not supported')\n if (!meta_data) throw new Error('parquet column metadata is undefined')\n\n // skip columns that are not requested\n const columnName = meta_data.path_in_schema[0]\n if (columns && !columns.includes(columnName)) continue\n\n const { startByte, endByte } = getColumnRange(meta_data)\n const columnBytes = endByte - startByte\n\n // skip columns larger than 1gb\n // TODO: stream process the data, returning only the requested rows\n if (columnBytes > 1 << 30) {\n console.warn(`parquet skipping huge column \"${meta_data.path_in_schema}\" ${columnBytes} bytes`)\n // TODO: set column to new Error('parquet column too large')\n continue\n }\n\n // wrap awaitable to ensure it's a promise\n /** @type {Promise} */\n const buffer = Promise.resolve(file.slice(startByte, endByte))\n\n // read column data async\n asyncColumns.push({\n pathInSchema: meta_data.path_in_schema,\n data: buffer.then(arrayBuffer => {\n const schemaPath = getSchemaPath(metadata.schema, meta_data.path_in_schema)\n const reader = { view: new DataView(arrayBuffer), offset: 0 }\n const subcolumn = meta_data.path_in_schema.join('.')\n const columnDecoder = {\n columnName: subcolumn,\n type: meta_data.type,\n element: schemaPath[schemaPath.length - 1].element,\n schemaPath,\n codec: meta_data.codec,\n parsers,\n compressors,\n utf8,\n }\n return readColumn(reader, groupPlan, columnDecoder, options.onPage)\n }),\n })\n }\n\n return { groupStart: groupPlan.groupStart, groupRows: groupPlan.groupRows, asyncColumns }\n}\n\n/**\n * @param {AsyncRowGroup} asyncGroup\n * @param {number} selectStart\n * @param {number} selectEnd\n * @param {string[] | undefined} columns\n * @param {'object' | 'array'} [rowFormat]\n * @returns {Promise[]>} resolves to row data\n */\nexport async function asyncGroupToRows({ asyncColumns }, selectStart, selectEnd, columns, rowFormat) {\n const groupData = new Array(selectEnd)\n\n // columnData[i] for asyncColumns[i]\n // TODO: do it without flatten\n const columnDatas = await Promise.all(asyncColumns.map(({ data }) => data.then(flatten)))\n\n // careful mapping of column order for rowFormat: array\n const includedColumnNames = asyncColumns\n .map(child => child.pathInSchema[0])\n .filter(name => !columns || columns.includes(name))\n const columnOrder = columns ?? includedColumnNames\n const columnIndexes = columnOrder.map(name => asyncColumns.findIndex(column => column.pathInSchema[0] === name))\n\n // transpose columns into rows\n for (let row = selectStart; row < selectEnd; row++) {\n if (rowFormat === 'object') {\n // return each row as an object\n /** @type {Record} */\n const rowData = {}\n for (let i = 0; i < asyncColumns.length; i++) {\n rowData[asyncColumns[i].pathInSchema[0]] = columnDatas[i][row]\n }\n groupData[row] = rowData\n } else {\n // return each row as an array\n const rowData = new Array(asyncColumns.length)\n for (let i = 0; i < columnOrder.length; i++) {\n if (columnIndexes[i] >= 0) {\n rowData[i] = columnDatas[columnIndexes[i]][row]\n }\n }\n groupData[row] = rowData\n }\n }\n return groupData\n}\n\n/**\n * Assemble physical columns into top-level columns asynchronously.\n *\n * @param {AsyncRowGroup} asyncRowGroup\n * @param {SchemaTree} schemaTree\n * @returns {AsyncRowGroup}\n */\nexport function assembleAsync(asyncRowGroup, schemaTree) {\n const { asyncColumns } = asyncRowGroup\n /** @type {AsyncColumn[]} */\n const assembled = []\n for (const child of schemaTree.children) {\n if (child.children.length) {\n const childColumns = asyncColumns.filter(column => column.pathInSchema[0] === child.element.name)\n if (!childColumns.length) continue\n\n // wait for all child columns to be read\n /** @type {Map} */\n const flatData = new Map()\n const data = Promise.all(childColumns.map(column => {\n return column.data.then(columnData => {\n flatData.set(column.pathInSchema.join('.'), flatten(columnData))\n })\n })).then(() => {\n // assemble the column\n assembleNested(flatData, child)\n const flatColumn = flatData.get(child.path.join('.'))\n if (!flatColumn) throw new Error('parquet column data not assembled')\n return [flatColumn]\n })\n\n assembled.push({ pathInSchema: child.path, data })\n } else {\n // leaf node, return the column\n const asyncColumn = asyncColumns.find(column => column.pathInSchema[0] === child.element.name)\n if (asyncColumn) {\n assembled.push(asyncColumn)\n }\n }\n }\n return { ...asyncRowGroup, asyncColumns: assembled }\n}\n","import { parquetMetadataAsync, parquetSchema } from './metadata.js'\nimport { parquetPlan, prefetchAsyncBuffer } from './plan.js'\nimport { assembleAsync, asyncGroupToRows, readRowGroup } from './rowgroup.js'\nimport { concat, flatten } from './utils.js'\n\n/**\n * @import {AsyncRowGroup, DecodedArray, ParquetReadOptions} from '../src/types.js'\n */\n/**\n * Read parquet data rows from a file-like object.\n * Reads the minimal number of row groups and columns to satisfy the request.\n *\n * Returns a void promise when complete.\n * Errors are thrown on the returned promise.\n * Data is returned in callbacks onComplete, onChunk, onPage, NOT the return promise.\n * See parquetReadObjects for a more convenient API.\n *\n * @param {ParquetReadOptions} options read options\n * @returns {Promise} resolves when all requested rows and columns are parsed, all errors are thrown here\n */\nexport async function parquetRead(options) {\n // load metadata if not provided\n options.metadata ??= await parquetMetadataAsync(options.file)\n\n // read row groups\n const asyncGroups = parquetReadAsync(options)\n\n const { rowStart = 0, rowEnd, columns, onChunk, onComplete, rowFormat } = options\n\n // skip assembly if no onComplete or onChunk, but wait for reading to finish\n if (!onComplete && !onChunk) {\n for (const { asyncColumns } of asyncGroups) {\n for (const { data } of asyncColumns) await data\n }\n return\n }\n\n // assemble struct columns\n const schemaTree = parquetSchema(options.metadata)\n const assembled = asyncGroups.map(arg => assembleAsync(arg, schemaTree))\n\n // onChunk emit all chunks (don't await)\n if (onChunk) {\n for (const asyncGroup of assembled) {\n for (const asyncColumn of asyncGroup.asyncColumns) {\n asyncColumn.data.then(columnDatas => {\n let rowStart = asyncGroup.groupStart\n for (const columnData of columnDatas) {\n onChunk({\n columnName: asyncColumn.pathInSchema[0],\n columnData,\n rowStart,\n rowEnd: rowStart + columnData.length,\n })\n rowStart += columnData.length\n }\n })\n }\n }\n }\n\n // onComplete transpose column chunks to rows\n if (onComplete) {\n /** @type {any[][]} */\n const rows = []\n for (const asyncGroup of assembled) {\n // filter to rows in range\n const selectStart = Math.max(rowStart - asyncGroup.groupStart, 0)\n const selectEnd = Math.min((rowEnd ?? Infinity) - asyncGroup.groupStart, asyncGroup.groupRows)\n // transpose column chunks to rows in output\n const groupData = await asyncGroupToRows(asyncGroup, selectStart, selectEnd, columns, rowFormat)\n concat(rows, groupData.slice(selectStart, selectEnd))\n }\n onComplete(rows)\n } else {\n // wait for all async groups to finish (complete takes care of this)\n for (const { asyncColumns } of assembled) {\n for (const { data } of asyncColumns) await data\n }\n }\n}\n\n/**\n * @param {ParquetReadOptions} options read options\n * @returns {AsyncRowGroup[]}\n */\nexport function parquetReadAsync(options) {\n if (!options.metadata) throw new Error('parquet requires metadata')\n // TODO: validate options (start, end, columns, etc)\n\n // prefetch byte ranges\n const plan = parquetPlan(options)\n options.file = prefetchAsyncBuffer(options.file, plan)\n\n // read row groups\n return plan.groups.map(groupPlan => readRowGroup(options, plan, groupPlan))\n}\n\n/**\n * Reads a single column from a parquet file.\n *\n * @param {ParquetReadOptions} options\n * @returns {Promise}\n */\nexport async function parquetReadColumn(options) {\n if (options.columns?.length !== 1) {\n throw new Error('parquetReadColumn expected columns: [columnName]')\n }\n options.metadata ??= await parquetMetadataAsync(options.file)\n const asyncGroups = parquetReadAsync(options)\n\n // assemble struct columns\n const schemaTree = parquetSchema(options.metadata)\n const assembled = asyncGroups.map(arg => assembleAsync(arg, schemaTree))\n\n /** @type {DecodedArray[]} */\n const columnData = []\n for (const rg of assembled) {\n columnData.push(flatten(await rg.asyncColumns[0].data))\n }\n return flatten(columnData)\n}\n\n/**\n * This is a helper function to read parquet row data as a promise.\n * It is a wrapper around the more configurable parquetRead function.\n *\n * @param {Omit} options\n * @returns {Promise[]>} resolves when all requested rows and columns are parsed\n*/\nexport function parquetReadObjects(options) {\n return new Promise((onComplete, reject) => {\n parquetRead({\n rowFormat: 'object',\n ...options,\n onComplete,\n }).catch(reject)\n })\n}\n","\nconst geometryTypePoint = 1\nconst geometryTypeLineString = 2\nconst geometryTypePolygon = 3\nconst geometryTypeMultiPoint = 4\nconst geometryTypeMultiLineString = 5\nconst geometryTypeMultiPolygon = 6\nconst geometryTypeGeometryCollection = 7\nconst geometryTypeCircularString = 8\nconst geometryTypeCompoundCurve = 9\nconst geometryTypeCurvePolygon = 10\nconst geometryTypeMultiCurve = 11\nconst geometryTypeMultiSurface = 12\nconst geometryTypeCurve = 13\nconst geometryTypeSurface = 14\nconst geometryTypePolyhedralSurface = 15\nconst geometryTypeTIN = 16\nconst geometryTypeTriangle = 17\nconst geometryTypeCircle = 18\nconst geometryTypeGeodesicString = 19\nconst geometryTypeEllipticalCurve = 20\nconst geometryTypeNurbsCurve = 21\nconst geometryTypeClothoid = 22\nconst geometryTypeSpiralCurve = 23\nconst geometryTypeCompoundSurface = 24\n\n/**\n * WKB (Well Known Binary) decoder for geometry objects.\n *\n * @import { Geometry } from '../src/geojson.js'\n * @param {Uint8Array} wkb\n * @returns {Geometry} GeoJSON geometry object\n */\nexport function decodeWKB(wkb) {\n const dv = new DataView(wkb.buffer, wkb.byteOffset, wkb.byteLength)\n let offset = 0\n\n // Byte order: 0 = big-endian, 1 = little-endian\n const byteOrder = wkb[offset]; offset += 1\n const isLittleEndian = byteOrder === 1\n\n // Read geometry type\n const geometryType = dv.getUint32(offset, isLittleEndian)\n offset += 4\n\n // WKB geometry types (OGC):\n if (geometryType === geometryTypePoint) {\n // Point\n const x = dv.getFloat64(offset, isLittleEndian); offset += 8\n const y = dv.getFloat64(offset, isLittleEndian); offset += 8\n return { type: 'Point', coordinates: [x, y] }\n } else if (geometryType === geometryTypeLineString) {\n // LineString\n const numPoints = dv.getUint32(offset, isLittleEndian); offset += 4\n const coords = []\n for (let i = 0; i < numPoints; i++) {\n const x = dv.getFloat64(offset, isLittleEndian); offset += 8\n const y = dv.getFloat64(offset, isLittleEndian); offset += 8\n coords.push([x, y])\n }\n return { type: 'LineString', coordinates: coords }\n } else if (geometryType === geometryTypePolygon) {\n // Polygon\n const numRings = dv.getUint32(offset, isLittleEndian); offset += 4\n const coords = []\n for (let r = 0; r < numRings; r++) {\n const numPoints = dv.getUint32(offset, isLittleEndian); offset += 4\n const ring = []\n for (let p = 0; p < numPoints; p++) {\n const x = dv.getFloat64(offset, isLittleEndian); offset += 8\n const y = dv.getFloat64(offset, isLittleEndian); offset += 8\n ring.push([x, y])\n }\n coords.push(ring)\n }\n return { type: 'Polygon', coordinates: coords }\n } else if (geometryType === geometryTypeMultiPolygon) {\n // MultiPolygon\n const numPolygons = dv.getUint32(offset, isLittleEndian); offset += 4\n const polygons = []\n for (let i = 0; i < numPolygons; i++) {\n // Each polygon has its own byte order & geometry type\n const polyIsLittleEndian = wkb[offset] === 1; offset += 1\n const polyType = dv.getUint32(offset, polyIsLittleEndian); offset += 4\n if (polyType !== geometryTypePolygon) {\n throw new Error(`Expected Polygon in MultiPolygon, got ${polyType}`)\n }\n const numRings = dv.getUint32(offset, polyIsLittleEndian); offset += 4\n\n const pgCoords = []\n for (let r = 0; r < numRings; r++) {\n const numPoints = dv.getUint32(offset, polyIsLittleEndian); offset += 4\n const ring = []\n for (let p = 0; p < numPoints; p++) {\n const x = dv.getFloat64(offset, polyIsLittleEndian); offset += 8\n const y = dv.getFloat64(offset, polyIsLittleEndian); offset += 8\n ring.push([x, y])\n }\n pgCoords.push(ring)\n }\n polygons.push(pgCoords)\n }\n return { type: 'MultiPolygon', coordinates: polygons }\n } else if (geometryType === geometryTypeMultiPoint) {\n // MultiPoint\n const numPoints = dv.getUint32(offset, isLittleEndian); offset += 4\n const points = []\n for (let i = 0; i < numPoints; i++) {\n // Each point has its own byte order & geometry type\n const pointIsLittleEndian = wkb[offset] === 1; offset += 1\n const pointType = dv.getUint32(offset, pointIsLittleEndian); offset += 4\n if (pointType !== geometryTypePoint) {\n throw new Error(`Expected Point in MultiPoint, got ${pointType}`)\n }\n const x = dv.getFloat64(offset, pointIsLittleEndian); offset += 8\n const y = dv.getFloat64(offset, pointIsLittleEndian); offset += 8\n points.push([x, y])\n }\n return { type: 'MultiPoint', coordinates: points }\n } else if (geometryType === geometryTypeMultiLineString) {\n // MultiLineString\n const numLineStrings = dv.getUint32(offset, isLittleEndian); offset += 4\n const lineStrings = []\n for (let i = 0; i < numLineStrings; i++) {\n // Each line has its own byte order & geometry type\n const lineIsLittleEndian = wkb[offset] === 1; offset += 1\n const lineType = dv.getUint32(offset, lineIsLittleEndian); offset += 4\n if (lineType !== geometryTypeLineString) {\n throw new Error(`Expected LineString in MultiLineString, got ${lineType}`)\n }\n const numPoints = dv.getUint32(offset, isLittleEndian); offset += 4\n const coords = []\n for (let p = 0; p < numPoints; p++) {\n const x = dv.getFloat64(offset, lineIsLittleEndian); offset += 8\n const y = dv.getFloat64(offset, lineIsLittleEndian); offset += 8\n coords.push([x, y])\n }\n lineStrings.push(coords)\n }\n return { type: 'MultiLineString', coordinates: lineStrings }\n } else {\n throw new Error(`Unsupported geometry type: ${geometryType}`)\n }\n}\n","import { parquetMetadataAsync, parquetReadObjects } from 'hyparquet'\nimport { decodeWKB } from './wkb.js'\n\n/**\n * Convert a GeoParquet file to GeoJSON.\n * Input is an AsyncBuffer representing a GeoParquet file.\n * An AsyncBuffer is a buffer-like object that can be read asynchronously.\n *\n * @import { AsyncBuffer, Compressors } from 'hyparquet'\n * @import { Feature, GeoJSON } from '../src/geojson.js'\n * @param {Object} options\n * @param {AsyncBuffer} options.file\n * @param {Compressors} [options.compressors]\n * @returns {Promise}\n */\nexport async function toGeoJson({ file, compressors }) {\n const metadata = await parquetMetadataAsync(file)\n const geoMetadata = metadata.key_value_metadata?.find(kv => kv.key === 'geo')\n if (!geoMetadata) {\n throw new Error('Invalid GeoParquet file: missing \"geo\" metadata')\n }\n\n // Geoparquet metadata\n const geoSchema = JSON.parse(geoMetadata.value || '{}')\n\n // Read all parquet data\n const data = await parquetReadObjects({ file, metadata, utf8: false, compressors })\n\n /** @type {Feature[]} */\n const features = []\n const primaryColumn = geoSchema.primary_column || 'geometry'\n for (const row of data) {\n const wkb = row[primaryColumn]\n if (!wkb) {\n // No geometry\n continue\n }\n\n const geometry = decodeWKB(wkb)\n\n // Extract properties (all fields except geometry)\n /** @type {Record} */\n const properties = {}\n for (const key of Object.keys(row)) {\n const value = row[key]\n if (key !== primaryColumn && value !== null) {\n properties[key] = value\n }\n }\n\n /** @type {Feature} */\n const feature = {\n type: 'Feature',\n geometry,\n properties,\n }\n\n features.push(feature)\n }\n\n return {\n type: 'FeatureCollection',\n features,\n }\n}\n","import { asyncBufferFromUrl, cachedAsyncBuffer } from 'hyparquet'\nimport { toGeoJson } from '../src/index.js'\n\nasync function initMap() {\n // @ts-expect-error MapsLibrary\n const { Map } = await google.maps.importLibrary('maps')\n const div = /** @type {HTMLElement} */document.getElementById('map')\n // Create a new map\n const map = new Map(div, {\n center: { lat: 39, lng: -98 },\n zoom: 4,\n })\n\n // URL or path to your GeoParquet file\n const parquetUrl = 'https://hyparam.github.io/geoparquet/demo/polys.parquet'\n\n try {\n // Read the GeoParquet file and convert to GeoJSON\n const file = cachedAsyncBuffer(\n await asyncBufferFromUrl({ url: parquetUrl, byteLength: 29838 })\n )\n console.log('GeoParquet file:', file)\n const geojson = await toGeoJson({ file })\n\n console.log('GeoJSON:', geojson)\n\n // Add the GeoJSON data to the map\n map.data.addGeoJson(geojson)\n } catch (error) {\n console.error('Error loading or parsing GeoParquet file:', error)\n }\n}\ninitMap()\n"],"names":["ParquetType","Encoding","FieldRepetitionType","ConvertedType","CompressionCodec","PageType","DEFAULT_PARSERS","timestampFromMilliseconds","millis","Date","Number","timestampFromMicroseconds","micros","timestampFromNanoseconds","nanos","dateFromDays","days","convertWithDictionary","data","dictionary","encoding","columnDecoder","endsWith","output","Uint8Array","constructor","length","i","convert","element","parsers","utf8","type","converted_type","ctype","logical_type","ltype","factor","scale","arr","Array","parseDecimal","parseInt96Nanos","decoder","TextDecoder","map","v","JSON","parse","decode","Error","bitWidth","isSigned","BigInt64Array","BigUint64Array","buffer","byteOffset","BigInt","Int32Array","Uint32Array","from","parseFloat16","unit","parser","bytes","value","byte","bits","int16","sign","exp","frac","NaN","Infinity","schemaTree","schema","rootIndex","path","children","count","num_children","childElement","child","name","push","getSchemaPath","tree","part","find","getMaxRepetitionLevel","schemaPath","maxLevel","repetition_type","getMaxDefinitionLevel","slice","CompactType","deserializeTCompactProtocol","reader","lastFid","offset","view","byteLength","fid","newLastFid","readFieldBegin","readElement","getInt8","zigzag","readVarInt","readZigZag","readZigZagBigInt","getFloat64","stringLength","strBytes","elemType","listSize","sizeType","getUint8","size","getCompactType","readCollectionBegin","boolType","values","structValues","structLastFid","structFieldType","structFid","result","shift","readVarBigInt","delta","async","parquetMetadataAsync","asyncBuffer","initialFetchSize","defaultInitialFetchSize","footerOffset","Math","max","footerBuffer","footerView","DataView","getUint32","metadataLength","metadataOffset","metadataBuffer","combinedBuffer","ArrayBuffer","combinedView","set","parquetMetadata","arrayBuffer","metadataLengthOffset","metadata","version","field_1","field_2","field","type_length","field_3","field_4","field_5","field_6","field_7","precision","field_8","field_id","field_9","logicalType","field_10","columnSchema","filter","e","num_rows","row_groups","rowGroup","columns","column","columnIndex","file_path","file_offset","meta_data","encodings","path_in_schema","codec","num_values","total_uncompressed_size","total_compressed_size","key_value_metadata","data_page_offset","index_page_offset","dictionary_page_offset","field_11","statistics","convertStats","field_12","encoding_stats","field_13","encodingStat","page_type","bloom_filter_offset","field_14","bloom_filter_length","field_15","size_statistics","field_16","unencoded_byte_array_data_bytes","repetition_level_histogram","definition_level_histogram","offset_index_offset","offset_index_length","column_index_offset","column_index_length","crypto_metadata","encrypted_column_metadata","total_byte_size","sorting_columns","sortingColumn","column_idx","descending","nulls_first","ordinal","keyValue","key","created_by","metadata_length","isAdjustedToUTC","timeUnit","stats","convertMetadata","min","null_count","distinct_count","max_value","min_value","is_max_value_exact","is_min_value_exact","undefined","getFloat32","getInt32","getBigInt64","concat","aaa","bbb","asyncBufferFromUrl","url","requestInit","fetch","customFetch","globalThis","method","then","res","ok","status","headers","get","parseInt","byteLengthFromUrl","init","start","end","Headers","endStr","body","cachedAsyncBuffer","minSize","cache","Map","cacheKey","cached","promise","flatten","chunks","chunk","getColumnRange","columnOffset","startByte","endByte","assembleLists","definitionLevels","repetitionLevels","n","maxDefinitionLevel","repetitionPath","valueIndex","containerStack","currentContainer","currentDepth","currentDefLevel","currentRepLevel","at","def","rep","pop","newList","assembleNested","subcolumnData","depth","join","optional","nextDepth","firstChild","isListLike","sublist","subDepth","subcolumn","flattenAtDepth","delete","keyChild","valueChild","isMapLike","mapName","keys","out","assembleMaps","invertDepth","struct","childData","inverted","invertStruct","obj","j","Object","deltaBinaryUnpack","int32","blockSize","miniblockPerBlock","outputIndex","valuesPerMiniblock","minDelta","bitWidths","bitpackPos","miniblockCount","mask","ceil","deltaLengthByteArray","lengths","clz32","readRleBitPackedHybrid","width","startOffset","seen","header","readBitPacked","readRle","left","right","byteStreamSplit","typeLength","byteWidth","b","Float32Array","Float64Array","split","subarray","readPlain","fixedLength","bitOffset","readPlainBoolean","align","readPlainInt32","readPlainInt64","low","high","readPlainInt96","readPlainFloat","readPlainDouble","readPlainByteArray","readPlainByteArrayFixed","aligned","WORD_MASK","copyBytes","fromArray","fromPos","toArray","toPos","readDataPage","daph","dataPage","maxRepetitionLevel","readRepetitionLevels","numNulls","readDefinitionLevels","nValues","x","decompressPage","compressedBytes","uncompressed_page_size","compressors","page","customDecompressor","input","inputLength","outputLength","pos","outPos","c","len","isNaN","lengthSize","snappyUncompress","readDataPageV2","ph","daph2","data_page_header_v2","repetition_levels_byte_length","readRepetitionLevelsV2","definition_levels_byte_length","readDefinitionLevelsV2","uncompressedPageSize","is_compressed","pageView","pageReader","num_nulls","prefixData","suffixData","suffix","deltaByteArray","readColumn","groupStart","selectStart","selectEnd","onPage","columnName","lastChunk","rowCount","emitLastChunk","columnData","rowStart","rowEnd","parquetHeader","readPage","lastChunkLength","previousChunk","pageStart","compressed_page_size","data_page_header","isFlatColumn","isArray","diph","dictionary_page_header","crc","definition_level_encoding","repetition_level_encoding","index_page_header","is_sorted","asyncGroupToRows","asyncColumns","rowFormat","groupData","columnDatas","Promise","all","includedColumnNames","pathInSchema","includes","columnOrder","columnIndexes","findIndex","row","rowData","parquetRead","options","file","asyncGroups","plan","groups","fetches","groupRows","groupEnd","ranges","groupSize","isFinite","parquetPlan","promises","index","endOffset","prefetchAsyncBuffer","groupPlan","columnBytes","console","warn","resolve","readRowGroup","parquetReadAsync","onChunk","onComplete","parquetSchema","assembled","arg","asyncRowGroup","childColumns","flatData","flatColumn","asyncColumn","assembleAsync","asyncGroup","rows","decodeWKB","wkb","dv","byteOrder","isLittleEndian","geometryType","y","coordinates","numPoints","coords","numRings","r","ring","p","numPolygons","polygons","polyIsLittleEndian","polyType","pgCoords","points","pointIsLittleEndian","pointType","numLineStrings","lineStrings","lineIsLittleEndian","lineType","toGeoJson","geoMetadata","kv","geoSchema","reject","catch","features","primaryColumn","primary_column","geometry","properties","feature","google","maps","importLibrary","document","getElementById","center","lat","lng","zoom","log","geojson","addGeoJson","error","initMap"],"mappings":"AACO,MAAMA,EAAc,CACzB,UACA,QACA,QACA,QACA,QACA,SACA,aACA,wBAIWC,EAAW,CACtB,QACA,gBACA,mBACA,MACA,aACA,sBACA,0BACA,mBACA,iBACA,qBAIWC,EAAsB,CACjC,WACA,WACA,YAIWC,EAAgB,CAC3B,OACA,MACA,gBACA,OACA,OACA,UACA,OACA,cACA,cACA,mBACA,mBACA,SACA,UACA,UACA,UACA,QACA,SACA,SACA,SACA,OACA,OACA,YAIWC,EAAmB,CAC9B,eACA,SACA,OACA,MACA,SACA,MACA,OACA,WAIWC,EAAW,CACtB,YACA,aACA,kBACA,gBCpEWC,EAAkB,CAC7BC,0BAA0BC,GACjB,IAAIC,KAAKC,OAAOF,IAEzBG,0BAA0BC,GACjB,IAAIH,KAAKC,OAAOE,EAAS,QAElCC,yBAAyBC,GAChB,IAAIL,KAAKC,OAAOI,EAAQ,WAEjCC,aAAaC,GAEJ,IAAIP,KADS,MACJO,IAab,SAASC,EAAsBC,EAAMC,EAAYC,EAAUC,GAChE,GAAIF,GAAcC,EAASE,SAAS,eAAgB,CAClD,IAAIC,EAASL,EACTA,aAAgBM,cAAgBL,aAAsBK,cAExDD,EAAS,IAAIJ,EAAWM,YAAYP,EAAKQ,SAE3C,IAAK,IAAIC,EAAI,EAAGA,EAAIT,EAAKQ,OAAQC,IAC/BJ,EAAOI,GAAKR,EAAWD,EAAKS,IAE9B,OAAOJ,CACT,CACE,OAAOK,EAAQV,EAAMG,EAEzB,CASO,SAASO,EAAQV,EAAMG,GAC5B,MAAMQ,QAAEA,EAAOC,QAAEA,EAAOC,KAAEA,GAAO,GAASV,GACpCW,KAAEA,EAAMC,eAAgBC,EAAOC,aAAcC,GAAUP,EAC7D,GAAc,YAAVK,EAAqB,CACvB,MACMG,EAAS,MADDR,EAAQS,OAAS,GAEzBC,EAAM,IAAIC,MAAMtB,EAAKQ,QAC3B,IAAK,IAAIC,EAAI,EAAGA,EAAIY,EAAIb,OAAQC,IAC1BT,EAAK,aAAcM,WACrBe,EAAIZ,GAAKc,EAAavB,EAAKS,IAAMU,EAEjCE,EAAIZ,GAAKjB,OAAOQ,EAAKS,IAAMU,EAG/B,OAAOE,CACT,CACA,IAAKL,GAAkB,UAATF,EAAkB,CAC9B,MAAMO,EAAM,IAAIC,MAAMtB,EAAKQ,QAC3B,IAAK,IAAIC,EAAI,EAAGA,EAAIY,EAAIb,OAAQC,IAC9BY,EAAIZ,GAAKG,EAAQjB,yBAAyB6B,EAAgBxB,EAAKS,KAEjE,OAAOY,CACT,CACA,GAAc,SAAVL,EAAkB,CACpB,MAAMK,EAAM,IAAIC,MAAMtB,EAAKQ,QAC3B,IAAK,IAAIC,EAAI,EAAGA,EAAIY,EAAIb,OAAQC,IAC9BY,EAAIZ,GAAKG,EAAQf,aAAaG,EAAKS,IAErC,OAAOY,CACT,CACA,GAAc,qBAAVL,EAA8B,CAChC,MAAMK,EAAM,IAAIC,MAAMtB,EAAKQ,QAC3B,IAAK,IAAIC,EAAI,EAAGA,EAAIY,EAAIb,OAAQC,IAC9BY,EAAIZ,GAAKG,EAAQvB,0BAA0BW,EAAKS,IAElD,OAAOY,CACT,CACA,GAAc,qBAAVL,EAA8B,CAChC,MAAMK,EAAM,IAAIC,MAAMtB,EAAKQ,QAC3B,IAAK,IAAIC,EAAI,EAAGA,EAAIY,EAAIb,OAAQC,IAC9BY,EAAIZ,GAAKG,EAAQnB,0BAA0BO,EAAKS,IAElD,OAAOY,CACT,CACA,GAAc,SAAVL,EAAkB,CACpB,MAAMS,EAAU,IAAIC,YACpB,OAAO1B,EAAK2B,IAAIC,GAAKC,KAAKC,MAAML,EAAQM,OAAOH,IACjD,CACA,GAAc,SAAVZ,EACF,MAAM,IAAIgB,MAAM,8BAElB,GAAc,aAAVhB,EACF,MAAM,IAAIgB,MAAM,kCAElB,GAAc,SAAVhB,GAAoC,WAAhBE,GAAOJ,MAAqBD,GAAiB,eAATC,EAAuB,CACjF,MAAMW,EAAU,IAAIC,YACdL,EAAM,IAAIC,MAAMtB,EAAKQ,QAC3B,IAAK,IAAIC,EAAI,EAAGA,EAAIY,EAAIb,OAAQC,IAC9BY,EAAIZ,GAAKT,EAAKS,IAAMgB,EAAQM,OAAO/B,EAAKS,IAE1C,OAAOY,CACT,CACA,GAAc,YAAVL,GAAuC,YAAhBE,GAAOJ,MAAyC,KAAnBI,EAAMe,WAAoBf,EAAMgB,SAAU,CAChG,GAAIlC,aAAgBmC,cAClB,OAAO,IAAIC,eAAepC,EAAKqC,OAAQrC,EAAKsC,WAAYtC,EAAKQ,QAE/D,MAAMa,EAAM,IAAIe,eAAepC,EAAKQ,QACpC,IAAK,IAAIC,EAAI,EAAGA,EAAIY,EAAIb,OAAQC,IAAKY,EAAIZ,GAAK8B,OAAOvC,EAAKS,IAC1D,OAAOY,CACT,CACA,GAAc,YAAVL,GAAuC,YAAhBE,GAAOJ,MAAyC,KAAnBI,EAAMe,WAAoBf,EAAMgB,SAAU,CAChG,GAAIlC,aAAgBwC,WAClB,OAAO,IAAIC,YAAYzC,EAAKqC,OAAQrC,EAAKsC,WAAYtC,EAAKQ,QAE5D,MAAMa,EAAM,IAAIoB,YAAYzC,EAAKQ,QACjC,IAAK,IAAIC,EAAI,EAAGA,EAAIY,EAAIb,OAAQC,IAAKY,EAAIZ,GAAKT,EAAKS,GACnD,OAAOY,CACT,CACA,GAAoB,YAAhBH,GAAOJ,KACT,OAAOQ,MAAMoB,KAAK1C,GAAM2B,IAAIgB,GAE9B,GAAoB,cAAhBzB,GAAOJ,KAAsB,CAC/B,MAAM8B,KAAEA,GAAS1B,EAEjB,IAAI2B,EAASjC,EAAQvB,0BACR,WAATuD,IAAmBC,EAASjC,EAAQnB,2BAC3B,UAATmD,IAAkBC,EAASjC,EAAQjB,0BACvC,MAAM0B,EAAM,IAAIC,MAAMtB,EAAKQ,QAC3B,IAAK,IAAIC,EAAI,EAAGA,EAAIY,EAAIb,OAAQC,IAC9BY,EAAIZ,GAAKoC,EAAO7C,EAAKS,IAEvB,OAAOY,CACT,CACA,OAAOrB,CACT,CAMO,SAASuB,EAAauB,GAC3B,IAAIC,EAAQ,EACZ,IAAK,MAAMC,KAAQF,EACjBC,EAAgB,IAARA,EAAcC,EAIxB,MAAMC,EAAsB,EAAfH,EAAMtC,OAKnB,OAJIuC,GAAS,IAAME,EAAO,KACxBF,GAAS,GAAKE,GAGTF,CACT,CAOA,SAASvB,EAAgBuB,GAGvB,OAAc,kBAFAA,GAAS,KAAO,WACT,oBAARA,EAEf,CAMO,SAASJ,EAAaG,GAC3B,IAAKA,EAAO,OACZ,MAAMI,EAAQJ,EAAM,IAAM,EAAIA,EAAM,GAC9BK,EAAOD,GAAS,MAAU,EAC1BE,EAAMF,GAAS,GAAK,GACpBG,EAAe,KAARH,EACb,OAAY,IAARE,EAAkBD,EAAO,IAAK,IAAOE,EAAO,MACpC,KAARD,EAAqBC,EAAOC,IAAMH,GAAOI,KACtCJ,EAAO,IAAMC,EAAM,KAAO,EAAIC,EAAO,KAC9C,CC1LA,SAASG,EAAWC,EAAQC,EAAWC,GACrC,MAAMhD,EAAU8C,EAAOC,GACjBE,EAAW,GACjB,IAAIC,EAAQ,EAGZ,GAAIlD,EAAQmD,aACV,KAAOF,EAASpD,OAASG,EAAQmD,cAAc,CAC7C,MAAMC,EAAeN,EAAOC,EAAYG,GAClCG,EAAQR,EAAWC,EAAQC,EAAYG,EAAO,IAAIF,EAAMI,EAAaE,OAC3EJ,GAASG,EAAMH,MACfD,EAASM,KAAKF,EAChB,CAGF,MAAO,CAAEH,QAAOlD,UAASiD,WAAUD,OACrC,CASO,SAASQ,EAAcV,EAAQQ,GACpC,IAAIG,EAAOZ,EAAWC,EAAQ,EAAG,IACjC,MAAME,EAAO,CAACS,GACd,IAAK,MAAMC,KAAQJ,EAAM,CACvB,MAAMD,EAAQI,EAAKR,SAASU,KAAKN,GAASA,EAAMrD,QAAQsD,OAASI,GACjE,IAAKL,EAAO,MAAM,IAAIhC,MAAM,qCAAqCiC,KACjEN,EAAKO,KAAKF,GACVI,EAAOJ,CACT,CACA,OAAOL,CACT,CAQO,SAASY,EAAsBC,GACpC,IAAIC,EAAW,EACf,IAAK,MAAM9D,QAAEA,KAAa6D,EACQ,aAA5B7D,EAAQ+D,iBACVD,IAGJ,OAAOA,CACT,CAQO,SAASE,EAAsBH,GACpC,IAAIC,EAAW,EACf,IAAK,MAAM9D,QAAEA,KAAa6D,EAAWI,MAAM,GACT,aAA5BjE,EAAQ+D,iBACVD,IAGJ,OAAOA,CACT,CC3EO,MAAMI,EACL,EADKA,EAEL,EAFKA,EAGJ,EAHIA,EAIL,EAJKA,EAKN,EALMA,EAMN,EANMA,EAON,EAPMA,EAQH,EARGA,EASH,EATGA,EAUL,EAVKA,EAaH,GAUH,SAASC,EAA4BC,GAC1C,IAAIC,EAAU,EAEd,MAAMjC,EAAQ,CAAA,EAEd,KAAOgC,EAAOE,OAASF,EAAOG,KAAKC,YAAY,CAE7C,MAAOrE,EAAMsE,EAAKC,GAAcC,EAAeP,EAAQC,GAGvD,GAFAA,EAAUK,EAENvE,IAAS+D,EACX,MAIF9B,EAAM,SAASqC,KAASG,EAAYR,EAAQjE,EAC9C,CAEA,OAAOiC,CACT,CAUA,SAASwC,EAAYR,EAAQjE,GAC3B,OAAQA,GACR,KAAK+D,EACH,OAAO,EACT,KAAKA,EACH,OAAO,EACT,KAAKA,EAEH,OAAOE,EAAOG,KAAKM,QAAQT,EAAOE,UACpC,KAAKJ,EACL,KAAKA,EACH,OA0FG,SAAoBE,GACzB,MAAMU,EAASC,EAAWX,GAE1B,OAAOU,IAAW,IAAe,EAATA,EAC1B,CA9FWE,CAAWZ,GACpB,KAAKF,EACH,OAAOe,EAAiBb,GAC1B,KAAKF,EAAoB,CACvB,MAAM9B,EAAQgC,EAAOG,KAAKW,WAAWd,EAAOE,QAAQ,GAEpD,OADAF,EAAOE,QAAU,EACVlC,CACT,CACA,KAAK8B,EAAoB,CACvB,MAAMiB,EAAeJ,EAAWX,GAC1BgB,EAAW,IAAIzF,WAAWyE,EAAOG,KAAK7C,OAAQ0C,EAAOG,KAAK5C,WAAayC,EAAOE,OAAQa,GAE5F,OADAf,EAAOE,QAAUa,EACVC,CACT,CACA,KAAKlB,EAAkB,CACrB,MAAOmB,EAAUC,GAsIrB,SAA6BlB,GAC3B,MAAMmB,EAAWnB,EAAOG,KAAKiB,SAASpB,EAAOE,UACvCmB,EAAOF,GAAY,EACnBpF,EAAOuF,EAAeH,GAC5B,GAAa,KAATE,EAAa,CAEf,MAAO,CAACtF,EADQ4E,EAAWX,GAE7B,CACA,MAAO,CAACjE,EAAMsF,EAChB,CA/IiCE,CAAoBvB,GAC3CwB,EAAWP,IAAanB,GAAoBmB,IAAanB,EACzD2B,EAAS,IAAIlF,MAAM2E,GACzB,IAAK,IAAIxF,EAAI,EAAGA,EAAIwF,EAAUxF,IAC5B+F,EAAO/F,GAAK8F,EAAqD,IAA1ChB,EAAYR,EAAQF,GAA0BU,EAAYR,EAAQiB,GAE3F,OAAOQ,CACT,CACA,KAAK3B,EAAoB,CAEvB,MAAM4B,EAAe,CAAA,EACrB,IAAIC,EAAgB,EACpB,OAAa,CACX,IAAIC,EAAiBC,EAErB,IADCD,EAAiBC,EAAWF,GAAiBpB,EAAeP,EAAQ2B,GACjEC,IAAoB9B,EACtB,MAEF4B,EAAa,SAASG,KAAerB,EAAYR,EAAQ4B,EAC3D,CACA,OAAOF,CACT,CAEA,QACE,MAAM,IAAIzE,MAAM,0BAA0BlB,KAE9C,CAUO,SAAS4E,EAAWX,GACzB,IAAI8B,EAAS,EACTC,EAAQ,EACZ,OAAa,CACX,MAAM9D,EAAO+B,EAAOG,KAAKiB,SAASpB,EAAOE,UAEzC,GADA4B,IAAkB,IAAP7D,IAAgB8D,IACd,IAAP9D,GACJ,OAAO6D,EAETC,GAAS,CACX,CACF,CAyCO,SAASlB,EAAiBb,GAC/B,MAAMU,EAlCR,SAAuBV,GACrB,IAAI8B,EAAS,GACTC,EAAQ,GACZ,OAAa,CACX,MAAM9D,EAAO+B,EAAOG,KAAKiB,SAASpB,EAAOE,UAEzC,GADA4B,GAAUtE,OAAc,IAAPS,IAAgB8D,IACpB,IAAP9D,GACJ,OAAO6D,EAETC,GAAS,EACX,CACF,CAuBiBC,CAAchC,GAE7B,OAAOU,GAAU,KAAgB,GAATA,EAC1B,CAQA,SAASY,EAAerD,GACtB,OAAc,GAAPA,CACT,CASA,SAASsC,EAAeP,EAAQC,GAC9B,MAAMlE,EAAOiE,EAAOG,KAAKiB,SAASpB,EAAOE,UACzC,IAAY,GAAPnE,KAAiB+D,EAEpB,MAAO,CAAC,EAAG,EAAGG,GAEhB,MAAMgC,EAAQlG,GAAQ,EACtB,IAAIsE,EACJ,IAAI4B,EAIF,MAAM,IAAIhF,MAAM,oCAElB,OAJEoD,EAAMJ,EAAUgC,EAIX,CAACX,EAAevF,GAAOsE,EAAKA,EACrC,CC9KO6B,eAAeC,EAAqBC,GAAavG,QAAEA,EAAOwG,iBAAEA,EAAmBC,QAA4B,IAChH,KAAKF,GAAiBA,EAAYhC,YAAc,GAAI,MAAM,IAAInD,MAAM,gCAGpE,MAAMsF,EAAeC,KAAKC,IAAI,EAAGL,EAAYhC,WAAaiC,GACpDK,QAAqBN,EAAYvC,MAAM0C,EAAcH,EAAYhC,YAGjEuC,EAAa,IAAIC,SAASF,GAChC,GAAgE,YAA5DC,EAAWE,UAAUH,EAAatC,WAAa,GAAG,GACpD,MAAM,IAAInD,MAAM,yCAKlB,MAAM6F,EAAiBH,EAAWE,UAAUH,EAAatC,WAAa,GAAG,GACzE,GAAI0C,EAAiBV,EAAYhC,WAAa,EAC5C,MAAM,IAAInD,MAAM,2BAA2B6F,8BAA2CV,EAAYhC,WAAa,KAIjH,GAAI0C,EAAiB,EAAIT,EAAkB,CAEzC,MAAMU,EAAiBX,EAAYhC,WAAa0C,EAAiB,EAC3DE,QAAuBZ,EAAYvC,MAAMkD,EAAgBR,GAEzDU,EAAiB,IAAIC,YAAYJ,EAAiB,GAClDK,EAAe,IAAI5H,WAAW0H,GAGpC,OAFAE,EAAaC,IAAI,IAAI7H,WAAWyH,IAChCG,EAAaC,IAAI,IAAI7H,WAAWmH,GAAeH,EAAeQ,GACvDM,EAAgBJ,EAAgB,CAAEpH,WAC3C,CAEE,OAAOwH,EAAgBX,EAAc,CAAE7G,WAE3C,CASO,SAASwH,EAAgBC,GAAazH,QAAEA,GAAY,CAAA,GACzD,KAAMyH,aAAuBJ,aAAc,MAAM,IAAIjG,MAAM,gCAC3D,MAAMkD,EAAO,IAAIyC,SAASU,GAM1B,GAHAzH,EAAU,IAAKxB,KAAoBwB,GAG/BsE,EAAKC,WAAa,EACpB,MAAM,IAAInD,MAAM,6BAElB,GAAkD,YAA9CkD,EAAK0C,UAAU1C,EAAKC,WAAa,GAAG,GACtC,MAAM,IAAInD,MAAM,yCAKlB,MAAMsG,EAAuBpD,EAAKC,WAAa,EACzC0C,EAAiB3C,EAAK0C,UAAUU,GAAsB,GAC5D,GAAIT,EAAiB3C,EAAKC,WAAa,EAErC,MAAM,IAAInD,MAAM,2BAA2B6F,8BAA2C3C,EAAKC,WAAa,KAG1G,MAEMoD,EAAWzD,EADF,CAAEI,OAAMD,OADAqD,EAAuBT,IAGxCpG,EAAU,IAAIC,YACpB,SAASK,EAAiCgB,GACxC,OAAOA,GAAStB,EAAQM,OAAOgB,EACjC,CAGA,MAAMyF,EAAUD,EAASE,QAEnBhF,EAAS8E,EAASG,QAAQ/G,IAAwBgH,IAAK,CAC3D7H,KAAMhC,EAAY6J,EAAMF,SACxBG,YAAaD,EAAMD,QACnBhE,gBAAiB1F,EAAoB2J,EAAME,SAC3C5E,KAAMlC,EAAO4G,EAAMG,SACnBhF,aAAc6E,EAAMI,QACpBhI,eAAgB9B,EAAc0J,EAAMK,SACpC5H,MAAOuH,EAAMM,QACbC,UAAWP,EAAMQ,QACjBC,SAAUT,EAAMU,QAChBpI,aAAcqI,EAAYX,EAAMY,aAG5BC,EAAe/F,EAAOgG,OAAOC,GAAKA,EAAE5I,MACpC6I,EAAWpB,EAASM,QACpBe,EAAarB,EAASO,QAAQnH,IAAwBkI,IAAQ,CAClEC,QAASD,EAASpB,QAAQ9G,IAAI,CAAoBoI,EAA8BC,KAAW,CACzFC,UAAWlI,EAAOgI,EAAOtB,SACzByB,YAAaH,EAAOrB,QACpByB,UAAWJ,EAAOlB,SAAW,CAC3B/H,KAAMhC,EAAYiL,EAAOlB,QAAQJ,SACjC2B,UAAWL,EAAOlB,QAAQH,SAAS/G,IAA2B+H,GAAM3K,EAAS2K,IAC7EW,eAAgBN,EAAOlB,QAAQA,QAAQlH,IAAII,GAC3CuI,MAAOpL,EAAiB6K,EAAOlB,QAAQC,SACvCyB,WAAYR,EAAOlB,QAAQE,QAC3ByB,wBAAyBT,EAAOlB,QAAQG,QACxCyB,sBAAuBV,EAAOlB,QAAQI,QACtCyB,mBAAoBX,EAAOlB,QAAQM,QACnCwB,iBAAkBZ,EAAOlB,QAAQQ,QACjCuB,kBAAmBb,EAAOlB,QAAQU,SAClCsB,uBAAwBd,EAAOlB,QAAQiC,SACvCC,WAAYC,EAAajB,EAAOlB,QAAQoC,SAAUzB,EAAaQ,GAAcpJ,GAC7EsK,eAAgBnB,EAAOlB,QAAQsC,UAAUxJ,IAAwByJ,IAAY,CAC3EC,UAAWlM,EAASiM,EAAa3C,SACjCvI,SAAUnB,EAASqM,EAAa1C,SAChC7E,MAAOuH,EAAavC,WAEtByC,oBAAqBvB,EAAOlB,QAAQ0C,SACpCC,oBAAqBzB,EAAOlB,QAAQ4C,SACpCC,gBAAiB3B,EAAOlB,QAAQ8C,UAAY,CAC1CC,gCAAiC7B,EAAOlB,QAAQ8C,SAASlD,QACzDoD,2BAA4B9B,EAAOlB,QAAQ8C,SAASjD,QACpDoD,2BAA4B/B,EAAOlB,QAAQ8C,SAAS9C,UAGxDkD,oBAAqBhC,EAAOjB,QAC5BkD,oBAAqBjC,EAAOhB,QAC5BkD,oBAAqBlC,EAAOf,QAC5BkD,oBAAqBnC,EAAOd,QAC5BkD,gBAAiBpC,EAAOZ,QACxBiD,0BAA2BrC,EAAOV,WAEpCgD,gBAAiBxC,EAASnB,QAC1BiB,SAAUE,EAAShB,QACnByD,gBAAiBzC,EAASf,SAASnH,IAAwB4K,IAAa,CACtEC,WAAYD,EAAc9D,QAC1BgE,WAAYF,EAAc7D,QAC1BgE,YAAaH,EAAc1D,WAE7BqB,YAAaL,EAASd,QACtB0B,sBAAuBZ,EAASb,QAChC2D,QAAS9C,EAASZ,WAEdyB,EAAqBnC,EAASQ,SAASpH,IAAwBiL,IAAQ,CAC3EC,IAAK9K,EAAO6K,EAASnE,SACrB1F,MAAOhB,EAAO6K,EAASlE,YAIzB,MAAO,CACLF,UACA/E,SACAkG,WACAC,aACAc,qBACAoC,WARiB/K,EAAOwG,EAASS,SASjC+D,gBAAiBlF,EAErB,CAgBA,SAASyB,EAAYA,GACnB,OAAIA,GAAab,QAAgB,CAAE3H,KAAM,UACrCwI,GAAaZ,QAAgB,CAAE5H,KAAM,OACrCwI,GAAaT,QAAgB,CAAE/H,KAAM,QACrCwI,GAAaR,QAAgB,CAAEhI,KAAM,QACrCwI,GAAaP,QAAgB,CAC/BjI,KAAM,UACNM,MAAOkI,EAAYP,QAAQN,QAC3BS,UAAWI,EAAYP,QAAQL,SAE7BY,GAAaN,QAAgB,CAAElI,KAAM,QACrCwI,GAAaL,QAAgB,CAC/BnI,KAAM,OACNkM,gBAAiB1D,EAAYL,QAAQR,QACrC7F,KAAMqK,EAAS3D,EAAYL,QAAQP,UAEjCY,GAAaH,QAAgB,CAC/BrI,KAAM,YACNkM,gBAAiB1D,EAAYH,QAAQV,QACrC7F,KAAMqK,EAAS3D,EAAYH,QAAQT,UAEjCY,GAAaC,SAAiB,CAChCzI,KAAM,UACNmB,SAAUqH,EAAYC,SAASd,QAC/BvG,SAAUoH,EAAYC,SAASb,SAE7BY,GAAawB,SAAiB,CAAEhK,KAAM,QACtCwI,GAAa2B,SAAiB,CAAEnK,KAAM,QACtCwI,GAAa6B,SAAiB,CAAErK,KAAM,QACtCwI,GAAaiC,SAAiB,CAAEzK,KAAM,QACtCwI,GAAamC,SAAiB,CAAE3K,KAAM,WACnCwI,CACT,CAMA,SAAS2D,EAASrK,GAChB,GAAIA,EAAK6F,QAAS,MAAO,SACzB,GAAI7F,EAAK8F,QAAS,MAAO,SACzB,GAAI9F,EAAKiG,QAAS,MAAO,QACzB,MAAM,IAAI7G,MAAM,6BAClB,CAWA,SAASgJ,EAAakC,EAAOzJ,EAAQ7C,GACnC,OAAOsM,GAAS,CACd1F,IAAK2F,EAAgBD,EAAMzE,QAAShF,EAAQ7C,GAC5CwM,IAAKD,EAAgBD,EAAMxE,QAASjF,EAAQ7C,GAC5CyM,WAAYH,EAAMrE,QAClByE,eAAgBJ,EAAMpE,QACtByE,UAAWJ,EAAgBD,EAAMnE,QAAStF,EAAQ7C,GAClD4M,UAAWL,EAAgBD,EAAMlE,QAASvF,EAAQ7C,GAClD6M,mBAAoBP,EAAMjE,QAC1ByE,mBAAoBR,EAAM/D,QAE9B,CAQO,SAASgE,EAAgBpK,EAAOU,EAAQ7C,GAC7C,MAAME,KAAEA,EAAIC,eAAEA,EAAcE,aAAEA,GAAiBwC,EAC/C,QAAckK,IAAV5K,EAAqB,OAAOA,EAChC,GAAa,YAATjC,EAAoB,OAAoB,IAAbiC,EAAM,GACrC,GAAa,eAATjC,EAAuB,OAAO,IAAIY,aAAcK,OAAOgB,GAC3D,MAAMmC,EAAO,IAAIyC,SAAS5E,EAAMV,OAAQU,EAAMT,WAAYS,EAAMoC,YAChE,MAAa,UAATrE,GAAwC,IAApBoE,EAAKC,WAAyBD,EAAK0I,WAAW,GAAG,GAC5D,WAAT9M,GAAyC,IAApBoE,EAAKC,WAAyBD,EAAKW,WAAW,GAAG,GAC7D,UAAT/E,GAAuC,SAAnBC,EAAkCH,EAAQf,aAAaqF,EAAK2I,SAAS,GAAG,IACnF,UAAT/M,GAAuC,qBAAnBC,EAA8CH,EAAQvB,0BAA0B6F,EAAK4I,YAAY,GAAG,IAC/G,UAAThN,GAAuC,qBAAnBC,EAA8CH,EAAQnB,0BAA0ByF,EAAK4I,YAAY,GAAG,IAC/G,UAAThN,GAA2C,cAAvBG,GAAcH,MAA+C,UAAvBG,GAAc2B,KAAyBhC,EAAQjB,yBAAyBuF,EAAK4I,YAAY,GAAG,IAC7I,UAAThN,GAA2C,cAAvBG,GAAcH,MAA+C,WAAvBG,GAAc2B,KAA0BhC,EAAQnB,0BAA0ByF,EAAK4I,YAAY,GAAG,IAC/I,UAAThN,GAA2C,cAAvBG,GAAcH,KAA6BF,EAAQvB,0BAA0B6F,EAAK4I,YAAY,GAAG,IAC5G,UAAThN,GAAwC,IAApBoE,EAAKC,WAAyBD,EAAK2I,SAAS,GAAG,GAC1D,UAAT/M,GAAwC,IAApBoE,EAAKC,WAAyBD,EAAK4I,YAAY,GAAG,GACnD,YAAnB/M,EAAqCQ,EAAawB,GAAS,MAAQU,EAAOrC,OAAS,GAC5D,YAAvBH,GAAcH,KAA2B6B,EAAaI,GACdA,CAG9C,CC1QO,SAASgL,EAAOC,EAAKC,GAE1B,IAAK,IAAIxN,EAAI,EAAGA,EAAIwN,EAAIzN,OAAQC,GADlB,IAEZuN,EAAI9J,QAAQ+J,EAAIrJ,MAAMnE,EAAGA,EAFb,KAIhB,CAmDOwG,eAAeiH,GAAmBC,IAAEA,EAAGhJ,WAAEA,EAAUiJ,YAAEA,EAAaC,MAAOC,IAE9E,MAAMD,EAAQC,GAAeC,WAAWF,MAQxC,IAAIhM,EANJ8C,UA5BK8B,eAAiCkH,EAAKC,EAAaE,GACxD,MAAMD,EAAQC,GAAeC,WAAWF,MACxC,aAAaA,EAAMF,EAAK,IAAKC,EAAaI,OAAQ,SAC/CC,KAAKC,IACJ,IAAKA,EAAIC,GAAI,MAAM,IAAI3M,MAAM,qBAAqB0M,EAAIE,UACtD,MAAMpO,EAASkO,EAAIG,QAAQC,IAAI,kBAC/B,IAAKtO,EAAQ,MAAM,IAAIwB,MAAM,0BAC7B,OAAO+M,SAASvO,IAEtB,CAmBuBwO,CAAkBb,EAAKC,EAAaC,GAOzD,MAAMY,EAAOb,GAAe,CAAA,EAE5B,MAAO,CACLjJ,aACA,WAAMP,CAAMsK,EAAOC,GACjB,GAAI9M,EACF,OAAOA,EAAOoM,KAAKpM,GAAUA,EAAOuC,MAAMsK,EAAOC,IAGnD,MAAMN,EAAU,IAAIO,QAAQH,EAAKJ,SAC3BQ,OAAiB1B,IAARwB,EAAoB,GAAKA,EAAM,EAC9CN,EAAQ1G,IAAI,QAAS,SAAS+G,KAASG,KAEvC,MAAMX,QAAYL,EAAMF,EAAK,IAAKc,EAAMJ,YACxC,IAAKH,EAAIC,KAAOD,EAAIY,KAAM,MAAM,IAAItN,MAAM,gBAAgB0M,EAAIE,UAE9D,GAAmB,MAAfF,EAAIE,OAGN,OADAvM,EAASqM,EAAIrG,cACNhG,EAAOoM,KAAKpM,GAAUA,EAAOuC,MAAMsK,EAAOC,IAC5C,GAAmB,MAAfT,EAAIE,OAEb,OAAOF,EAAIrG,cAEX,MAAM,IAAIrG,MAAM,yCAAyC0M,EAAIE,SAEjE,EAEJ,CAUO,SAASW,GAAkBpK,WAAEA,EAAUP,MAAEA,IAAS4K,QAAEA,EAAUnI,QAA4B,IAC/F,GAAIlC,EAAaqK,EAAS,CAExB,MAAMnN,EAASuC,EAAM,EAAGO,GACxB,MAAO,CACLA,aACA8B,MAAW,MAACiI,EAAOC,WACH9M,GAAQuC,MAAMsK,EAAOC,GAGzC,CACA,MAAMM,EAAQ,IAAIC,IAClB,MAAO,CACLvK,aAMA,KAAAP,CAAMsK,EAAOC,GACX,MAAMtC,EAsBZ,SAAkBqC,EAAOC,EAAK/I,GAC5B,GAAI8I,EAAQ,EAAG,CACb,QAAYvB,IAARwB,EAAmB,MAAM,IAAInN,MAAM,yBAAyBkN,MAAUC,MAC1E,YAAaxB,IAATvH,EAA2B,GAAG8I,KAC3B,GAAG9I,EAAO8I,KAAS9I,GAC5B,CAAO,QAAYuH,IAARwB,EAAmB,CAC5B,GAAID,EAAQC,EAAK,MAAM,IAAInN,MAAM,wBAAwBkN,MAAUC,MACnE,MAAO,GAAGD,KAASC,GACrB,CAAO,YAAaxB,IAATvH,EACF,GAAG8I,KAEH,GAAGA,KAAS9I,GAEvB,CAnCkBuJ,CAAST,EAAOC,EAAKhK,GAC3ByK,EAASH,EAAMX,IAAIjC,GACzB,GAAI+C,EAAQ,OAAOA,EAEnB,MAAMC,EAAUjL,EAAMsK,EAAOC,GAE7B,OADAM,EAAMtH,IAAI0E,EAAKgD,GACRA,CACT,EAEJ,CAkCO,SAASC,EAAQC,GACtB,IAAKA,EAAQ,MAAO,GACpB,GAAsB,IAAlBA,EAAOvP,OAAc,OAAOuP,EAAO,GAEvC,MAAM1P,EAAS,GACf,IAAK,MAAM2P,KAASD,EAClBhC,EAAO1N,EAAQ2P,GAEjB,OAAO3P,CACT,CC3IO,SAAS4P,GAAepF,uBAAEA,EAAsBF,iBAAEA,EAAgBF,sBAAEA,IACzE,MAAMyF,EAAerF,GAA0BF,EAC/C,MAAO,CACLwF,UAAW3Q,OAAO0Q,GAClBE,QAAS5Q,OAAO0Q,EAAezF,GAEnC,CC/DO,SAAS4F,EAAchQ,EAAQiQ,EAAkBC,EAAkB/J,EAAQhC,GAChF,MAAMgM,EAAIF,GAAkB9P,QAAU+P,EAAiB/P,OACvD,IAAKgQ,EAAG,OAAOhK,EACf,MAAMiK,EAAqB9L,EAAsBH,GAC3CkM,EAAiBlM,EAAW7C,IAAI,EAAGhB,aAAcA,EAAQ+D,iBAC/D,IAAIiM,EAAa,EAGjB,MAAMC,EAAiB,CAACvQ,GACxB,IAAIwQ,EAAmBxQ,EACnByQ,EAAe,EACfC,EAAkB,EAClBC,EAAkB,EAEtB,GAAIT,EAAiB,GAEnB,KAAOO,EAAeJ,EAAelQ,OAAS,GAAKwQ,EAAkBT,EAAiB,IACpFO,IACqC,aAAjCJ,EAAeI,KAEjBD,EAAmBA,EAAiBI,IAAG,GACvCL,EAAe1M,KAAK2M,GACpBE,KAEmC,aAAjCL,EAAeI,IAA8BE,IAIrD,IAAK,IAAIvQ,EAAI,EAAGA,EAAI+P,EAAG/P,IAAK,CAE1B,MAAMyQ,EAAMZ,GAAkB9P,OAAS8P,EAAiB7P,GAAKgQ,EACvDU,EAAMZ,EAAiB9P,GAG7B,KAAOqQ,IAAiBK,EAAMH,GAAoD,aAAjCN,EAAeI,KACzB,aAAjCJ,EAAeI,KACjBF,EAAeQ,MACfL,KAEmC,aAAjCL,EAAeI,IAA8BE,IACjDF,IAMF,IAHAD,EAAmBD,EAAeK,IAAG,IAIlCH,EAAeJ,EAAelQ,OAAS,GAA0C,aAArCkQ,EAAeI,EAAe,MAC1EC,EAAkBG,GAA4C,aAArCR,EAAeI,EAAe,KACxD,CAEA,GADAA,IACqC,aAAjCJ,EAAeI,GAA8B,CAE/C,MAAMO,EAAU,GAChBR,EAAiB3M,KAAKmN,GACtBR,EAAmBQ,EACnBT,EAAe1M,KAAKmN,GACpBN,GACF,CACqC,aAAjCL,EAAeI,IAA8BE,GACnD,CAGIE,IAAQT,EAEVI,EAAiB3M,KAAKsC,EAAOmK,MACpBG,IAAiBJ,EAAelQ,OAAS,EAClDqQ,EAAiB3M,KAAK,MAEtB2M,EAAiB3M,KAAK,GAE1B,CAGA,IAAK7D,EAAOG,OAEV,IAAK,IAAIC,EAAI,EAAGA,EAAIgQ,EAAoBhQ,IAAK,CAE3C,MAAM4Q,EAAU,GAChBR,EAAiB3M,KAAKmN,GACtBR,EAAmBQ,CACrB,CAGF,OAAOhR,CACT,CAUO,SAASiR,EAAeC,EAAe9N,EAAQ+N,EAAQ,GAC5D,MAAM7N,EAAOF,EAAOE,KAAK8N,KAAK,KACxBC,EAA8C,aAAnCjO,EAAO9C,QAAQ+D,gBAC1BiN,EAAYD,EAAWF,EAAQ,EAAIA,EAEzC,GL7BK,SAAoB/N,GACzB,IAAKA,EAAQ,OAAO,EACpB,GAAsC,SAAlCA,EAAO9C,QAAQI,eAA2B,OAAO,EACrD,GAAI0C,EAAOG,SAASpD,OAAS,EAAG,OAAO,EAEvC,MAAMoR,EAAanO,EAAOG,SAAS,GACnC,QAAIgO,EAAWhO,SAASpD,OAAS,IACU,aAAvCoR,EAAWjR,QAAQ+D,eAGzB,CKmBMmN,CAAWpO,GAAS,CACtB,IAAIqO,EAAUrO,EAAOG,SAAS,GAC1BmO,EAAWJ,EACiB,IAA5BG,EAAQlO,SAASpD,SACnBsR,EAAUA,EAAQlO,SAAS,GAC3BmO,KAEFT,EAAeC,EAAeO,EAASC,GAEvC,MAAMC,EAAYF,EAAQnO,KAAK8N,KAAK,KAC9BjL,EAAS+K,EAAczC,IAAIkD,GACjC,IAAKxL,EAAQ,MAAM,IAAIxE,MAAM,sCAI7B,OAHI0P,GAAUO,EAAezL,EAAQgL,GACrCD,EAAcpJ,IAAIxE,EAAM6C,QACxB+K,EAAcW,OAAOF,EAEvB,CAEA,GL7BK,SAAmBvO,GACxB,IAAKA,EAAQ,OAAO,EACpB,GAAsC,QAAlCA,EAAO9C,QAAQI,eAA0B,OAAO,EACpD,GAAI0C,EAAOG,SAASpD,OAAS,EAAG,OAAO,EAEvC,MAAMoR,EAAanO,EAAOG,SAAS,GACnC,GAAmC,IAA/BgO,EAAWhO,SAASpD,OAAc,OAAO,EAC7C,GAA2C,aAAvCoR,EAAWjR,QAAQ+D,gBAAgC,OAAO,EAE9D,MAAMyN,EAAWP,EAAWhO,SAASU,KAAKN,GAAgC,QAAvBA,EAAMrD,QAAQsD,MACjE,GAA0C,aAAtCkO,GAAUxR,QAAQ+D,gBAAgC,OAAO,EAE7D,MAAM0N,EAAaR,EAAWhO,SAASU,KAAKN,GAAgC,UAAvBA,EAAMrD,QAAQsD,MACnE,MAA4C,aAAxCmO,GAAYzR,QAAQ+D,eAG1B,CKaM2N,CAAU5O,GAAS,CACrB,MAAM6O,EAAU7O,EAAOG,SAAS,GAAGjD,QAAQsD,KAG3CqN,EAAeC,EAAe9N,EAAOG,SAAS,GAAGA,SAAS,GAAI+N,EAAY,GAC1EL,EAAeC,EAAe9N,EAAOG,SAAS,GAAGA,SAAS,GAAI+N,EAAY,GAE1E,MAAMY,EAAOhB,EAAczC,IAAI,GAAGnL,KAAQ2O,SACpC9L,EAAS+K,EAAczC,IAAI,GAAGnL,KAAQ2O,WAE5C,IAAKC,EAAM,MAAM,IAAIvQ,MAAM,mCAC3B,IAAKwE,EAAQ,MAAM,IAAIxE,MAAM,qCAC7B,GAAIuQ,EAAK/R,SAAWgG,EAAOhG,OACzB,MAAM,IAAIwB,MAAM,gDAGlB,MAAMwQ,EAAMC,EAAaF,EAAM/L,EAAQmL,GAMvC,OALID,GAAUO,EAAeO,EAAKhB,GAElCD,EAAcW,OAAO,GAAGvO,KAAQ2O,SAChCf,EAAcW,OAAO,GAAGvO,KAAQ2O,gBAChCf,EAAcpJ,IAAIxE,EAAM6O,EAE1B,CAGA,GAAI/O,EAAOG,SAASpD,OAAQ,CAE1B,MAAMkS,EAAiD,aAAnCjP,EAAO9C,QAAQ+D,gBAAiC8M,EAAQA,EAAQ,EAE9EmB,EAAS,CAAA,EACf,IAAK,MAAM3O,KAASP,EAAOG,SAAU,CACnC0N,EAAeC,EAAevN,EAAO0O,GACrC,MAAME,EAAYrB,EAAczC,IAAI9K,EAAML,KAAK8N,KAAK,MACpD,IAAKmB,EAAW,MAAM,IAAI5Q,MAAM,qCAChC2Q,EAAO3O,EAAMrD,QAAQsD,MAAQ2O,CAC/B,CAEA,IAAK,MAAM5O,KAASP,EAAOG,SACzB2N,EAAcW,OAAOlO,EAAML,KAAK8N,KAAK,MAGvC,MAAMoB,EAAWC,EAAaH,EAAQD,GAClChB,GAAUO,EAAeY,EAAUrB,GACvCD,EAAcpJ,IAAIxE,EAAMkP,EAC1B,CACF,CAOA,SAASZ,EAAe5Q,EAAKmQ,GAC3B,IAAK,IAAI/Q,EAAI,EAAGA,EAAIY,EAAIb,OAAQC,IAC1B+Q,EACFS,EAAe5Q,EAAIZ,GAAI+Q,EAAQ,GAE/BnQ,EAAIZ,GAAKY,EAAIZ,GAAG,EAGtB,CAQA,SAASgS,EAAaF,EAAM/L,EAAQgL,GAClC,MAAMgB,EAAM,GACZ,IAAK,IAAI/R,EAAI,EAAGA,EAAI8R,EAAK/R,OAAQC,IAC/B,GAAI+Q,EACFgB,EAAItO,KAAKuO,EAAaF,EAAK9R,GAAI+F,EAAO/F,GAAI+Q,EAAQ,SAElD,GAAIe,EAAK9R,GAAI,CAEX,MAAMsS,EAAM,CAAA,EACZ,IAAK,IAAIC,EAAI,EAAGA,EAAIT,EAAK9R,GAAGD,OAAQwS,IAAK,CACvC,MAAMjQ,EAAQyD,EAAO/F,GAAGuS,GACxBD,EAAIR,EAAK9R,GAAGuS,SAAgBrF,IAAV5K,EAAsB,KAAOA,CACjD,CACAyP,EAAItO,KAAK6O,EACX,MACEP,EAAItO,UAAKyJ,GAIf,OAAO6E,CACT,CASA,SAASM,EAAaH,EAAQnB,GAC5B,MAAMe,EAAOU,OAAOV,KAAKI,GACnBnS,EAASmS,EAAOJ,EAAK,KAAK/R,OAC1BgS,EAAM,GACZ,IAAK,IAAI/R,EAAI,EAAGA,EAAID,EAAQC,IAAK,CAE/B,MAAMsS,EAAM,CAAA,EACZ,IAAK,MAAMlG,KAAO0F,EAAM,CACtB,GAAII,EAAO9F,GAAKrM,SAAWA,EAAQ,MAAM,IAAIwB,MAAM,gCACnD+Q,EAAIlG,GAAO8F,EAAO9F,GAAKpM,EACzB,CACI+Q,EACFgB,EAAItO,KAAK4O,EAAaC,EAAKvB,EAAQ,IAEnCgB,EAAItO,KAAK6O,EAEb,CACA,OAAOP,CACT,CC/OO,SAASU,EAAkBnO,EAAQlB,EAAOxD,GAC/C,MAAM8S,EAAQ9S,aAAkBmC,WAC1B4Q,EAAY1N,EAAWX,GACvBsO,EAAoB3N,EAAWX,GACrCW,EAAWX,GACX,IAAIhC,EAAQ6C,EAAiBb,GACzBuO,EAAc,EAClBjT,EAAOiT,KAAiBH,EAAQ3T,OAAOuD,GAASA,EAEhD,MAAMwQ,EAAqBH,EAAYC,EAEvC,KAAOC,EAAczP,GAAO,CAE1B,MAAM2P,EAAW5N,EAAiBb,GAC5B0O,EAAY,IAAInT,WAAW+S,GACjC,IAAK,IAAI5S,EAAI,EAAGA,EAAI4S,EAAmB5S,IACrCgT,EAAUhT,GAAKsE,EAAOG,KAAKiB,SAASpB,EAAOE,UAG7C,IAAK,IAAIxE,EAAI,EAAGA,EAAI4S,GAAqBC,EAAczP,EAAOpD,IAAK,CAEjE,MAAMwB,EAAWM,OAAOkR,EAAUhT,IAClC,GAAIwB,EAAU,CACZ,IAAIyR,EAAa,GACbC,EAAiBJ,EACrB,MAAMK,GAAQ,IAAM3R,GAAY,GAChC,KAAO0R,GAAkBL,EAAczP,GAAO,CAC5C,IAAIZ,EAAOV,OAAOwC,EAAOG,KAAKiB,SAASpB,EAAOE,UAAYyO,EAAaE,EAEvE,IADAF,GAAczR,EACPyR,GAAc,GACnBA,GAAc,GACd3O,EAAOE,SACHyO,IACFzQ,GAAQV,OAAOwC,EAAOG,KAAKiB,SAASpB,EAAOE,UAAYhD,EAAWyR,EAAaE,GAInF7Q,GADcyQ,EAAWvQ,EAEzB5C,EAAOiT,KAAiBH,EAAQ3T,OAAOuD,GAASA,EAChD4Q,GACF,CACIA,IAEF5O,EAAOE,QAAUsC,KAAKsM,MAAMF,EAAiBnU,OAAOyC,GAAYzC,OAAOkU,IAAe,GAE1F,MACE,IAAK,IAAIV,EAAI,EAAGA,EAAIO,GAAsBD,EAAczP,EAAOmP,IAC7DjQ,GAASyQ,EACTnT,EAAOiT,KAAiBH,EAAQ3T,OAAOuD,GAASA,CAGtD,CACF,CACF,CAOO,SAAS+Q,EAAqB/O,EAAQlB,EAAOxD,GAClD,MAAM0T,EAAU,IAAIvR,WAAWqB,GAC/BqP,EAAkBnO,EAAQlB,EAAOkQ,GACjC,IAAK,IAAItT,EAAI,EAAGA,EAAIoD,EAAOpD,IACzBJ,EAAOI,GAAK,IAAIH,WAAWyE,EAAOG,KAAK7C,OAAQ0C,EAAOG,KAAK5C,WAAayC,EAAOE,OAAQ8O,EAAQtT,IAC/FsE,EAAOE,QAAU8O,EAAQtT,EAE7B,CCnEO,SAASwB,EAASc,GACvB,OAAO,GAAKwE,KAAKyM,MAAMjR,EACzB,CAYO,SAASkR,EAAuBlP,EAAQmP,EAAO7T,EAAQG,QAC7CmN,IAAXnN,IACFA,EAASuE,EAAOG,KAAK0C,UAAU7C,EAAOE,QAAQ,GAC9CF,EAAOE,QAAU,GAEnB,MAAMkP,EAAcpP,EAAOE,OAC3B,IAAImP,EAAO,EACX,KAAOA,EAAO/T,EAAOG,QAAQ,CAC3B,MAAM6T,EAAS3O,EAAWX,GAC1B,GAAa,EAATsP,EAEFD,EAAOE,GAAcvP,EAAQsP,EAAQH,EAAO7T,EAAQ+T,OAC/C,CAEL,MAAMvQ,EAAQwQ,IAAW,EACzBE,EAAQxP,EAAQlB,EAAOqQ,EAAO7T,EAAQ+T,GACtCA,GAAQvQ,CACV,CACF,CACAkB,EAAOE,OAASkP,EAAc3T,CAChC,CAWA,SAAS+T,EAAQxP,EAAQlB,EAAO5B,EAAU5B,EAAQ+T,GAChD,MAAMF,EAAQjS,EAAW,GAAK,EAC9B,IAAIc,EAAQ,EACZ,IAAK,IAAItC,EAAI,EAAGA,EAAIyT,EAAOzT,IACzBsC,GAASgC,EAAOG,KAAKiB,SAASpB,EAAOE,YAAcxE,GAAK,GAK1D,IAAK,IAAIA,EAAI,EAAGA,EAAIoD,EAAOpD,IACzBJ,EAAO+T,EAAO3T,GAAKsC,CAEvB,CAaA,SAASuR,GAAcvP,EAAQsP,EAAQpS,EAAU5B,EAAQ+T,GACvD,IAAIvQ,EAAQwQ,GAAU,GAAK,EAC3B,MAAMT,GAAQ,GAAK3R,GAAY,EAE/B,IAAIjC,EAAO,EACX,GAAI+E,EAAOE,OAASF,EAAOG,KAAKC,WAC9BnF,EAAO+E,EAAOG,KAAKiB,SAASpB,EAAOE,eAC9B,GAAI2O,EAET,MAAM,IAAI5R,MAAM,0BAA0B+C,EAAOE,uBAEnD,IAAIuP,EAAO,EACPC,EAAQ,EAGZ,KAAO5Q,GAED4Q,EAAQ,GACVA,GAAS,EACTD,GAAQ,EACRxU,KAAU,GACDwU,EAAOC,EAAQxS,GAExBjC,GAAQ+E,EAAOG,KAAKiB,SAASpB,EAAOE,SAAWuP,EAC/CzP,EAAOE,SACPuP,GAAQ,IAEJJ,EAAO/T,EAAOG,SAEhBH,EAAO+T,KAAUpU,GAAQyU,EAAQb,GAEnC/P,IACA4Q,GAASxS,GAIb,OAAOmS,CACT,CASO,SAASM,GAAgB3P,EAAQlB,EAAO/C,EAAM6T,GACnD,MAAMT,EA6BR,SAAmBpT,EAAM6T,GACvB,OAAQ7T,GACR,IAAK,QACL,IAAK,QACH,OAAO,EACT,IAAK,QACL,IAAK,SACH,OAAO,EACT,IAAK,uBACH,IAAK6T,EAAY,MAAM,IAAI3S,MAAM,yCACjC,OAAO2S,EACT,QACE,MAAM,IAAI3S,MAAM,6BAA6BlB,KAEjD,CA3CgB8T,CAAU9T,EAAM6T,GACxB7R,EAAQ,IAAIxC,WAAWuD,EAAQqQ,GACrC,IAAK,IAAIW,EAAI,EAAGA,EAAIX,EAAOW,IACzB,IAAK,IAAIpU,EAAI,EAAGA,EAAIoD,EAAOpD,IACzBqC,EAAMrC,EAAIyT,EAAQW,GAAK9P,EAAOG,KAAKiB,SAASpB,EAAOE,UAIvD,GAAa,UAATnE,EAAkB,OAAO,IAAIgU,aAAahS,EAAMT,QAC/C,GAAa,WAATvB,EAAmB,OAAO,IAAIiU,aAAajS,EAAMT,QACrD,GAAa,UAATvB,EAAkB,OAAO,IAAI0B,WAAWM,EAAMT,QAClD,GAAa,UAATvB,EAAkB,OAAO,IAAIqB,cAAcW,EAAMT,QACrD,GAAa,yBAATvB,EAAiC,CAExC,MAAMkU,EAAQ,IAAI1T,MAAMuC,GACxB,IAAK,IAAIpD,EAAI,EAAGA,EAAIoD,EAAOpD,IACzBuU,EAAMvU,GAAKqC,EAAMmS,SAASxU,EAAIyT,GAAQzT,EAAI,GAAKyT,GAEjD,OAAOc,CACT,CACA,MAAM,IAAIhT,MAAM,+CAA+ClB,IACjE,CCzIO,SAASoU,GAAUnQ,EAAQjE,EAAM+C,EAAOsR,GAC7C,GAAc,IAAVtR,EAAa,MAAO,GACxB,GAAa,YAAT/C,EACF,OA4BJ,SAA0BiE,EAAQlB,GAChC,MAAM2C,EAAS,IAAIlF,MAAMuC,GACzB,IAAK,IAAIpD,EAAI,EAAGA,EAAIoD,EAAOpD,IAAK,CAC9B,MAAM6B,EAAayC,EAAOE,QAAUxE,EAAI,EAAI,GACtC2U,EAAY3U,EAAI,EAChBuC,EAAO+B,EAAOG,KAAKiB,SAAS7D,GAClCkE,EAAO/F,MAAMuC,EAAO,GAAKoS,EAC3B,CAEA,OADArQ,EAAOE,QAAUsC,KAAKsM,KAAKhQ,EAAQ,GAC5B2C,CACT,CAtCW6O,CAAiBtQ,EAAQlB,GAC3B,GAAa,UAAT/C,EACT,OA6CJ,SAAwBiE,EAAQlB,GAC9B,MAAM2C,GAAUzB,EAAOG,KAAK5C,WAAayC,EAAOE,QAAU,EACtD,IAAIzC,WAAW8S,GAAMvQ,EAAOG,KAAK7C,OAAQ0C,EAAOG,KAAK5C,WAAayC,EAAOE,OAAgB,EAARpB,IACjF,IAAIrB,WAAWuC,EAAOG,KAAK7C,OAAQ0C,EAAOG,KAAK5C,WAAayC,EAAOE,OAAQpB,GAE/E,OADAkB,EAAOE,QAAkB,EAARpB,EACV2C,CACT,CAnDW+O,CAAexQ,EAAQlB,GACzB,GAAa,UAAT/C,EACT,OA0DJ,SAAwBiE,EAAQlB,GAC9B,MAAM2C,GAAUzB,EAAOG,KAAK5C,WAAayC,EAAOE,QAAU,EACtD,IAAI9C,cAAcmT,GAAMvQ,EAAOG,KAAK7C,OAAQ0C,EAAOG,KAAK5C,WAAayC,EAAOE,OAAgB,EAARpB,IACpF,IAAI1B,cAAc4C,EAAOG,KAAK7C,OAAQ0C,EAAOG,KAAK5C,WAAayC,EAAOE,OAAQpB,GAElF,OADAkB,EAAOE,QAAkB,EAARpB,EACV2C,CACT,CAhEWgP,CAAezQ,EAAQlB,GACzB,GAAa,UAAT/C,EACT,OAuEJ,SAAwBiE,EAAQlB,GAC9B,MAAM2C,EAAS,IAAIlF,MAAMuC,GACzB,IAAK,IAAIpD,EAAI,EAAGA,EAAIoD,EAAOpD,IAAK,CAC9B,MAAMgV,EAAM1Q,EAAOG,KAAK4I,YAAY/I,EAAOE,OAAa,GAAJxE,GAAQ,GACtDiV,EAAO3Q,EAAOG,KAAK2I,SAAS9I,EAAOE,OAAa,GAAJxE,EAAS,GAAG,GAC9D+F,EAAO/F,GAAK8B,OAAOmT,IAAS,IAAMD,CACpC,CAEA,OADA1Q,EAAOE,QAAkB,GAARpB,EACV2C,CACT,CAhFWmP,CAAe5Q,EAAQlB,GACzB,GAAa,UAAT/C,EACT,OAuFJ,SAAwBiE,EAAQlB,GAC9B,MAAM2C,GAAUzB,EAAOG,KAAK5C,WAAayC,EAAOE,QAAU,EACtD,IAAI6P,aAAaQ,GAAMvQ,EAAOG,KAAK7C,OAAQ0C,EAAOG,KAAK5C,WAAayC,EAAOE,OAAgB,EAARpB,IACnF,IAAIiR,aAAa/P,EAAOG,KAAK7C,OAAQ0C,EAAOG,KAAK5C,WAAayC,EAAOE,OAAQpB,GAEjF,OADAkB,EAAOE,QAAkB,EAARpB,EACV2C,CACT,CA7FWoP,CAAe7Q,EAAQlB,GACzB,GAAa,WAAT/C,EACT,OAoGJ,SAAyBiE,EAAQlB,GAC/B,MAAM2C,GAAUzB,EAAOG,KAAK5C,WAAayC,EAAOE,QAAU,EACtD,IAAI8P,aAAaO,GAAMvQ,EAAOG,KAAK7C,OAAQ0C,EAAOG,KAAK5C,WAAayC,EAAOE,OAAgB,EAARpB,IACnF,IAAIkR,aAAahQ,EAAOG,KAAK7C,OAAQ0C,EAAOG,KAAK5C,WAAayC,EAAOE,OAAQpB,GAEjF,OADAkB,EAAOE,QAAkB,EAARpB,EACV2C,CACT,CA1GWqP,CAAgB9Q,EAAQlB,GAC1B,GAAa,eAAT/C,EACT,OAiHJ,SAA4BiE,EAAQlB,GAClC,MAAM2C,EAAS,IAAIlF,MAAMuC,GACzB,IAAK,IAAIpD,EAAI,EAAGA,EAAIoD,EAAOpD,IAAK,CAC9B,MAAMD,EAASuE,EAAOG,KAAK0C,UAAU7C,EAAOE,QAAQ,GACpDF,EAAOE,QAAU,EACjBuB,EAAO/F,GAAK,IAAIH,WAAWyE,EAAOG,KAAK7C,OAAQ0C,EAAOG,KAAK5C,WAAayC,EAAOE,OAAQzE,GACvFuE,EAAOE,QAAUzE,CACnB,CACA,OAAOgG,CACT,CA1HWsP,CAAmB/Q,EAAQlB,GAC7B,GAAa,yBAAT/C,EAAiC,CAC1C,IAAKqU,EAAa,MAAM,IAAInT,MAAM,gCAClC,OAiIJ,SAAiC+C,EAAQlB,EAAOsR,GAE9C,MAAM3O,EAAS,IAAIlF,MAAMuC,GACzB,IAAK,IAAIpD,EAAI,EAAGA,EAAIoD,EAAOpD,IACzB+F,EAAO/F,GAAK,IAAIH,WAAWyE,EAAOG,KAAK7C,OAAQ0C,EAAOG,KAAK5C,WAAayC,EAAOE,OAAQkQ,GACvFpQ,EAAOE,QAAUkQ,EAEnB,OAAO3O,CACT,CAzIWuP,CAAwBhR,EAAQlB,EAAOsR,EAChD,CACE,MAAM,IAAInT,MAAM,2BAA2BlB,IAE/C,CAgJA,SAASwU,GAAMjT,EAAQ4C,EAAQmB,GAC7B,MAAM4P,EAAU,IAAI/N,YAAY7B,GAEhC,OADA,IAAI9F,WAAW0V,GAAS7N,IAAI,IAAI7H,WAAW+B,EAAQ4C,EAAQmB,IACpD4P,CACT,CC7KA,MAAMC,GAAY,CAAC,EAAG,IAAM,MAAQ,SAAU,YAW9C,SAASC,GAAUC,EAAWC,EAASC,EAASC,EAAO9V,GACrD,IAAK,IAAIC,EAAI,EAAGA,EAAID,EAAQC,IAC1B4V,EAAQC,EAAQ7V,GAAK0V,EAAUC,EAAU3V,EAE7C,CCPO,SAAS8V,GAAazT,EAAO0T,GAAM1V,KAAEA,EAAIH,QAAEA,EAAO6D,WAAEA,IACzD,MAAMU,EAAO,IAAIyC,SAAS7E,EAAMT,OAAQS,EAAMR,WAAYQ,EAAMqC,YAC1DJ,EAAS,CAAEG,OAAMD,OAAQ,GAE/B,IAAIwR,EAGJ,MAAMlG,EAkDR,SAA8BxL,EAAQyR,EAAMhS,GAC1C,GAAIA,EAAWhE,OAAS,EAAG,CACzB,MAAMkW,EAAqBnS,EAAsBC,GACjD,GAAIkS,EAAoB,CACtB,MAAMlQ,EAAS,IAAIlF,MAAMkV,EAAKjM,YAE9B,OADA0J,EAAuBlP,EAAQ9C,EAASyU,GAAqBlQ,GACtDA,CACT,CACF,CACA,MAAO,EACT,CA5D2BmQ,CAAqB5R,EAAQyR,EAAMhS,IAEtD8L,iBAAEA,EAAgBsG,SAAEA,GAkE5B,SAA8B7R,EAAQyR,EAAMhS,GAC1C,MAAMiM,EAAqB9L,EAAsBH,GACjD,IAAKiM,EAAoB,MAAO,CAAEH,iBAAkB,GAAIsG,SAAU,GAElE,MAAMtG,EAAmB,IAAIhP,MAAMkV,EAAKjM,YACxC0J,EAAuBlP,EAAQ9C,EAASwO,GAAqBH,GAG7D,IAAIsG,EAAWJ,EAAKjM,WACpB,IAAK,MAAM2G,KAAOZ,EACZY,IAAQT,GAAoBmG,IAEjB,IAAbA,IAAgBtG,EAAiB9P,OAAS,GAE9C,MAAO,CAAE8P,mBAAkBsG,WAC7B,CAjFyCC,CAAqB9R,EAAQyR,EAAMhS,GAIpEsS,EAAUN,EAAKjM,WAAaqM,EAClC,GAAsB,UAAlBJ,EAAKtW,SACPuW,EAAWvB,GAAUnQ,EAAQjE,EAAMgW,EAASnW,EAAQiI,kBAC/C,GACa,qBAAlB4N,EAAKtW,UACa,mBAAlBsW,EAAKtW,UACa,QAAlBsW,EAAKtW,SACL,CACA,MAAM+B,EAAoB,YAATnB,EAAqB,EAAIoE,EAAKiB,SAASpB,EAAOE,UAC3DhD,GACFwU,EAAW,IAAInV,MAAMwV,GACR,YAAThW,GACFmT,EAAuBlP,EAAQ9C,EAAUwU,GACzCA,EAAWA,EAAS9U,IAAIoV,KAAOA,IAG/B9C,EAAuBlP,EAAQ9C,EAAUwU,EAAUvR,EAAKC,WAAaJ,EAAOE,SAG9EwR,EAAW,IAAInW,WAAWwW,EAE9B,MAAO,GAAsB,sBAAlBN,EAAKtW,SACduW,EAAW/B,GAAgB3P,EAAQ+R,EAAShW,EAAMH,EAAQiI,kBACrD,GAAsB,wBAAlB4N,EAAKtW,SAAoC,CAElDuW,EADuB,UAAT3V,EACK,IAAI0B,WAAWsU,GAAW,IAAI3U,cAAc2U,GAC/D5D,EAAkBnO,EAAQ+R,EAASL,EACrC,KAAO,IAAsB,4BAAlBD,EAAKtW,SAId,MAAM,IAAI8B,MAAM,iCAAiCwU,EAAKtW,YAHtDuW,EAAW,IAAInV,MAAMwV,GACrBhD,EAAqB/O,EAAQ+R,EAASL,EAGxC,CAEA,MAAO,CAAEnG,mBAAkBC,mBAAkBkG,WAC/C,CAmDO,SAASO,GAAeC,EAAiBC,EAAwB5M,EAAO6M,GAE7E,IAAIC,EACJ,MAAMC,EAAqBF,IAAc7M,GACzC,GAAc,iBAAVA,EACF8M,EAAOH,OACF,GAAII,EACTD,EAAOC,EAAmBJ,EAAiBC,OACtC,IAAc,WAAV5M,EAIT,MAAM,IAAItI,MAAM,0CAA0CsI,KAH1D8M,EAAO,IAAI9W,WAAW4W,GD5FnB,SAA0BI,EAAOjX,GACtC,MAAMkX,EAAcD,EAAMnS,WACpBqS,EAAenX,EAAO8E,WAC5B,IAAIsS,EAAM,EACNC,EAAS,EAGb,KAAOD,EAAMF,GAAa,CACxB,MAAMI,EAAIL,EAAMG,GAEhB,GADAA,IACIE,EAAI,IACN,KAEJ,CACA,GAAIH,GAAgBC,GAAOF,EACzB,MAAM,IAAIvV,MAAM,gCAGlB,KAAOyV,EAAMF,GAAa,CACxB,MAAMI,EAAIL,EAAMG,GAChB,IAAIG,EAAM,EAGV,GAFAH,IAEIA,GAAOF,EACT,MAAM,IAAIvV,MAAM,sBAIlB,GAAS,EAAJ2V,EAsBE,CAEL,IAAI1S,EAAS,EACb,OAAY,EAAJ0S,GACR,KAAK,EAEHC,EAAwB,GAAjBD,IAAM,EAAI,GACjB1S,EAASqS,EAAMG,IAAQE,IAAM,GAAK,GAClCF,IACA,MACF,KAAK,EAEH,GAAIF,GAAeE,EAAM,EACvB,MAAM,IAAIzV,MAAM,6BAElB4V,GAAOD,IAAM,GAAK,EAClB1S,EAASqS,EAAMG,IAAQH,EAAMG,EAAM,IAAM,GACzCA,GAAO,EACP,MACF,KAAK,EAEH,GAAIF,GAAeE,EAAM,EACvB,MAAM,IAAIzV,MAAM,6BAElB4V,GAAOD,IAAM,GAAK,EAClB1S,EAASqS,EAAMG,IACVH,EAAMG,EAAM,IAAM,IAClBH,EAAMG,EAAM,IAAM,KAClBH,EAAMG,EAAM,IAAM,IACvBA,GAAO,EAKT,GAAe,IAAXxS,GAAgB4S,MAAM5S,GACxB,MAAM,IAAIjD,MAAM,kBAAkBiD,SAAcwS,iBAAmBF,KAErE,GAAItS,EAASyS,EACX,MAAM,IAAI1V,MAAM,2CAElBkU,GAAU7V,EAAQqX,EAASzS,EAAQ5E,EAAQqX,EAAQE,GACnDF,GAAUE,CACZ,KAhEqB,CAEnB,IAAIA,GAAOD,IAAM,GAAK,EAEtB,GAAIC,EAAM,GAAI,CACZ,GAAIH,EAAM,GAAKF,EACb,MAAM,IAAIvV,MAAM,+CAElB,MAAM8V,EAAaF,EAAM,GACzBA,EAAMN,EAAMG,IACPH,EAAMG,EAAM,IAAM,IAClBH,EAAMG,EAAM,IAAM,KAClBH,EAAMG,EAAM,IAAM,IACvBG,EAAsC,GAA/BA,EAAM3B,GAAU6B,IACvBL,GAAOK,CACT,CACA,GAAIL,EAAMG,EAAML,EACd,MAAM,IAAIvV,MAAM,6CAElBkU,GAAUoB,EAAOG,EAAKpX,EAAQqX,EAAQE,GACtCH,GAAOG,EACPF,GAAUE,CACZ,CA2CF,CAEA,GAAIF,IAAWF,EAAc,MAAM,IAAIxV,MAAM,yBAC/C,CCHI+V,CAAiBd,EAAiBG,EAGpC,CACA,GAAIA,GAAM5W,SAAW0W,EACnB,MAAM,IAAIlV,MAAM,oCAAoCoV,GAAM5W,gCAAgC0W,KAE5F,OAAOE,CACT,CAWO,SAASY,GAAef,EAAiBgB,EAAI9X,GAClD,MACM4E,EAAS,CAAEG,KADJ,IAAIyC,SAASsP,EAAgB5U,OAAQ4U,EAAgB3U,WAAY2U,EAAgB9R,YACvEF,OAAQ,IACzBnE,KAAEA,EAAIH,QAAEA,EAAO6D,WAAEA,EAAU8F,MAAEA,EAAK6M,YAAEA,GAAgBhX,EACpD+X,EAAQD,EAAGE,oBACjB,IAAKD,EAAO,MAAM,IAAIlW,MAAM,4CAG5B,MAAMuO,EA2DR,SAAgCxL,EAAQmT,EAAO1T,GAC7C,MAAMkS,EAAqBnS,EAAsBC,GACjD,IAAKkS,EAAoB,MAAO,GAEhC,MAAMlQ,EAAS,IAAIlF,MAAM4W,EAAM3N,YAE/B,OADA0J,EAAuBlP,EAAQ9C,EAASyU,GAAqBlQ,EAAQ0R,EAAME,+BACpE5R,CACT,CAlE2B6R,CAAuBtT,EAAQmT,EAAO1T,GAC/DO,EAAOE,OAASiT,EAAME,8BAGtB,MAAM9H,EAsER,SAAgCvL,EAAQmT,EAAO1T,GAC7C,MAAMiM,EAAqB9L,EAAsBH,GACjD,GAAIiM,EAAoB,CAEtB,MAAMjK,EAAS,IAAIlF,MAAM4W,EAAM3N,YAE/B,OADA0J,EAAuBlP,EAAQ9C,EAASwO,GAAqBjK,EAAQ0R,EAAMI,+BACpE9R,CACT,CACF,CA9E2B+R,CAAuBxT,EAAQmT,EAAO1T,GAGzDgU,EAAuBP,EAAGf,uBAAyBgB,EAAMI,8BAAgCJ,EAAME,8BAErG,IAAIhB,EAAOH,EAAgBhC,SAASlQ,EAAOE,SACf,IAAxBiT,EAAMO,gBACRrB,EAAOJ,GAAeI,EAAMoB,EAAsBlO,EAAO6M,IAE3D,MAAMuB,EAAW,IAAI/Q,SAASyP,EAAK/U,OAAQ+U,EAAK9U,WAAY8U,EAAKjS,YAC3DwT,EAAa,CAAEzT,KAAMwT,EAAUzT,OAAQ,GAI7C,IAAIwR,EACJ,MAAMK,EAAUoB,EAAM3N,WAAa2N,EAAMU,UACzC,GAAuB,UAAnBV,EAAMhY,SACRuW,EAAWvB,GAAUyD,EAAY7X,EAAMgW,EAASnW,EAAQiI,kBACnD,GAAuB,QAAnBsP,EAAMhY,SAEfuW,EAAW,IAAInV,MAAMwV,GACrB7C,EAAuB0E,EAAY,EAAGlC,GACtCA,EAAWA,EAAS9U,IAAIoV,KAAOA,QAC1B,GACc,qBAAnBmB,EAAMhY,UACa,mBAAnBgY,EAAMhY,SACN,CACA,MAAM+B,EAAWyW,EAASvS,SAASwS,EAAW1T,UAC9CwR,EAAW,IAAInV,MAAMwV,GACrB7C,EAAuB0E,EAAY1W,EAAUwU,EAAU+B,EAAuB,EAChF,MAAO,GAAuB,wBAAnBN,EAAMhY,SAAoC,CAEnDuW,EADuB,UAAT3V,EACK,IAAI0B,WAAWsU,GAAW,IAAI3U,cAAc2U,GAC/D5D,EAAkByF,EAAY7B,EAASL,EACzC,MAAO,GAAuB,4BAAnByB,EAAMhY,SACfuW,EAAW,IAAInV,MAAMwV,GACrBhD,EAAqB6E,EAAY7B,EAASL,QACrC,GAAuB,qBAAnByB,EAAMhY,SACfuW,EAAW,IAAInV,MAAMwV,GJ9GlB,SAAwB/R,EAAQlB,EAAOxD,GAC5C,MAAMwY,EAAa,IAAIrW,WAAWqB,GAClCqP,EAAkBnO,EAAQlB,EAAOgV,GACjC,MAAMC,EAAa,IAAItW,WAAWqB,GAClCqP,EAAkBnO,EAAQlB,EAAOiV,GAEjC,IAAK,IAAIrY,EAAI,EAAGA,EAAIoD,EAAOpD,IAAK,CAC9B,MAAMsY,EAAS,IAAIzY,WAAWyE,EAAOG,KAAK7C,OAAQ0C,EAAOG,KAAK5C,WAAayC,EAAOE,OAAQ6T,EAAWrY,IACjGoY,EAAWpY,IAEbJ,EAAOI,GAAK,IAAIH,WAAWuY,EAAWpY,GAAKqY,EAAWrY,IACtDJ,EAAOI,GAAG0H,IAAI9H,EAAOI,EAAI,GAAGwU,SAAS,EAAG4D,EAAWpY,KACnDJ,EAAOI,GAAG0H,IAAI4Q,EAAQF,EAAWpY,KAEjCJ,EAAOI,GAAKsY,EAEdhU,EAAOE,QAAU6T,EAAWrY,EAC9B,CACF,CI6FIuY,CAAeL,EAAY7B,EAASL,OAC/B,IAAuB,sBAAnByB,EAAMhY,SAGf,MAAM,IAAI8B,MAAM,iCAAiCkW,EAAMhY,YAFvDuW,EAAW/B,GAAgB3P,EAAQ+R,EAAShW,EAAMH,EAAQiI,YAG5D,CAEA,MAAO,CAAE0H,mBAAkBC,mBAAkBkG,WAC/C,CCxLO,SAASwC,GAAWlU,GAAQmU,WAAEA,EAAUC,YAAEA,EAAWC,UAAEA,GAAajZ,EAAekZ,GACxF,MAAMC,WAAEA,GAAenZ,EAEjB4P,EAAS,GAEf,IAAI9P,EAEAsZ,EACAC,EAAW,EAEf,MAAMC,EAAgBJ,SACpBE,GAAaF,EAAO,CAClBC,aACAI,WAAYH,EACZI,SAAUT,EAAaM,EAAWD,EAAU/Y,OAC5CoZ,OAAQV,EAAaM,GAExB,GAED,KAAOA,EAAWJ,KACZrU,EAAOE,QAAUF,EAAOG,KAAKC,WAAa,IADnB,CAI3B,MAAMkP,EAASwF,GAAc9U,GAC7B,GAAoB,oBAAhBsP,EAAOvT,KAETb,EAAa6Z,GAAS/U,EAAQsP,EAAQlU,EAAeF,OAAY0N,EAAW,GAC5E1N,EAAaS,EAAQT,EAAYE,OAC5B,CACL,MAAM4Z,EAAkBR,GAAW/Y,QAAU,EACvCgG,EAASsT,GAAS/U,EAAQsP,EAAQlU,EAAeF,EAAYsZ,EAAWJ,EAAcK,GACxFD,IAAc/S,EAEhBgT,GAAYhT,EAAOhG,OAASuZ,GAE5BN,MACA1J,EAAO7L,KAAKsC,GACZgT,GAAYhT,EAAOhG,OACnB+Y,EAAY/S,EAEhB,CACF,CAOA,OANAiT,MAEID,EAAWJ,GAAaG,IAE1BxJ,EAAOA,EAAOvP,OAAS,GAAK+Y,EAAU3U,MAAM,EAAGwU,GAAaI,EAAWD,EAAU/Y,UAE5EuP,CACT,CAaO,SAAS+J,GAAS/U,EAAQsP,EAAQlU,EAAeF,EAAY+Z,EAAeC,GACjF,MAAMnZ,KAAEA,EAAIH,QAAEA,EAAO6D,WAAEA,EAAU8F,MAAEA,EAAK6M,YAAEA,GAAgBhX,EAEpD8W,EAAkB,IAAI3W,WAC1ByE,EAAOG,KAAK7C,OAAQ0C,EAAOG,KAAK5C,WAAayC,EAAOE,OAAQoP,EAAO6F,sBAKrE,GAHAnV,EAAOE,QAAUoP,EAAO6F,qBAGJ,cAAhB7F,EAAOvT,KAAsB,CAC/B,MAAM0V,EAAOnC,EAAO8F,iBACpB,IAAK3D,EAAM,MAAM,IAAIxU,MAAM,yCAG3B,GAAIiY,EAAYzD,EAAKjM,YXiClB,SAAsB/F,GAC3B,GAA0B,IAAtBA,EAAWhE,OAAc,OAAO,EACpC,MAAM,CAAGuJ,GAAUvF,EACnB,MAAuC,aAAnCuF,EAAOpJ,QAAQ+D,kBACfqF,EAAOnG,SAASpD,MAEtB,CWvCuC4Z,CAAa5V,GAC9C,OAAO,IAAIlD,MAAMkV,EAAKjM,YAGxB,MAAM6M,EAAOJ,GAAeC,EAAiBzX,OAAO6U,EAAO6C,wBAAyB5M,EAAO6M,IACrF7G,iBAAEA,EAAgBC,iBAAEA,EAAgBkG,SAAEA,GAAaF,GAAaa,EAAMZ,EAAMrW,GAIlF,IAAIqG,EAASzG,EAAsB0W,EAAUxW,EAAYuW,EAAKtW,SAAUC,GACxE,GAAIoQ,EAAiB/P,QAAU8P,GAAkB9P,OAAQ,CAEvD,OAAO6P,EADQ/O,MAAM+Y,QAAQL,GAAiBA,EAAgB,GACjC1J,EAAkBC,EAAkB/J,EAAQhC,EAC3E,CAEE,IAAK,IAAI/D,EAAI,EAAGA,EAAI+D,EAAWhE,OAAQC,IACS,aAA1C+D,EAAW/D,GAAGE,QAAQ+D,kBACxB8B,EAASlF,MAAMoB,KAAK8D,EAAQkD,GAAK,CAACA,KAGtC,OAAOlD,CAEX,CAAO,GAAoB,iBAAhB6N,EAAOvT,KAAyB,CACzC,MAAMoX,EAAQ7D,EAAO8D,oBACrB,IAAKD,EAAO,MAAM,IAAIlW,MAAM,4CAG5B,GAAIiY,EAAY/B,EAAMvO,SACpB,OAAO,IAAIrI,MAAM4W,EAAM3N,YAGzB,MAAM+F,iBAAEA,EAAgBC,iBAAEA,EAAgBkG,SAAEA,GAC1CuB,GAAef,EAAiB5C,EAAQlU,GAGpCqG,EAASzG,EAAsB0W,EAAUxW,EAAYiY,EAAMhY,SAAUC,GAE3E,OAAOkQ,EADQ/O,MAAM+Y,QAAQL,GAAiBA,EAAgB,GACjC1J,EAAkBC,EAAkB/J,EAAQhC,EAC3E,CAAO,GAAoB,oBAAhB6P,EAAOvT,KAA4B,CAC5C,MAAMwZ,EAAOjG,EAAOkG,uBACpB,IAAKD,EAAM,MAAM,IAAItY,MAAM,+CAE3B,MAAMoV,EAAOJ,GACXC,EAAiBzX,OAAO6U,EAAO6C,wBAAyB5M,EAAO6M,GAIjE,OAAOjC,GADQ,CAAEhQ,KAAM,IAAIyC,SAASyP,EAAK/U,OAAQ+U,EAAK9U,WAAY8U,EAAKjS,YAAaF,OAAQ,GACnEnE,EAAMwZ,EAAK/P,WAAY5J,EAAQiI,YAC1D,CACE,MAAM,IAAI5G,MAAM,kCAAkCqS,EAAOvT,OAE7D,CASA,SAAS+Y,GAAc9U,GACrB,MAAMsP,EAASvP,EAA4BC,GAsC3C,MAAO,CACLjE,KApCW3B,EAASkV,EAAO5L,SAqC3ByO,uBApC6B7C,EAAO3L,QAqCpCwR,qBApC2B7F,EAAOxL,QAqClC2R,IApCUnG,EAAOvL,QAqCjBqR,iBApCuB9F,EAAOtL,SAAW,CACzCwB,WAAY8J,EAAOtL,QAAQN,QAC3BvI,SAAUnB,EAASsV,EAAOtL,QAAQL,SAClC+R,0BAA2B1b,EAASsV,EAAOtL,QAAQF,SACnD6R,0BAA2B3b,EAASsV,EAAOtL,QAAQD,SACnDiC,WAAYsJ,EAAOtL,QAAQA,SAAW,CACpCvB,IAAK6M,EAAOtL,QAAQA,QAAQN,QAC5B2E,IAAKiH,EAAOtL,QAAQA,QAAQL,QAC5B2E,WAAYgH,EAAOtL,QAAQA,QAAQF,QACnCyE,eAAgB+G,EAAOtL,QAAQA,QAAQD,QACvCyE,UAAW8G,EAAOtL,QAAQA,QAAQA,QAClCyE,UAAW6G,EAAOtL,QAAQA,QAAQC,UA0BpC2R,kBAvBwBtG,EAAOrL,QAwB/BuR,uBAvB6BlG,EAAOpL,SAAW,CAC/CsB,WAAY8J,EAAOpL,QAAQR,QAC3BvI,SAAUnB,EAASsV,EAAOpL,QAAQP,SAClCkS,UAAWvG,EAAOpL,QAAQJ,SAqB1BsP,oBAnB0B9D,EAAOlL,SAAW,CAC5CoB,WAAY8J,EAAOlL,QAAQV,QAC3BmQ,UAAWvE,EAAOlL,QAAQT,QAC1BiB,SAAU0K,EAAOlL,QAAQN,QACzB3I,SAAUnB,EAASsV,EAAOlL,QAAQL,SAClCwP,8BAA+BjE,EAAOlL,QAAQJ,QAC9CqP,8BAA+B/D,EAAOlL,QAAQH,QAC9CyP,mBAA0C9K,IAA3B0G,EAAOlL,QAAQF,SAA+BoL,EAAOlL,QAAQF,QAC5E8B,WAAYsJ,EAAOlL,QAAQA,SAa/B,CCvHOlC,eAAe4T,IAAiBC,aAAEA,GAAgB3B,EAAaC,EAAWtP,EAASiR,GACxF,MAAMC,EAAY,IAAI1Z,MAAM8X,GAItB6B,QAAoBC,QAAQC,IAAIL,EAAanZ,IAAI,EAAG3B,UAAWA,EAAKyO,KAAKqB,KAGzEsL,EAAsBN,EACzBnZ,IAAIqC,GAASA,EAAMqX,aAAa,IAChC5R,OAAOxF,IAAS6F,GAAWA,EAAQwR,SAASrX,IACzCsX,EAAczR,GAAWsR,EACzBI,EAAgBD,EAAY5Z,IAAIsC,GAAQ6W,EAAaW,UAAU1R,GAAUA,EAAOsR,aAAa,KAAOpX,IAG1G,IAAK,IAAIyX,EAAMvC,EAAauC,EAAMtC,EAAWsC,IAC3C,GAAkB,WAAdX,EAAwB,CAG1B,MAAMY,EAAU,CAAA,EAChB,IAAK,IAAIlb,EAAI,EAAGA,EAAIqa,EAAata,OAAQC,IACvCkb,EAAQb,EAAara,GAAG4a,aAAa,IAAMJ,EAAYxa,GAAGib,GAE5DV,EAAUU,GAAOC,CACnB,KAAO,CAEL,MAAMA,EAAU,IAAIra,MAAMwZ,EAAata,QACvC,IAAK,IAAIC,EAAI,EAAGA,EAAI8a,EAAY/a,OAAQC,IAClC+a,EAAc/a,IAAM,IACtBkb,EAAQlb,GAAKwa,EAAYO,EAAc/a,IAAIib,IAG/CV,EAAUU,GAAOC,CACnB,CAEF,OAAOX,CACT,CCnGO/T,eAAe2U,GAAYC,GAEhCA,EAAQtT,iBAAmBrB,EAAqB2U,EAAQC,MAGxD,MAAMC,EA6DD,SAA0BF,GAC/B,IAAKA,EAAQtT,SAAU,MAAM,IAAIvG,MAAM,6BAIvC,MAAMga,ET5ED,UAAqBzT,SAAEA,EAAQoR,SAAEA,EAAW,EAACC,OAAEA,EAASrW,IAAQuG,QAAEA,IACvE,IAAKvB,EAAU,MAAM,IAAIvG,MAAM,iCAE/B,MAAMia,EAAS,GAETC,EAAU,GAGhB,IAAIhD,EAAa,EACjB,IAAK,MAAMrP,KAAYtB,EAASqB,WAAY,CAC1C,MAAMuS,EAAY3c,OAAOqK,EAASF,UAC5ByS,EAAWlD,EAAaiD,EAE9B,GAAIA,EAAY,GAAKC,GAAYzC,GAAYT,EAAaU,EAAQ,CAEhE,MAAMyC,EAAS,GAEf,IAAK,MAAMpS,UAAEA,EAASE,UAAEA,KAAeN,EAASC,QAAS,CACvD,GAAIG,EAAW,MAAM,IAAIjI,MAAM,mCAC/B,IAAKmI,EAAW,MAAM,IAAInI,MAAM,wCAE3B8H,IAAWA,EAAQwR,SAASnR,EAAUE,eAAe,KACxDgS,EAAOnY,KAAK+L,EAAe9F,GAE/B,CACA,MAAMgP,EAAc5R,KAAKC,IAAImS,EAAWT,EAAY,GAC9CE,EAAY7R,KAAK6F,IAAIwM,EAASV,EAAYiD,GAChDF,EAAO/X,KAAK,CAAEmY,SAAQxS,WAAUqP,aAAYiD,YAAWhD,cAAaC,cAGpE,MAAMkD,EAAYD,EAAOA,EAAO7b,OAAS,IAAI4P,QAAUiM,EAAO,IAAIlM,UAClE,IAAKrG,GAAWwS,EA3CS,SA6CvBJ,EAAQhY,KAAK,CACXiM,UAAWkM,EAAO,GAAGlM,UACrBC,QAASiM,EAAOA,EAAO7b,OAAS,GAAG4P,eAEhC,GAAIiM,EAAO7b,OAChBuN,EAAOmO,EAASG,QACX,GAAIvS,GAAStJ,OAClB,MAAM,IAAIwB,MAAM,8BAA8B8H,EAAQ2H,KAAK,QAE/D,CAEAyH,EAAakD,CACf,CAGA,OAFKG,SAAS3C,KAASA,EAASV,GAEzB,CAAE3Q,WAAUoR,WAAUC,SAAQ9P,UAASoS,UAASD,SACzD,CS2BeO,CAAYX,GAIzB,OAHAA,EAAQC,KTPH,SAA6BA,GAAMI,QAAEA,IAE1C,MAAMO,EAAWP,EAAQva,IAAI,EAAGwO,YAAWC,aAAc0L,EAAKlX,MAAMuL,EAAWC,IAC/E,MAAO,CACLjL,WAAY2W,EAAK3W,WACjB,KAAAP,CAAMsK,EAAOC,EAAM2M,EAAK3W,YAEtB,MAAMuX,EAAQR,EAAQT,UAAU,EAAGtL,YAAWC,aAAcD,GAAajB,GAASC,GAAOiB,GACzF,GAAIsM,EAAQ,EAAG,MAAM,IAAI1a,MAAM,0BAA0BkN,MAAUC,MACnE,GAAI+M,EAAQQ,GAAOvM,YAAcjB,GAASgN,EAAQQ,GAAOtM,UAAYjB,EAAK,CAExE,MAAMgF,EAAcjF,EAAQgN,EAAQQ,GAAOvM,UACrCwM,EAAYxN,EAAM+M,EAAQQ,GAAOvM,UACvC,OAAIsM,EAASC,aAAkBxB,QACtBuB,EAASC,GAAOjO,KAAKpM,GAAUA,EAAOuC,MAAMuP,EAAawI,IAEzDF,EAASC,GAAO9X,MAAMuP,EAAawI,EAE9C,CACE,OAAOF,EAASC,EAEpB,EAEJ,CShBiBE,CAAoBf,EAAQC,KAAME,GAG1CA,EAAKC,OAAOta,IAAIkb,GD7ElB,SAAsBhB,GAAStT,SAAEA,EAAQuB,QAAEA,GAAW+S,GAC3D,MAAMf,KAAEA,EAAI3E,YAAEA,EAAWtW,KAAEA,GAASgb,EAG9Bf,EAAe,GAEfla,EAAU,IAAKxB,KAAoByc,EAAQjb,SAGjD,IAAK,MAAMqJ,UAAEA,EAASE,UAAEA,KAAe0S,EAAUhT,SAASC,QAAS,CACjE,GAAIG,EAAW,MAAM,IAAIjI,MAAM,mCAC/B,IAAKmI,EAAW,MAAM,IAAInI,MAAM,wCAGhC,MAAMsX,EAAanP,EAAUE,eAAe,GAC5C,GAAIP,IAAYA,EAAQwR,SAAShC,GAAa,SAE9C,MAAMnJ,UAAEA,EAASC,QAAEA,GAAYH,EAAe9F,GACxC2S,EAAc1M,EAAUD,EAI9B,GAAI2M,EAAc,GAAK,GAAI,CACzBC,QAAQC,KAAK,iCAAiC7S,EAAUE,mBAAmByS,WAE3E,QACF,CAIA,MAAMza,EAAS6Y,QAAQ+B,QAAQnB,EAAKlX,MAAMuL,EAAWC,IAGrD0K,EAAa5W,KAAK,CAChBmX,aAAclR,EAAUE,eACxBrK,KAAMqC,EAAOoM,KAAKpG,IAChB,MAAM7D,EAAaL,EAAcoE,EAAS9E,OAAQ0G,EAAUE,gBACtDtF,EAAS,CAAEG,KAAM,IAAIyC,SAASU,GAAcpD,OAAQ,GAEpD9E,EAAgB,CACpBmZ,WAFgBnP,EAAUE,eAAeoH,KAAK,KAG9C3Q,KAAMqJ,EAAUrJ,KAChBH,QAAS6D,EAAWA,EAAWhE,OAAS,GAAGG,QAC3C6D,aACA8F,MAAOH,EAAUG,MACjB1J,UACAuW,cACAtW,QAEF,OAAOoY,GAAWlU,EAAQ8X,EAAW1c,EAAe0b,EAAQxC,WAGlE,CAEA,MAAO,CAAEH,WAAY2D,EAAU3D,WAAYiD,UAAWU,EAAUV,UAAWrB,eAC7E,CCsBsCoC,CAAarB,EAASG,EAAMa,GAClE,CAvEsBM,CAAiBtB,IAE/BlC,SAAEA,EAAW,EAACC,OAAEA,EAAM9P,QAAEA,EAAOsT,QAAEA,EAAOC,WAAEA,EAAUtC,UAAEA,GAAcc,EAG1E,IAAKwB,IAAeD,EAAS,CAC3B,IAAK,MAAMtC,aAAEA,KAAkBiB,EAC7B,IAAK,MAAM/b,KAAEA,KAAU8a,QAAoB9a,EAE7C,MACF,CAGA,MAAMwD,EX8JD,UAAuBC,OAAEA,IAC9B,OAAOU,EAAcV,EAAQ,IAAI,EACnC,CWhKqB6Z,CAAczB,EAAQtT,UACnCgV,EAAYxB,EAAYpa,IAAI6b,GDyF7B,SAAuBC,EAAeja,GAC3C,MAAMsX,aAAEA,GAAiB2C,EAEnBF,EAAY,GAClB,IAAK,MAAMvZ,KAASR,EAAWI,SAC7B,GAAII,EAAMJ,SAASpD,OAAQ,CACzB,MAAMkd,EAAe5C,EAAarR,OAAOM,GAAUA,EAAOsR,aAAa,KAAOrX,EAAMrD,QAAQsD,MAC5F,IAAKyZ,EAAald,OAAQ,SAI1B,MAAMmd,EAAW,IAAIjO,IACf1P,EAAOkb,QAAQC,IAAIuC,EAAa/b,IAAIoI,GACjCA,EAAO/J,KAAKyO,KAAKiL,IACtBiE,EAASxV,IAAI4B,EAAOsR,aAAa5J,KAAK,KAAM3B,EAAQ4J,QAEpDjL,KAAK,KAEP6C,EAAeqM,EAAU3Z,GACzB,MAAM4Z,EAAaD,EAAS7O,IAAI9K,EAAML,KAAK8N,KAAK,MAChD,IAAKmM,EAAY,MAAM,IAAI5b,MAAM,qCACjC,MAAO,CAAC4b,KAGVL,EAAUrZ,KAAK,CAAEmX,aAAcrX,EAAML,KAAM3D,QAC7C,KAAO,CAEL,MAAM6d,EAAc/C,EAAaxW,KAAKyF,GAAUA,EAAOsR,aAAa,KAAOrX,EAAMrD,QAAQsD,MACrF4Z,GACFN,EAAUrZ,KAAK2Z,EAEnB,CAEF,MAAO,IAAKJ,EAAe3C,aAAcyC,EAC3C,CC3H2CO,CAAcN,EAAKha,IAG5D,GAAI4Z,EACF,IAAK,MAAMW,KAAcR,EACvB,IAAK,MAAMM,KAAeE,EAAWjD,aACnC+C,EAAY7d,KAAKyO,KAAKwM,IACpB,IAAItB,EAAWoE,EAAW7E,WAC1B,IAAK,MAAMQ,KAAcuB,EACvBmC,EAAQ,CACN9D,WAAYuE,EAAYxC,aAAa,GACrC3B,aACAC,WACAC,OAAQD,EAAWD,EAAWlZ,SAEhCmZ,GAAYD,EAAWlZ,SAQjC,GAAI6c,EAAY,CAEd,MAAMW,EAAO,GACb,IAAK,MAAMD,KAAcR,EAAW,CAElC,MAAMpE,EAAc5R,KAAKC,IAAImS,EAAWoE,EAAW7E,WAAY,GACzDE,EAAY7R,KAAK6F,KAAKwM,GAAUrW,KAAYwa,EAAW7E,WAAY6E,EAAW5B,WAGpFpO,EAAOiQ,SADiBnD,GAAiBkD,EAAY5E,EAAaC,EAAWtP,EAASiR,IAC/DnW,MAAMuU,EAAaC,GAC5C,CACAiE,EAAWW,EACb,MAEE,IAAK,MAAMlD,aAAEA,KAAkByC,EAC7B,IAAK,MAAMvd,KAAEA,KAAU8a,QAAoB9a,CAGjD,CC/CO,SAASie,GAAUC,GACxB,MAAMC,EAAK,IAAIxW,SAASuW,EAAI7b,OAAQ6b,EAAI5b,WAAY4b,EAAI/Y,YACxD,IAAIF,EAAS,EAGb,MAAMmZ,EAAYF,EAAIjZ,GAASA,GAAU,EACzC,MAAMoZ,EAA+B,IAAdD,EAGjBE,EAAeH,EAAGvW,UAAU3C,EAAQoZ,GAI1C,GAHApZ,GAAU,EA1Cc,IA6CpBqZ,EAAoC,CAEtC,MAAMvH,EAAIoH,EAAGtY,WAAWZ,EAAQoZ,GAAiBpZ,GAAU,EAC3D,MAAMsZ,EAAIJ,EAAGtY,WAAWZ,EAAQoZ,GAChC,OADiDpZ,GAAU,EACpD,CAAEnE,KAAM,QAAS0d,YAAa,CAACzH,EAAGwH,GAC3C,CAAO,GAjDsB,IAiDlBD,EAAyC,CAElD,MAAMG,EAAYN,EAAGvW,UAAU3C,EAAQoZ,GAAiBpZ,GAAU,EAClE,MAAMyZ,EAAS,GACf,IAAK,IAAIje,EAAI,EAAGA,EAAIge,EAAWhe,IAAK,CAClC,MAAMsW,EAAIoH,EAAGtY,WAAWZ,EAAQoZ,GAAiBpZ,GAAU,EAC3D,MAAMsZ,EAAIJ,EAAGtY,WAAWZ,EAAQoZ,GAAiBpZ,GAAU,EAC3DyZ,EAAOxa,KAAK,CAAC6S,EAAGwH,GAClB,CACA,MAAO,CAAEzd,KAAM,aAAc0d,YAAaE,EAC5C,CAAO,GA1DmB,IA0DfJ,EAAsC,CAE/C,MAAMK,EAAWR,EAAGvW,UAAU3C,EAAQoZ,GAAiBpZ,GAAU,EACjE,MAAMyZ,EAAS,GACf,IAAK,IAAIE,EAAI,EAAGA,EAAID,EAAUC,IAAK,CACjC,MAAMH,EAAYN,EAAGvW,UAAU3C,EAAQoZ,GAAiBpZ,GAAU,EAClE,MAAM4Z,EAAO,GACb,IAAK,IAAIC,EAAI,EAAGA,EAAIL,EAAWK,IAAK,CAClC,MAAM/H,EAAIoH,EAAGtY,WAAWZ,EAAQoZ,GAAiBpZ,GAAU,EAC3D,MAAMsZ,EAAIJ,EAAGtY,WAAWZ,EAAQoZ,GAAiBpZ,GAAU,EAC3D4Z,EAAK3a,KAAK,CAAC6S,EAAGwH,GAChB,CACAG,EAAOxa,KAAK2a,EACd,CACA,MAAO,CAAE/d,KAAM,UAAW0d,YAAaE,EACzC,CAAO,GAtEwB,IAsEpBJ,EAA2C,CAEpD,MAAMS,EAAcZ,EAAGvW,UAAU3C,EAAQoZ,GAAiBpZ,GAAU,EACpE,MAAM+Z,EAAW,GACjB,IAAK,IAAIve,EAAI,EAAGA,EAAIse,EAAate,IAAK,CAEpC,MAAMwe,EAAqC,IAAhBf,EAAIjZ,GAAeA,GAAU,EACxD,MAAMia,EAAWf,EAAGvW,UAAU3C,EAAQga,GACtC,GAD2Dha,GAAU,EAhF/C,IAiFlBia,EACF,MAAM,IAAIld,MAAM,yCAAyCkd,KAE3D,MAAMP,EAAWR,EAAGvW,UAAU3C,EAAQga,GAAqBha,GAAU,EAErE,MAAMka,EAAW,GACjB,IAAK,IAAIP,EAAI,EAAGA,EAAID,EAAUC,IAAK,CACjC,MAAMH,EAAYN,EAAGvW,UAAU3C,EAAQga,GAAqBha,GAAU,EACtE,MAAM4Z,EAAO,GACb,IAAK,IAAIC,EAAI,EAAGA,EAAIL,EAAWK,IAAK,CAClC,MAAM/H,EAAIoH,EAAGtY,WAAWZ,EAAQga,GAAqBha,GAAU,EAC/D,MAAMsZ,EAAIJ,EAAGtY,WAAWZ,EAAQga,GAAqBha,GAAU,EAC/D4Z,EAAK3a,KAAK,CAAC6S,EAAGwH,GAChB,CACAY,EAASjb,KAAK2a,EAChB,CACAG,EAAS9a,KAAKib,EAChB,CACA,MAAO,CAAEre,KAAM,eAAgB0d,YAAaQ,EAC9C,CAAO,GAnGsB,IAmGlBV,EAAyC,CAElD,MAAMG,EAAYN,EAAGvW,UAAU3C,EAAQoZ,GAAiBpZ,GAAU,EAClE,MAAMma,EAAS,GACf,IAAK,IAAI3e,EAAI,EAAGA,EAAIge,EAAWhe,IAAK,CAElC,MAAM4e,EAAsC,IAAhBnB,EAAIjZ,GAAeA,GAAU,EACzD,MAAMqa,EAAYnB,EAAGvW,UAAU3C,EAAQoa,GACvC,GAD6Dpa,GAAU,EA7GnD,IA8GhBqa,EACF,MAAM,IAAItd,MAAM,qCAAqCsd,KAEvD,MAAMvI,EAAIoH,EAAGtY,WAAWZ,EAAQoa,GAAsBpa,GAAU,EAChE,MAAMsZ,EAAIJ,EAAGtY,WAAWZ,EAAQoa,GAAsBpa,GAAU,EAChEma,EAAOlb,KAAK,CAAC6S,EAAGwH,GAClB,CACA,MAAO,CAAEzd,KAAM,aAAc0d,YAAaY,EAC5C,CAAO,GAlH2B,IAkHvBd,EAA8C,CAEvD,MAAMiB,EAAiBpB,EAAGvW,UAAU3C,EAAQoZ,GAAiBpZ,GAAU,EACvE,MAAMua,EAAc,GACpB,IAAK,IAAI/e,EAAI,EAAGA,EAAI8e,EAAgB9e,IAAK,CAEvC,MAAMgf,EAAqC,IAAhBvB,EAAIjZ,GAAeA,GAAU,EACxD,MAAMya,EAAWvB,EAAGvW,UAAU3C,EAAQwa,GACtC,GAD2Dxa,GAAU,EA5H5C,IA6HrBya,EACF,MAAM,IAAI1d,MAAM,+CAA+C0d,KAEjE,MAAMjB,EAAYN,EAAGvW,UAAU3C,EAAQoZ,GAAiBpZ,GAAU,EAClE,MAAMyZ,EAAS,GACf,IAAK,IAAII,EAAI,EAAGA,EAAIL,EAAWK,IAAK,CAClC,MAAM/H,EAAIoH,EAAGtY,WAAWZ,EAAQwa,GAAqBxa,GAAU,EAC/D,MAAMsZ,EAAIJ,EAAGtY,WAAWZ,EAAQwa,GAAqBxa,GAAU,EAC/DyZ,EAAOxa,KAAK,CAAC6S,EAAGwH,GAClB,CACAiB,EAAYtb,KAAKwa,EACnB,CACA,MAAO,CAAE5d,KAAM,kBAAmB0d,YAAagB,EACjD,CACE,MAAM,IAAIxd,MAAM,8BAA8Bsc,IAElD,CChIOrX,eAAe0Y,IAAU7D,KAAEA,EAAI3E,YAAEA,IACtC,MAAM5O,QAAiBrB,EAAqB4U,GACtC8D,EAAcrX,EAASmC,oBAAoBpG,KAAKub,GAAiB,QAAXA,EAAGhT,KAC/D,IAAK+S,EACH,MAAM,IAAI5d,MAAM,mDAIlB,MAAM8d,EAAYje,KAAKC,MAAM8d,EAAY7c,OAAS,MAG5C/C,QFwG2B6b,EExGK,CAAEC,OAAMvT,WAAU1H,MAAM,EAAOsW,eFyG9D,IAAI+D,QAAQ,CAACmC,EAAY0C,KAC9BnE,GAAY,CACVb,UAAW,YACRc,EACHwB,eACC2C,MAAMD,MANN,IAA4BlE,EErGjC,MAAMoE,EAAW,GACXC,EAAgBJ,EAAUK,gBAAkB,WAClD,IAAK,MAAMzE,KAAO1b,EAAM,CACtB,MAAMke,EAAMxC,EAAIwE,GAChB,IAAKhC,EAEH,SAGF,MAAMkC,EAAWnC,GAAUC,GAIrBmC,EAAa,CAAA,EACnB,IAAK,MAAMxT,KAAOoG,OAAOV,KAAKmJ,GAAM,CAClC,MAAM3Y,EAAQ2Y,EAAI7O,GACdA,IAAQqT,GAA2B,OAAVnd,IAC3Bsd,EAAWxT,GAAO9J,EAEtB,CAGA,MAAMud,EAAU,CACdxf,KAAM,UACNsf,WACAC,cAGFJ,EAAS/b,KAAKoc,EAChB,CAEA,MAAO,CACLxf,KAAM,oBACNmf,WAEJ,EC7DAhZ,iBAEE,MAAMyI,IAAEA,SAAc6Q,OAAOC,KAAKC,cAAc,QAG1C9e,EAAM,IAAI+N,EAFsBgR,SAASC,eAAe,OAErC,CACvBC,OAAQ,CAAEC,IAAK,GAAIC,KAAK,IACxBC,KAAM,IAMR,IAEE,MAAMjF,EAAOvM,QACLrB,EAAmB,CAAEC,IALZ,0DAK6BhJ,WAAY,SAE1D4X,QAAQiE,IAAI,mBAAoBlF,GAChC,MAAMmF,QAAgBtB,GAAU,CAAE7D,SAElCiB,QAAQiE,IAAI,WAAYC,GAGxBtf,EAAI3B,KAAKkhB,WAAWD,EACtB,CAAE,MAAOE,GACPpE,QAAQoE,MAAM,4CAA6CA,EAC7D,CACF,CACAC","x_google_ignoreList":[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15]} \ No newline at end of file +{"version":3,"file":"bundle.min.js","sources":["../node_modules/hyparquet/src/constants.js","../node_modules/hyparquet/src/wkb.js","../node_modules/hyparquet/src/convert.js","../node_modules/hyparquet/src/schema.js","../node_modules/hyparquet/src/thrift.js","../node_modules/hyparquet/src/metadata.js","../node_modules/hyparquet/src/utils.js","../node_modules/hyparquet/src/plan.js","../node_modules/hyparquet/src/assemble.js","../node_modules/hyparquet/src/delta.js","../node_modules/hyparquet/src/encoding.js","../node_modules/hyparquet/src/plain.js","../node_modules/hyparquet/src/snappy.js","../node_modules/hyparquet/src/datapage.js","../node_modules/hyparquet/src/column.js","../node_modules/hyparquet/src/rowgroup.js","../node_modules/hyparquet/src/read.js","../src/toGeoJson.js","demo.js"],"sourcesContent":["/** @type {import('../src/types.d.ts').ParquetType[]} */\nexport const ParquetType = [\n 'BOOLEAN',\n 'INT32',\n 'INT64',\n 'INT96', // deprecated\n 'FLOAT',\n 'DOUBLE',\n 'BYTE_ARRAY',\n 'FIXED_LEN_BYTE_ARRAY',\n]\n\n/** @type {import('../src/types.d.ts').Encoding[]} */\nexport const Encoding = [\n 'PLAIN',\n 'GROUP_VAR_INT', // deprecated\n 'PLAIN_DICTIONARY',\n 'RLE',\n 'BIT_PACKED', // deprecated\n 'DELTA_BINARY_PACKED',\n 'DELTA_LENGTH_BYTE_ARRAY',\n 'DELTA_BYTE_ARRAY',\n 'RLE_DICTIONARY',\n 'BYTE_STREAM_SPLIT',\n]\n\n/** @type {import('../src/types.d.ts').FieldRepetitionType[]} */\nexport const FieldRepetitionType = [\n 'REQUIRED',\n 'OPTIONAL',\n 'REPEATED',\n]\n\n/** @type {import('../src/types.d.ts').ConvertedType[]} */\nexport const ConvertedType = [\n 'UTF8',\n 'MAP',\n 'MAP_KEY_VALUE',\n 'LIST',\n 'ENUM',\n 'DECIMAL',\n 'DATE',\n 'TIME_MILLIS',\n 'TIME_MICROS',\n 'TIMESTAMP_MILLIS',\n 'TIMESTAMP_MICROS',\n 'UINT_8',\n 'UINT_16',\n 'UINT_32',\n 'UINT_64',\n 'INT_8',\n 'INT_16',\n 'INT_32',\n 'INT_64',\n 'JSON',\n 'BSON',\n 'INTERVAL',\n]\n\n/** @type {import('../src/types.d.ts').CompressionCodec[]} */\nexport const CompressionCodec = [\n 'UNCOMPRESSED',\n 'SNAPPY',\n 'GZIP',\n 'LZO',\n 'BROTLI',\n 'LZ4',\n 'ZSTD',\n 'LZ4_RAW',\n]\n\n/** @type {import('../src/types.d.ts').PageType[]} */\nexport const PageType = [\n 'DATA_PAGE',\n 'INDEX_PAGE',\n 'DICTIONARY_PAGE',\n 'DATA_PAGE_V2',\n]\n\n/** @type {import('../src/types.d.ts').BoundaryOrder[]} */\nexport const BoundaryOrder = [\n 'UNORDERED',\n 'ASCENDING',\n 'DESCENDING',\n]\n\n/** @type {import('../src/types.d.ts').EdgeInterpolationAlgorithm[]} */\nexport const EdgeInterpolationAlgorithm = [\n 'SPHERICAL',\n 'VINCENTY',\n 'THOMAS',\n 'ANDOYER',\n 'KARNEY',\n]\n","/**\n * WKB (Well-Known Binary) decoder for geometry objects.\n *\n * @import {DataReader, Geometry} from '../src/types.js'\n * @param {DataReader} reader\n * @returns {Geometry} geometry object\n */\nexport function wkbToGeojson(reader) {\n const flags = getFlags(reader)\n\n if (flags.type === 1) { // Point\n return { type: 'Point', coordinates: readPosition(reader, flags) }\n } else if (flags.type === 2) { // LineString\n return { type: 'LineString', coordinates: readLine(reader, flags) }\n } else if (flags.type === 3) { // Polygon\n return { type: 'Polygon', coordinates: readPolygon(reader, flags) }\n } else if (flags.type === 4) { // MultiPoint\n const points = []\n for (let i = 0; i < flags.count; i++) {\n points.push(readPosition(reader, getFlags(reader)))\n }\n return { type: 'MultiPoint', coordinates: points }\n } else if (flags.type === 5) { // MultiLineString\n const lines = []\n for (let i = 0; i < flags.count; i++) {\n lines.push(readLine(reader, getFlags(reader)))\n }\n return { type: 'MultiLineString', coordinates: lines }\n } else if (flags.type === 6) { // MultiPolygon\n const polygons = []\n for (let i = 0; i < flags.count; i++) {\n polygons.push(readPolygon(reader, getFlags(reader)))\n }\n return { type: 'MultiPolygon', coordinates: polygons }\n } else if (flags.type === 7) { // GeometryCollection\n const geometries = []\n for (let i = 0; i < flags.count; i++) {\n geometries.push(wkbToGeojson(reader))\n }\n return { type: 'GeometryCollection', geometries }\n } else {\n throw new Error(`Unsupported geometry type: ${flags.type}`)\n }\n}\n\n/**\n * @typedef {object} WkbFlags\n * @property {boolean} littleEndian\n * @property {number} type\n * @property {number} dim\n * @property {number} count\n */\n\n/**\n * Extract ISO WKB flags and base geometry type.\n *\n * @param {DataReader} reader\n * @returns {WkbFlags}\n */\nfunction getFlags(reader) {\n const { view } = reader\n const littleEndian = view.getUint8(reader.offset++) === 1\n const rawType = view.getUint32(reader.offset, littleEndian)\n reader.offset += 4\n\n const type = rawType % 1000\n const flags = Math.floor(rawType / 1000)\n\n let count = 0\n if (type > 1 && type <= 7) {\n count = view.getUint32(reader.offset, littleEndian)\n reader.offset += 4\n }\n\n // XY, XYZ, XYM, XYZM\n let dim = 2\n if (flags) dim++\n if (flags === 3) dim++\n\n return { littleEndian, type, dim, count }\n}\n\n/**\n * @param {DataReader} reader\n * @param {WkbFlags} flags\n * @returns {number[]}\n */\nfunction readPosition(reader, flags) {\n const points = []\n for (let i = 0; i < flags.dim; i++) {\n const coord = reader.view.getFloat64(reader.offset, flags.littleEndian)\n reader.offset += 8\n points.push(coord)\n }\n return points\n}\n\n/**\n * @param {DataReader} reader\n * @param {WkbFlags} flags\n * @returns {number[][]}\n */\nfunction readLine(reader, flags) {\n const points = []\n for (let i = 0; i < flags.count; i++) {\n points.push(readPosition(reader, flags))\n }\n return points\n}\n\n/**\n * @param {DataReader} reader\n * @param {WkbFlags} flags\n * @returns {number[][][]}\n */\nfunction readPolygon(reader, flags) {\n const { view } = reader\n const rings = []\n for (let r = 0; r < flags.count; r++) {\n const count = view.getUint32(reader.offset, flags.littleEndian)\n reader.offset += 4\n rings.push(readLine(reader, { ...flags, count }))\n }\n return rings\n}\n","import { wkbToGeojson } from './wkb.js'\n\n/**\n * @import {ColumnDecoder, DecodedArray, Encoding, ParquetParsers} from '../src/types.js'\n */\n\nconst decoder = new TextDecoder()\n\n/**\n * Default type parsers when no custom ones are given\n * @type ParquetParsers\n */\nexport const DEFAULT_PARSERS = {\n timestampFromMilliseconds(millis) {\n return new Date(Number(millis))\n },\n timestampFromMicroseconds(micros) {\n return new Date(Number(micros / 1000n))\n },\n timestampFromNanoseconds(nanos) {\n return new Date(Number(nanos / 1000000n))\n },\n dateFromDays(days) {\n return new Date(days * 86400000)\n },\n stringFromBytes(bytes) {\n return bytes && decoder.decode(bytes)\n },\n geometryFromBytes(bytes) {\n return bytes && wkbToGeojson({ view: new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength), offset: 0 })\n },\n geographyFromBytes(bytes) {\n return bytes && wkbToGeojson({ view: new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength), offset: 0 })\n },\n}\n\n/**\n * Convert known types from primitive to rich, and dereference dictionary.\n *\n * @param {DecodedArray} data series of primitive types\n * @param {DecodedArray | undefined} dictionary\n * @param {Encoding} encoding\n * @param {ColumnDecoder} columnDecoder\n * @returns {DecodedArray} series of rich types\n */\nexport function convertWithDictionary(data, dictionary, encoding, columnDecoder) {\n if (dictionary && encoding.endsWith('_DICTIONARY')) {\n let output = data\n if (data instanceof Uint8Array && !(dictionary instanceof Uint8Array)) {\n // @ts-expect-error upgrade data to match dictionary type with fancy constructor\n output = new dictionary.constructor(data.length)\n }\n for (let i = 0; i < data.length; i++) {\n output[i] = dictionary[data[i]]\n }\n return output\n } else {\n return convert(data, columnDecoder)\n }\n}\n\n/**\n * Convert known types from primitive to rich.\n *\n * @param {DecodedArray} data series of primitive types\n * @param {Pick} columnDecoder\n * @returns {DecodedArray} series of rich types\n */\nexport function convert(data, columnDecoder) {\n const { element, parsers, utf8 = true } = columnDecoder\n const { type, converted_type: ctype, logical_type: ltype } = element\n if (ctype === 'DECIMAL') {\n const scale = element.scale || 0\n const factor = 10 ** -scale\n const arr = new Array(data.length)\n for (let i = 0; i < arr.length; i++) {\n if (data[i] instanceof Uint8Array) {\n arr[i] = parseDecimal(data[i]) * factor\n } else {\n arr[i] = Number(data[i]) * factor\n }\n }\n return arr\n }\n if (!ctype && type === 'INT96') {\n return Array.from(data).map(v => parsers.timestampFromNanoseconds(parseInt96Nanos(v)))\n }\n if (ctype === 'DATE') {\n return Array.from(data).map(v => parsers.dateFromDays(v))\n }\n if (ctype === 'TIMESTAMP_MILLIS') {\n return Array.from(data).map(v => parsers.timestampFromMilliseconds(v))\n }\n if (ctype === 'TIMESTAMP_MICROS') {\n return Array.from(data).map(v => parsers.timestampFromMicroseconds(v))\n }\n if (ctype === 'JSON') {\n return data.map(v => JSON.parse(decoder.decode(v)))\n }\n if (ctype === 'BSON') {\n throw new Error('parquet bson not supported')\n }\n if (ctype === 'INTERVAL') {\n throw new Error('parquet interval not supported')\n }\n if (ltype?.type === 'GEOMETRY') {\n return data.map(v => parsers.geometryFromBytes(v))\n }\n if (ltype?.type === 'GEOGRAPHY') {\n return data.map(v => parsers.geographyFromBytes(v))\n }\n if (ctype === 'UTF8' || ltype?.type === 'STRING' || utf8 && type === 'BYTE_ARRAY') {\n return data.map(v => parsers.stringFromBytes(v))\n }\n if (ctype === 'UINT_64' || ltype?.type === 'INTEGER' && ltype.bitWidth === 64 && !ltype.isSigned) {\n if (data instanceof BigInt64Array) {\n return new BigUint64Array(data.buffer, data.byteOffset, data.length)\n }\n const arr = new BigUint64Array(data.length)\n for (let i = 0; i < arr.length; i++) arr[i] = BigInt(data[i])\n return arr\n }\n if (ctype === 'UINT_32' || ltype?.type === 'INTEGER' && ltype.bitWidth === 32 && !ltype.isSigned) {\n if (data instanceof Int32Array) {\n return new Uint32Array(data.buffer, data.byteOffset, data.length)\n }\n const arr = new Uint32Array(data.length)\n for (let i = 0; i < arr.length; i++) arr[i] = data[i]\n return arr\n }\n if (ltype?.type === 'FLOAT16') {\n return Array.from(data).map(parseFloat16)\n }\n if (ltype?.type === 'TIMESTAMP') {\n const { unit } = ltype\n /** @type {ParquetParsers[keyof ParquetParsers]} */\n let parser = parsers.timestampFromMilliseconds\n if (unit === 'MICROS') parser = parsers.timestampFromMicroseconds\n if (unit === 'NANOS') parser = parsers.timestampFromNanoseconds\n const arr = new Array(data.length)\n for (let i = 0; i < arr.length; i++) {\n arr[i] = parser(data[i])\n }\n return arr\n }\n return data\n}\n\n/**\n * @param {Uint8Array} bytes\n * @returns {number}\n */\nexport function parseDecimal(bytes) {\n if (!bytes.length) return 0\n\n let value = 0n\n for (const byte of bytes) {\n value = value * 256n + BigInt(byte)\n }\n\n // handle signed\n const bits = bytes.length * 8\n if (value >= 2n ** BigInt(bits - 1)) {\n value -= 2n ** BigInt(bits)\n }\n\n return Number(value)\n}\n\n/**\n * Converts INT96 date format (hi 32bit days, lo 64bit nanos) to nanos since epoch\n * @param {bigint} value\n * @returns {bigint}\n */\nfunction parseInt96Nanos(value) {\n const days = (value >> 64n) - 2440588n\n const nano = value & 0xffffffffffffffffn\n return days * 86400000000000n + nano\n}\n\n/**\n * @param {Uint8Array | undefined} bytes\n * @returns {number | undefined}\n */\nexport function parseFloat16(bytes) {\n if (!bytes) return undefined\n const int16 = bytes[1] << 8 | bytes[0]\n const sign = int16 >> 15 ? -1 : 1\n const exp = int16 >> 10 & 0x1f\n const frac = int16 & 0x3ff\n if (exp === 0) return sign * 2 ** -14 * (frac / 1024) // subnormals\n if (exp === 0x1f) return frac ? NaN : sign * Infinity\n return sign * 2 ** (exp - 15) * (1 + frac / 1024)\n}\n","/**\n * Build a tree from the schema elements.\n *\n * @import {SchemaElement, SchemaTree} from '../src/types.d.ts'\n * @param {SchemaElement[]} schema\n * @param {number} rootIndex index of the root element\n * @param {string[]} path path to the element\n * @returns {SchemaTree} tree of schema elements\n */\nfunction schemaTree(schema, rootIndex, path) {\n const element = schema[rootIndex]\n const children = []\n let count = 1\n\n // Read the specified number of children\n if (element.num_children) {\n while (children.length < element.num_children) {\n const childElement = schema[rootIndex + count]\n const child = schemaTree(schema, rootIndex + count, [...path, childElement.name])\n count += child.count\n children.push(child)\n }\n }\n\n return { count, element, children, path }\n}\n\n/**\n * Get schema elements from the root to the given element name.\n *\n * @param {SchemaElement[]} schema\n * @param {string[]} name path to the element\n * @returns {SchemaTree[]} list of schema elements\n */\nexport function getSchemaPath(schema, name) {\n let tree = schemaTree(schema, 0, [])\n const path = [tree]\n for (const part of name) {\n const child = tree.children.find(child => child.element.name === part)\n if (!child) throw new Error(`parquet schema element not found: ${name}`)\n path.push(child)\n tree = child\n }\n return path\n}\n\n/**\n * Get the max repetition level for a given schema path.\n *\n * @param {SchemaTree[]} schemaPath\n * @returns {number} max repetition level\n */\nexport function getMaxRepetitionLevel(schemaPath) {\n let maxLevel = 0\n for (const { element } of schemaPath) {\n if (element.repetition_type === 'REPEATED') {\n maxLevel++\n }\n }\n return maxLevel\n}\n\n/**\n * Get the max definition level for a given schema path.\n *\n * @param {SchemaTree[]} schemaPath\n * @returns {number} max definition level\n */\nexport function getMaxDefinitionLevel(schemaPath) {\n let maxLevel = 0\n for (const { element } of schemaPath.slice(1)) {\n if (element.repetition_type !== 'REQUIRED') {\n maxLevel++\n }\n }\n return maxLevel\n}\n\n/**\n * Check if a column is list-like.\n *\n * @param {SchemaTree} schema\n * @returns {boolean} true if list-like\n */\nexport function isListLike(schema) {\n if (!schema) return false\n if (schema.element.converted_type !== 'LIST') return false\n if (schema.children.length > 1) return false\n\n const firstChild = schema.children[0]\n if (firstChild.children.length > 1) return false\n if (firstChild.element.repetition_type !== 'REPEATED') return false\n\n return true\n}\n\n/**\n * Check if a column is map-like.\n *\n * @param {SchemaTree} schema\n * @returns {boolean} true if map-like\n */\nexport function isMapLike(schema) {\n if (!schema) return false\n if (schema.element.converted_type !== 'MAP') return false\n if (schema.children.length > 1) return false\n\n const firstChild = schema.children[0]\n if (firstChild.children.length !== 2) return false\n if (firstChild.element.repetition_type !== 'REPEATED') return false\n\n const keyChild = firstChild.children.find(child => child.element.name === 'key')\n if (keyChild?.element.repetition_type === 'REPEATED') return false\n\n const valueChild = firstChild.children.find(child => child.element.name === 'value')\n if (valueChild?.element.repetition_type === 'REPEATED') return false\n\n return true\n}\n\n/**\n * Returns true if a column is non-nested.\n *\n * @param {SchemaTree[]} schemaPath\n * @returns {boolean}\n */\nexport function isFlatColumn(schemaPath) {\n if (schemaPath.length !== 2) return false\n const [, column] = schemaPath\n if (column.element.repetition_type === 'REPEATED') return false\n if (column.children.length) return false\n return true\n}\n","// TCompactProtocol types\nexport const CompactType = {\n STOP: 0,\n TRUE: 1,\n FALSE: 2,\n BYTE: 3,\n I16: 4,\n I32: 5,\n I64: 6,\n DOUBLE: 7,\n BINARY: 8,\n LIST: 9,\n SET: 10,\n MAP: 11,\n STRUCT: 12,\n UUID: 13,\n}\n\n/**\n * Parse TCompactProtocol\n *\n * @param {DataReader} reader\n * @returns {{ [key: `field_${number}`]: any }}\n */\nexport function deserializeTCompactProtocol(reader) {\n let lastFid = 0\n /** @type {ThriftObject} */\n const value = {}\n\n while (reader.offset < reader.view.byteLength) {\n // Parse each field based on its type and add to the result object\n const [type, fid, newLastFid] = readFieldBegin(reader, lastFid)\n lastFid = newLastFid\n\n if (type === CompactType.STOP) {\n break\n }\n\n // Handle the field based on its type\n value[`field_${fid}`] = readElement(reader, type)\n }\n\n return value\n}\n\n/**\n * Read a single element based on its type\n *\n * @import {DataReader, ThriftObject, ThriftType} from '../src/types.d.ts'\n * @param {DataReader} reader\n * @param {number} type\n * @returns {ThriftType}\n */\nfunction readElement(reader, type) {\n switch (type) {\n case CompactType.TRUE:\n return true\n case CompactType.FALSE:\n return false\n case CompactType.BYTE:\n // read byte directly\n return reader.view.getInt8(reader.offset++)\n case CompactType.I16:\n case CompactType.I32:\n return readZigZag(reader)\n case CompactType.I64:\n return readZigZagBigInt(reader)\n case CompactType.DOUBLE: {\n const value = reader.view.getFloat64(reader.offset, true)\n reader.offset += 8\n return value\n }\n case CompactType.BINARY: {\n const stringLength = readVarInt(reader)\n const strBytes = new Uint8Array(reader.view.buffer, reader.view.byteOffset + reader.offset, stringLength)\n reader.offset += stringLength\n return strBytes\n }\n case CompactType.LIST: {\n const byte = reader.view.getUint8(reader.offset++)\n const elemType = byte & 0x0f\n let listSize = byte >> 4\n if (listSize === 15) {\n listSize = readVarInt(reader)\n }\n const boolType = elemType === CompactType.TRUE || elemType === CompactType.FALSE\n const values = new Array(listSize)\n for (let i = 0; i < listSize; i++) {\n values[i] = boolType ? readElement(reader, CompactType.BYTE) === 1 : readElement(reader, elemType)\n }\n return values\n }\n case CompactType.STRUCT: {\n /** @type {ThriftObject} */\n const structValues = {}\n let lastFid = 0\n while (true) {\n const [fieldType, fid, newLastFid] = readFieldBegin(reader, lastFid)\n lastFid = newLastFid\n if (fieldType === CompactType.STOP) {\n break\n }\n structValues[`field_${fid}`] = readElement(reader, fieldType)\n }\n return structValues\n }\n // TODO: MAP, SET, UUID\n default:\n throw new Error(`thrift unhandled type: ${type}`)\n }\n}\n\n/**\n * Var int aka Unsigned LEB128.\n * Reads groups of 7 low bits until high bit is 0.\n *\n * @param {DataReader} reader\n * @returns {number}\n */\nexport function readVarInt(reader) {\n let result = 0\n let shift = 0\n while (true) {\n const byte = reader.view.getUint8(reader.offset++)\n result |= (byte & 0x7f) << shift\n if (!(byte & 0x80)) {\n return result\n }\n shift += 7\n }\n}\n\n/**\n * Read a varint as a bigint.\n *\n * @param {DataReader} reader\n * @returns {bigint}\n */\nfunction readVarBigInt(reader) {\n let result = 0n\n let shift = 0n\n while (true) {\n const byte = reader.view.getUint8(reader.offset++)\n result |= BigInt(byte & 0x7f) << shift\n if (!(byte & 0x80)) {\n return result\n }\n shift += 7n\n }\n}\n\n/**\n * Values of type int32 and int64 are transformed to a zigzag int.\n * A zigzag int folds positive and negative numbers into the positive number space.\n *\n * @param {DataReader} reader\n * @returns {number}\n */\nexport function readZigZag(reader) {\n const zigzag = readVarInt(reader)\n // convert zigzag to int\n return zigzag >>> 1 ^ -(zigzag & 1)\n}\n\n/**\n * A zigzag int folds positive and negative numbers into the positive number space.\n * This version returns a BigInt.\n *\n * @param {DataReader} reader\n * @returns {bigint}\n */\nexport function readZigZagBigInt(reader) {\n const zigzag = readVarBigInt(reader)\n // convert zigzag to int\n return zigzag >> 1n ^ -(zigzag & 1n)\n}\n\n/**\n * Read field type and field id\n *\n * @param {DataReader} reader\n * @param {number} lastFid\n * @returns {[number, number, number]} [type, fid, newLastFid]\n */\nfunction readFieldBegin(reader, lastFid) {\n const byte = reader.view.getUint8(reader.offset++)\n const type = byte & 0x0f\n if (type === CompactType.STOP) {\n // STOP also ends a struct\n return [0, 0, lastFid]\n }\n const delta = byte >> 4\n const fid = delta ? lastFid + delta : readZigZag(reader)\n return [type, fid, fid]\n}\n","import { CompressionCodec, ConvertedType, EdgeInterpolationAlgorithm, Encoding, FieldRepetitionType, PageType, ParquetType } from './constants.js'\nimport { DEFAULT_PARSERS, parseDecimal, parseFloat16 } from './convert.js'\nimport { getSchemaPath } from './schema.js'\nimport { deserializeTCompactProtocol } from './thrift.js'\n\nexport const defaultInitialFetchSize = 1 << 19 // 512kb\n\nconst decoder = new TextDecoder()\nfunction decode(/** @type {Uint8Array} */ value) {\n return value && decoder.decode(value)\n}\n\n/**\n * Read parquet metadata from an async buffer.\n *\n * An AsyncBuffer is like an ArrayBuffer, but the slices are loaded\n * asynchronously, possibly over the network.\n *\n * You must provide the byteLength of the buffer, typically from a HEAD request.\n *\n * In theory, you could use suffix-range requests to fetch the end of the file,\n * and save a round trip. But in practice, this doesn't work because chrome\n * deems suffix-range requests as a not-safe-listed header, and will require\n * a pre-flight. So the byteLength is required.\n *\n * To make this efficient, we initially request the last 512kb of the file,\n * which is likely to contain the metadata. If the metadata length exceeds the\n * initial fetch, 512kb, we request the rest of the metadata from the AsyncBuffer.\n *\n * This ensures that we either make one 512kb initial request for the metadata,\n * or a second request for up to the metadata size.\n *\n * @param {AsyncBuffer} asyncBuffer parquet file contents\n * @param {MetadataOptions & { initialFetchSize?: number }} options initial fetch size in bytes (default 512kb)\n * @returns {Promise} parquet metadata object\n */\nexport async function parquetMetadataAsync(asyncBuffer, { parsers, initialFetchSize = defaultInitialFetchSize } = {}) {\n if (!asyncBuffer || !(asyncBuffer.byteLength >= 0)) throw new Error('parquet expected AsyncBuffer')\n\n // fetch last bytes (footer) of the file\n const footerOffset = Math.max(0, asyncBuffer.byteLength - initialFetchSize)\n const footerBuffer = await asyncBuffer.slice(footerOffset, asyncBuffer.byteLength)\n\n // Check for parquet magic number \"PAR1\"\n const footerView = new DataView(footerBuffer)\n if (footerView.getUint32(footerBuffer.byteLength - 4, true) !== 0x31524150) {\n throw new Error('parquet file invalid (footer != PAR1)')\n }\n\n // Parquet files store metadata at the end of the file\n // Metadata length is 4 bytes before the last PAR1\n const metadataLength = footerView.getUint32(footerBuffer.byteLength - 8, true)\n if (metadataLength > asyncBuffer.byteLength - 8) {\n throw new Error(`parquet metadata length ${metadataLength} exceeds available buffer ${asyncBuffer.byteLength - 8}`)\n }\n\n // check if metadata size fits inside the initial fetch\n if (metadataLength + 8 > initialFetchSize) {\n // fetch the rest of the metadata\n const metadataOffset = asyncBuffer.byteLength - metadataLength - 8\n const metadataBuffer = await asyncBuffer.slice(metadataOffset, footerOffset)\n // combine initial fetch with the new slice\n const combinedBuffer = new ArrayBuffer(metadataLength + 8)\n const combinedView = new Uint8Array(combinedBuffer)\n combinedView.set(new Uint8Array(metadataBuffer))\n combinedView.set(new Uint8Array(footerBuffer), footerOffset - metadataOffset)\n return parquetMetadata(combinedBuffer, { parsers })\n } else {\n // parse metadata from the footer\n return parquetMetadata(footerBuffer, { parsers })\n }\n}\n\n/**\n * Read parquet metadata from a buffer synchronously.\n *\n * @param {ArrayBuffer} arrayBuffer parquet file footer\n * @param {MetadataOptions} options metadata parsing options\n * @returns {FileMetaData} parquet metadata object\n */\nexport function parquetMetadata(arrayBuffer, { parsers } = {}) {\n if (!(arrayBuffer instanceof ArrayBuffer)) throw new Error('parquet expected ArrayBuffer')\n const view = new DataView(arrayBuffer)\n\n // Use default parsers if not given\n parsers = { ...DEFAULT_PARSERS, ...parsers }\n\n // Validate footer magic number \"PAR1\"\n if (view.byteLength < 8) {\n throw new Error('parquet file is too short')\n }\n if (view.getUint32(view.byteLength - 4, true) !== 0x31524150) {\n throw new Error('parquet file invalid (footer != PAR1)')\n }\n\n // Parquet files store metadata at the end of the file\n // Metadata length is 4 bytes before the last PAR1\n const metadataLengthOffset = view.byteLength - 8\n const metadataLength = view.getUint32(metadataLengthOffset, true)\n if (metadataLength > view.byteLength - 8) {\n // {metadata}, metadata_length, PAR1\n throw new Error(`parquet metadata length ${metadataLength} exceeds available buffer ${view.byteLength - 8}`)\n }\n\n const metadataOffset = metadataLengthOffset - metadataLength\n const reader = { view, offset: metadataOffset }\n const metadata = deserializeTCompactProtocol(reader)\n\n // Parse metadata from thrift data\n const version = metadata.field_1\n /** @type {SchemaElement[]} */\n const schema = metadata.field_2.map((/** @type {any} */ field) => ({\n type: ParquetType[field.field_1],\n type_length: field.field_2,\n repetition_type: FieldRepetitionType[field.field_3],\n name: decode(field.field_4),\n num_children: field.field_5,\n converted_type: ConvertedType[field.field_6],\n scale: field.field_7,\n precision: field.field_8,\n field_id: field.field_9,\n logical_type: logicalType(field.field_10),\n }))\n // schema element per column index\n const columnSchema = schema.filter(e => e.type)\n const num_rows = metadata.field_3\n const row_groups = metadata.field_4.map((/** @type {any} */ rowGroup) => ({\n columns: rowGroup.field_1.map((/** @type {any} */ column, /** @type {number} */ columnIndex) => ({\n file_path: decode(column.field_1),\n file_offset: column.field_2,\n meta_data: column.field_3 && {\n type: ParquetType[column.field_3.field_1],\n encodings: column.field_3.field_2?.map((/** @type {number} */ e) => Encoding[e]),\n path_in_schema: column.field_3.field_3.map(decode),\n codec: CompressionCodec[column.field_3.field_4],\n num_values: column.field_3.field_5,\n total_uncompressed_size: column.field_3.field_6,\n total_compressed_size: column.field_3.field_7,\n key_value_metadata: column.field_3.field_8,\n data_page_offset: column.field_3.field_9,\n index_page_offset: column.field_3.field_10,\n dictionary_page_offset: column.field_3.field_11,\n statistics: convertStats(column.field_3.field_12, columnSchema[columnIndex], parsers),\n encoding_stats: column.field_3.field_13?.map((/** @type {any} */ encodingStat) => ({\n page_type: PageType[encodingStat.field_1],\n encoding: Encoding[encodingStat.field_2],\n count: encodingStat.field_3,\n })),\n bloom_filter_offset: column.field_3.field_14,\n bloom_filter_length: column.field_3.field_15,\n size_statistics: column.field_3.field_16 && {\n unencoded_byte_array_data_bytes: column.field_3.field_16.field_1,\n repetition_level_histogram: column.field_3.field_16.field_2,\n definition_level_histogram: column.field_3.field_16.field_3,\n },\n geospatial_statistics: column.field_3.field_17 && {\n bbox: column.field_3.field_17.field_1 && {\n xmin: column.field_3.field_17.field_1.field_1,\n xmax: column.field_3.field_17.field_1.field_2,\n ymin: column.field_3.field_17.field_1.field_3,\n ymax: column.field_3.field_17.field_1.field_4,\n zmin: column.field_3.field_17.field_1.field_5,\n zmax: column.field_3.field_17.field_1.field_6,\n mmin: column.field_3.field_17.field_1.field_7,\n mmax: column.field_3.field_17.field_1.field_8,\n },\n geospatial_types: column.field_3.field_17.field_2,\n },\n },\n offset_index_offset: column.field_4,\n offset_index_length: column.field_5,\n column_index_offset: column.field_6,\n column_index_length: column.field_7,\n crypto_metadata: column.field_8,\n encrypted_column_metadata: column.field_9,\n })),\n total_byte_size: rowGroup.field_2,\n num_rows: rowGroup.field_3,\n sorting_columns: rowGroup.field_4?.map((/** @type {any} */ sortingColumn) => ({\n column_idx: sortingColumn.field_1,\n descending: sortingColumn.field_2,\n nulls_first: sortingColumn.field_3,\n })),\n file_offset: rowGroup.field_5,\n total_compressed_size: rowGroup.field_6,\n ordinal: rowGroup.field_7,\n }))\n const key_value_metadata = metadata.field_5?.map((/** @type {any} */ keyValue) => ({\n key: decode(keyValue.field_1),\n value: decode(keyValue.field_2),\n }))\n const created_by = decode(metadata.field_6)\n\n return {\n version,\n schema,\n num_rows,\n row_groups,\n key_value_metadata,\n created_by,\n metadata_length: metadataLength,\n }\n}\n\n/**\n * Return a tree of schema elements from parquet metadata.\n *\n * @param {{schema: SchemaElement[]}} metadata parquet metadata object\n * @returns {SchemaTree} tree of schema elements\n */\nexport function parquetSchema({ schema }) {\n return getSchemaPath(schema, [])[0]\n}\n\n/**\n * @param {any} logicalType\n * @returns {LogicalType | undefined}\n */\nfunction logicalType(logicalType) {\n if (logicalType?.field_1) return { type: 'STRING' }\n if (logicalType?.field_2) return { type: 'MAP' }\n if (logicalType?.field_3) return { type: 'LIST' }\n if (logicalType?.field_4) return { type: 'ENUM' }\n if (logicalType?.field_5) return {\n type: 'DECIMAL',\n scale: logicalType.field_5.field_1,\n precision: logicalType.field_5.field_2,\n }\n if (logicalType?.field_6) return { type: 'DATE' }\n if (logicalType?.field_7) return {\n type: 'TIME',\n isAdjustedToUTC: logicalType.field_7.field_1,\n unit: timeUnit(logicalType.field_7.field_2),\n }\n if (logicalType?.field_8) return {\n type: 'TIMESTAMP',\n isAdjustedToUTC: logicalType.field_8.field_1,\n unit: timeUnit(logicalType.field_8.field_2),\n }\n if (logicalType?.field_10) return {\n type: 'INTEGER',\n bitWidth: logicalType.field_10.field_1,\n isSigned: logicalType.field_10.field_2,\n }\n if (logicalType?.field_11) return { type: 'NULL' }\n if (logicalType?.field_12) return { type: 'JSON' }\n if (logicalType?.field_13) return { type: 'BSON' }\n if (logicalType?.field_14) return { type: 'UUID' }\n if (logicalType?.field_15) return { type: 'FLOAT16' }\n if (logicalType?.field_16) return { type: 'VARIANT' }\n if (logicalType?.field_17) return {\n type: 'GEOMETRY',\n crs: decode(logicalType.field_17.field_1),\n }\n if (logicalType?.field_18) return {\n type: 'GEOGRAPHY',\n crs: decode(logicalType.field_18.field_1),\n algorithm: EdgeInterpolationAlgorithm[logicalType.field_18.field_2],\n }\n return logicalType\n}\n\n/**\n * @param {any} unit\n * @returns {TimeUnit}\n */\nfunction timeUnit(unit) {\n if (unit.field_1) return 'MILLIS'\n if (unit.field_2) return 'MICROS'\n if (unit.field_3) return 'NANOS'\n throw new Error('parquet time unit required')\n}\n\n/**\n * Convert column statistics based on column type.\n *\n * @import {AsyncBuffer, FileMetaData, LogicalType, MetadataOptions, MinMaxType, ParquetParsers, SchemaElement, SchemaTree, Statistics, TimeUnit} from '../src/types.d.ts'\n * @param {any} stats\n * @param {SchemaElement} schema\n * @param {ParquetParsers} parsers\n * @returns {Statistics}\n */\nfunction convertStats(stats, schema, parsers) {\n return stats && {\n max: convertMetadata(stats.field_1, schema, parsers),\n min: convertMetadata(stats.field_2, schema, parsers),\n null_count: stats.field_3,\n distinct_count: stats.field_4,\n max_value: convertMetadata(stats.field_5, schema, parsers),\n min_value: convertMetadata(stats.field_6, schema, parsers),\n is_max_value_exact: stats.field_7,\n is_min_value_exact: stats.field_8,\n }\n}\n\n/**\n * @param {Uint8Array | undefined} value\n * @param {SchemaElement} schema\n * @param {ParquetParsers} parsers\n * @returns {MinMaxType | undefined}\n */\nexport function convertMetadata(value, schema, parsers) {\n const { type, converted_type, logical_type } = schema\n if (value === undefined) return value\n if (type === 'BOOLEAN') return value[0] === 1\n if (type === 'BYTE_ARRAY') return parsers.stringFromBytes(value)\n const view = new DataView(value.buffer, value.byteOffset, value.byteLength)\n if (type === 'FLOAT' && view.byteLength === 4) return view.getFloat32(0, true)\n if (type === 'DOUBLE' && view.byteLength === 8) return view.getFloat64(0, true)\n if (type === 'INT32' && converted_type === 'DATE') return parsers.dateFromDays(view.getInt32(0, true))\n if (type === 'INT64' && converted_type === 'TIMESTAMP_MILLIS') return parsers.timestampFromMilliseconds(view.getBigInt64(0, true))\n if (type === 'INT64' && converted_type === 'TIMESTAMP_MICROS') return parsers.timestampFromMicroseconds(view.getBigInt64(0, true))\n if (type === 'INT64' && logical_type?.type === 'TIMESTAMP' && logical_type?.unit === 'NANOS') return parsers.timestampFromNanoseconds(view.getBigInt64(0, true))\n if (type === 'INT64' && logical_type?.type === 'TIMESTAMP' && logical_type?.unit === 'MICROS') return parsers.timestampFromMicroseconds(view.getBigInt64(0, true))\n if (type === 'INT64' && logical_type?.type === 'TIMESTAMP') return parsers.timestampFromMilliseconds(view.getBigInt64(0, true))\n if (type === 'INT32' && view.byteLength === 4) return view.getInt32(0, true)\n if (type === 'INT64' && view.byteLength === 8) return view.getBigInt64(0, true)\n if (converted_type === 'DECIMAL') return parseDecimal(value) * 10 ** -(schema.scale || 0)\n if (logical_type?.type === 'FLOAT16') return parseFloat16(value)\n if (type === 'FIXED_LEN_BYTE_ARRAY') return value\n // assert(false)\n return value\n}\n","import { defaultInitialFetchSize } from './metadata.js'\n\n/**\n * Replace bigint, date, etc with legal JSON types.\n *\n * @param {any} obj object to convert\n * @returns {unknown} converted object\n */\nexport function toJson(obj) {\n if (obj === undefined) return null\n if (typeof obj === 'bigint') return Number(obj)\n if (Array.isArray(obj)) return obj.map(toJson)\n if (obj instanceof Uint8Array) return Array.from(obj)\n if (obj instanceof Date) return obj.toISOString()\n if (obj instanceof Object) {\n /** @type {Record} */\n const newObj = {}\n for (const key of Object.keys(obj)) {\n if (obj[key] === undefined) continue\n newObj[key] = toJson(obj[key])\n }\n return newObj\n }\n return obj\n}\n\n/**\n * Concatenate two arrays fast.\n *\n * @param {any[]} aaa first array\n * @param {DecodedArray} bbb second array\n */\nexport function concat(aaa, bbb) {\n const chunk = 10000\n for (let i = 0; i < bbb.length; i += chunk) {\n aaa.push(...bbb.slice(i, i + chunk))\n }\n}\n\n/**\n * Deep equality comparison\n *\n * @param {any} a First object to compare\n * @param {any} b Second object to compare\n * @returns {boolean} true if objects are equal\n */\nexport function equals(a, b) {\n if (a === b) return true\n if (a instanceof Uint8Array && b instanceof Uint8Array) return equals(Array.from(a), Array.from(b))\n if (!a || !b || typeof a !== typeof b) return false\n return Array.isArray(a) && Array.isArray(b)\n ? a.length === b.length && a.every((v, i) => equals(v, b[i]))\n : typeof a === 'object' && Object.keys(a).length === Object.keys(b).length && Object.keys(a).every(k => equals(a[k], b[k]))\n}\n\n/**\n * Get the byte length of a URL using a HEAD request.\n * If requestInit is provided, it will be passed to fetch.\n *\n * @param {string} url\n * @param {RequestInit} [requestInit] fetch options\n * @param {typeof globalThis.fetch} [customFetch] fetch function to use\n * @returns {Promise}\n */\nexport async function byteLengthFromUrl(url, requestInit, customFetch) {\n const fetch = customFetch ?? globalThis.fetch\n return await fetch(url, { ...requestInit, method: 'HEAD' })\n .then(res => {\n if (!res.ok) throw new Error(`fetch head failed ${res.status}`)\n const length = res.headers.get('Content-Length')\n if (!length) throw new Error('missing content length')\n return parseInt(length)\n })\n}\n\n/**\n * Construct an AsyncBuffer for a URL.\n * If byteLength is not provided, will make a HEAD request to get the file size.\n * If fetch is provided, it will be used instead of the global fetch.\n * If requestInit is provided, it will be passed to fetch.\n *\n * @param {object} options\n * @param {string} options.url\n * @param {number} [options.byteLength]\n * @param {typeof globalThis.fetch} [options.fetch] fetch function to use\n * @param {RequestInit} [options.requestInit]\n * @returns {Promise}\n */\nexport async function asyncBufferFromUrl({ url, byteLength, requestInit, fetch: customFetch }) {\n if (!url) throw new Error('missing url')\n const fetch = customFetch ?? globalThis.fetch\n // byte length from HEAD request\n byteLength ||= await byteLengthFromUrl(url, requestInit, fetch)\n\n /**\n * A promise for the whole buffer, if range requests are not supported.\n * @type {Promise|undefined}\n */\n let buffer = undefined\n const init = requestInit || {}\n\n return {\n byteLength,\n async slice(start, end) {\n if (buffer) {\n return buffer.then(buffer => buffer.slice(start, end))\n }\n\n const headers = new Headers(init.headers)\n const endStr = end === undefined ? '' : end - 1\n headers.set('Range', `bytes=${start}-${endStr}`)\n\n const res = await fetch(url, { ...init, headers })\n if (!res.ok || !res.body) throw new Error(`fetch failed ${res.status}`)\n\n if (res.status === 200) {\n // Endpoint does not support range requests and returned the whole object\n buffer = res.arrayBuffer()\n return buffer.then(buffer => buffer.slice(start, end))\n } else if (res.status === 206) {\n // The endpoint supports range requests and sent us the requested range\n return res.arrayBuffer()\n } else {\n throw new Error(`fetch received unexpected status code ${res.status}`)\n }\n },\n }\n}\n\n/**\n * Returns a cached layer on top of an AsyncBuffer. For caching slices of a file\n * that are read multiple times, possibly over a network.\n *\n * @param {AsyncBuffer} file file-like object to cache\n * @param {{ minSize?: number }} [options]\n * @returns {AsyncBuffer} cached file-like object\n */\nexport function cachedAsyncBuffer({ byteLength, slice }, { minSize = defaultInitialFetchSize } = {}) {\n if (byteLength < minSize) {\n // Cache whole file if it's small\n const buffer = slice(0, byteLength)\n return {\n byteLength,\n async slice(start, end) {\n return (await buffer).slice(start, end)\n },\n }\n }\n const cache = new Map()\n return {\n byteLength,\n /**\n * @param {number} start\n * @param {number} [end]\n * @returns {Awaitable}\n */\n slice(start, end) {\n const key = cacheKey(start, end, byteLength)\n const cached = cache.get(key)\n if (cached) return cached\n // cache miss, read from file\n const promise = slice(start, end)\n cache.set(key, promise)\n return promise\n },\n }\n}\n\n\n/**\n * Returns canonical cache key for a byte range 'start,end'.\n * Normalize int-range and suffix-range requests to the same key.\n *\n * @import {AsyncBuffer, Awaitable, DecodedArray} from '../src/types.d.ts'\n * @param {number} start start byte of range\n * @param {number} [end] end byte of range, or undefined for suffix range\n * @param {number} [size] size of file, or undefined for suffix range\n * @returns {string}\n */\nfunction cacheKey(start, end, size) {\n if (start < 0) {\n if (end !== undefined) throw new Error(`invalid suffix range [${start}, ${end}]`)\n if (size === undefined) return `${start},`\n return `${size + start},${size}`\n } else if (end !== undefined) {\n if (start > end) throw new Error(`invalid empty range [${start}, ${end}]`)\n return `${start},${end}`\n } else if (size === undefined) {\n return `${start},`\n } else {\n return `${start},${size}`\n }\n}\n\n/**\n * Flatten a list of lists into a single list.\n *\n * @param {DecodedArray[]} [chunks]\n * @returns {DecodedArray}\n */\nexport function flatten(chunks) {\n if (!chunks) return []\n if (chunks.length === 1) return chunks[0]\n /** @type {any[]} */\n const output = []\n for (const chunk of chunks) {\n concat(output, chunk)\n }\n return output\n}\n","import { concat } from './utils.js'\n\n// Combine column chunks into a single byte range if less than 32mb\nconst columnChunkAggregation = 1 << 25 // 32mb\n\n/**\n * @import {AsyncBuffer, ByteRange, ColumnMetaData, GroupPlan, ParquetReadOptions, QueryPlan} from '../src/types.js'\n */\n/**\n * Plan which byte ranges to read to satisfy a read request.\n * Metadata must be non-null.\n *\n * @param {ParquetReadOptions} options\n * @returns {QueryPlan}\n */\nexport function parquetPlan({ metadata, rowStart = 0, rowEnd = Infinity, columns }) {\n if (!metadata) throw new Error('parquetPlan requires metadata')\n /** @type {GroupPlan[]} */\n const groups = []\n /** @type {ByteRange[]} */\n const fetches = []\n\n // find which row groups to read\n let groupStart = 0 // first row index of the current group\n for (const rowGroup of metadata.row_groups) {\n const groupRows = Number(rowGroup.num_rows)\n const groupEnd = groupStart + groupRows\n // if row group overlaps with row range, add it to the plan\n if (groupRows > 0 && groupEnd >= rowStart && groupStart < rowEnd) {\n /** @type {ByteRange[]} */\n const ranges = []\n // loop through each column chunk\n for (const { file_path, meta_data } of rowGroup.columns) {\n if (file_path) throw new Error('parquet file_path not supported')\n if (!meta_data) throw new Error('parquet column metadata is undefined')\n // add included columns to the plan\n if (!columns || columns.includes(meta_data.path_in_schema[0])) {\n ranges.push(getColumnRange(meta_data))\n }\n }\n const selectStart = Math.max(rowStart - groupStart, 0)\n const selectEnd = Math.min(rowEnd - groupStart, groupRows)\n groups.push({ ranges, rowGroup, groupStart, groupRows, selectStart, selectEnd })\n\n // map group plan to ranges\n const groupSize = ranges[ranges.length - 1]?.endByte - ranges[0]?.startByte\n if (!columns && groupSize < columnChunkAggregation) {\n // full row group\n fetches.push({\n startByte: ranges[0].startByte,\n endByte: ranges[ranges.length - 1].endByte,\n })\n } else if (ranges.length) {\n concat(fetches, ranges)\n } else if (columns?.length) {\n throw new Error(`parquet columns not found: ${columns.join(', ')}`)\n }\n }\n\n groupStart = groupEnd\n }\n if (!isFinite(rowEnd)) rowEnd = groupStart\n\n return { metadata, rowStart, rowEnd, columns, fetches, groups }\n}\n\n/**\n * @param {ColumnMetaData} columnMetadata\n * @returns {ByteRange}\n */\nexport function getColumnRange({ dictionary_page_offset, data_page_offset, total_compressed_size }) {\n const columnOffset = dictionary_page_offset || data_page_offset\n return {\n startByte: Number(columnOffset),\n endByte: Number(columnOffset + total_compressed_size),\n }\n}\n\n/**\n * Prefetch byte ranges from an AsyncBuffer.\n *\n * @param {AsyncBuffer} file\n * @param {QueryPlan} plan\n * @returns {AsyncBuffer}\n */\nexport function prefetchAsyncBuffer(file, { fetches }) {\n // fetch byte ranges from the file\n const promises = fetches.map(({ startByte, endByte }) => file.slice(startByte, endByte))\n return {\n byteLength: file.byteLength,\n slice(start, end = file.byteLength) {\n // find matching slice\n const index = fetches.findIndex(({ startByte, endByte }) => startByte <= start && end <= endByte)\n if (index < 0) throw new Error(`no prefetch for range [${start}, ${end}]`)\n if (fetches[index].startByte !== start || fetches[index].endByte !== end) {\n // slice a subrange of the prefetch\n const startOffset = start - fetches[index].startByte\n const endOffset = end - fetches[index].startByte\n if (promises[index] instanceof Promise) {\n return promises[index].then(buffer => buffer.slice(startOffset, endOffset))\n } else {\n return promises[index].slice(startOffset, endOffset)\n }\n } else {\n return promises[index]\n }\n },\n }\n}\n","import { getMaxDefinitionLevel, isListLike, isMapLike } from './schema.js'\n\n/**\n * Reconstructs a complex nested structure from flat arrays of values and\n * definition and repetition levels, according to Dremel encoding.\n *\n * @param {any[]} output\n * @param {number[] | undefined} definitionLevels\n * @param {number[]} repetitionLevels\n * @param {DecodedArray} values\n * @param {SchemaTree[]} schemaPath\n * @returns {DecodedArray}\n */\nexport function assembleLists(output, definitionLevels, repetitionLevels, values, schemaPath) {\n const n = definitionLevels?.length || repetitionLevels.length\n if (!n) return values\n const maxDefinitionLevel = getMaxDefinitionLevel(schemaPath)\n const repetitionPath = schemaPath.map(({ element }) => element.repetition_type)\n let valueIndex = 0\n\n // Track state of nested structures\n const containerStack = [output]\n let currentContainer = output\n let currentDepth = 0 // schema depth\n let currentDefLevel = 0 // list depth\n let currentRepLevel = 0\n\n if (repetitionLevels[0]) {\n // continue previous row\n while (currentDepth < repetitionPath.length - 2 && currentRepLevel < repetitionLevels[0]) {\n currentDepth++\n if (repetitionPath[currentDepth] !== 'REQUIRED') {\n // go into last list\n currentContainer = currentContainer.at(-1)\n containerStack.push(currentContainer)\n currentDefLevel++\n }\n if (repetitionPath[currentDepth] === 'REPEATED') currentRepLevel++\n }\n }\n\n for (let i = 0; i < n; i++) {\n // assert(currentDefLevel === containerStack.length - 1)\n const def = definitionLevels?.length ? definitionLevels[i] : maxDefinitionLevel\n const rep = repetitionLevels[i]\n\n // Pop up to start of rep level\n while (currentDepth && (rep < currentRepLevel || repetitionPath[currentDepth] !== 'REPEATED')) {\n if (repetitionPath[currentDepth] !== 'REQUIRED') {\n containerStack.pop()\n currentDefLevel--\n }\n if (repetitionPath[currentDepth] === 'REPEATED') currentRepLevel--\n currentDepth--\n }\n // @ts-expect-error won't be empty\n currentContainer = containerStack.at(-1)\n\n // Go deeper to end of definition level\n while (\n (currentDepth < repetitionPath.length - 2 || repetitionPath[currentDepth + 1] === 'REPEATED') &&\n (currentDefLevel < def || repetitionPath[currentDepth + 1] === 'REQUIRED')\n ) {\n currentDepth++\n if (repetitionPath[currentDepth] !== 'REQUIRED') {\n /** @type {any[]} */\n const newList = []\n currentContainer.push(newList)\n currentContainer = newList\n containerStack.push(newList)\n currentDefLevel++\n }\n if (repetitionPath[currentDepth] === 'REPEATED') currentRepLevel++\n }\n\n // Add value or null based on definition level\n if (def === maxDefinitionLevel) {\n // assert(currentDepth === maxDefinitionLevel || currentDepth === repetitionPath.length - 2)\n currentContainer.push(values[valueIndex++])\n } else if (currentDepth === repetitionPath.length - 2) {\n currentContainer.push(null)\n } else {\n currentContainer.push([])\n }\n }\n\n // Handle edge cases for empty inputs or single-level data\n if (!output.length) {\n // return max definition level of nested lists\n for (let i = 0; i < maxDefinitionLevel; i++) {\n /** @type {any[]} */\n const newList = []\n currentContainer.push(newList)\n currentContainer = newList\n }\n }\n\n return output\n}\n\n/**\n * Assemble a nested structure from subcolumn data.\n * https://github.com/apache/parquet-format/blob/apache-parquet-format-2.10.0/LogicalTypes.md#nested-types\n *\n * @param {Map} subcolumnData\n * @param {SchemaTree} schema top-level schema element\n * @param {number} [depth] depth of nested structure\n */\nexport function assembleNested(subcolumnData, schema, depth = 0) {\n const path = schema.path.join('.')\n const optional = schema.element.repetition_type === 'OPTIONAL'\n const nextDepth = optional ? depth + 1 : depth\n\n if (isListLike(schema)) {\n let sublist = schema.children[0]\n let subDepth = nextDepth\n if (sublist.children.length === 1) {\n sublist = sublist.children[0]\n subDepth++\n }\n assembleNested(subcolumnData, sublist, subDepth)\n\n const subcolumn = sublist.path.join('.')\n const values = subcolumnData.get(subcolumn)\n if (!values) throw new Error('parquet list column missing values')\n if (optional) flattenAtDepth(values, depth)\n subcolumnData.set(path, values)\n subcolumnData.delete(subcolumn)\n return\n }\n\n if (isMapLike(schema)) {\n const mapName = schema.children[0].element.name\n\n // Assemble keys and values\n assembleNested(subcolumnData, schema.children[0].children[0], nextDepth + 1)\n assembleNested(subcolumnData, schema.children[0].children[1], nextDepth + 1)\n\n const keys = subcolumnData.get(`${path}.${mapName}.key`)\n const values = subcolumnData.get(`${path}.${mapName}.value`)\n\n if (!keys) throw new Error('parquet map column missing keys')\n if (!values) throw new Error('parquet map column missing values')\n if (keys.length !== values.length) {\n throw new Error('parquet map column key/value length mismatch')\n }\n\n const out = assembleMaps(keys, values, nextDepth)\n if (optional) flattenAtDepth(out, depth)\n\n subcolumnData.delete(`${path}.${mapName}.key`)\n subcolumnData.delete(`${path}.${mapName}.value`)\n subcolumnData.set(path, out)\n return\n }\n\n // Struct-like column\n if (schema.children.length) {\n // construct a meta struct and then invert\n const invertDepth = schema.element.repetition_type === 'REQUIRED' ? depth : depth + 1\n /** @type {Record} */\n const struct = {}\n for (const child of schema.children) {\n assembleNested(subcolumnData, child, invertDepth)\n const childData = subcolumnData.get(child.path.join('.'))\n if (!childData) throw new Error('parquet struct missing child data')\n struct[child.element.name] = childData\n }\n // remove children\n for (const child of schema.children) {\n subcolumnData.delete(child.path.join('.'))\n }\n // invert struct by depth\n const inverted = invertStruct(struct, invertDepth)\n if (optional) flattenAtDepth(inverted, depth)\n subcolumnData.set(path, inverted)\n }\n}\n\n/**\n * @import {DecodedArray, SchemaTree} from '../src/types.d.ts'\n * @param {DecodedArray} arr\n * @param {number} depth\n */\nfunction flattenAtDepth(arr, depth) {\n for (let i = 0; i < arr.length; i++) {\n if (depth) {\n flattenAtDepth(arr[i], depth - 1)\n } else {\n arr[i] = arr[i][0]\n }\n }\n}\n\n/**\n * @param {DecodedArray} keys\n * @param {DecodedArray} values\n * @param {number} depth\n * @returns {any[]}\n */\nfunction assembleMaps(keys, values, depth) {\n const out = []\n for (let i = 0; i < keys.length; i++) {\n if (depth) {\n out.push(assembleMaps(keys[i], values[i], depth - 1)) // go deeper\n } else {\n if (keys[i]) {\n /** @type {Record} */\n const obj = {}\n for (let j = 0; j < keys[i].length; j++) {\n const value = values[i][j]\n obj[keys[i][j]] = value === undefined ? null : value\n }\n out.push(obj)\n } else {\n out.push(undefined)\n }\n }\n }\n return out\n}\n\n/**\n * Invert a struct-like object by depth.\n *\n * @param {Record} struct\n * @param {number} depth\n * @returns {any[]}\n */\nfunction invertStruct(struct, depth) {\n const keys = Object.keys(struct)\n const length = struct[keys[0]]?.length\n const out = []\n for (let i = 0; i < length; i++) {\n /** @type {Record} */\n const obj = {}\n for (const key of keys) {\n if (struct[key].length !== length) throw new Error('parquet struct parsing error')\n obj[key] = struct[key][i]\n }\n if (depth) {\n out.push(invertStruct(obj, depth - 1)) // deeper\n } else {\n out.push(obj)\n }\n }\n return out\n}\n","import { readVarInt, readZigZagBigInt } from './thrift.js'\n\n/**\n * @import {DataReader} from '../src/types.d.ts'\n * @param {DataReader} reader\n * @param {number} count number of values to read\n * @param {Int32Array | BigInt64Array} output\n */\nexport function deltaBinaryUnpack(reader, count, output) {\n const int32 = output instanceof Int32Array\n const blockSize = readVarInt(reader)\n const miniblockPerBlock = readVarInt(reader)\n readVarInt(reader) // assert(=== count)\n let value = readZigZagBigInt(reader) // first value\n let outputIndex = 0\n output[outputIndex++] = int32 ? Number(value) : value\n\n const valuesPerMiniblock = blockSize / miniblockPerBlock\n\n while (outputIndex < count) {\n // new block\n const minDelta = readZigZagBigInt(reader)\n const bitWidths = new Uint8Array(miniblockPerBlock)\n for (let i = 0; i < miniblockPerBlock; i++) {\n bitWidths[i] = reader.view.getUint8(reader.offset++)\n }\n\n for (let i = 0; i < miniblockPerBlock && outputIndex < count; i++) {\n // new miniblock\n const bitWidth = BigInt(bitWidths[i])\n if (bitWidth) {\n let bitpackPos = 0n\n let miniblockCount = valuesPerMiniblock\n const mask = (1n << bitWidth) - 1n\n while (miniblockCount && outputIndex < count) {\n let bits = BigInt(reader.view.getUint8(reader.offset)) >> bitpackPos & mask // TODO: don't re-read value every time\n bitpackPos += bitWidth\n while (bitpackPos >= 8) {\n bitpackPos -= 8n\n reader.offset++\n if (bitpackPos) {\n bits |= BigInt(reader.view.getUint8(reader.offset)) << bitWidth - bitpackPos & mask\n }\n }\n const delta = minDelta + bits\n value += delta\n output[outputIndex++] = int32 ? Number(value) : value\n miniblockCount--\n }\n if (miniblockCount) {\n // consume leftover miniblock\n reader.offset += Math.ceil((miniblockCount * Number(bitWidth) + Number(bitpackPos)) / 8)\n }\n } else {\n for (let j = 0; j < valuesPerMiniblock && outputIndex < count; j++) {\n value += minDelta\n output[outputIndex++] = int32 ? Number(value) : value\n }\n }\n }\n }\n}\n\n/**\n * @param {DataReader} reader\n * @param {number} count\n * @param {Uint8Array[]} output\n */\nexport function deltaLengthByteArray(reader, count, output) {\n const lengths = new Int32Array(count)\n deltaBinaryUnpack(reader, count, lengths)\n for (let i = 0; i < count; i++) {\n output[i] = new Uint8Array(reader.view.buffer, reader.view.byteOffset + reader.offset, lengths[i])\n reader.offset += lengths[i]\n }\n}\n\n/**\n * @param {DataReader} reader\n * @param {number} count\n * @param {Uint8Array[]} output\n */\nexport function deltaByteArray(reader, count, output) {\n const prefixData = new Int32Array(count)\n deltaBinaryUnpack(reader, count, prefixData)\n const suffixData = new Int32Array(count)\n deltaBinaryUnpack(reader, count, suffixData)\n\n for (let i = 0; i < count; i++) {\n const suffix = new Uint8Array(reader.view.buffer, reader.view.byteOffset + reader.offset, suffixData[i])\n if (prefixData[i]) {\n // copy from previous value\n output[i] = new Uint8Array(prefixData[i] + suffixData[i])\n output[i].set(output[i - 1].subarray(0, prefixData[i]))\n output[i].set(suffix, prefixData[i])\n } else {\n output[i] = suffix\n }\n reader.offset += suffixData[i]\n }\n}\n","import { readVarInt } from './thrift.js'\n\n/**\n * Minimum bits needed to store value.\n *\n * @param {number} value\n * @returns {number}\n */\nexport function bitWidth(value) {\n return 32 - Math.clz32(value)\n}\n\n/**\n * Read values from a run-length encoded/bit-packed hybrid encoding.\n *\n * If length is zero, then read int32 length at the start.\n *\n * @param {DataReader} reader\n * @param {number} width - bitwidth\n * @param {DecodedArray} output\n * @param {number} [length] - length of the encoded data\n */\nexport function readRleBitPackedHybrid(reader, width, output, length) {\n if (length === undefined) {\n length = reader.view.getUint32(reader.offset, true)\n reader.offset += 4\n }\n const startOffset = reader.offset\n let seen = 0\n while (seen < output.length) {\n const header = readVarInt(reader)\n if (header & 1) {\n // bit-packed\n seen = readBitPacked(reader, header, width, output, seen)\n } else {\n // rle\n const count = header >>> 1\n readRle(reader, count, width, output, seen)\n seen += count\n }\n }\n reader.offset = startOffset + length // duckdb writes an empty block\n}\n\n/**\n * Run-length encoding: read value with bitWidth and repeat it count times.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @param {number} bitWidth\n * @param {DecodedArray} output\n * @param {number} seen\n */\nfunction readRle(reader, count, bitWidth, output, seen) {\n const width = bitWidth + 7 >> 3\n let value = 0\n for (let i = 0; i < width; i++) {\n value |= reader.view.getUint8(reader.offset++) << (i << 3)\n }\n // assert(value < 1 << bitWidth)\n\n // repeat value count times\n for (let i = 0; i < count; i++) {\n output[seen + i] = value\n }\n}\n\n/**\n * Read a bit-packed run of the rle/bitpack hybrid.\n * Supports width > 8 (crossing bytes).\n *\n * @param {DataReader} reader\n * @param {number} header - bit-pack header\n * @param {number} bitWidth\n * @param {DecodedArray} output\n * @param {number} seen\n * @returns {number} total output values so far\n */\nfunction readBitPacked(reader, header, bitWidth, output, seen) {\n let count = header >> 1 << 3 // values to read\n const mask = (1 << bitWidth) - 1\n\n let data = 0\n if (reader.offset < reader.view.byteLength) {\n data = reader.view.getUint8(reader.offset++)\n } else if (mask) {\n // sometimes out-of-bounds reads are masked out\n throw new Error(`parquet bitpack offset ${reader.offset} out of range`)\n }\n let left = 8\n let right = 0\n\n // read values\n while (count) {\n // if we have crossed a byte boundary, shift the data\n if (right > 8) {\n right -= 8\n left -= 8\n data >>>= 8\n } else if (left - right < bitWidth) {\n // if we don't have bitWidth number of bits to read, read next byte\n data |= reader.view.getUint8(reader.offset) << left\n reader.offset++\n left += 8\n } else {\n if (seen < output.length) {\n // emit value\n output[seen++] = data >> right & mask\n }\n count--\n right += bitWidth\n }\n }\n\n return seen\n}\n\n/**\n * @param {DataReader} reader\n * @param {number} count\n * @param {ParquetType} type\n * @param {number | undefined} typeLength\n * @returns {DecodedArray}\n */\nexport function byteStreamSplit(reader, count, type, typeLength) {\n const width = byteWidth(type, typeLength)\n const bytes = new Uint8Array(count * width)\n for (let b = 0; b < width; b++) {\n for (let i = 0; i < count; i++) {\n bytes[i * width + b] = reader.view.getUint8(reader.offset++)\n }\n }\n // interpret bytes as typed array\n if (type === 'FLOAT') return new Float32Array(bytes.buffer)\n else if (type === 'DOUBLE') return new Float64Array(bytes.buffer)\n else if (type === 'INT32') return new Int32Array(bytes.buffer)\n else if (type === 'INT64') return new BigInt64Array(bytes.buffer)\n else if (type === 'FIXED_LEN_BYTE_ARRAY') {\n // split into arrays of typeLength\n const split = new Array(count)\n for (let i = 0; i < count; i++) {\n split[i] = bytes.subarray(i * width, (i + 1) * width)\n }\n return split\n }\n throw new Error(`parquet byte_stream_split unsupported type: ${type}`)\n}\n\n/**\n * @import {DataReader, DecodedArray, ParquetType} from '../src/types.d.ts'\n * @param {ParquetType} type\n * @param {number | undefined} typeLength\n * @returns {number}\n */\nfunction byteWidth(type, typeLength) {\n switch (type) {\n case 'INT32':\n case 'FLOAT':\n return 4\n case 'INT64':\n case 'DOUBLE':\n return 8\n case 'FIXED_LEN_BYTE_ARRAY':\n if (!typeLength) throw new Error('parquet byteWidth missing type_length')\n return typeLength\n default:\n throw new Error(`parquet unsupported type: ${type}`)\n }\n}\n","/**\n * Read `count` values of the given type from the reader.view.\n *\n * @param {DataReader} reader - buffer to read data from\n * @param {ParquetType} type - parquet type of the data\n * @param {number} count - number of values to read\n * @param {number | undefined} fixedLength - length of each fixed length byte array\n * @returns {DecodedArray} array of values\n */\nexport function readPlain(reader, type, count, fixedLength) {\n if (count === 0) return []\n if (type === 'BOOLEAN') {\n return readPlainBoolean(reader, count)\n } else if (type === 'INT32') {\n return readPlainInt32(reader, count)\n } else if (type === 'INT64') {\n return readPlainInt64(reader, count)\n } else if (type === 'INT96') {\n return readPlainInt96(reader, count)\n } else if (type === 'FLOAT') {\n return readPlainFloat(reader, count)\n } else if (type === 'DOUBLE') {\n return readPlainDouble(reader, count)\n } else if (type === 'BYTE_ARRAY') {\n return readPlainByteArray(reader, count)\n } else if (type === 'FIXED_LEN_BYTE_ARRAY') {\n if (!fixedLength) throw new Error('parquet missing fixed length')\n return readPlainByteArrayFixed(reader, count, fixedLength)\n } else {\n throw new Error(`parquet unhandled type: ${type}`)\n }\n}\n\n/**\n * Read `count` boolean values.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @returns {boolean[]}\n */\nfunction readPlainBoolean(reader, count) {\n const values = new Array(count)\n for (let i = 0; i < count; i++) {\n const byteOffset = reader.offset + (i / 8 | 0)\n const bitOffset = i % 8\n const byte = reader.view.getUint8(byteOffset)\n values[i] = (byte & 1 << bitOffset) !== 0\n }\n reader.offset += Math.ceil(count / 8)\n return values\n}\n\n/**\n * Read `count` int32 values.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @returns {Int32Array}\n */\nfunction readPlainInt32(reader, count) {\n const values = (reader.view.byteOffset + reader.offset) % 4\n ? new Int32Array(align(reader.view.buffer, reader.view.byteOffset + reader.offset, count * 4))\n : new Int32Array(reader.view.buffer, reader.view.byteOffset + reader.offset, count)\n reader.offset += count * 4\n return values\n}\n\n/**\n * Read `count` int64 values.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @returns {BigInt64Array}\n */\nfunction readPlainInt64(reader, count) {\n const values = (reader.view.byteOffset + reader.offset) % 8\n ? new BigInt64Array(align(reader.view.buffer, reader.view.byteOffset + reader.offset, count * 8))\n : new BigInt64Array(reader.view.buffer, reader.view.byteOffset + reader.offset, count)\n reader.offset += count * 8\n return values\n}\n\n/**\n * Read `count` int96 values.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @returns {bigint[]}\n */\nfunction readPlainInt96(reader, count) {\n const values = new Array(count)\n for (let i = 0; i < count; i++) {\n const low = reader.view.getBigInt64(reader.offset + i * 12, true)\n const high = reader.view.getInt32(reader.offset + i * 12 + 8, true)\n values[i] = BigInt(high) << 64n | low\n }\n reader.offset += count * 12\n return values\n}\n\n/**\n * Read `count` float values.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @returns {Float32Array}\n */\nfunction readPlainFloat(reader, count) {\n const values = (reader.view.byteOffset + reader.offset) % 4\n ? new Float32Array(align(reader.view.buffer, reader.view.byteOffset + reader.offset, count * 4))\n : new Float32Array(reader.view.buffer, reader.view.byteOffset + reader.offset, count)\n reader.offset += count * 4\n return values\n}\n\n/**\n * Read `count` double values.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @returns {Float64Array}\n */\nfunction readPlainDouble(reader, count) {\n const values = (reader.view.byteOffset + reader.offset) % 8\n ? new Float64Array(align(reader.view.buffer, reader.view.byteOffset + reader.offset, count * 8))\n : new Float64Array(reader.view.buffer, reader.view.byteOffset + reader.offset, count)\n reader.offset += count * 8\n return values\n}\n\n/**\n * Read `count` byte array values.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @returns {Uint8Array[]}\n */\nfunction readPlainByteArray(reader, count) {\n const values = new Array(count)\n for (let i = 0; i < count; i++) {\n const length = reader.view.getUint32(reader.offset, true)\n reader.offset += 4\n values[i] = new Uint8Array(reader.view.buffer, reader.view.byteOffset + reader.offset, length)\n reader.offset += length\n }\n return values\n}\n\n/**\n * Read a fixed length byte array.\n *\n * @param {DataReader} reader\n * @param {number} count\n * @param {number} fixedLength\n * @returns {Uint8Array[]}\n */\nfunction readPlainByteArrayFixed(reader, count, fixedLength) {\n // assert(reader.view.byteLength - reader.offset >= count * fixedLength)\n const values = new Array(count)\n for (let i = 0; i < count; i++) {\n values[i] = new Uint8Array(reader.view.buffer, reader.view.byteOffset + reader.offset, fixedLength)\n reader.offset += fixedLength\n }\n return values\n}\n\n/**\n * Create a new buffer with the offset and size.\n *\n * @import {DataReader, DecodedArray, ParquetType} from '../src/types.d.ts'\n * @param {ArrayBufferLike} buffer\n * @param {number} offset\n * @param {number} size\n * @returns {ArrayBuffer}\n */\nfunction align(buffer, offset, size) {\n const aligned = new ArrayBuffer(size)\n new Uint8Array(aligned).set(new Uint8Array(buffer, offset, size))\n return aligned\n}\n","/**\n * The MIT License (MIT)\n * Copyright (c) 2016 Zhipeng Jia\n * https://github.com/zhipeng-jia/snappyjs\n */\n\nconst WORD_MASK = [0, 0xff, 0xffff, 0xffffff, 0xffffffff]\n\n/**\n * Copy bytes from one array to another\n *\n * @param {Uint8Array} fromArray source array\n * @param {number} fromPos source position\n * @param {Uint8Array} toArray destination array\n * @param {number} toPos destination position\n * @param {number} length number of bytes to copy\n */\nfunction copyBytes(fromArray, fromPos, toArray, toPos, length) {\n for (let i = 0; i < length; i++) {\n toArray[toPos + i] = fromArray[fromPos + i]\n }\n}\n\n/**\n * Decompress snappy data.\n * Accepts an output buffer to avoid allocating a new buffer for each call.\n *\n * @param {Uint8Array} input compressed data\n * @param {Uint8Array} output output buffer\n */\nexport function snappyUncompress(input, output) {\n const inputLength = input.byteLength\n const outputLength = output.byteLength\n let pos = 0\n let outPos = 0\n\n // skip preamble (contains uncompressed length as varint)\n while (pos < inputLength) {\n const c = input[pos]\n pos++\n if (c < 128) {\n break\n }\n }\n if (outputLength && pos >= inputLength) {\n throw new Error('invalid snappy length header')\n }\n\n while (pos < inputLength) {\n const c = input[pos]\n let len = 0\n pos++\n\n if (pos >= inputLength) {\n throw new Error('missing eof marker')\n }\n\n // There are two types of elements, literals and copies (back references)\n if ((c & 0x3) === 0) {\n // Literals are uncompressed data stored directly in the byte stream\n let len = (c >>> 2) + 1\n // Longer literal length is encoded in multiple bytes\n if (len > 60) {\n if (pos + 3 >= inputLength) {\n throw new Error('snappy error literal pos + 3 >= inputLength')\n }\n const lengthSize = len - 60 // length bytes - 1\n len = input[pos]\n + (input[pos + 1] << 8)\n + (input[pos + 2] << 16)\n + (input[pos + 3] << 24)\n len = (len & WORD_MASK[lengthSize]) + 1\n pos += lengthSize\n }\n if (pos + len > inputLength) {\n throw new Error('snappy error literal exceeds input length')\n }\n copyBytes(input, pos, output, outPos, len)\n pos += len\n outPos += len\n } else {\n // Copy elements\n let offset = 0 // offset back from current position to read\n switch (c & 0x3) {\n case 1:\n // Copy with 1-byte offset\n len = (c >>> 2 & 0x7) + 4\n offset = input[pos] + (c >>> 5 << 8)\n pos++\n break\n case 2:\n // Copy with 2-byte offset\n if (inputLength <= pos + 1) {\n throw new Error('snappy error end of input')\n }\n len = (c >>> 2) + 1\n offset = input[pos] + (input[pos + 1] << 8)\n pos += 2\n break\n case 3:\n // Copy with 4-byte offset\n if (inputLength <= pos + 3) {\n throw new Error('snappy error end of input')\n }\n len = (c >>> 2) + 1\n offset = input[pos]\n + (input[pos + 1] << 8)\n + (input[pos + 2] << 16)\n + (input[pos + 3] << 24)\n pos += 4\n break\n default:\n break\n }\n if (offset === 0 || isNaN(offset)) {\n throw new Error(`invalid offset ${offset} pos ${pos} inputLength ${inputLength}`)\n }\n if (offset > outPos) {\n throw new Error('cannot copy from before start of buffer')\n }\n copyBytes(output, outPos - offset, output, outPos, len)\n outPos += len\n }\n }\n\n if (outPos !== outputLength) throw new Error('premature end of input')\n}\n","import { deltaBinaryUnpack, deltaByteArray, deltaLengthByteArray } from './delta.js'\nimport { bitWidth, byteStreamSplit, readRleBitPackedHybrid } from './encoding.js'\nimport { readPlain } from './plain.js'\nimport { getMaxDefinitionLevel, getMaxRepetitionLevel } from './schema.js'\nimport { snappyUncompress } from './snappy.js'\n\n/**\n * Read a data page from uncompressed reader.\n *\n * @param {Uint8Array} bytes raw page data (should already be decompressed)\n * @param {DataPageHeader} daph data page header\n * @param {ColumnDecoder} columnDecoder\n * @returns {DataPage} definition levels, repetition levels, and array of values\n */\nexport function readDataPage(bytes, daph, { type, element, schemaPath }) {\n const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength)\n const reader = { view, offset: 0 }\n /** @type {DecodedArray} */\n let dataPage\n\n // repetition and definition levels\n const repetitionLevels = readRepetitionLevels(reader, daph, schemaPath)\n // assert(!repetitionLevels.length || repetitionLevels.length === daph.num_values)\n const { definitionLevels, numNulls } = readDefinitionLevels(reader, daph, schemaPath)\n // assert(!definitionLevels.length || definitionLevels.length === daph.num_values)\n\n // read values based on encoding\n const nValues = daph.num_values - numNulls\n if (daph.encoding === 'PLAIN') {\n dataPage = readPlain(reader, type, nValues, element.type_length)\n } else if (\n daph.encoding === 'PLAIN_DICTIONARY' ||\n daph.encoding === 'RLE_DICTIONARY' ||\n daph.encoding === 'RLE'\n ) {\n const bitWidth = type === 'BOOLEAN' ? 1 : view.getUint8(reader.offset++)\n if (bitWidth) {\n dataPage = new Array(nValues)\n if (type === 'BOOLEAN') {\n readRleBitPackedHybrid(reader, bitWidth, dataPage)\n dataPage = dataPage.map(x => !!x) // convert to boolean\n } else {\n // assert(daph.encoding.endsWith('_DICTIONARY'))\n readRleBitPackedHybrid(reader, bitWidth, dataPage, view.byteLength - reader.offset)\n }\n } else {\n dataPage = new Uint8Array(nValues) // nValue zeroes\n }\n } else if (daph.encoding === 'BYTE_STREAM_SPLIT') {\n dataPage = byteStreamSplit(reader, nValues, type, element.type_length)\n } else if (daph.encoding === 'DELTA_BINARY_PACKED') {\n const int32 = type === 'INT32'\n dataPage = int32 ? new Int32Array(nValues) : new BigInt64Array(nValues)\n deltaBinaryUnpack(reader, nValues, dataPage)\n } else if (daph.encoding === 'DELTA_LENGTH_BYTE_ARRAY') {\n dataPage = new Array(nValues)\n deltaLengthByteArray(reader, nValues, dataPage)\n } else {\n throw new Error(`parquet unsupported encoding: ${daph.encoding}`)\n }\n\n return { definitionLevels, repetitionLevels, dataPage }\n}\n\n/**\n * @import {ColumnDecoder, CompressionCodec, Compressors, DataPage, DataPageHeader, DataPageHeaderV2, DataReader, DecodedArray, PageHeader, SchemaTree} from '../src/types.d.ts'\n * @param {DataReader} reader data view for the page\n * @param {DataPageHeader} daph data page header\n * @param {SchemaTree[]} schemaPath\n * @returns {any[]} repetition levels and number of bytes read\n */\nfunction readRepetitionLevels(reader, daph, schemaPath) {\n if (schemaPath.length > 1) {\n const maxRepetitionLevel = getMaxRepetitionLevel(schemaPath)\n if (maxRepetitionLevel) {\n const values = new Array(daph.num_values)\n readRleBitPackedHybrid(reader, bitWidth(maxRepetitionLevel), values)\n return values\n }\n }\n return []\n}\n\n/**\n * @param {DataReader} reader data view for the page\n * @param {DataPageHeader} daph data page header\n * @param {SchemaTree[]} schemaPath\n * @returns {{ definitionLevels: number[], numNulls: number }} definition levels\n */\nfunction readDefinitionLevels(reader, daph, schemaPath) {\n const maxDefinitionLevel = getMaxDefinitionLevel(schemaPath)\n if (!maxDefinitionLevel) return { definitionLevels: [], numNulls: 0 }\n\n const definitionLevels = new Array(daph.num_values)\n readRleBitPackedHybrid(reader, bitWidth(maxDefinitionLevel), definitionLevels)\n\n // count nulls\n let numNulls = daph.num_values\n for (const def of definitionLevels) {\n if (def === maxDefinitionLevel) numNulls--\n }\n if (numNulls === 0) definitionLevels.length = 0\n\n return { definitionLevels, numNulls }\n}\n\n/**\n * @param {Uint8Array} compressedBytes\n * @param {number} uncompressed_page_size\n * @param {CompressionCodec} codec\n * @param {Compressors | undefined} compressors\n * @returns {Uint8Array}\n */\nexport function decompressPage(compressedBytes, uncompressed_page_size, codec, compressors) {\n /** @type {Uint8Array} */\n let page\n const customDecompressor = compressors?.[codec]\n if (codec === 'UNCOMPRESSED') {\n page = compressedBytes\n } else if (customDecompressor) {\n page = customDecompressor(compressedBytes, uncompressed_page_size)\n } else if (codec === 'SNAPPY') {\n page = new Uint8Array(uncompressed_page_size)\n snappyUncompress(compressedBytes, page)\n } else {\n throw new Error(`parquet unsupported compression codec: ${codec}`)\n }\n if (page?.length !== uncompressed_page_size) {\n throw new Error(`parquet decompressed page length ${page?.length} does not match header ${uncompressed_page_size}`)\n }\n return page\n}\n\n\n/**\n * Read a data page from the given Uint8Array.\n *\n * @param {Uint8Array} compressedBytes raw page data\n * @param {PageHeader} ph page header\n * @param {ColumnDecoder} columnDecoder\n * @returns {DataPage} definition levels, repetition levels, and array of values\n */\nexport function readDataPageV2(compressedBytes, ph, columnDecoder) {\n const view = new DataView(compressedBytes.buffer, compressedBytes.byteOffset, compressedBytes.byteLength)\n const reader = { view, offset: 0 }\n const { type, element, schemaPath, codec, compressors } = columnDecoder\n const daph2 = ph.data_page_header_v2\n if (!daph2) throw new Error('parquet data page header v2 is undefined')\n\n // repetition levels\n const repetitionLevels = readRepetitionLevelsV2(reader, daph2, schemaPath)\n reader.offset = daph2.repetition_levels_byte_length // readVarInt() => len for boolean v2?\n\n // definition levels\n const definitionLevels = readDefinitionLevelsV2(reader, daph2, schemaPath)\n // assert(reader.offset === daph2.repetition_levels_byte_length + daph2.definition_levels_byte_length)\n\n const uncompressedPageSize = ph.uncompressed_page_size - daph2.definition_levels_byte_length - daph2.repetition_levels_byte_length\n\n let page = compressedBytes.subarray(reader.offset)\n if (daph2.is_compressed !== false) {\n page = decompressPage(page, uncompressedPageSize, codec, compressors)\n }\n const pageView = new DataView(page.buffer, page.byteOffset, page.byteLength)\n const pageReader = { view: pageView, offset: 0 }\n\n // read values based on encoding\n /** @type {DecodedArray} */\n let dataPage\n const nValues = daph2.num_values - daph2.num_nulls\n if (daph2.encoding === 'PLAIN') {\n dataPage = readPlain(pageReader, type, nValues, element.type_length)\n } else if (daph2.encoding === 'RLE') {\n // assert(type === 'BOOLEAN')\n dataPage = new Array(nValues)\n readRleBitPackedHybrid(pageReader, 1, dataPage)\n dataPage = dataPage.map(x => !!x)\n } else if (\n daph2.encoding === 'PLAIN_DICTIONARY' ||\n daph2.encoding === 'RLE_DICTIONARY'\n ) {\n const bitWidth = pageView.getUint8(pageReader.offset++)\n dataPage = new Array(nValues)\n readRleBitPackedHybrid(pageReader, bitWidth, dataPage, uncompressedPageSize - 1)\n } else if (daph2.encoding === 'DELTA_BINARY_PACKED') {\n const int32 = type === 'INT32'\n dataPage = int32 ? new Int32Array(nValues) : new BigInt64Array(nValues)\n deltaBinaryUnpack(pageReader, nValues, dataPage)\n } else if (daph2.encoding === 'DELTA_LENGTH_BYTE_ARRAY') {\n dataPage = new Array(nValues)\n deltaLengthByteArray(pageReader, nValues, dataPage)\n } else if (daph2.encoding === 'DELTA_BYTE_ARRAY') {\n dataPage = new Array(nValues)\n deltaByteArray(pageReader, nValues, dataPage)\n } else if (daph2.encoding === 'BYTE_STREAM_SPLIT') {\n dataPage = byteStreamSplit(reader, nValues, type, element.type_length)\n } else {\n throw new Error(`parquet unsupported encoding: ${daph2.encoding}`)\n }\n\n return { definitionLevels, repetitionLevels, dataPage }\n}\n\n/**\n * @param {DataReader} reader\n * @param {DataPageHeaderV2} daph2 data page header v2\n * @param {SchemaTree[]} schemaPath\n * @returns {any[]} repetition levels\n */\nfunction readRepetitionLevelsV2(reader, daph2, schemaPath) {\n const maxRepetitionLevel = getMaxRepetitionLevel(schemaPath)\n if (!maxRepetitionLevel) return []\n\n const values = new Array(daph2.num_values)\n readRleBitPackedHybrid(reader, bitWidth(maxRepetitionLevel), values, daph2.repetition_levels_byte_length)\n return values\n}\n\n/**\n * @param {DataReader} reader\n * @param {DataPageHeaderV2} daph2 data page header v2\n * @param {SchemaTree[]} schemaPath\n * @returns {number[] | undefined} definition levels\n */\nfunction readDefinitionLevelsV2(reader, daph2, schemaPath) {\n const maxDefinitionLevel = getMaxDefinitionLevel(schemaPath)\n if (maxDefinitionLevel) {\n // V2 we know the length\n const values = new Array(daph2.num_values)\n readRleBitPackedHybrid(reader, bitWidth(maxDefinitionLevel), values, daph2.definition_levels_byte_length)\n return values\n }\n}\n","import { assembleLists } from './assemble.js'\nimport { Encoding, PageType } from './constants.js'\nimport { convert, convertWithDictionary } from './convert.js'\nimport { decompressPage, readDataPage, readDataPageV2 } from './datapage.js'\nimport { readPlain } from './plain.js'\nimport { isFlatColumn } from './schema.js'\nimport { deserializeTCompactProtocol } from './thrift.js'\n\n/**\n * Parse column data from a buffer.\n *\n * @param {DataReader} reader\n * @param {RowGroupSelect} rowGroupSelect row group selection\n * @param {ColumnDecoder} columnDecoder column decoder params\n * @param {(chunk: ColumnData) => void} [onPage] callback for each page\n * @returns {DecodedArray[]}\n */\nexport function readColumn(reader, { groupStart, selectStart, selectEnd }, columnDecoder, onPage) {\n const { columnName, schemaPath } = columnDecoder\n const isFlat = isFlatColumn(schemaPath)\n /** @type {DecodedArray[]} */\n const chunks = []\n /** @type {DecodedArray | undefined} */\n let dictionary = undefined\n /** @type {DecodedArray | undefined} */\n let lastChunk = undefined\n let rowCount = 0\n\n const emitLastChunk = onPage && (() => {\n lastChunk && onPage({\n columnName,\n columnData: lastChunk,\n rowStart: groupStart + rowCount - lastChunk.length,\n rowEnd: groupStart + rowCount,\n })\n })\n\n while (isFlat ? rowCount < selectEnd : reader.offset < reader.view.byteLength - 1) {\n if (reader.offset >= reader.view.byteLength - 1) break // end of reader\n\n // read page header\n const header = parquetHeader(reader)\n if (header.type === 'DICTIONARY_PAGE') {\n // assert(!dictionary)\n dictionary = readPage(reader, header, columnDecoder, dictionary, undefined, 0)\n dictionary = convert(dictionary, columnDecoder)\n } else {\n const lastChunkLength = lastChunk?.length || 0\n const values = readPage(reader, header, columnDecoder, dictionary, lastChunk, selectStart - rowCount)\n if (lastChunk === values) {\n // continued from previous page\n rowCount += values.length - lastChunkLength\n } else {\n emitLastChunk?.()\n chunks.push(values)\n rowCount += values.length\n lastChunk = values\n }\n }\n }\n emitLastChunk?.()\n // assert(rowCount >= selectEnd)\n if (rowCount > selectEnd && lastChunk) {\n // truncate last chunk to row limit\n chunks[chunks.length - 1] = lastChunk.slice(0, selectEnd - (rowCount - lastChunk.length))\n }\n return chunks\n}\n\n/**\n * Read a page (data or dictionary) from a buffer.\n *\n * @param {DataReader} reader\n * @param {PageHeader} header\n * @param {ColumnDecoder} columnDecoder\n * @param {DecodedArray | undefined} dictionary\n * @param {DecodedArray | undefined} previousChunk\n * @param {number} pageStart skip this many rows in the page\n * @returns {DecodedArray}\n */\nexport function readPage(reader, header, columnDecoder, dictionary, previousChunk, pageStart) {\n const { type, element, schemaPath, codec, compressors } = columnDecoder\n // read compressed_page_size bytes\n const compressedBytes = new Uint8Array(\n reader.view.buffer, reader.view.byteOffset + reader.offset, header.compressed_page_size\n )\n reader.offset += header.compressed_page_size\n\n // parse page data by type\n if (header.type === 'DATA_PAGE') {\n const daph = header.data_page_header\n if (!daph) throw new Error('parquet data page header is undefined')\n\n // skip unnecessary non-nested pages\n if (pageStart > daph.num_values && isFlatColumn(schemaPath)) {\n return new Array(daph.num_values) // TODO: don't allocate array\n }\n\n const page = decompressPage(compressedBytes, Number(header.uncompressed_page_size), codec, compressors)\n const { definitionLevels, repetitionLevels, dataPage } = readDataPage(page, daph, columnDecoder)\n // assert(!daph.statistics?.null_count || daph.statistics.null_count === BigInt(daph.num_values - dataPage.length))\n\n // convert types, dereference dictionary, and assemble lists\n let values = convertWithDictionary(dataPage, dictionary, daph.encoding, columnDecoder)\n if (repetitionLevels.length || definitionLevels?.length) {\n const output = Array.isArray(previousChunk) ? previousChunk : []\n return assembleLists(output, definitionLevels, repetitionLevels, values, schemaPath)\n } else {\n // wrap nested flat data by depth\n for (let i = 2; i < schemaPath.length; i++) {\n if (schemaPath[i].element.repetition_type !== 'REQUIRED') {\n values = Array.from(values, e => [e])\n }\n }\n return values\n }\n } else if (header.type === 'DATA_PAGE_V2') {\n const daph2 = header.data_page_header_v2\n if (!daph2) throw new Error('parquet data page header v2 is undefined')\n\n // skip unnecessary pages\n if (pageStart > daph2.num_rows) {\n return new Array(daph2.num_values) // TODO: don't allocate array\n }\n\n const { definitionLevels, repetitionLevels, dataPage } =\n readDataPageV2(compressedBytes, header, columnDecoder)\n\n // convert types, dereference dictionary, and assemble lists\n const values = convertWithDictionary(dataPage, dictionary, daph2.encoding, columnDecoder)\n const output = Array.isArray(previousChunk) ? previousChunk : []\n return assembleLists(output, definitionLevels, repetitionLevels, values, schemaPath)\n } else if (header.type === 'DICTIONARY_PAGE') {\n const diph = header.dictionary_page_header\n if (!diph) throw new Error('parquet dictionary page header is undefined')\n\n const page = decompressPage(\n compressedBytes, Number(header.uncompressed_page_size), codec, compressors\n )\n\n const reader = { view: new DataView(page.buffer, page.byteOffset, page.byteLength), offset: 0 }\n return readPlain(reader, type, diph.num_values, element.type_length)\n } else {\n throw new Error(`parquet unsupported page type: ${header.type}`)\n }\n}\n\n/**\n * Read parquet header from a buffer.\n *\n * @import {ColumnData, ColumnDecoder, DataReader, DecodedArray, PageHeader, RowGroupSelect} from '../src/types.d.ts'\n * @param {DataReader} reader\n * @returns {PageHeader}\n */\nfunction parquetHeader(reader) {\n const header = deserializeTCompactProtocol(reader)\n\n // Parse parquet header from thrift data\n const type = PageType[header.field_1]\n const uncompressed_page_size = header.field_2\n const compressed_page_size = header.field_3\n const crc = header.field_4\n const data_page_header = header.field_5 && {\n num_values: header.field_5.field_1,\n encoding: Encoding[header.field_5.field_2],\n definition_level_encoding: Encoding[header.field_5.field_3],\n repetition_level_encoding: Encoding[header.field_5.field_4],\n statistics: header.field_5.field_5 && {\n max: header.field_5.field_5.field_1,\n min: header.field_5.field_5.field_2,\n null_count: header.field_5.field_5.field_3,\n distinct_count: header.field_5.field_5.field_4,\n max_value: header.field_5.field_5.field_5,\n min_value: header.field_5.field_5.field_6,\n },\n }\n const index_page_header = header.field_6\n const dictionary_page_header = header.field_7 && {\n num_values: header.field_7.field_1,\n encoding: Encoding[header.field_7.field_2],\n is_sorted: header.field_7.field_3,\n }\n const data_page_header_v2 = header.field_8 && {\n num_values: header.field_8.field_1,\n num_nulls: header.field_8.field_2,\n num_rows: header.field_8.field_3,\n encoding: Encoding[header.field_8.field_4],\n definition_levels_byte_length: header.field_8.field_5,\n repetition_levels_byte_length: header.field_8.field_6,\n is_compressed: header.field_8.field_7 === undefined ? true : header.field_8.field_7, // default true\n statistics: header.field_8.field_8,\n }\n\n return {\n type,\n uncompressed_page_size,\n compressed_page_size,\n crc,\n data_page_header,\n index_page_header,\n dictionary_page_header,\n data_page_header_v2,\n }\n}\n","import { assembleNested } from './assemble.js'\nimport { readColumn } from './column.js'\nimport { DEFAULT_PARSERS } from './convert.js'\nimport { getColumnRange } from './plan.js'\nimport { getSchemaPath } from './schema.js'\nimport { flatten } from './utils.js'\n\n/**\n * @import {AsyncColumn, AsyncRowGroup, DecodedArray, GroupPlan, ParquetParsers, ParquetReadOptions, QueryPlan, RowGroup, SchemaTree} from './types.js'\n */\n/**\n * Read a row group from a file-like object.\n *\n * @param {ParquetReadOptions} options\n * @param {QueryPlan} plan\n * @param {GroupPlan} groupPlan\n * @returns {AsyncRowGroup} resolves to column data\n */\nexport function readRowGroup(options, { metadata, columns }, groupPlan) {\n const { file, compressors, utf8 } = options\n\n /** @type {AsyncColumn[]} */\n const asyncColumns = []\n /** @type {ParquetParsers} */\n const parsers = { ...DEFAULT_PARSERS, ...options.parsers }\n\n // read column data\n for (const { file_path, meta_data } of groupPlan.rowGroup.columns) {\n if (file_path) throw new Error('parquet file_path not supported')\n if (!meta_data) throw new Error('parquet column metadata is undefined')\n\n // skip columns that are not requested\n const columnName = meta_data.path_in_schema[0]\n if (columns && !columns.includes(columnName)) continue\n\n const { startByte, endByte } = getColumnRange(meta_data)\n const columnBytes = endByte - startByte\n\n // skip columns larger than 1gb\n // TODO: stream process the data, returning only the requested rows\n if (columnBytes > 1 << 30) {\n console.warn(`parquet skipping huge column \"${meta_data.path_in_schema}\" ${columnBytes} bytes`)\n // TODO: set column to new Error('parquet column too large')\n continue\n }\n\n // wrap awaitable to ensure it's a promise\n /** @type {Promise} */\n const buffer = Promise.resolve(file.slice(startByte, endByte))\n\n // read column data async\n asyncColumns.push({\n pathInSchema: meta_data.path_in_schema,\n data: buffer.then(arrayBuffer => {\n const schemaPath = getSchemaPath(metadata.schema, meta_data.path_in_schema)\n const reader = { view: new DataView(arrayBuffer), offset: 0 }\n const subcolumn = meta_data.path_in_schema.join('.')\n const columnDecoder = {\n columnName: subcolumn,\n type: meta_data.type,\n element: schemaPath[schemaPath.length - 1].element,\n schemaPath,\n codec: meta_data.codec,\n parsers,\n compressors,\n utf8,\n }\n return readColumn(reader, groupPlan, columnDecoder, options.onPage)\n }),\n })\n }\n\n return { groupStart: groupPlan.groupStart, groupRows: groupPlan.groupRows, asyncColumns }\n}\n\n/**\n * @overload\n * @param {AsyncRowGroup} asyncGroup\n * @param {number} selectStart\n * @param {number} selectEnd\n * @param {string[] | undefined} columns\n * @param {'object'} rowFormat\n * @returns {Promise[]>} resolves to row data\n */\n/**\n * @overload\n * @param {AsyncRowGroup} asyncGroup\n * @param {number} selectStart\n * @param {number} selectEnd\n * @param {string[] | undefined} columns\n * @param {'array'} [rowFormat]\n * @returns {Promise} resolves to row data\n */\n/**\n * @param {AsyncRowGroup} asyncGroup\n * @param {number} selectStart\n * @param {number} selectEnd\n * @param {string[] | undefined} columns\n * @param {'object' | 'array'} [rowFormat]\n * @returns {Promise[] | any[][]>} resolves to row data\n */\nexport async function asyncGroupToRows({ asyncColumns }, selectStart, selectEnd, columns, rowFormat) {\n // columnData[i] for asyncColumns[i]\n // TODO: do it without flatten\n const columnDatas = await Promise.all(asyncColumns.map(({ data }) => data.then(flatten)))\n\n // careful mapping of column order for rowFormat: array\n const includedColumnNames = asyncColumns\n .map(child => child.pathInSchema[0])\n .filter(name => !columns || columns.includes(name))\n const columnOrder = columns ?? includedColumnNames\n const columnIndexes = columnOrder.map(name => asyncColumns.findIndex(column => column.pathInSchema[0] === name))\n\n // transpose columns into rows\n const selectCount = selectEnd - selectStart\n if (rowFormat === 'object') {\n /** @type {Record[]} */\n const groupData = new Array(selectCount)\n for (let selectRow = 0; selectRow < selectCount; selectRow++) {\n const row = selectStart + selectRow\n // return each row as an object\n /** @type {Record} */\n const rowData = {}\n for (let i = 0; i < asyncColumns.length; i++) {\n rowData[asyncColumns[i].pathInSchema[0]] = columnDatas[i][row]\n }\n groupData[selectRow] = rowData\n }\n return groupData\n }\n\n /** @type {any[][]} */\n const groupData = new Array(selectCount)\n for (let selectRow = 0; selectRow < selectCount; selectRow++) {\n const row = selectStart + selectRow\n // return each row as an array\n const rowData = new Array(asyncColumns.length)\n for (let i = 0; i < columnOrder.length; i++) {\n if (columnIndexes[i] >= 0) {\n rowData[i] = columnDatas[columnIndexes[i]][row]\n }\n }\n groupData[selectRow] = rowData\n }\n return groupData\n}\n\n/**\n * Assemble physical columns into top-level columns asynchronously.\n *\n * @param {AsyncRowGroup} asyncRowGroup\n * @param {SchemaTree} schemaTree\n * @returns {AsyncRowGroup}\n */\nexport function assembleAsync(asyncRowGroup, schemaTree) {\n const { asyncColumns } = asyncRowGroup\n /** @type {AsyncColumn[]} */\n const assembled = []\n for (const child of schemaTree.children) {\n if (child.children.length) {\n const childColumns = asyncColumns.filter(column => column.pathInSchema[0] === child.element.name)\n if (!childColumns.length) continue\n\n // wait for all child columns to be read\n /** @type {Map} */\n const flatData = new Map()\n const data = Promise.all(childColumns.map(column => {\n return column.data.then(columnData => {\n flatData.set(column.pathInSchema.join('.'), flatten(columnData))\n })\n })).then(() => {\n // assemble the column\n assembleNested(flatData, child)\n const flatColumn = flatData.get(child.path.join('.'))\n if (!flatColumn) throw new Error('parquet column data not assembled')\n return [flatColumn]\n })\n\n assembled.push({ pathInSchema: child.path, data })\n } else {\n // leaf node, return the column\n const asyncColumn = asyncColumns.find(column => column.pathInSchema[0] === child.element.name)\n if (asyncColumn) {\n assembled.push(asyncColumn)\n }\n }\n }\n return { ...asyncRowGroup, asyncColumns: assembled }\n}\n","import { parquetMetadataAsync, parquetSchema } from './metadata.js'\nimport { parquetPlan, prefetchAsyncBuffer } from './plan.js'\nimport { assembleAsync, asyncGroupToRows, readRowGroup } from './rowgroup.js'\nimport { concat, flatten } from './utils.js'\n\n/**\n * @import {AsyncRowGroup, DecodedArray, ParquetReadOptions, BaseParquetReadOptions} from '../src/types.js'\n */\n/**\n * Read parquet data rows from a file-like object.\n * Reads the minimal number of row groups and columns to satisfy the request.\n *\n * Returns a void promise when complete.\n * Errors are thrown on the returned promise.\n * Data is returned in callbacks onComplete, onChunk, onPage, NOT the return promise.\n * See parquetReadObjects for a more convenient API.\n *\n * @param {ParquetReadOptions} options read options\n * @returns {Promise} resolves when all requested rows and columns are parsed, all errors are thrown here\n */\nexport async function parquetRead(options) {\n // load metadata if not provided\n options.metadata ??= await parquetMetadataAsync(options.file)\n\n // read row groups\n const asyncGroups = parquetReadAsync(options)\n\n const { rowStart = 0, rowEnd, columns, onChunk, onComplete, rowFormat } = options\n\n // skip assembly if no onComplete or onChunk, but wait for reading to finish\n if (!onComplete && !onChunk) {\n for (const { asyncColumns } of asyncGroups) {\n for (const { data } of asyncColumns) await data\n }\n return\n }\n\n // assemble struct columns\n const schemaTree = parquetSchema(options.metadata)\n const assembled = asyncGroups.map(arg => assembleAsync(arg, schemaTree))\n\n // onChunk emit all chunks (don't await)\n if (onChunk) {\n for (const asyncGroup of assembled) {\n for (const asyncColumn of asyncGroup.asyncColumns) {\n asyncColumn.data.then(columnDatas => {\n let rowStart = asyncGroup.groupStart\n for (const columnData of columnDatas) {\n onChunk({\n columnName: asyncColumn.pathInSchema[0],\n columnData,\n rowStart,\n rowEnd: rowStart + columnData.length,\n })\n rowStart += columnData.length\n }\n })\n }\n }\n }\n\n // onComplete transpose column chunks to rows\n if (onComplete) {\n // loosen the types to avoid duplicate code\n /** @type {any[]} */\n const rows = []\n for (const asyncGroup of assembled) {\n // filter to rows in range\n const selectStart = Math.max(rowStart - asyncGroup.groupStart, 0)\n const selectEnd = Math.min((rowEnd ?? Infinity) - asyncGroup.groupStart, asyncGroup.groupRows)\n // transpose column chunks to rows in output\n const groupData = rowFormat === 'object' ?\n await asyncGroupToRows(asyncGroup, selectStart, selectEnd, columns, 'object') :\n await asyncGroupToRows(asyncGroup, selectStart, selectEnd, columns, 'array')\n concat(rows, groupData)\n }\n onComplete(rows)\n } else {\n // wait for all async groups to finish (complete takes care of this)\n for (const { asyncColumns } of assembled) {\n for (const { data } of asyncColumns) await data\n }\n }\n}\n\n/**\n * @param {ParquetReadOptions} options read options\n * @returns {AsyncRowGroup[]}\n */\nexport function parquetReadAsync(options) {\n if (!options.metadata) throw new Error('parquet requires metadata')\n // TODO: validate options (start, end, columns, etc)\n\n // prefetch byte ranges\n const plan = parquetPlan(options)\n options.file = prefetchAsyncBuffer(options.file, plan)\n\n // read row groups\n return plan.groups.map(groupPlan => readRowGroup(options, plan, groupPlan))\n}\n\n/**\n * Reads a single column from a parquet file.\n *\n * @param {BaseParquetReadOptions} options\n * @returns {Promise}\n */\nexport async function parquetReadColumn(options) {\n if (options.columns?.length !== 1) {\n throw new Error('parquetReadColumn expected columns: [columnName]')\n }\n options.metadata ??= await parquetMetadataAsync(options.file)\n const asyncGroups = parquetReadAsync(options)\n\n // assemble struct columns\n const schemaTree = parquetSchema(options.metadata)\n const assembled = asyncGroups.map(arg => assembleAsync(arg, schemaTree))\n\n /** @type {DecodedArray[]} */\n const columnData = []\n for (const rg of assembled) {\n columnData.push(flatten(await rg.asyncColumns[0].data))\n }\n return flatten(columnData)\n}\n\n/**\n * This is a helper function to read parquet row data as a promise.\n * It is a wrapper around the more configurable parquetRead function.\n *\n * @param {Omit} options\n * @returns {Promise[]>} resolves when all requested rows and columns are parsed\n */\nexport function parquetReadObjects(options) {\n return new Promise((onComplete, reject) => {\n parquetRead({\n ...options,\n rowFormat: 'object', // force object output\n onComplete,\n }).catch(reject)\n })\n}\n","import { parquetMetadataAsync, parquetReadObjects } from 'hyparquet'\nimport { wkbToGeojson } from 'hyparquet/src/wkb.js'\n\n/**\n * Convert a GeoParquet file to GeoJSON.\n * Input is an AsyncBuffer representing a GeoParquet file.\n * An AsyncBuffer is a buffer-like object that can be read asynchronously.\n *\n * @import { AsyncBuffer, Compressors } from 'hyparquet'\n * @import { Feature, GeoJSON } from '../src/geojson.js'\n * @param {Object} options\n * @param {AsyncBuffer} options.file\n * @param {Compressors} [options.compressors]\n * @returns {Promise}\n */\nexport async function toGeoJson({ file, compressors }) {\n const metadata = await parquetMetadataAsync(file)\n const geoMetadata = metadata.key_value_metadata?.find(kv => kv.key === 'geo')\n if (!geoMetadata) {\n throw new Error('Invalid GeoParquet file: missing \"geo\" metadata')\n }\n\n // Geoparquet metadata\n const geoSchema = JSON.parse(geoMetadata.value || '{}')\n\n // Read all parquet data\n const data = await parquetReadObjects({ file, metadata, utf8: false, compressors })\n\n /** @type {Feature[]} */\n const features = []\n const primaryColumn = geoSchema.primary_column || 'geometry'\n for (const row of data) {\n const wkb = row[primaryColumn]\n if (!wkb) {\n // No geometry\n continue\n }\n\n const geometry = decodeWKB(wkb)\n\n // Extract properties (all fields except geometry)\n /** @type {Record} */\n const properties = {}\n for (const key of Object.keys(row)) {\n const value = row[key]\n if (key !== primaryColumn && value !== null) {\n properties[key] = value\n }\n }\n\n /** @type {Feature} */\n const feature = {\n type: 'Feature',\n geometry,\n properties,\n }\n\n features.push(feature)\n }\n\n return {\n type: 'FeatureCollection',\n features,\n }\n}\n\n/**\n * @param {Uint8Array} buffer\n */\nexport function decodeWKB(buffer) {\n return wkbToGeojson({ view: new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength), offset: 0 })\n}\n","import { asyncBufferFromUrl, cachedAsyncBuffer } from 'hyparquet'\nimport { toGeoJson } from '../src/index.js'\n\nasync function initMap() {\n // @ts-expect-error MapsLibrary\n const { Map } = await google.maps.importLibrary('maps')\n const div = /** @type {HTMLElement} */document.getElementById('map')\n // Create a new map\n const map = new Map(div, {\n center: { lat: 39, lng: -98 },\n zoom: 4,\n })\n\n // URL or path to your GeoParquet file\n const parquetUrl = 'https://hyparam.github.io/geoparquet/demo/polys.parquet'\n\n try {\n // Read the GeoParquet file and convert to GeoJSON\n const file = cachedAsyncBuffer(\n await asyncBufferFromUrl({ url: parquetUrl, byteLength: 29838 })\n )\n console.log('GeoParquet file:', file)\n const geojson = await toGeoJson({ file })\n\n console.log('GeoJSON:', geojson)\n\n // Add the GeoJSON data to the map\n map.data.addGeoJson(geojson)\n } catch (error) {\n console.error('Error loading or parsing GeoParquet file:', error)\n }\n}\ninitMap()\n"],"names":["ParquetType","Encoding","FieldRepetitionType","ConvertedType","CompressionCodec","PageType","EdgeInterpolationAlgorithm","wkbToGeojson","reader","flags","getFlags","type","coordinates","readPosition","readLine","readPolygon","points","i","count","push","lines","polygons","geometries","Error","view","littleEndian","getUint8","offset","rawType","getUint32","Math","floor","dim","coord","getFloat64","rings","r","decoder","TextDecoder","DEFAULT_PARSERS","timestampFromMilliseconds","millis","Date","Number","timestampFromMicroseconds","micros","timestampFromNanoseconds","nanos","dateFromDays","days","stringFromBytes","bytes","decode","geometryFromBytes","DataView","buffer","byteOffset","byteLength","geographyFromBytes","convertWithDictionary","data","dictionary","encoding","columnDecoder","endsWith","output","Uint8Array","constructor","length","convert","element","parsers","utf8","converted_type","ctype","logical_type","ltype","factor","scale","arr","Array","parseDecimal","from","map","v","value","JSON","parse","bitWidth","isSigned","BigInt64Array","BigUint64Array","BigInt","Int32Array","Uint32Array","parseFloat16","unit","parser","byte","bits","int16","sign","exp","frac","NaN","Infinity","schemaTree","schema","rootIndex","path","children","num_children","childElement","child","name","getSchemaPath","tree","part","find","getMaxRepetitionLevel","schemaPath","maxLevel","repetition_type","getMaxDefinitionLevel","slice","isFlatColumn","column","CompactType","deserializeTCompactProtocol","lastFid","fid","newLastFid","readFieldBegin","readElement","getInt8","readZigZag","readZigZagBigInt","stringLength","readVarInt","strBytes","elemType","listSize","boolType","values","structValues","fieldType","result","shift","zigzag","readVarBigInt","delta","async","parquetMetadataAsync","asyncBuffer","initialFetchSize","defaultInitialFetchSize","footerOffset","max","footerBuffer","footerView","metadataLength","metadataOffset","metadataBuffer","combinedBuffer","ArrayBuffer","combinedView","set","parquetMetadata","arrayBuffer","metadataLengthOffset","metadata","version","field_1","field_2","field","type_length","field_3","field_4","field_5","field_6","field_7","precision","field_8","field_id","field_9","logicalType","field_10","columnSchema","filter","e","num_rows","row_groups","rowGroup","columns","columnIndex","file_path","file_offset","meta_data","encodings","path_in_schema","codec","num_values","total_uncompressed_size","total_compressed_size","key_value_metadata","data_page_offset","index_page_offset","dictionary_page_offset","field_11","statistics","convertStats","field_12","encoding_stats","field_13","encodingStat","page_type","bloom_filter_offset","field_14","bloom_filter_length","field_15","size_statistics","field_16","unencoded_byte_array_data_bytes","repetition_level_histogram","definition_level_histogram","geospatial_statistics","field_17","bbox","xmin","xmax","ymin","ymax","zmin","zmax","mmin","mmax","geospatial_types","offset_index_offset","offset_index_length","column_index_offset","column_index_length","crypto_metadata","encrypted_column_metadata","total_byte_size","sorting_columns","sortingColumn","column_idx","descending","nulls_first","ordinal","keyValue","key","created_by","metadata_length","isAdjustedToUTC","timeUnit","crs","field_18","algorithm","stats","convertMetadata","min","null_count","distinct_count","max_value","min_value","is_max_value_exact","is_min_value_exact","undefined","getFloat32","getInt32","getBigInt64","concat","aaa","bbb","asyncBufferFromUrl","url","requestInit","fetch","customFetch","globalThis","method","then","res","ok","status","headers","get","parseInt","byteLengthFromUrl","init","start","end","Headers","endStr","body","cachedAsyncBuffer","minSize","cache","Map","size","cacheKey","cached","promise","flatten","chunks","chunk","getColumnRange","columnOffset","startByte","endByte","assembleLists","definitionLevels","repetitionLevels","n","maxDefinitionLevel","repetitionPath","valueIndex","containerStack","currentContainer","currentDepth","currentDefLevel","currentRepLevel","at","def","rep","pop","newList","assembleNested","subcolumnData","depth","join","optional","nextDepth","firstChild","isListLike","sublist","subDepth","subcolumn","flattenAtDepth","delete","keyChild","valueChild","isMapLike","mapName","keys","out","assembleMaps","invertDepth","struct","childData","inverted","invertStruct","obj","j","Object","deltaBinaryUnpack","int32","blockSize","miniblockPerBlock","outputIndex","valuesPerMiniblock","minDelta","bitWidths","bitpackPos","miniblockCount","mask","ceil","deltaLengthByteArray","lengths","clz32","readRleBitPackedHybrid","width","startOffset","seen","header","readBitPacked","readRle","left","right","byteStreamSplit","typeLength","byteWidth","b","Float32Array","Float64Array","split","subarray","readPlain","fixedLength","bitOffset","readPlainBoolean","align","readPlainInt32","readPlainInt64","low","high","readPlainInt96","readPlainFloat","readPlainDouble","readPlainByteArray","readPlainByteArrayFixed","aligned","WORD_MASK","copyBytes","fromArray","fromPos","toArray","toPos","readDataPage","daph","dataPage","maxRepetitionLevel","readRepetitionLevels","numNulls","readDefinitionLevels","nValues","x","decompressPage","compressedBytes","uncompressed_page_size","compressors","page","customDecompressor","input","inputLength","outputLength","pos","outPos","c","len","isNaN","lengthSize","snappyUncompress","readDataPageV2","ph","daph2","data_page_header_v2","repetition_levels_byte_length","readRepetitionLevelsV2","definition_levels_byte_length","readDefinitionLevelsV2","uncompressedPageSize","is_compressed","pageView","pageReader","num_nulls","prefixData","suffixData","suffix","deltaByteArray","readColumn","groupStart","selectStart","selectEnd","onPage","columnName","isFlat","lastChunk","rowCount","emitLastChunk","columnData","rowStart","rowEnd","parquetHeader","readPage","lastChunkLength","previousChunk","pageStart","compressed_page_size","data_page_header","isArray","diph","dictionary_page_header","crc","definition_level_encoding","repetition_level_encoding","index_page_header","is_sorted","asyncGroupToRows","asyncColumns","rowFormat","columnDatas","Promise","all","includedColumnNames","pathInSchema","includes","columnOrder","columnIndexes","findIndex","selectCount","groupData","selectRow","row","rowData","parquetRead","options","file","asyncGroups","plan","groups","fetches","groupRows","groupEnd","ranges","groupSize","isFinite","parquetPlan","promises","index","endOffset","prefetchAsyncBuffer","groupPlan","columnBytes","console","warn","resolve","readRowGroup","parquetReadAsync","onChunk","onComplete","parquetSchema","assembled","arg","asyncRowGroup","childColumns","flatData","flatColumn","asyncColumn","assembleAsync","asyncGroup","rows","toGeoJson","geoMetadata","kv","geoSchema","reject","catch","features","primaryColumn","primary_column","wkb","geometry","decodeWKB","properties","feature","google","maps","importLibrary","document","getElementById","center","lat","lng","zoom","log","geojson","addGeoJson","error","initMap"],"mappings":"AACO,MAAMA,EAAc,CACzB,UACA,QACA,QACA,QACA,QACA,SACA,aACA,wBAIWC,EAAW,CACtB,QACA,gBACA,mBACA,MACA,aACA,sBACA,0BACA,mBACA,iBACA,qBAIWC,EAAsB,CACjC,WACA,WACA,YAIWC,EAAgB,CAC3B,OACA,MACA,gBACA,OACA,OACA,UACA,OACA,cACA,cACA,mBACA,mBACA,SACA,UACA,UACA,UACA,QACA,SACA,SACA,SACA,OACA,OACA,YAIWC,EAAmB,CAC9B,eACA,SACA,OACA,MACA,SACA,MACA,OACA,WAIWC,EAAW,CACtB,YACA,aACA,kBACA,gBAWWC,EAA6B,CACxC,YACA,WACA,SACA,UACA,UCrFK,SAASC,EAAaC,GAC3B,MAAMC,EAAQC,EAASF,GAEvB,GAAmB,IAAfC,EAAME,KACR,MAAO,CAAEA,KAAM,QAASC,YAAaC,EAAaL,EAAQC,IACrD,GAAmB,IAAfA,EAAME,KACf,MAAO,CAAEA,KAAM,aAAcC,YAAaE,EAASN,EAAQC,IACtD,GAAmB,IAAfA,EAAME,KACf,MAAO,CAAEA,KAAM,UAAWC,YAAaG,EAAYP,EAAQC,IACtD,GAAmB,IAAfA,EAAME,KAAY,CAC3B,MAAMK,EAAS,GACf,IAAK,IAAIC,EAAI,EAAGA,EAAIR,EAAMS,MAAOD,IAC/BD,EAAOG,KAAKN,EAAaL,EAAQE,EAASF,KAE5C,MAAO,CAAEG,KAAM,aAAcC,YAAaI,EAC5C,CAAO,GAAmB,IAAfP,EAAME,KAAY,CAC3B,MAAMS,EAAQ,GACd,IAAK,IAAIH,EAAI,EAAGA,EAAIR,EAAMS,MAAOD,IAC/BG,EAAMD,KAAKL,EAASN,EAAQE,EAASF,KAEvC,MAAO,CAAEG,KAAM,kBAAmBC,YAAaQ,EACjD,CAAO,GAAmB,IAAfX,EAAME,KAAY,CAC3B,MAAMU,EAAW,GACjB,IAAK,IAAIJ,EAAI,EAAGA,EAAIR,EAAMS,MAAOD,IAC/BI,EAASF,KAAKJ,EAAYP,EAAQE,EAASF,KAE7C,MAAO,CAAEG,KAAM,eAAgBC,YAAaS,EAC9C,CAAO,GAAmB,IAAfZ,EAAME,KAAY,CAC3B,MAAMW,EAAa,GACnB,IAAK,IAAIL,EAAI,EAAGA,EAAIR,EAAMS,MAAOD,IAC/BK,EAAWH,KAAKZ,EAAaC,IAE/B,MAAO,CAAEG,KAAM,qBAAsBW,aACvC,CACE,MAAM,IAAIC,MAAM,8BAA8Bd,EAAME,OAExD,CAgBA,SAASD,EAASF,GAChB,MAAMgB,KAAEA,GAAShB,EACXiB,EAAkD,IAAnCD,EAAKE,SAASlB,EAAOmB,UACpCC,EAAUJ,EAAKK,UAAUrB,EAAOmB,OAAQF,GAC9CjB,EAAOmB,QAAU,EAEjB,MAAMhB,EAAOiB,EAAU,IACjBnB,EAAQqB,KAAKC,MAAMH,EAAU,KAEnC,IAAIV,EAAQ,EACRP,EAAO,GAAKA,GAAQ,IACtBO,EAAQM,EAAKK,UAAUrB,EAAOmB,OAAQF,GACtCjB,EAAOmB,QAAU,GAInB,IAAIK,EAAM,EAIV,OAHIvB,GAAOuB,IACG,IAAVvB,GAAauB,IAEV,CAAEP,eAAcd,OAAMqB,MAAKd,QACpC,CAOA,SAASL,EAAaL,EAAQC,GAC5B,MAAMO,EAAS,GACf,IAAK,IAAIC,EAAI,EAAGA,EAAIR,EAAMuB,IAAKf,IAAK,CAClC,MAAMgB,EAAQzB,EAAOgB,KAAKU,WAAW1B,EAAOmB,OAAQlB,EAAMgB,cAC1DjB,EAAOmB,QAAU,EACjBX,EAAOG,KAAKc,EACd,CACA,OAAOjB,CACT,CAOA,SAASF,EAASN,EAAQC,GACxB,MAAMO,EAAS,GACf,IAAK,IAAIC,EAAI,EAAGA,EAAIR,EAAMS,MAAOD,IAC/BD,EAAOG,KAAKN,EAAaL,EAAQC,IAEnC,OAAOO,CACT,CAOA,SAASD,EAAYP,EAAQC,GAC3B,MAAMe,KAAEA,GAAShB,EACX2B,EAAQ,GACd,IAAK,IAAIC,EAAI,EAAGA,EAAI3B,EAAMS,MAAOkB,IAAK,CACpC,MAAMlB,EAAQM,EAAKK,UAAUrB,EAAOmB,OAAQlB,EAAMgB,cAClDjB,EAAOmB,QAAU,EACjBQ,EAAMhB,KAAKL,EAASN,EAAQ,IAAKC,EAAOS,UAC1C,CACA,OAAOiB,CACT,CCtHA,MAAME,EAAU,IAAIC,YAMPC,EAAkB,CAC7BC,0BAA0BC,GACjB,IAAIC,KAAKC,OAAOF,IAEzBG,0BAA0BC,GACjB,IAAIH,KAAKC,OAAOE,EAAS,QAElCC,yBAAyBC,GAChB,IAAIL,KAAKC,OAAOI,EAAQ,WAEjCC,aAAaC,GACJ,IAAIP,KAAY,MAAPO,GAElBC,gBAAgBC,GACPA,GAASd,EAAQe,OAAOD,GAEjCE,kBAAkBF,GACTA,GAAS5C,EAAa,CAAEiB,KAAM,IAAI8B,SAASH,EAAMI,OAAQJ,EAAMK,WAAYL,EAAMM,YAAa9B,OAAQ,IAE/G+B,mBAAmBP,GACVA,GAAS5C,EAAa,CAAEiB,KAAM,IAAI8B,SAASH,EAAMI,OAAQJ,EAAMK,WAAYL,EAAMM,YAAa9B,OAAQ,KAa1G,SAASgC,EAAsBC,EAAMC,EAAYC,EAAUC,GAChE,GAAIF,GAAcC,EAASE,SAAS,eAAgB,CAClD,IAAIC,EAASL,EACTA,aAAgBM,cAAgBL,aAAsBK,cAExDD,EAAS,IAAIJ,EAAWM,YAAYP,EAAKQ,SAE3C,IAAK,IAAInD,EAAI,EAAGA,EAAI2C,EAAKQ,OAAQnD,IAC/BgD,EAAOhD,GAAK4C,EAAWD,EAAK3C,IAE9B,OAAOgD,CACT,CACE,OAAOI,EAAQT,EAAMG,EAEzB,CASO,SAASM,EAAQT,EAAMG,GAC5B,MAAMO,QAAEA,EAAOC,QAAEA,EAAOC,KAAEA,GAAO,GAAST,GACpCpD,KAAEA,EAAM8D,eAAgBC,EAAOC,aAAcC,GAAUN,EAC7D,GAAc,YAAVI,EAAqB,CACvB,MACMG,EAAS,MADDP,EAAQQ,OAAS,GAEzBC,EAAM,IAAIC,MAAMpB,EAAKQ,QAC3B,IAAK,IAAInD,EAAI,EAAGA,EAAI8D,EAAIX,OAAQnD,IAC1B2C,EAAK3C,aAAciD,WACrBa,EAAI9D,GAAKgE,EAAarB,EAAK3C,IAAM4D,EAEjCE,EAAI9D,GAAK0B,OAAOiB,EAAK3C,IAAM4D,EAG/B,OAAOE,CACT,CACA,IAAKL,GAAkB,UAAT/D,EACZ,OAAOqE,MAAME,KAAKtB,GAAMuB,IAAIC,IAAKb,SAAQzB,yBA4F7B,mBAHSuC,EAzF6DD,IA0F7D,KAAO,WACT,oBAARC,IAFf,IAAyBA,IAvFvB,GAAc,SAAVX,EACF,OAAOM,MAAME,KAAKtB,GAAMuB,IAAIC,GAAKb,EAAQvB,aAAaoC,IAExD,GAAc,qBAAVV,EACF,OAAOM,MAAME,KAAKtB,GAAMuB,IAAIC,GAAKb,EAAQ/B,0BAA0B4C,IAErE,GAAc,qBAAVV,EACF,OAAOM,MAAME,KAAKtB,GAAMuB,IAAIC,GAAKb,EAAQ3B,0BAA0BwC,IAErE,GAAc,SAAVV,EACF,OAAOd,EAAKuB,IAAIC,GAAKE,KAAKC,MAAMlD,EAAQe,OAAOgC,KAEjD,GAAc,SAAVV,EACF,MAAM,IAAInD,MAAM,8BAElB,GAAc,aAAVmD,EACF,MAAM,IAAInD,MAAM,kCAElB,GAAoB,aAAhBqD,GAAOjE,KACT,OAAOiD,EAAKuB,IAAIC,GAAKb,EAAQlB,kBAAkB+B,IAEjD,GAAoB,cAAhBR,GAAOjE,KACT,OAAOiD,EAAKuB,IAAIC,GAAKb,EAAQb,mBAAmB0B,IAElD,GAAc,SAAVV,GAAoC,WAAhBE,GAAOjE,MAAqB6D,GAAiB,eAAT7D,EAC1D,OAAOiD,EAAKuB,IAAIC,GAAKb,EAAQrB,gBAAgBkC,IAE/C,GAAc,YAAVV,GAAuC,YAAhBE,GAAOjE,MAAyC,KAAnBiE,EAAMY,WAAoBZ,EAAMa,SAAU,CAChG,GAAI7B,aAAgB8B,cAClB,OAAO,IAAIC,eAAe/B,EAAKL,OAAQK,EAAKJ,WAAYI,EAAKQ,QAE/D,MAAMW,EAAM,IAAIY,eAAe/B,EAAKQ,QACpC,IAAK,IAAInD,EAAI,EAAGA,EAAI8D,EAAIX,OAAQnD,IAAK8D,EAAI9D,GAAK2E,OAAOhC,EAAK3C,IAC1D,OAAO8D,CACT,CACA,GAAc,YAAVL,GAAuC,YAAhBE,GAAOjE,MAAyC,KAAnBiE,EAAMY,WAAoBZ,EAAMa,SAAU,CAChG,GAAI7B,aAAgBiC,WAClB,OAAO,IAAIC,YAAYlC,EAAKL,OAAQK,EAAKJ,WAAYI,EAAKQ,QAE5D,MAAMW,EAAM,IAAIe,YAAYlC,EAAKQ,QACjC,IAAK,IAAInD,EAAI,EAAGA,EAAI8D,EAAIX,OAAQnD,IAAK8D,EAAI9D,GAAK2C,EAAK3C,GACnD,OAAO8D,CACT,CACA,GAAoB,YAAhBH,GAAOjE,KACT,OAAOqE,MAAME,KAAKtB,GAAMuB,IAAIY,GAE9B,GAAoB,cAAhBnB,GAAOjE,KAAsB,CAC/B,MAAMqF,KAAEA,GAASpB,EAEjB,IAAIqB,EAAS1B,EAAQ/B,0BACR,WAATwD,IAAmBC,EAAS1B,EAAQ3B,2BAC3B,UAAToD,IAAkBC,EAAS1B,EAAQzB,0BACvC,MAAMiC,EAAM,IAAIC,MAAMpB,EAAKQ,QAC3B,IAAK,IAAInD,EAAI,EAAGA,EAAI8D,EAAIX,OAAQnD,IAC9B8D,EAAI9D,GAAKgF,EAAOrC,EAAK3C,IAEvB,OAAO8D,CACT,CACA,OAAOnB,CACT,CAMO,SAASqB,EAAa9B,GAC3B,IAAKA,EAAMiB,OAAQ,OAAO,EAE1B,IAAIiB,EAAQ,GACZ,IAAK,MAAMa,KAAQ/C,EACjBkC,EAAgB,KAARA,EAAeO,OAAOM,GAIhC,MAAMC,EAAsB,EAAfhD,EAAMiB,OAKnB,OAJIiB,GAAS,IAAMO,OAAOO,EAAO,KAC/Bd,GAAS,IAAMO,OAAOO,IAGjBxD,OAAO0C,EAChB,CAiBO,SAASU,EAAa5C,GAC3B,IAAKA,EAAO,OACZ,MAAMiD,EAAQjD,EAAM,IAAM,EAAIA,EAAM,GAC9BkD,EAAOD,GAAS,MAAU,EAC1BE,EAAMF,GAAS,GAAK,GACpBG,EAAe,KAARH,EACb,OAAY,IAARE,EAAkBD,EAAO,IAAK,IAAOE,EAAO,MACpC,KAARD,EAAqBC,EAAOC,IAAMH,GAAOI,KACtCJ,EAAO,IAAMC,EAAM,KAAO,EAAIC,EAAO,KAC9C,CCxLA,SAASG,EAAWC,EAAQC,EAAWC,GACrC,MAAMvC,EAAUqC,EAAOC,GACjBE,EAAW,GACjB,IAAI5F,EAAQ,EAGZ,GAAIoD,EAAQyC,aACV,KAAOD,EAAS1C,OAASE,EAAQyC,cAAc,CAC7C,MAAMC,EAAeL,EAAOC,EAAY1F,GAClC+F,EAAQP,EAAWC,EAAQC,EAAY1F,EAAO,IAAI2F,EAAMG,EAAaE,OAC3EhG,GAAS+F,EAAM/F,MACf4F,EAAS3F,KAAK8F,EAChB,CAGF,MAAO,CAAE/F,QAAOoD,UAASwC,WAAUD,OACrC,CASO,SAASM,EAAcR,EAAQO,GACpC,IAAIE,EAAOV,EAAWC,EAAQ,EAAG,IACjC,MAAME,EAAO,CAACO,GACd,IAAK,MAAMC,KAAQH,EAAM,CACvB,MAAMD,EAAQG,EAAKN,SAASQ,KAAKL,GAASA,EAAM3C,QAAQ4C,OAASG,GACjE,IAAKJ,EAAO,MAAM,IAAI1F,MAAM,qCAAqC2F,KACjEL,EAAK1F,KAAK8F,GACVG,EAAOH,CACT,CACA,OAAOJ,CACT,CAQO,SAASU,EAAsBC,GACpC,IAAIC,EAAW,EACf,IAAK,MAAMnD,QAAEA,KAAakD,EACQ,aAA5BlD,EAAQoD,iBACVD,IAGJ,OAAOA,CACT,CAQO,SAASE,EAAsBH,GACpC,IAAIC,EAAW,EACf,IAAK,MAAMnD,QAAEA,KAAakD,EAAWI,MAAM,GACT,aAA5BtD,EAAQoD,iBACVD,IAGJ,OAAOA,CACT,CAkDO,SAASI,EAAaL,GAC3B,GAA0B,IAAtBA,EAAWpD,OAAc,OAAO,EACpC,MAAM,CAAG0D,GAAUN,EACnB,MAAuC,aAAnCM,EAAOxD,QAAQoD,kBACfI,EAAOhB,SAAS1C,MAEtB,CCnIO,MAAM2D,EACL,EADKA,EAEL,EAFKA,EAGJ,EAHIA,EAIL,EAJKA,EAKN,EALMA,EAMN,EANMA,EAON,EAPMA,EAQH,EARGA,EASH,EATGA,EAUL,EAVKA,EAaH,GAUH,SAASC,EAA4BxH,GAC1C,IAAIyH,EAAU,EAEd,MAAM5C,EAAQ,CAAA,EAEd,KAAO7E,EAAOmB,OAASnB,EAAOgB,KAAKiC,YAAY,CAE7C,MAAO9C,EAAMuH,EAAKC,GAAcC,EAAe5H,EAAQyH,GAGvD,GAFAA,EAAUE,EAENxH,IAASoH,EACX,MAIF1C,EAAM,SAAS6C,KAASG,EAAY7H,EAAQG,EAC9C,CAEA,OAAO0E,CACT,CAUA,SAASgD,EAAY7H,EAAQG,GAC3B,OAAQA,GACR,KAAKoH,EACH,OAAO,EACT,KAAKA,EACH,OAAO,EACT,KAAKA,EAEH,OAAOvH,EAAOgB,KAAK8G,QAAQ9H,EAAOmB,UACpC,KAAKoG,EACL,KAAKA,EACH,OAAOQ,EAAW/H,GACpB,KAAKuH,EACH,OAAOS,EAAiBhI,GAC1B,KAAKuH,EAAoB,CACvB,MAAM1C,EAAQ7E,EAAOgB,KAAKU,WAAW1B,EAAOmB,QAAQ,GAEpD,OADAnB,EAAOmB,QAAU,EACV0D,CACT,CACA,KAAK0C,EAAoB,CACvB,MAAMU,EAAeC,EAAWlI,GAC1BmI,EAAW,IAAIzE,WAAW1D,EAAOgB,KAAK+B,OAAQ/C,EAAOgB,KAAKgC,WAAahD,EAAOmB,OAAQ8G,GAE5F,OADAjI,EAAOmB,QAAU8G,EACVE,CACT,CACA,KAAKZ,EAAkB,CACrB,MAAM7B,EAAO1F,EAAOgB,KAAKE,SAASlB,EAAOmB,UACnCiH,EAAkB,GAAP1C,EACjB,IAAI2C,EAAW3C,GAAQ,EACN,KAAb2C,IACFA,EAAWH,EAAWlI,IAExB,MAAMsI,EAAWF,IAAab,GAAoBa,IAAab,EACzDgB,EAAS,IAAI/D,MAAM6D,GACzB,IAAK,IAAI5H,EAAI,EAAGA,EAAI4H,EAAU5H,IAC5B8H,EAAO9H,GAAK6H,EAAqD,IAA1CT,EAAY7H,EAAQuH,GAA0BM,EAAY7H,EAAQoI,GAE3F,OAAOG,CACT,CACA,KAAKhB,EAAoB,CAEvB,MAAMiB,EAAe,CAAA,EACrB,IAAIf,EAAU,EACd,OAAa,CACX,MAAOgB,EAAWf,EAAKC,GAAcC,EAAe5H,EAAQyH,GAE5D,GADAA,EAAUE,EACNc,IAAclB,EAChB,MAEFiB,EAAa,SAASd,KAASG,EAAY7H,EAAQyI,EACrD,CACA,OAAOD,CACT,CAEA,QACE,MAAM,IAAIzH,MAAM,0BAA0BZ,KAE9C,CASO,SAAS+H,EAAWlI,GACzB,IAAI0I,EAAS,EACTC,EAAQ,EACZ,OAAa,CACX,MAAMjD,EAAO1F,EAAOgB,KAAKE,SAASlB,EAAOmB,UAEzC,GADAuH,IAAkB,IAAPhD,IAAgBiD,IACd,IAAPjD,GACJ,OAAOgD,EAETC,GAAS,CACX,CACF,CA4BO,SAASZ,EAAW/H,GACzB,MAAM4I,EAASV,EAAWlI,GAE1B,OAAO4I,IAAW,IAAe,EAATA,EAC1B,CASO,SAASZ,EAAiBhI,GAC/B,MAAM4I,EAlCR,SAAuB5I,GACrB,IAAI0I,EAAS,GACTC,EAAQ,GACZ,OAAa,CACX,MAAMjD,EAAO1F,EAAOgB,KAAKE,SAASlB,EAAOmB,UAEzC,GADAuH,GAAUtD,OAAc,IAAPM,IAAgBiD,IACpB,IAAPjD,GACJ,OAAOgD,EAETC,GAAS,EACX,CACF,CAuBiBE,CAAc7I,GAE7B,OAAO4I,GAAU,KAAgB,GAATA,EAC1B,CASA,SAAShB,EAAe5H,EAAQyH,GAC9B,MAAM/B,EAAO1F,EAAOgB,KAAKE,SAASlB,EAAOmB,UACnChB,EAAc,GAAPuF,EACb,GAAIvF,IAASoH,EAEX,MAAO,CAAC,EAAG,EAAGE,GAEhB,MAAMqB,EAAQpD,GAAQ,EAChBgC,EAAMoB,EAAQrB,EAAUqB,EAAQf,EAAW/H,GACjD,MAAO,CAACG,EAAMuH,EAAKA,EACrB,CC7LO,MAED7F,EAAU,IAAIC,YACpB,SAASc,EAAiCiC,GACxC,OAAOA,GAAShD,EAAQe,OAAOiC,EACjC,CA0BOkE,eAAeC,EAAqBC,GAAalF,QAAEA,EAAOmF,iBAAEA,EAAmBC,QAA4B,IAChH,KAAKF,GAAiBA,EAAYhG,YAAc,GAAI,MAAM,IAAIlC,MAAM,gCAGpE,MAAMqI,EAAe9H,KAAK+H,IAAI,EAAGJ,EAAYhG,WAAaiG,GACpDI,QAAqBL,EAAY7B,MAAMgC,EAAcH,EAAYhG,YAGjEsG,EAAa,IAAIzG,SAASwG,GAChC,GAAgE,YAA5DC,EAAWlI,UAAUiI,EAAarG,WAAa,GAAG,GACpD,MAAM,IAAIlC,MAAM,yCAKlB,MAAMyI,EAAiBD,EAAWlI,UAAUiI,EAAarG,WAAa,GAAG,GACzE,GAAIuG,EAAiBP,EAAYhG,WAAa,EAC5C,MAAM,IAAIlC,MAAM,2BAA2ByI,8BAA2CP,EAAYhG,WAAa,KAIjH,GAAIuG,EAAiB,EAAIN,EAAkB,CAEzC,MAAMO,EAAiBR,EAAYhG,WAAauG,EAAiB,EAC3DE,QAAuBT,EAAY7B,MAAMqC,EAAgBL,GAEzDO,EAAiB,IAAIC,YAAYJ,EAAiB,GAClDK,EAAe,IAAInG,WAAWiG,GAGpC,OAFAE,EAAaC,IAAI,IAAIpG,WAAWgG,IAChCG,EAAaC,IAAI,IAAIpG,WAAW4F,GAAeF,EAAeK,GACvDM,EAAgBJ,EAAgB,CAAE5F,WAC3C,CAEE,OAAOgG,EAAgBT,EAAc,CAAEvF,WAE3C,CASO,SAASgG,EAAgBC,GAAajG,QAAEA,GAAY,CAAA,GACzD,KAAMiG,aAAuBJ,aAAc,MAAM,IAAI7I,MAAM,gCAC3D,MAAMC,EAAO,IAAI8B,SAASkH,GAM1B,GAHAjG,EAAU,IAAKhC,KAAoBgC,GAG/B/C,EAAKiC,WAAa,EACpB,MAAM,IAAIlC,MAAM,6BAElB,GAAkD,YAA9CC,EAAKK,UAAUL,EAAKiC,WAAa,GAAG,GACtC,MAAM,IAAIlC,MAAM,yCAKlB,MAAMkJ,EAAuBjJ,EAAKiC,WAAa,EACzCuG,EAAiBxI,EAAKK,UAAU4I,GAAsB,GAC5D,GAAIT,EAAiBxI,EAAKiC,WAAa,EAErC,MAAM,IAAIlC,MAAM,2BAA2ByI,8BAA2CxI,EAAKiC,WAAa,KAG1G,MAEMiH,EAAW1C,EADF,CAAExG,OAAMG,OADA8I,EAAuBT,IAKxCW,EAAUD,EAASE,QAEnBjE,EAAS+D,EAASG,QAAQ1F,IAAwB2F,IAAK,CAC3DnK,KAAMX,EAAY8K,EAAMF,SACxBG,YAAaD,EAAMD,QACnBnD,gBAAiBxH,EAAoB4K,EAAME,SAC3C9D,KAAM9D,EAAO0H,EAAMG,SACnBlE,aAAc+D,EAAMI,QACpBzG,eAAgBtE,EAAc2K,EAAMK,SACpCrG,MAAOgG,EAAMM,QACbC,UAAWP,EAAMQ,QACjBC,SAAUT,EAAMU,QAChB7G,aAAc8G,EAAYX,EAAMY,aAG5BC,EAAehF,EAAOiF,OAAOC,GAAKA,EAAElL,MACpCmL,EAAWpB,EAASM,QACpBe,EAAarB,EAASO,QAAQ9F,IAAwB6G,IAAQ,CAClEC,QAASD,EAASpB,QAAQzF,IAAI,CAAoB2C,EAA8BoE,KAAW,CACzFC,UAAW/I,EAAO0E,EAAO8C,SACzBwB,YAAatE,EAAO+C,QACpBwB,UAAWvE,EAAOkD,SAAW,CAC3BrK,KAAMX,EAAY8H,EAAOkD,QAAQJ,SACjC0B,UAAWxE,EAAOkD,QAAQH,SAAS1F,IAA2B0G,GAAM5L,EAAS4L,IAC7EU,eAAgBzE,EAAOkD,QAAQA,QAAQ7F,IAAI/B,GAC3CoJ,MAAOpM,EAAiB0H,EAAOkD,QAAQC,SACvCwB,WAAY3E,EAAOkD,QAAQE,QAC3BwB,wBAAyB5E,EAAOkD,QAAQG,QACxCwB,sBAAuB7E,EAAOkD,QAAQI,QACtCwB,mBAAoB9E,EAAOkD,QAAQM,QACnCuB,iBAAkB/E,EAAOkD,QAAQQ,QACjCsB,kBAAmBhF,EAAOkD,QAAQU,SAClCqB,uBAAwBjF,EAAOkD,QAAQgC,SACvCC,WAAYC,EAAapF,EAAOkD,QAAQmC,SAAUxB,EAAaO,GAAc3H,GAC7E6I,eAAgBtF,EAAOkD,QAAQqC,UAAUlI,IAAwBmI,IAAY,CAC3EC,UAAWlN,EAASiN,EAAa1C,SACjC9G,SAAU7D,EAASqN,EAAazC,SAChC3J,MAAOoM,EAAatC,WAEtBwC,oBAAqB1F,EAAOkD,QAAQyC,SACpCC,oBAAqB5F,EAAOkD,QAAQ2C,SACpCC,gBAAiB9F,EAAOkD,QAAQ6C,UAAY,CAC1CC,gCAAiChG,EAAOkD,QAAQ6C,SAASjD,QACzDmD,2BAA4BjG,EAAOkD,QAAQ6C,SAAShD,QACpDmD,2BAA4BlG,EAAOkD,QAAQ6C,SAAS7C,SAEtDiD,sBAAuBnG,EAAOkD,QAAQkD,UAAY,CAChDC,KAAMrG,EAAOkD,QAAQkD,SAAStD,SAAW,CACvCwD,KAAMtG,EAAOkD,QAAQkD,SAAStD,QAAQA,QACtCyD,KAAMvG,EAAOkD,QAAQkD,SAAStD,QAAQC,QACtCyD,KAAMxG,EAAOkD,QAAQkD,SAAStD,QAAQI,QACtCuD,KAAMzG,EAAOkD,QAAQkD,SAAStD,QAAQK,QACtCuD,KAAM1G,EAAOkD,QAAQkD,SAAStD,QAAQM,QACtCuD,KAAM3G,EAAOkD,QAAQkD,SAAStD,QAAQO,QACtCuD,KAAM5G,EAAOkD,QAAQkD,SAAStD,QAAQQ,QACtCuD,KAAM7G,EAAOkD,QAAQkD,SAAStD,QAAQU,SAExCsD,iBAAkB9G,EAAOkD,QAAQkD,SAASrD,UAG9CgE,oBAAqB/G,EAAOmD,QAC5B6D,oBAAqBhH,EAAOoD,QAC5B6D,oBAAqBjH,EAAOqD,QAC5B6D,oBAAqBlH,EAAOsD,QAC5B6D,gBAAiBnH,EAAOwD,QACxB4D,0BAA2BpH,EAAO0D,WAEpC2D,gBAAiBnD,EAASnB,QAC1BiB,SAAUE,EAAShB,QACnBoE,gBAAiBpD,EAASf,SAAS9F,IAAwBkK,IAAa,CACtEC,WAAYD,EAAczE,QAC1B2E,WAAYF,EAAcxE,QAC1B2E,YAAaH,EAAcrE,WAE7BoB,YAAaJ,EAASd,QACtByB,sBAAuBX,EAASb,QAChCsE,QAASzD,EAASZ,WAEdwB,EAAqBlC,EAASQ,SAAS/F,IAAwBuK,IAAQ,CAC3EC,IAAKvM,EAAOsM,EAAS9E,SACrBvF,MAAOjC,EAAOsM,EAAS7E,YAIzB,MAAO,CACLF,UACAhE,SACAmF,WACAC,aACAa,qBACAgD,WARiBxM,EAAOsH,EAASS,SASjC0E,gBAAiB7F,EAErB,CAgBA,SAASyB,EAAYA,GACnB,OAAIA,GAAab,QAAgB,CAAEjK,KAAM,UACrC8K,GAAaZ,QAAgB,CAAElK,KAAM,OACrC8K,GAAaT,QAAgB,CAAErK,KAAM,QACrC8K,GAAaR,QAAgB,CAAEtK,KAAM,QACrC8K,GAAaP,QAAgB,CAC/BvK,KAAM,UACNmE,MAAO2G,EAAYP,QAAQN,QAC3BS,UAAWI,EAAYP,QAAQL,SAE7BY,GAAaN,QAAgB,CAAExK,KAAM,QACrC8K,GAAaL,QAAgB,CAC/BzK,KAAM,OACNmP,gBAAiBrE,EAAYL,QAAQR,QACrC5E,KAAM+J,EAAStE,EAAYL,QAAQP,UAEjCY,GAAaH,QAAgB,CAC/B3K,KAAM,YACNmP,gBAAiBrE,EAAYH,QAAQV,QACrC5E,KAAM+J,EAAStE,EAAYH,QAAQT,UAEjCY,GAAaC,SAAiB,CAChC/K,KAAM,UACN6E,SAAUiG,EAAYC,SAASd,QAC/BnF,SAAUgG,EAAYC,SAASb,SAE7BY,GAAauB,SAAiB,CAAErM,KAAM,QACtC8K,GAAa0B,SAAiB,CAAExM,KAAM,QACtC8K,GAAa4B,SAAiB,CAAE1M,KAAM,QACtC8K,GAAagC,SAAiB,CAAE9M,KAAM,QACtC8K,GAAakC,SAAiB,CAAEhN,KAAM,WACtC8K,GAAaoC,SAAiB,CAAElN,KAAM,WACtC8K,GAAayC,SAAiB,CAChCvN,KAAM,WACNqP,IAAK5M,EAAOqI,EAAYyC,SAAStD,UAE/Ba,GAAawE,SAAiB,CAChCtP,KAAM,YACNqP,IAAK5M,EAAOqI,EAAYwE,SAASrF,SACjCsF,UAAW5P,EAA2BmL,EAAYwE,SAASpF,UAEtDY,CACT,CAMA,SAASsE,EAAS/J,GAChB,GAAIA,EAAK4E,QAAS,MAAO,SACzB,GAAI5E,EAAK6E,QAAS,MAAO,SACzB,GAAI7E,EAAKgF,QAAS,MAAO,QACzB,MAAM,IAAIzJ,MAAM,6BAClB,CAWA,SAAS2L,EAAaiD,EAAOxJ,EAAQpC,GACnC,OAAO4L,GAAS,CACdtG,IAAKuG,EAAgBD,EAAMvF,QAASjE,EAAQpC,GAC5C8L,IAAKD,EAAgBD,EAAMtF,QAASlE,EAAQpC,GAC5C+L,WAAYH,EAAMnF,QAClBuF,eAAgBJ,EAAMlF,QACtBuF,UAAWJ,EAAgBD,EAAMjF,QAASvE,EAAQpC,GAClDkM,UAAWL,EAAgBD,EAAMhF,QAASxE,EAAQpC,GAClDmM,mBAAoBP,EAAM/E,QAC1BuF,mBAAoBR,EAAM7E,QAE9B,CAQO,SAAS8E,EAAgB/K,EAAOsB,EAAQpC,GAC7C,MAAM5D,KAAEA,EAAI8D,eAAEA,EAAcE,aAAEA,GAAiBgC,EAC/C,QAAciK,IAAVvL,EAAqB,OAAOA,EAChC,GAAa,YAAT1E,EAAoB,OAAoB,IAAb0E,EAAM,GACrC,GAAa,eAAT1E,EAAuB,OAAO4D,EAAQrB,gBAAgBmC,GAC1D,MAAM7D,EAAO,IAAI8B,SAAS+B,EAAM9B,OAAQ8B,EAAM7B,WAAY6B,EAAM5B,YAChE,MAAa,UAAT9C,GAAwC,IAApBa,EAAKiC,WAAyBjC,EAAKqP,WAAW,GAAG,GAC5D,WAATlQ,GAAyC,IAApBa,EAAKiC,WAAyBjC,EAAKU,WAAW,GAAG,GAC7D,UAATvB,GAAuC,SAAnB8D,EAAkCF,EAAQvB,aAAaxB,EAAKsP,SAAS,GAAG,IACnF,UAATnQ,GAAuC,qBAAnB8D,EAA8CF,EAAQ/B,0BAA0BhB,EAAKuP,YAAY,GAAG,IAC/G,UAATpQ,GAAuC,qBAAnB8D,EAA8CF,EAAQ3B,0BAA0BpB,EAAKuP,YAAY,GAAG,IAC/G,UAATpQ,GAA2C,cAAvBgE,GAAchE,MAA+C,UAAvBgE,GAAcqB,KAAyBzB,EAAQzB,yBAAyBtB,EAAKuP,YAAY,GAAG,IAC7I,UAATpQ,GAA2C,cAAvBgE,GAAchE,MAA+C,WAAvBgE,GAAcqB,KAA0BzB,EAAQ3B,0BAA0BpB,EAAKuP,YAAY,GAAG,IAC/I,UAATpQ,GAA2C,cAAvBgE,GAAchE,KAA6B4D,EAAQ/B,0BAA0BhB,EAAKuP,YAAY,GAAG,IAC5G,UAATpQ,GAAwC,IAApBa,EAAKiC,WAAyBjC,EAAKsP,SAAS,GAAG,GAC1D,UAATnQ,GAAwC,IAApBa,EAAKiC,WAAyBjC,EAAKuP,YAAY,GAAG,GACnD,YAAnBtM,EAAqCQ,EAAaI,GAAS,MAAQsB,EAAO7B,OAAS,GAC5D,YAAvBH,GAAchE,KAA2BoF,EAAaV,GACdA,CAG9C,CClSO,SAAS2L,EAAOC,EAAKC,GAE1B,IAAK,IAAIjQ,EAAI,EAAGA,EAAIiQ,EAAI9M,OAAQnD,GADlB,IAEZgQ,EAAI9P,QAAQ+P,EAAItJ,MAAM3G,EAAGA,EAFb,KAIhB,CAmDOsI,eAAe4H,GAAmBC,IAAEA,EAAG3N,WAAEA,EAAU4N,YAAEA,EAAaC,MAAOC,IAE9E,MAAMD,EAAQC,GAAeC,WAAWF,MAQxC,IAAI/N,EANJE,UA5BK8F,eAAiC6H,EAAKC,EAAaE,GACxD,MAAMD,EAAQC,GAAeC,WAAWF,MACxC,aAAaA,EAAMF,EAAK,IAAKC,EAAaI,OAAQ,SAC/CC,KAAKC,IACJ,IAAKA,EAAIC,GAAI,MAAM,IAAIrQ,MAAM,qBAAqBoQ,EAAIE,UACtD,MAAMzN,EAASuN,EAAIG,QAAQC,IAAI,kBAC/B,IAAK3N,EAAQ,MAAM,IAAI7C,MAAM,0BAC7B,OAAOyQ,SAAS5N,IAEtB,CAmBuB6N,CAAkBb,EAAKC,EAAaC,GAOzD,MAAMY,EAAOb,GAAe,CAAA,EAE5B,MAAO,CACL5N,aACA,WAAMmE,CAAMuK,EAAOC,GACjB,GAAI7O,EACF,OAAOA,EAAOmO,KAAKnO,GAAUA,EAAOqE,MAAMuK,EAAOC,IAGnD,MAAMN,EAAU,IAAIO,QAAQH,EAAKJ,SAC3BQ,OAAiB1B,IAARwB,EAAoB,GAAKA,EAAM,EAC9CN,EAAQxH,IAAI,QAAS,SAAS6H,KAASG,KAEvC,MAAMX,QAAYL,EAAMF,EAAK,IAAKc,EAAMJ,YACxC,IAAKH,EAAIC,KAAOD,EAAIY,KAAM,MAAM,IAAIhR,MAAM,gBAAgBoQ,EAAIE,UAE9D,GAAmB,MAAfF,EAAIE,OAGN,OADAtO,EAASoO,EAAInH,cACNjH,EAAOmO,KAAKnO,GAAUA,EAAOqE,MAAMuK,EAAOC,IAC5C,GAAmB,MAAfT,EAAIE,OAEb,OAAOF,EAAInH,cAEX,MAAM,IAAIjJ,MAAM,yCAAyCoQ,EAAIE,SAEjE,EAEJ,CAUO,SAASW,GAAkB/O,WAAEA,EAAUmE,MAAEA,IAAS6K,QAAEA,EAAU9I,QAA4B,IAC/F,GAAIlG,EAAagP,EAAS,CAExB,MAAMlP,EAASqE,EAAM,EAAGnE,GACxB,MAAO,CACLA,aACA8F,MAAW,MAAC4I,EAAOC,WACH7O,GAAQqE,MAAMuK,EAAOC,GAGzC,CACA,MAAMM,EAAQ,IAAIC,IAClB,MAAO,CACLlP,aAMA,KAAAmE,CAAMuK,EAAOC,GACX,MAAMzC,EAsBZ,SAAkBwC,EAAOC,EAAKQ,GAC5B,GAAIT,EAAQ,EAAG,CACb,QAAYvB,IAARwB,EAAmB,MAAM,IAAI7Q,MAAM,yBAAyB4Q,MAAUC,MAC1E,YAAaxB,IAATgC,EAA2B,GAAGT,KAC3B,GAAGS,EAAOT,KAASS,GAC5B,CAAO,QAAYhC,IAARwB,EAAmB,CAC5B,GAAID,EAAQC,EAAK,MAAM,IAAI7Q,MAAM,wBAAwB4Q,MAAUC,MACnE,MAAO,GAAGD,KAASC,GACrB,CAAO,YAAaxB,IAATgC,EACF,GAAGT,KAEH,GAAGA,KAASS,GAEvB,CAnCkBC,CAASV,EAAOC,EAAK3O,GAC3BqP,EAASJ,EAAMX,IAAIpC,GACzB,GAAImD,EAAQ,OAAOA,EAEnB,MAAMC,EAAUnL,EAAMuK,EAAOC,GAE7B,OADAM,EAAMpI,IAAIqF,EAAKoD,GACRA,CACT,EAEJ,CAkCO,SAASC,EAAQC,GACtB,IAAKA,EAAQ,MAAO,GACpB,GAAsB,IAAlBA,EAAO7O,OAAc,OAAO6O,EAAO,GAEvC,MAAMhP,EAAS,GACf,IAAK,MAAMiP,KAASD,EAClBjC,EAAO/M,EAAQiP,GAEjB,OAAOjP,CACT,CC3IO,SAASkP,GAAepG,uBAAEA,EAAsBF,iBAAEA,EAAgBF,sBAAEA,IACzE,MAAMyG,EAAerG,GAA0BF,EAC/C,MAAO,CACLwG,UAAW1Q,OAAOyQ,GAClBE,QAAS3Q,OAAOyQ,EAAezG,GAEnC,CC/DO,SAAS4G,EAActP,EAAQuP,EAAkBC,EAAkB1K,EAAQvB,GAChF,MAAMkM,EAAIF,GAAkBpP,QAAUqP,EAAiBrP,OACvD,IAAKsP,EAAG,OAAO3K,EACf,MAAM4K,EAAqBhM,EAAsBH,GAC3CoM,EAAiBpM,EAAWrC,IAAI,EAAGb,aAAcA,EAAQoD,iBAC/D,IAAImM,EAAa,EAGjB,MAAMC,EAAiB,CAAC7P,GACxB,IAAI8P,EAAmB9P,EACnB+P,EAAe,EACfC,EAAkB,EAClBC,EAAkB,EAEtB,GAAIT,EAAiB,GAEnB,KAAOO,EAAeJ,EAAexP,OAAS,GAAK8P,EAAkBT,EAAiB,IACpFO,IACqC,aAAjCJ,EAAeI,KAEjBD,EAAmBA,EAAiBI,IAAG,GACvCL,EAAe3S,KAAK4S,GACpBE,KAEmC,aAAjCL,EAAeI,IAA8BE,IAIrD,IAAK,IAAIjT,EAAI,EAAGA,EAAIyS,EAAGzS,IAAK,CAE1B,MAAMmT,EAAMZ,GAAkBpP,OAASoP,EAAiBvS,GAAK0S,EACvDU,EAAMZ,EAAiBxS,GAG7B,KAAO+S,IAAiBK,EAAMH,GAAoD,aAAjCN,EAAeI,KACzB,aAAjCJ,EAAeI,KACjBF,EAAeQ,MACfL,KAEmC,aAAjCL,EAAeI,IAA8BE,IACjDF,IAMF,IAHAD,EAAmBD,EAAeK,IAAG,IAIlCH,EAAeJ,EAAexP,OAAS,GAA0C,aAArCwP,EAAeI,EAAe,MAC1EC,EAAkBG,GAA4C,aAArCR,EAAeI,EAAe,KACxD,CAEA,GADAA,IACqC,aAAjCJ,EAAeI,GAA8B,CAE/C,MAAMO,EAAU,GAChBR,EAAiB5S,KAAKoT,GACtBR,EAAmBQ,EACnBT,EAAe3S,KAAKoT,GACpBN,GACF,CACqC,aAAjCL,EAAeI,IAA8BE,GACnD,CAGIE,IAAQT,EAEVI,EAAiB5S,KAAK4H,EAAO8K,MACpBG,IAAiBJ,EAAexP,OAAS,EAClD2P,EAAiB5S,KAAK,MAEtB4S,EAAiB5S,KAAK,GAE1B,CAGA,IAAK8C,EAAOG,OAEV,IAAK,IAAInD,EAAI,EAAGA,EAAI0S,EAAoB1S,IAAK,CAE3C,MAAMsT,EAAU,GAChBR,EAAiB5S,KAAKoT,GACtBR,EAAmBQ,CACrB,CAGF,OAAOtQ,CACT,CAUO,SAASuQ,GAAeC,EAAe9N,EAAQ+N,EAAQ,GAC5D,MAAM7N,EAAOF,EAAOE,KAAK8N,KAAK,KACxBC,EAA8C,aAAnCjO,EAAOrC,QAAQoD,gBAC1BmN,EAAYD,EAAWF,EAAQ,EAAIA,EAEzC,GL7BK,SAAoB/N,GACzB,IAAKA,EAAQ,OAAO,EACpB,GAAsC,SAAlCA,EAAOrC,QAAQG,eAA2B,OAAO,EACrD,GAAIkC,EAAOG,SAAS1C,OAAS,EAAG,OAAO,EAEvC,MAAM0Q,EAAanO,EAAOG,SAAS,GACnC,QAAIgO,EAAWhO,SAAS1C,OAAS,IACU,aAAvC0Q,EAAWxQ,QAAQoD,eAGzB,CKmBMqN,CAAWpO,GAAS,CACtB,IAAIqO,EAAUrO,EAAOG,SAAS,GAC1BmO,EAAWJ,EACiB,IAA5BG,EAAQlO,SAAS1C,SACnB4Q,EAAUA,EAAQlO,SAAS,GAC3BmO,KAEFT,GAAeC,EAAeO,EAASC,GAEvC,MAAMC,EAAYF,EAAQnO,KAAK8N,KAAK,KAC9B5L,EAAS0L,EAAc1C,IAAImD,GACjC,IAAKnM,EAAQ,MAAM,IAAIxH,MAAM,sCAI7B,OAHIqT,GAAUO,GAAepM,EAAQ2L,GACrCD,EAAcnK,IAAIzD,EAAMkC,QACxB0L,EAAcW,OAAOF,EAEvB,CAEA,GL7BK,SAAmBvO,GACxB,IAAKA,EAAQ,OAAO,EACpB,GAAsC,QAAlCA,EAAOrC,QAAQG,eAA0B,OAAO,EACpD,GAAIkC,EAAOG,SAAS1C,OAAS,EAAG,OAAO,EAEvC,MAAM0Q,EAAanO,EAAOG,SAAS,GACnC,GAAmC,IAA/BgO,EAAWhO,SAAS1C,OAAc,OAAO,EAC7C,GAA2C,aAAvC0Q,EAAWxQ,QAAQoD,gBAAgC,OAAO,EAE9D,MAAM2N,EAAWP,EAAWhO,SAASQ,KAAKL,GAAgC,QAAvBA,EAAM3C,QAAQ4C,MACjE,GAA0C,aAAtCmO,GAAU/Q,QAAQoD,gBAAgC,OAAO,EAE7D,MAAM4N,EAAaR,EAAWhO,SAASQ,KAAKL,GAAgC,UAAvBA,EAAM3C,QAAQ4C,MACnE,MAA4C,aAAxCoO,GAAYhR,QAAQoD,eAG1B,CKaM6N,CAAU5O,GAAS,CACrB,MAAM6O,EAAU7O,EAAOG,SAAS,GAAGxC,QAAQ4C,KAG3CsN,GAAeC,EAAe9N,EAAOG,SAAS,GAAGA,SAAS,GAAI+N,EAAY,GAC1EL,GAAeC,EAAe9N,EAAOG,SAAS,GAAGA,SAAS,GAAI+N,EAAY,GAE1E,MAAMY,EAAOhB,EAAc1C,IAAI,GAAGlL,KAAQ2O,SACpCzM,EAAS0L,EAAc1C,IAAI,GAAGlL,KAAQ2O,WAE5C,IAAKC,EAAM,MAAM,IAAIlU,MAAM,mCAC3B,IAAKwH,EAAQ,MAAM,IAAIxH,MAAM,qCAC7B,GAAIkU,EAAKrR,SAAW2E,EAAO3E,OACzB,MAAM,IAAI7C,MAAM,gDAGlB,MAAMmU,EAAMC,GAAaF,EAAM1M,EAAQ8L,GAMvC,OALID,GAAUO,GAAeO,EAAKhB,GAElCD,EAAcW,OAAO,GAAGvO,KAAQ2O,SAChCf,EAAcW,OAAO,GAAGvO,KAAQ2O,gBAChCf,EAAcnK,IAAIzD,EAAM6O,EAE1B,CAGA,GAAI/O,EAAOG,SAAS1C,OAAQ,CAE1B,MAAMwR,EAAiD,aAAnCjP,EAAOrC,QAAQoD,gBAAiCgN,EAAQA,EAAQ,EAE9EmB,EAAS,CAAA,EACf,IAAK,MAAM5O,KAASN,EAAOG,SAAU,CACnC0N,GAAeC,EAAexN,EAAO2O,GACrC,MAAME,EAAYrB,EAAc1C,IAAI9K,EAAMJ,KAAK8N,KAAK,MACpD,IAAKmB,EAAW,MAAM,IAAIvU,MAAM,qCAChCsU,EAAO5O,EAAM3C,QAAQ4C,MAAQ4O,CAC/B,CAEA,IAAK,MAAM7O,KAASN,EAAOG,SACzB2N,EAAcW,OAAOnO,EAAMJ,KAAK8N,KAAK,MAGvC,MAAMoB,EAAWC,GAAaH,EAAQD,GAClChB,GAAUO,GAAeY,EAAUrB,GACvCD,EAAcnK,IAAIzD,EAAMkP,EAC1B,CACF,CAOA,SAASZ,GAAepQ,EAAK2P,GAC3B,IAAK,IAAIzT,EAAI,EAAGA,EAAI8D,EAAIX,OAAQnD,IAC1ByT,EACFS,GAAepQ,EAAI9D,GAAIyT,EAAQ,GAE/B3P,EAAI9D,GAAK8D,EAAI9D,GAAG,EAGtB,CAQA,SAAS0U,GAAaF,EAAM1M,EAAQ2L,GAClC,MAAMgB,EAAM,GACZ,IAAK,IAAIzU,EAAI,EAAGA,EAAIwU,EAAKrR,OAAQnD,IAC/B,GAAIyT,EACFgB,EAAIvU,KAAKwU,GAAaF,EAAKxU,GAAI8H,EAAO9H,GAAIyT,EAAQ,SAElD,GAAIe,EAAKxU,GAAI,CAEX,MAAMgV,EAAM,CAAA,EACZ,IAAK,IAAIC,EAAI,EAAGA,EAAIT,EAAKxU,GAAGmD,OAAQ8R,IAAK,CACvC,MAAM7Q,EAAQ0D,EAAO9H,GAAGiV,GACxBD,EAAIR,EAAKxU,GAAGiV,SAAgBtF,IAAVvL,EAAsB,KAAOA,CACjD,CACAqQ,EAAIvU,KAAK8U,EACX,MACEP,EAAIvU,UAAKyP,GAIf,OAAO8E,CACT,CASA,SAASM,GAAaH,EAAQnB,GAC5B,MAAMe,EAAOU,OAAOV,KAAKI,GACnBzR,EAASyR,EAAOJ,EAAK,KAAKrR,OAC1BsR,EAAM,GACZ,IAAK,IAAIzU,EAAI,EAAGA,EAAImD,EAAQnD,IAAK,CAE/B,MAAMgV,EAAM,CAAA,EACZ,IAAK,MAAMtG,KAAO8F,EAAM,CACtB,GAAII,EAAOlG,GAAKvL,SAAWA,EAAQ,MAAM,IAAI7C,MAAM,gCACnD0U,EAAItG,GAAOkG,EAAOlG,GAAK1O,EACzB,CACIyT,EACFgB,EAAIvU,KAAK6U,GAAaC,EAAKvB,EAAQ,IAEnCgB,EAAIvU,KAAK8U,EAEb,CACA,OAAOP,CACT,CC/OO,SAASU,GAAkB5V,EAAQU,EAAO+C,GAC/C,MAAMoS,EAAQpS,aAAkB4B,WAC1ByQ,EAAY5N,EAAWlI,GACvB+V,EAAoB7N,EAAWlI,GACrCkI,EAAWlI,GACX,IAAI6E,EAAQmD,EAAiBhI,GACzBgW,EAAc,EAClBvS,EAAOuS,KAAiBH,EAAQ1T,OAAO0C,GAASA,EAEhD,MAAMoR,EAAqBH,EAAYC,EAEvC,KAAOC,EAActV,GAAO,CAE1B,MAAMwV,EAAWlO,EAAiBhI,GAC5BmW,EAAY,IAAIzS,WAAWqS,GACjC,IAAK,IAAItV,EAAI,EAAGA,EAAIsV,EAAmBtV,IACrC0V,EAAU1V,GAAKT,EAAOgB,KAAKE,SAASlB,EAAOmB,UAG7C,IAAK,IAAIV,EAAI,EAAGA,EAAIsV,GAAqBC,EAActV,EAAOD,IAAK,CAEjE,MAAMuE,EAAWI,OAAO+Q,EAAU1V,IAClC,GAAIuE,EAAU,CACZ,IAAIoR,EAAa,GACbC,EAAiBJ,EACrB,MAAMK,GAAQ,IAAMtR,GAAY,GAChC,KAAOqR,GAAkBL,EAActV,GAAO,CAC5C,IAAIiF,EAAOP,OAAOpF,EAAOgB,KAAKE,SAASlB,EAAOmB,UAAYiV,EAAaE,EAEvE,IADAF,GAAcpR,EACPoR,GAAc,GACnBA,GAAc,GACdpW,EAAOmB,SACHiV,IACFzQ,GAAQP,OAAOpF,EAAOgB,KAAKE,SAASlB,EAAOmB,UAAY6D,EAAWoR,EAAaE,GAInFzR,GADcqR,EAAWvQ,EAEzBlC,EAAOuS,KAAiBH,EAAQ1T,OAAO0C,GAASA,EAChDwR,GACF,CACIA,IAEFrW,EAAOmB,QAAUG,KAAKiV,MAAMF,EAAiBlU,OAAO6C,GAAY7C,OAAOiU,IAAe,GAE1F,MACE,IAAK,IAAIV,EAAI,EAAGA,EAAIO,GAAsBD,EAActV,EAAOgV,IAC7D7Q,GAASqR,EACTzS,EAAOuS,KAAiBH,EAAQ1T,OAAO0C,GAASA,CAGtD,CACF,CACF,CAOO,SAAS2R,GAAqBxW,EAAQU,EAAO+C,GAClD,MAAMgT,EAAU,IAAIpR,WAAW3E,GAC/BkV,GAAkB5V,EAAQU,EAAO+V,GACjC,IAAK,IAAIhW,EAAI,EAAGA,EAAIC,EAAOD,IACzBgD,EAAOhD,GAAK,IAAIiD,WAAW1D,EAAOgB,KAAK+B,OAAQ/C,EAAOgB,KAAKgC,WAAahD,EAAOmB,OAAQsV,EAAQhW,IAC/FT,EAAOmB,QAAUsV,EAAQhW,EAE7B,CCnEO,SAASuE,GAASH,GACvB,OAAO,GAAKvD,KAAKoV,MAAM7R,EACzB,CAYO,SAAS8R,GAAuB3W,EAAQ4W,EAAOnT,EAAQG,QAC7CwM,IAAXxM,IACFA,EAAS5D,EAAOgB,KAAKK,UAAUrB,EAAOmB,QAAQ,GAC9CnB,EAAOmB,QAAU,GAEnB,MAAM0V,EAAc7W,EAAOmB,OAC3B,IAAI2V,EAAO,EACX,KAAOA,EAAOrT,EAAOG,QAAQ,CAC3B,MAAMmT,EAAS7O,EAAWlI,GAC1B,GAAa,EAAT+W,EAEFD,EAAOE,GAAchX,EAAQ+W,EAAQH,EAAOnT,EAAQqT,OAC/C,CAEL,MAAMpW,EAAQqW,IAAW,EACzBE,GAAQjX,EAAQU,EAAOkW,EAAOnT,EAAQqT,GACtCA,GAAQpW,CACV,CACF,CACAV,EAAOmB,OAAS0V,EAAcjT,CAChC,CAWA,SAASqT,GAAQjX,EAAQU,EAAOsE,EAAUvB,EAAQqT,GAChD,MAAMF,EAAQ5R,EAAW,GAAK,EAC9B,IAAIH,EAAQ,EACZ,IAAK,IAAIpE,EAAI,EAAGA,EAAImW,EAAOnW,IACzBoE,GAAS7E,EAAOgB,KAAKE,SAASlB,EAAOmB,YAAcV,GAAK,GAK1D,IAAK,IAAIA,EAAI,EAAGA,EAAIC,EAAOD,IACzBgD,EAAOqT,EAAOrW,GAAKoE,CAEvB,CAaA,SAASmS,GAAchX,EAAQ+W,EAAQ/R,EAAUvB,EAAQqT,GACvD,IAAIpW,EAAQqW,GAAU,GAAK,EAC3B,MAAMT,GAAQ,GAAKtR,GAAY,EAE/B,IAAI5B,EAAO,EACX,GAAIpD,EAAOmB,OAASnB,EAAOgB,KAAKiC,WAC9BG,EAAOpD,EAAOgB,KAAKE,SAASlB,EAAOmB,eAC9B,GAAImV,EAET,MAAM,IAAIvV,MAAM,0BAA0Bf,EAAOmB,uBAEnD,IAAI+V,EAAO,EACPC,EAAQ,EAGZ,KAAOzW,GAEDyW,EAAQ,GACVA,GAAS,EACTD,GAAQ,EACR9T,KAAU,GACD8T,EAAOC,EAAQnS,GAExB5B,GAAQpD,EAAOgB,KAAKE,SAASlB,EAAOmB,SAAW+V,EAC/ClX,EAAOmB,SACP+V,GAAQ,IAEJJ,EAAOrT,EAAOG,SAEhBH,EAAOqT,KAAU1T,GAAQ+T,EAAQb,GAEnC5V,IACAyW,GAASnS,GAIb,OAAO8R,CACT,CASO,SAASM,GAAgBpX,EAAQU,EAAOP,EAAMkX,GACnD,MAAMT,EA6BR,SAAmBzW,EAAMkX,GACvB,OAAQlX,GACR,IAAK,QACL,IAAK,QACH,OAAO,EACT,IAAK,QACL,IAAK,SACH,OAAO,EACT,IAAK,uBACH,IAAKkX,EAAY,MAAM,IAAItW,MAAM,yCACjC,OAAOsW,EACT,QACE,MAAM,IAAItW,MAAM,6BAA6BZ,KAEjD,CA3CgBmX,CAAUnX,EAAMkX,GACxB1U,EAAQ,IAAIe,WAAWhD,EAAQkW,GACrC,IAAK,IAAIW,EAAI,EAAGA,EAAIX,EAAOW,IACzB,IAAK,IAAI9W,EAAI,EAAGA,EAAIC,EAAOD,IACzBkC,EAAMlC,EAAImW,EAAQW,GAAKvX,EAAOgB,KAAKE,SAASlB,EAAOmB,UAIvD,GAAa,UAAThB,EAAkB,OAAO,IAAIqX,aAAa7U,EAAMI,QAC/C,GAAa,WAAT5C,EAAmB,OAAO,IAAIsX,aAAa9U,EAAMI,QACrD,GAAa,UAAT5C,EAAkB,OAAO,IAAIkF,WAAW1C,EAAMI,QAClD,GAAa,UAAT5C,EAAkB,OAAO,IAAI+E,cAAcvC,EAAMI,QACrD,GAAa,yBAAT5C,EAAiC,CAExC,MAAMuX,EAAQ,IAAIlT,MAAM9D,GACxB,IAAK,IAAID,EAAI,EAAGA,EAAIC,EAAOD,IACzBiX,EAAMjX,GAAKkC,EAAMgV,SAASlX,EAAImW,GAAQnW,EAAI,GAAKmW,GAEjD,OAAOc,CACT,CACA,MAAM,IAAI3W,MAAM,+CAA+CZ,IACjE,CCzIO,SAASyX,GAAU5X,EAAQG,EAAMO,EAAOmX,GAC7C,GAAc,IAAVnX,EAAa,MAAO,GACxB,GAAa,YAATP,EACF,OA4BJ,SAA0BH,EAAQU,GAChC,MAAM6H,EAAS,IAAI/D,MAAM9D,GACzB,IAAK,IAAID,EAAI,EAAGA,EAAIC,EAAOD,IAAK,CAC9B,MAAMuC,EAAahD,EAAOmB,QAAUV,EAAI,EAAI,GACtCqX,EAAYrX,EAAI,EAChBiF,EAAO1F,EAAOgB,KAAKE,SAAS8B,GAClCuF,EAAO9H,MAAMiF,EAAO,GAAKoS,EAC3B,CAEA,OADA9X,EAAOmB,QAAUG,KAAKiV,KAAK7V,EAAQ,GAC5B6H,CACT,CAtCWwP,CAAiB/X,EAAQU,GAC3B,GAAa,UAATP,EACT,OA6CJ,SAAwBH,EAAQU,GAC9B,MAAM6H,GAAUvI,EAAOgB,KAAKgC,WAAahD,EAAOmB,QAAU,EACtD,IAAIkE,WAAW2S,GAAMhY,EAAOgB,KAAK+B,OAAQ/C,EAAOgB,KAAKgC,WAAahD,EAAOmB,OAAgB,EAART,IACjF,IAAI2E,WAAWrF,EAAOgB,KAAK+B,OAAQ/C,EAAOgB,KAAKgC,WAAahD,EAAOmB,OAAQT,GAE/E,OADAV,EAAOmB,QAAkB,EAART,EACV6H,CACT,CAnDW0P,CAAejY,EAAQU,GACzB,GAAa,UAATP,EACT,OA0DJ,SAAwBH,EAAQU,GAC9B,MAAM6H,GAAUvI,EAAOgB,KAAKgC,WAAahD,EAAOmB,QAAU,EACtD,IAAI+D,cAAc8S,GAAMhY,EAAOgB,KAAK+B,OAAQ/C,EAAOgB,KAAKgC,WAAahD,EAAOmB,OAAgB,EAART,IACpF,IAAIwE,cAAclF,EAAOgB,KAAK+B,OAAQ/C,EAAOgB,KAAKgC,WAAahD,EAAOmB,OAAQT,GAElF,OADAV,EAAOmB,QAAkB,EAART,EACV6H,CACT,CAhEW2P,CAAelY,EAAQU,GACzB,GAAa,UAATP,EACT,OAuEJ,SAAwBH,EAAQU,GAC9B,MAAM6H,EAAS,IAAI/D,MAAM9D,GACzB,IAAK,IAAID,EAAI,EAAGA,EAAIC,EAAOD,IAAK,CAC9B,MAAM0X,EAAMnY,EAAOgB,KAAKuP,YAAYvQ,EAAOmB,OAAa,GAAJV,GAAQ,GACtD2X,EAAOpY,EAAOgB,KAAKsP,SAAStQ,EAAOmB,OAAa,GAAJV,EAAS,GAAG,GAC9D8H,EAAO9H,GAAK2E,OAAOgT,IAAS,IAAMD,CACpC,CAEA,OADAnY,EAAOmB,QAAkB,GAART,EACV6H,CACT,CAhFW8P,CAAerY,EAAQU,GACzB,GAAa,UAATP,EACT,OAuFJ,SAAwBH,EAAQU,GAC9B,MAAM6H,GAAUvI,EAAOgB,KAAKgC,WAAahD,EAAOmB,QAAU,EACtD,IAAIqW,aAAaQ,GAAMhY,EAAOgB,KAAK+B,OAAQ/C,EAAOgB,KAAKgC,WAAahD,EAAOmB,OAAgB,EAART,IACnF,IAAI8W,aAAaxX,EAAOgB,KAAK+B,OAAQ/C,EAAOgB,KAAKgC,WAAahD,EAAOmB,OAAQT,GAEjF,OADAV,EAAOmB,QAAkB,EAART,EACV6H,CACT,CA7FW+P,CAAetY,EAAQU,GACzB,GAAa,WAATP,EACT,OAoGJ,SAAyBH,EAAQU,GAC/B,MAAM6H,GAAUvI,EAAOgB,KAAKgC,WAAahD,EAAOmB,QAAU,EACtD,IAAIsW,aAAaO,GAAMhY,EAAOgB,KAAK+B,OAAQ/C,EAAOgB,KAAKgC,WAAahD,EAAOmB,OAAgB,EAART,IACnF,IAAI+W,aAAazX,EAAOgB,KAAK+B,OAAQ/C,EAAOgB,KAAKgC,WAAahD,EAAOmB,OAAQT,GAEjF,OADAV,EAAOmB,QAAkB,EAART,EACV6H,CACT,CA1GWgQ,CAAgBvY,EAAQU,GAC1B,GAAa,eAATP,EACT,OAiHJ,SAA4BH,EAAQU,GAClC,MAAM6H,EAAS,IAAI/D,MAAM9D,GACzB,IAAK,IAAID,EAAI,EAAGA,EAAIC,EAAOD,IAAK,CAC9B,MAAMmD,EAAS5D,EAAOgB,KAAKK,UAAUrB,EAAOmB,QAAQ,GACpDnB,EAAOmB,QAAU,EACjBoH,EAAO9H,GAAK,IAAIiD,WAAW1D,EAAOgB,KAAK+B,OAAQ/C,EAAOgB,KAAKgC,WAAahD,EAAOmB,OAAQyC,GACvF5D,EAAOmB,QAAUyC,CACnB,CACA,OAAO2E,CACT,CA1HWiQ,CAAmBxY,EAAQU,GAC7B,GAAa,yBAATP,EAAiC,CAC1C,IAAK0X,EAAa,MAAM,IAAI9W,MAAM,gCAClC,OAiIJ,SAAiCf,EAAQU,EAAOmX,GAE9C,MAAMtP,EAAS,IAAI/D,MAAM9D,GACzB,IAAK,IAAID,EAAI,EAAGA,EAAIC,EAAOD,IACzB8H,EAAO9H,GAAK,IAAIiD,WAAW1D,EAAOgB,KAAK+B,OAAQ/C,EAAOgB,KAAKgC,WAAahD,EAAOmB,OAAQ0W,GACvF7X,EAAOmB,QAAU0W,EAEnB,OAAOtP,CACT,CAzIWkQ,CAAwBzY,EAAQU,EAAOmX,EAChD,CACE,MAAM,IAAI9W,MAAM,2BAA2BZ,IAE/C,CAgJA,SAAS6X,GAAMjV,EAAQ5B,EAAQiR,GAC7B,MAAMsG,EAAU,IAAI9O,YAAYwI,GAEhC,OADA,IAAI1O,WAAWgV,GAAS5O,IAAI,IAAIpG,WAAWX,EAAQ5B,EAAQiR,IACpDsG,CACT,CC7KA,MAAMC,GAAY,CAAC,EAAG,IAAM,MAAQ,SAAU,YAW9C,SAASC,GAAUC,EAAWC,EAASC,EAASC,EAAOpV,GACrD,IAAK,IAAInD,EAAI,EAAGA,EAAImD,EAAQnD,IAC1BsY,EAAQC,EAAQvY,GAAKoY,EAAUC,EAAUrY,EAE7C,CCPO,SAASwY,GAAatW,EAAOuW,GAAM/Y,KAAEA,EAAI2D,QAAEA,EAAOkD,WAAEA,IACzD,MAAMhG,EAAO,IAAI8B,SAASH,EAAMI,OAAQJ,EAAMK,WAAYL,EAAMM,YAC1DjD,EAAS,CAAEgB,OAAMG,OAAQ,GAE/B,IAAIgY,EAGJ,MAAMlG,EAkDR,SAA8BjT,EAAQkZ,EAAMlS,GAC1C,GAAIA,EAAWpD,OAAS,EAAG,CACzB,MAAMwV,EAAqBrS,EAAsBC,GACjD,GAAIoS,EAAoB,CACtB,MAAM7Q,EAAS,IAAI/D,MAAM0U,EAAKjN,YAE9B,OADA0K,GAAuB3W,EAAQgF,GAASoU,GAAqB7Q,GACtDA,CACT,CACF,CACA,MAAO,EACT,CA5D2B8Q,CAAqBrZ,EAAQkZ,EAAMlS,IAEtDgM,iBAAEA,EAAgBsG,SAAEA,GAkE5B,SAA8BtZ,EAAQkZ,EAAMlS,GAC1C,MAAMmM,EAAqBhM,EAAsBH,GACjD,IAAKmM,EAAoB,MAAO,CAAEH,iBAAkB,GAAIsG,SAAU,GAElE,MAAMtG,EAAmB,IAAIxO,MAAM0U,EAAKjN,YACxC0K,GAAuB3W,EAAQgF,GAASmO,GAAqBH,GAG7D,IAAIsG,EAAWJ,EAAKjN,WACpB,IAAK,MAAM2H,KAAOZ,EACZY,IAAQT,GAAoBmG,IAEjB,IAAbA,IAAgBtG,EAAiBpP,OAAS,GAE9C,MAAO,CAAEoP,mBAAkBsG,WAC7B,CAjFyCC,CAAqBvZ,EAAQkZ,EAAMlS,GAIpEwS,EAAUN,EAAKjN,WAAaqN,EAClC,GAAsB,UAAlBJ,EAAK5V,SACP6V,EAAWvB,GAAU5X,EAAQG,EAAMqZ,EAAS1V,EAAQyG,kBAC/C,GACa,qBAAlB2O,EAAK5V,UACa,mBAAlB4V,EAAK5V,UACa,QAAlB4V,EAAK5V,SACL,CACA,MAAM0B,EAAoB,YAAT7E,EAAqB,EAAIa,EAAKE,SAASlB,EAAOmB,UAC3D6D,GACFmU,EAAW,IAAI3U,MAAMgV,GACR,YAATrZ,GACFwW,GAAuB3W,EAAQgF,EAAUmU,GACzCA,EAAWA,EAASxU,IAAI8U,KAAOA,IAG/B9C,GAAuB3W,EAAQgF,EAAUmU,EAAUnY,EAAKiC,WAAajD,EAAOmB,SAG9EgY,EAAW,IAAIzV,WAAW8V,EAE9B,MAAO,GAAsB,sBAAlBN,EAAK5V,SACd6V,EAAW/B,GAAgBpX,EAAQwZ,EAASrZ,EAAM2D,EAAQyG,kBACrD,GAAsB,wBAAlB2O,EAAK5V,SAAoC,CAElD6V,EADuB,UAAThZ,EACK,IAAIkF,WAAWmU,GAAW,IAAItU,cAAcsU,GAC/D5D,GAAkB5V,EAAQwZ,EAASL,EACrC,KAAO,IAAsB,4BAAlBD,EAAK5V,SAId,MAAM,IAAIvC,MAAM,iCAAiCmY,EAAK5V,YAHtD6V,EAAW,IAAI3U,MAAMgV,GACrBhD,GAAqBxW,EAAQwZ,EAASL,EAGxC,CAEA,MAAO,CAAEnG,mBAAkBC,mBAAkBkG,WAC/C,CAmDO,SAASO,GAAeC,EAAiBC,EAAwB5N,EAAO6N,GAE7E,IAAIC,EACJ,MAAMC,EAAqBF,IAAc7N,GACzC,GAAc,iBAAVA,EACF8N,EAAOH,OACF,GAAII,EACTD,EAAOC,EAAmBJ,EAAiBC,OACtC,IAAc,WAAV5N,EAIT,MAAM,IAAIjL,MAAM,0CAA0CiL,KAH1D8N,EAAO,IAAIpW,WAAWkW,GD5FnB,SAA0BI,EAAOvW,GACtC,MAAMwW,EAAcD,EAAM/W,WACpBiX,EAAezW,EAAOR,WAC5B,IAAIkX,EAAM,EACNC,EAAS,EAGb,KAAOD,EAAMF,GAAa,CACxB,MAAMI,EAAIL,EAAMG,GAEhB,GADAA,IACIE,EAAI,IACN,KAEJ,CACA,GAAIH,GAAgBC,GAAOF,EACzB,MAAM,IAAIlZ,MAAM,gCAGlB,KAAOoZ,EAAMF,GAAa,CACxB,MAAMI,EAAIL,EAAMG,GAChB,IAAIG,EAAM,EAGV,GAFAH,IAEIA,GAAOF,EACT,MAAM,IAAIlZ,MAAM,sBAIlB,GAAS,EAAJsZ,EAsBE,CAEL,IAAIlZ,EAAS,EACb,OAAY,EAAJkZ,GACR,KAAK,EAEHC,EAAwB,GAAjBD,IAAM,EAAI,GACjBlZ,EAAS6Y,EAAMG,IAAQE,IAAM,GAAK,GAClCF,IACA,MACF,KAAK,EAEH,GAAIF,GAAeE,EAAM,EACvB,MAAM,IAAIpZ,MAAM,6BAElBuZ,GAAOD,IAAM,GAAK,EAClBlZ,EAAS6Y,EAAMG,IAAQH,EAAMG,EAAM,IAAM,GACzCA,GAAO,EACP,MACF,KAAK,EAEH,GAAIF,GAAeE,EAAM,EACvB,MAAM,IAAIpZ,MAAM,6BAElBuZ,GAAOD,IAAM,GAAK,EAClBlZ,EAAS6Y,EAAMG,IACVH,EAAMG,EAAM,IAAM,IAClBH,EAAMG,EAAM,IAAM,KAClBH,EAAMG,EAAM,IAAM,IACvBA,GAAO,EAKT,GAAe,IAAXhZ,GAAgBoZ,MAAMpZ,GACxB,MAAM,IAAIJ,MAAM,kBAAkBI,SAAcgZ,iBAAmBF,KAErE,GAAI9Y,EAASiZ,EACX,MAAM,IAAIrZ,MAAM,2CAElB6X,GAAUnV,EAAQ2W,EAASjZ,EAAQsC,EAAQ2W,EAAQE,GACnDF,GAAUE,CACZ,KAhEqB,CAEnB,IAAIA,GAAOD,IAAM,GAAK,EAEtB,GAAIC,EAAM,GAAI,CACZ,GAAIH,EAAM,GAAKF,EACb,MAAM,IAAIlZ,MAAM,+CAElB,MAAMyZ,EAAaF,EAAM,GACzBA,EAAMN,EAAMG,IACPH,EAAMG,EAAM,IAAM,IAClBH,EAAMG,EAAM,IAAM,KAClBH,EAAMG,EAAM,IAAM,IACvBG,EAAsC,GAA/BA,EAAM3B,GAAU6B,IACvBL,GAAOK,CACT,CACA,GAAIL,EAAMG,EAAML,EACd,MAAM,IAAIlZ,MAAM,6CAElB6X,GAAUoB,EAAOG,EAAK1W,EAAQ2W,EAAQE,GACtCH,GAAOG,EACPF,GAAUE,CACZ,CA2CF,CAEA,GAAIF,IAAWF,EAAc,MAAM,IAAInZ,MAAM,yBAC/C,CCHI0Z,CAAiBd,EAAiBG,EAGpC,CACA,GAAIA,GAAMlW,SAAWgW,EACnB,MAAM,IAAI7Y,MAAM,oCAAoC+Y,GAAMlW,gCAAgCgW,KAE5F,OAAOE,CACT,CAWO,SAASY,GAAef,EAAiBgB,EAAIpX,GAClD,MACMvD,EAAS,CAAEgB,KADJ,IAAI8B,SAAS6W,EAAgB5W,OAAQ4W,EAAgB3W,WAAY2W,EAAgB1W,YACvE9B,OAAQ,IACzBhB,KAAEA,EAAI2D,QAAEA,EAAOkD,WAAEA,EAAUgF,MAAEA,EAAK6N,YAAEA,GAAgBtW,EACpDqX,EAAQD,EAAGE,oBACjB,IAAKD,EAAO,MAAM,IAAI7Z,MAAM,4CAG5B,MAAMkS,EA2DR,SAAgCjT,EAAQ4a,EAAO5T,GAC7C,MAAMoS,EAAqBrS,EAAsBC,GACjD,IAAKoS,EAAoB,MAAO,GAEhC,MAAM7Q,EAAS,IAAI/D,MAAMoW,EAAM3O,YAE/B,OADA0K,GAAuB3W,EAAQgF,GAASoU,GAAqB7Q,EAAQqS,EAAME,+BACpEvS,CACT,CAlE2BwS,CAAuB/a,EAAQ4a,EAAO5T,GAC/DhH,EAAOmB,OAASyZ,EAAME,8BAGtB,MAAM9H,EAsER,SAAgChT,EAAQ4a,EAAO5T,GAC7C,MAAMmM,EAAqBhM,EAAsBH,GACjD,GAAImM,EAAoB,CAEtB,MAAM5K,EAAS,IAAI/D,MAAMoW,EAAM3O,YAE/B,OADA0K,GAAuB3W,EAAQgF,GAASmO,GAAqB5K,EAAQqS,EAAMI,+BACpEzS,CACT,CACF,CA9E2B0S,CAAuBjb,EAAQ4a,EAAO5T,GAGzDkU,EAAuBP,EAAGf,uBAAyBgB,EAAMI,8BAAgCJ,EAAME,8BAErG,IAAIhB,EAAOH,EAAgBhC,SAAS3X,EAAOmB,SACf,IAAxByZ,EAAMO,gBACRrB,EAAOJ,GAAeI,EAAMoB,EAAsBlP,EAAO6N,IAE3D,MAAMuB,EAAW,IAAItY,SAASgX,EAAK/W,OAAQ+W,EAAK9W,WAAY8W,EAAK7W,YAC3DoY,EAAa,CAAEra,KAAMoa,EAAUja,OAAQ,GAI7C,IAAIgY,EACJ,MAAMK,EAAUoB,EAAM3O,WAAa2O,EAAMU,UACzC,GAAuB,UAAnBV,EAAMtX,SACR6V,EAAWvB,GAAUyD,EAAYlb,EAAMqZ,EAAS1V,EAAQyG,kBACnD,GAAuB,QAAnBqQ,EAAMtX,SAEf6V,EAAW,IAAI3U,MAAMgV,GACrB7C,GAAuB0E,EAAY,EAAGlC,GACtCA,EAAWA,EAASxU,IAAI8U,KAAOA,QAC1B,GACc,qBAAnBmB,EAAMtX,UACa,mBAAnBsX,EAAMtX,SACN,CACA,MAAM0B,EAAWoW,EAASla,SAASma,EAAWla,UAC9CgY,EAAW,IAAI3U,MAAMgV,GACrB7C,GAAuB0E,EAAYrW,EAAUmU,EAAU+B,EAAuB,EAChF,MAAO,GAAuB,wBAAnBN,EAAMtX,SAAoC,CAEnD6V,EADuB,UAAThZ,EACK,IAAIkF,WAAWmU,GAAW,IAAItU,cAAcsU,GAC/D5D,GAAkByF,EAAY7B,EAASL,EACzC,MAAO,GAAuB,4BAAnByB,EAAMtX,SACf6V,EAAW,IAAI3U,MAAMgV,GACrBhD,GAAqB6E,EAAY7B,EAASL,QACrC,GAAuB,qBAAnByB,EAAMtX,SACf6V,EAAW,IAAI3U,MAAMgV,GJ9GlB,SAAwBxZ,EAAQU,EAAO+C,GAC5C,MAAM8X,EAAa,IAAIlW,WAAW3E,GAClCkV,GAAkB5V,EAAQU,EAAO6a,GACjC,MAAMC,EAAa,IAAInW,WAAW3E,GAClCkV,GAAkB5V,EAAQU,EAAO8a,GAEjC,IAAK,IAAI/a,EAAI,EAAGA,EAAIC,EAAOD,IAAK,CAC9B,MAAMgb,EAAS,IAAI/X,WAAW1D,EAAOgB,KAAK+B,OAAQ/C,EAAOgB,KAAKgC,WAAahD,EAAOmB,OAAQqa,EAAW/a,IACjG8a,EAAW9a,IAEbgD,EAAOhD,GAAK,IAAIiD,WAAW6X,EAAW9a,GAAK+a,EAAW/a,IACtDgD,EAAOhD,GAAGqJ,IAAIrG,EAAOhD,EAAI,GAAGkX,SAAS,EAAG4D,EAAW9a,KACnDgD,EAAOhD,GAAGqJ,IAAI2R,EAAQF,EAAW9a,KAEjCgD,EAAOhD,GAAKgb,EAEdzb,EAAOmB,QAAUqa,EAAW/a,EAC9B,CACF,CI6FIib,CAAeL,EAAY7B,EAASL,OAC/B,IAAuB,sBAAnByB,EAAMtX,SAGf,MAAM,IAAIvC,MAAM,iCAAiC6Z,EAAMtX,YAFvD6V,EAAW/B,GAAgBpX,EAAQwZ,EAASrZ,EAAM2D,EAAQyG,YAG5D,CAEA,MAAO,CAAEyI,mBAAkBC,mBAAkBkG,WAC/C,CCxLO,SAASwC,GAAW3b,GAAQ4b,WAAEA,EAAUC,YAAEA,EAAWC,UAAEA,GAAavY,EAAewY,GACxF,MAAMC,WAAEA,EAAUhV,WAAEA,GAAezD,EAC7B0Y,EAAS5U,EAAaL,GAEtByL,EAAS,GAEf,IAAIpP,EAEA6Y,EACAC,EAAW,EAEf,MAAMC,EAAgBL,SACpBG,GAAaH,EAAO,CAClBC,aACAK,WAAYH,EACZI,SAAUV,EAAaO,EAAWD,EAAUtY,OAC5C2Y,OAAQX,EAAaO,GAExB,GAED,MAAOF,EAASE,EAAWL,EAAY9b,EAAOmB,OAASnB,EAAOgB,KAAKiC,WAAa,MAC1EjD,EAAOmB,QAAUnB,EAAOgB,KAAKiC,WAAa,IADmC,CAIjF,MAAM8T,EAASyF,GAAcxc,GAC7B,GAAoB,oBAAhB+W,EAAO5W,KAETkD,EAAaoZ,GAASzc,EAAQ+W,EAAQxT,EAAeF,OAAY+M,EAAW,GAC5E/M,EAAaQ,EAAQR,EAAYE,OAC5B,CACL,MAAMmZ,EAAkBR,GAAWtY,QAAU,EACvC2E,EAASkU,GAASzc,EAAQ+W,EAAQxT,EAAeF,EAAY6Y,EAAWL,EAAcM,GACxFD,IAAc3T,EAEhB4T,GAAY5T,EAAO3E,OAAS8Y,GAE5BN,MACA3J,EAAO9R,KAAK4H,GACZ4T,GAAY5T,EAAO3E,OACnBsY,EAAY3T,EAEhB,CACF,CAOA,OANA6T,MAEID,EAAWL,GAAaI,IAE1BzJ,EAAOA,EAAO7O,OAAS,GAAKsY,EAAU9U,MAAM,EAAG0U,GAAaK,EAAWD,EAAUtY,UAE5E6O,CACT,CAaO,SAASgK,GAASzc,EAAQ+W,EAAQxT,EAAeF,EAAYsZ,EAAeC,GACjF,MAAMzc,KAAEA,EAAI2D,QAAEA,EAAOkD,WAAEA,EAAUgF,MAAEA,EAAK6N,YAAEA,GAAgBtW,EAEpDoW,EAAkB,IAAIjW,WAC1B1D,EAAOgB,KAAK+B,OAAQ/C,EAAOgB,KAAKgC,WAAahD,EAAOmB,OAAQ4V,EAAO8F,sBAKrE,GAHA7c,EAAOmB,QAAU4V,EAAO8F,qBAGJ,cAAhB9F,EAAO5W,KAAsB,CAC/B,MAAM+Y,EAAOnC,EAAO+F,iBACpB,IAAK5D,EAAM,MAAM,IAAInY,MAAM,yCAG3B,GAAI6b,EAAY1D,EAAKjN,YAAc5E,EAAaL,GAC9C,OAAO,IAAIxC,MAAM0U,EAAKjN,YAGxB,MAAM6N,EAAOJ,GAAeC,EAAiBxX,OAAO4U,EAAO6C,wBAAyB5N,EAAO6N,IACrF7G,iBAAEA,EAAgBC,iBAAEA,EAAgBkG,SAAEA,GAAaF,GAAaa,EAAMZ,EAAM3V,GAIlF,IAAIgF,EAASpF,EAAsBgW,EAAU9V,EAAY6V,EAAK5V,SAAUC,GACxE,GAAI0P,EAAiBrP,QAAUoP,GAAkBpP,OAAQ,CAEvD,OAAOmP,EADQvO,MAAMuY,QAAQJ,GAAiBA,EAAgB,GACjC3J,EAAkBC,EAAkB1K,EAAQvB,EAC3E,CAEE,IAAK,IAAIvG,EAAI,EAAGA,EAAIuG,EAAWpD,OAAQnD,IACS,aAA1CuG,EAAWvG,GAAGqD,QAAQoD,kBACxBqB,EAAS/D,MAAME,KAAK6D,EAAQ8C,GAAK,CAACA,KAGtC,OAAO9C,CAEX,CAAO,GAAoB,iBAAhBwO,EAAO5W,KAAyB,CACzC,MAAMya,EAAQ7D,EAAO8D,oBACrB,IAAKD,EAAO,MAAM,IAAI7Z,MAAM,4CAG5B,GAAI6b,EAAYhC,EAAMtP,SACpB,OAAO,IAAI9G,MAAMoW,EAAM3O,YAGzB,MAAM+G,iBAAEA,EAAgBC,iBAAEA,EAAgBkG,SAAEA,GAC1CuB,GAAef,EAAiB5C,EAAQxT,GAGpCgF,EAASpF,EAAsBgW,EAAU9V,EAAYuX,EAAMtX,SAAUC,GAE3E,OAAOwP,EADQvO,MAAMuY,QAAQJ,GAAiBA,EAAgB,GACjC3J,EAAkBC,EAAkB1K,EAAQvB,EAC3E,CAAO,GAAoB,oBAAhB+P,EAAO5W,KAA4B,CAC5C,MAAM6c,EAAOjG,EAAOkG,uBACpB,IAAKD,EAAM,MAAM,IAAIjc,MAAM,+CAE3B,MAAM+Y,EAAOJ,GACXC,EAAiBxX,OAAO4U,EAAO6C,wBAAyB5N,EAAO6N,GAIjE,OAAOjC,GADQ,CAAE5W,KAAM,IAAI8B,SAASgX,EAAK/W,OAAQ+W,EAAK9W,WAAY8W,EAAK7W,YAAa9B,OAAQ,GACnEhB,EAAM6c,EAAK/Q,WAAYnI,EAAQyG,YAC1D,CACE,MAAM,IAAIxJ,MAAM,kCAAkCgW,EAAO5W,OAE7D,CASA,SAASqc,GAAcxc,GACrB,MAAM+W,EAASvP,EAA4BxH,GAsC3C,MAAO,CACLG,KApCWN,EAASkX,EAAO3M,SAqC3BwP,uBApC6B7C,EAAO1M,QAqCpCwS,qBApC2B9F,EAAOvM,QAqClC0S,IApCUnG,EAAOtM,QAqCjBqS,iBApCuB/F,EAAOrM,SAAW,CACzCuB,WAAY8K,EAAOrM,QAAQN,QAC3B9G,SAAU7D,EAASsX,EAAOrM,QAAQL,SAClC8S,0BAA2B1d,EAASsX,EAAOrM,QAAQF,SACnD4S,0BAA2B3d,EAASsX,EAAOrM,QAAQD,SACnDgC,WAAYsK,EAAOrM,QAAQA,SAAW,CACpCrB,IAAK0N,EAAOrM,QAAQA,QAAQN,QAC5ByF,IAAKkH,EAAOrM,QAAQA,QAAQL,QAC5ByF,WAAYiH,EAAOrM,QAAQA,QAAQF,QACnCuF,eAAgBgH,EAAOrM,QAAQA,QAAQD,QACvCuF,UAAW+G,EAAOrM,QAAQA,QAAQA,QAClCuF,UAAW8G,EAAOrM,QAAQA,QAAQC,UA0BpC0S,kBAvBwBtG,EAAOpM,QAwB/BsS,uBAvB6BlG,EAAOnM,SAAW,CAC/CqB,WAAY8K,EAAOnM,QAAQR,QAC3B9G,SAAU7D,EAASsX,EAAOnM,QAAQP,SAClCiT,UAAWvG,EAAOnM,QAAQJ,SAqB1BqQ,oBAnB0B9D,EAAOjM,SAAW,CAC5CmB,WAAY8K,EAAOjM,QAAQV,QAC3BkR,UAAWvE,EAAOjM,QAAQT,QAC1BiB,SAAUyL,EAAOjM,QAAQN,QACzBlH,SAAU7D,EAASsX,EAAOjM,QAAQL,SAClCuQ,8BAA+BjE,EAAOjM,QAAQJ,QAC9CoQ,8BAA+B/D,EAAOjM,QAAQH,QAC9CwQ,mBAA0C/K,IAA3B2G,EAAOjM,QAAQF,SAA+BmM,EAAOjM,QAAQF,QAC5E6B,WAAYsK,EAAOjM,QAAQA,SAa/B,CCtGO/B,eAAewU,IAAiBC,aAAEA,GAAgB3B,EAAaC,EAAWrQ,EAASgS,GAGxF,MAAMC,QAAoBC,QAAQC,IAAIJ,EAAa7Y,IAAI,EAAGvB,UAAWA,EAAK8N,KAAKsB,KAGzEqL,EAAsBL,EACzB7Y,IAAI8B,GAASA,EAAMqX,aAAa,IAChC1S,OAAO1E,IAAS+E,GAAWA,EAAQsS,SAASrX,IACzCsX,EAAcvS,GAAWoS,EACzBI,EAAgBD,EAAYrZ,IAAI+B,GAAQ8W,EAAaU,UAAU5W,GAAUA,EAAOwW,aAAa,KAAOpX,IAGpGyX,EAAcrC,EAAYD,EAChC,GAAkB,WAAd4B,EAAwB,CAE1B,MAAMW,EAAY,IAAI5Z,MAAM2Z,GAC5B,IAAK,IAAIE,EAAY,EAAGA,EAAYF,EAAaE,IAAa,CAC5D,MAAMC,EAAMzC,EAAcwC,EAGpBE,EAAU,CAAA,EAChB,IAAK,IAAI9d,EAAI,EAAGA,EAAI+c,EAAa5Z,OAAQnD,IACvC8d,EAAQf,EAAa/c,GAAGqd,aAAa,IAAMJ,EAAYjd,GAAG6d,GAE5DF,EAAUC,GAAaE,CACzB,CACA,OAAOH,CACT,CAGA,MAAMA,EAAY,IAAI5Z,MAAM2Z,GAC5B,IAAK,IAAIE,EAAY,EAAGA,EAAYF,EAAaE,IAAa,CAC5D,MAAMC,EAAMzC,EAAcwC,EAEpBE,EAAU,IAAI/Z,MAAMgZ,EAAa5Z,QACvC,IAAK,IAAInD,EAAI,EAAGA,EAAIud,EAAYpa,OAAQnD,IAClCwd,EAAcxd,IAAM,IACtB8d,EAAQ9d,GAAKid,EAAYO,EAAcxd,IAAI6d,IAG/CF,EAAUC,GAAaE,CACzB,CACA,OAAOH,CACT,CC7HOrV,eAAeyV,GAAYC,GAEhCA,EAAQvU,iBAAmBlB,EAAqByV,EAAQC,MAGxD,MAAMC,EAgED,SAA0BF,GAC/B,IAAKA,EAAQvU,SAAU,MAAM,IAAInJ,MAAM,6BAIvC,MAAM6d,ET/ED,UAAqB1U,SAAEA,EAAQoS,SAAEA,EAAW,EAACC,OAAEA,EAAStW,IAAQwF,QAAEA,IACvE,IAAKvB,EAAU,MAAM,IAAInJ,MAAM,iCAE/B,MAAM8d,EAAS,GAETC,EAAU,GAGhB,IAAIlD,EAAa,EACjB,IAAK,MAAMpQ,KAAYtB,EAASqB,WAAY,CAC1C,MAAMwT,EAAY5c,OAAOqJ,EAASF,UAC5B0T,EAAWpD,EAAamD,EAE9B,GAAIA,EAAY,GAAKC,GAAY1C,GAAYV,EAAaW,EAAQ,CAEhE,MAAM0C,EAAS,GAEf,IAAK,MAAMtT,UAAEA,EAASE,UAAEA,KAAeL,EAASC,QAAS,CACvD,GAAIE,EAAW,MAAM,IAAI5K,MAAM,mCAC/B,IAAK8K,EAAW,MAAM,IAAI9K,MAAM,wCAE3B0K,IAAWA,EAAQsS,SAASlS,EAAUE,eAAe,KACxDkT,EAAOte,KAAKgS,EAAe9G,GAE/B,CACA,MAAMgQ,EAAcva,KAAK+H,IAAIiT,EAAWV,EAAY,GAC9CE,EAAYxa,KAAKuO,IAAI0M,EAASX,EAAYmD,GAChDF,EAAOle,KAAK,CAAEse,SAAQzT,WAAUoQ,aAAYmD,YAAWlD,cAAaC,cAGpE,MAAMoD,EAAYD,EAAOA,EAAOrb,OAAS,IAAIkP,QAAUmM,EAAO,IAAIpM,UAClE,IAAKpH,GAAWyT,EA3CS,SA6CvBJ,EAAQne,KAAK,CACXkS,UAAWoM,EAAO,GAAGpM,UACrBC,QAASmM,EAAOA,EAAOrb,OAAS,GAAGkP,eAEhC,GAAImM,EAAOrb,OAChB4M,EAAOsO,EAASG,QACX,GAAIxT,GAAS7H,OAClB,MAAM,IAAI7C,MAAM,8BAA8B0K,EAAQ0I,KAAK,QAE/D,CAEAyH,EAAaoD,CACf,CAGA,OAFKG,SAAS5C,KAASA,EAASX,GAEzB,CAAE1R,WAAUoS,WAAUC,SAAQ9Q,UAASqT,UAASD,SACzD,CS8BeO,CAAYX,GAIzB,OAHAA,EAAQC,KTVH,SAA6BA,GAAMI,QAAEA,IAE1C,MAAMO,EAAWP,EAAQna,IAAI,EAAGkO,YAAWC,aAAc4L,EAAKtX,MAAMyL,EAAWC,IAC/E,MAAO,CACL7P,WAAYyb,EAAKzb,WACjB,KAAAmE,CAAMuK,EAAOC,EAAM8M,EAAKzb,YAEtB,MAAMqc,EAAQR,EAAQZ,UAAU,EAAGrL,YAAWC,aAAcD,GAAalB,GAASC,GAAOkB,GACzF,GAAIwM,EAAQ,EAAG,MAAM,IAAIve,MAAM,0BAA0B4Q,MAAUC,MACnE,GAAIkN,EAAQQ,GAAOzM,YAAclB,GAASmN,EAAQQ,GAAOxM,UAAYlB,EAAK,CAExE,MAAMiF,EAAclF,EAAQmN,EAAQQ,GAAOzM,UACrC0M,EAAY3N,EAAMkN,EAAQQ,GAAOzM,UACvC,OAAIwM,EAASC,aAAkB3B,QACtB0B,EAASC,GAAOpO,KAAKnO,GAAUA,EAAOqE,MAAMyP,EAAa0I,IAEzDF,EAASC,GAAOlY,MAAMyP,EAAa0I,EAE9C,CACE,OAAOF,EAASC,EAEpB,EAEJ,CSbiBE,CAAoBf,EAAQC,KAAME,GAG1CA,EAAKC,OAAOla,IAAI8a,GDhFlB,SAAsBhB,GAASvU,SAAEA,EAAQuB,QAAEA,GAAWgU,GAC3D,MAAMf,KAAEA,EAAI7E,YAAEA,EAAW7V,KAAEA,GAASya,EAG9BjB,EAAe,GAEfzZ,EAAU,IAAKhC,KAAoB0c,EAAQ1a,SAGjD,IAAK,MAAM4H,UAAEA,EAASE,UAAEA,KAAe4T,EAAUjU,SAASC,QAAS,CACjE,GAAIE,EAAW,MAAM,IAAI5K,MAAM,mCAC/B,IAAK8K,EAAW,MAAM,IAAI9K,MAAM,wCAGhC,MAAMib,EAAanQ,EAAUE,eAAe,GAC5C,GAAIN,IAAYA,EAAQsS,SAAS/B,GAAa,SAE9C,MAAMnJ,UAAEA,EAASC,QAAEA,GAAYH,EAAe9G,GACxC6T,EAAc5M,EAAUD,EAI9B,GAAI6M,EAAc,GAAK,GAAI,CACzBC,QAAQC,KAAK,iCAAiC/T,EAAUE,mBAAmB2T,WAE3E,QACF,CAIA,MAAM3c,EAAS4a,QAAQkC,QAAQnB,EAAKtX,MAAMyL,EAAWC,IAGrD0K,EAAa7c,KAAK,CAChBmd,aAAcjS,EAAUE,eACxB3I,KAAML,EAAOmO,KAAKlH,IAChB,MAAMhD,EAAaL,EAAcuD,EAAS/D,OAAQ0F,EAAUE,gBACtD/L,EAAS,CAAEgB,KAAM,IAAI8B,SAASkH,GAAc7I,OAAQ,GAEpDoC,EAAgB,CACpByY,WAFgBnQ,EAAUE,eAAeoI,KAAK,KAG9ChU,KAAM0L,EAAU1L,KAChB2D,QAASkD,EAAWA,EAAWpD,OAAS,GAAGE,QAC3CkD,aACAgF,MAAOH,EAAUG,MACjBjI,UACA8V,cACA7V,QAEF,OAAO2X,GAAW3b,EAAQyf,EAAWlc,EAAekb,EAAQ1C,WAGlE,CAEA,MAAO,CAAEH,WAAY6D,EAAU7D,WAAYmD,UAAWU,EAAUV,UAAWvB,eAC7E,CCyBsCsC,CAAarB,EAASG,EAAMa,GAClE,CA1EsBM,CAAiBtB,IAE/BnC,SAAEA,EAAW,EAACC,OAAEA,EAAM9Q,QAAEA,EAAOuU,QAAEA,EAAOC,WAAEA,EAAUxC,UAAEA,GAAcgB,EAG1E,IAAKwB,IAAeD,EAAS,CAC3B,IAAK,MAAMxC,aAAEA,KAAkBmB,EAC7B,IAAK,MAAMvb,KAAEA,KAAUoa,QAAoBpa,EAE7C,MACF,CAGA,MAAM8C,EX4KD,UAAuBC,OAAEA,IAC9B,OAAOQ,EAAcR,EAAQ,IAAI,EACnC,CW9KqB+Z,CAAczB,EAAQvU,UACnCiW,EAAYxB,EAAYha,IAAIyb,GDmH7B,SAAuBC,EAAena,GAC3C,MAAMsX,aAAEA,GAAiB6C,EAEnBF,EAAY,GAClB,IAAK,MAAM1Z,KAASP,EAAWI,SAC7B,GAAIG,EAAMH,SAAS1C,OAAQ,CACzB,MAAM0c,EAAe9C,EAAapS,OAAO9D,GAAUA,EAAOwW,aAAa,KAAOrX,EAAM3C,QAAQ4C,MAC5F,IAAK4Z,EAAa1c,OAAQ,SAI1B,MAAM2c,EAAW,IAAIpO,IACf/O,EAAOua,QAAQC,IAAI0C,EAAa3b,IAAI2C,GACjCA,EAAOlE,KAAK8N,KAAKmL,IACtBkE,EAASzW,IAAIxC,EAAOwW,aAAa3J,KAAK,KAAM3B,EAAQ6J,QAEpDnL,KAAK,KAEP8C,GAAeuM,EAAU9Z,GACzB,MAAM+Z,EAAaD,EAAShP,IAAI9K,EAAMJ,KAAK8N,KAAK,MAChD,IAAKqM,EAAY,MAAM,IAAIzf,MAAM,qCACjC,MAAO,CAACyf,KAGVL,EAAUxf,KAAK,CAAEmd,aAAcrX,EAAMJ,KAAMjD,QAC7C,KAAO,CAEL,MAAMqd,EAAcjD,EAAa1W,KAAKQ,GAAUA,EAAOwW,aAAa,KAAOrX,EAAM3C,QAAQ4C,MACrF+Z,GACFN,EAAUxf,KAAK8f,EAEnB,CAEF,MAAO,IAAKJ,EAAe7C,aAAc2C,EAC3C,CCrJ2CO,CAAcN,EAAKla,IAG5D,GAAI8Z,EACF,IAAK,MAAMW,KAAcR,EACvB,IAAK,MAAMM,KAAeE,EAAWnD,aACnCiD,EAAYrd,KAAK8N,KAAKwM,IACpB,IAAIpB,EAAWqE,EAAW/E,WAC1B,IAAK,MAAMS,KAAcqB,EACvBsC,EAAQ,CACNhE,WAAYyE,EAAY3C,aAAa,GACrCzB,aACAC,WACAC,OAAQD,EAAWD,EAAWzY,SAEhC0Y,GAAYD,EAAWzY,SAQjC,GAAIqc,EAAY,CAGd,MAAMW,EAAO,GACb,IAAK,MAAMD,KAAcR,EAAW,CAElC,MAAMtE,EAAcva,KAAK+H,IAAIiT,EAAWqE,EAAW/E,WAAY,GACzDE,EAAYxa,KAAKuO,KAAK0M,GAAUtW,KAAY0a,EAAW/E,WAAY+E,EAAW5B,WAKpFvO,EAAOoQ,EAHyB,WAAdnD,QACVF,GAAiBoD,EAAY9E,EAAaC,EAAWrQ,EAAS,gBAC9D8R,GAAiBoD,EAAY9E,EAAaC,EAAWrQ,EAAS,SAExE,CACAwU,EAAWW,EACb,MAEE,IAAK,MAAMpD,aAAEA,KAAkB2C,EAC7B,IAAK,MAAM/c,KAAEA,KAAUoa,QAAoBpa,CAGjD,CCpEO2F,eAAe8X,IAAUnC,KAAEA,EAAI7E,YAAEA,IACtC,MAAM3P,QAAiBlB,EAAqB0V,GACtCoC,EAAc5W,EAASkC,oBAAoBtF,KAAKia,GAAiB,QAAXA,EAAG5R,KAC/D,IAAK2R,EACH,MAAM,IAAI/f,MAAM,mDAIlB,MAAMigB,EAAYlc,KAAKC,MAAM+b,EAAYjc,OAAS,MAG5CzB,QD2G2Bqb,EC3GK,CAAEC,OAAMxU,WAAUlG,MAAM,EAAO6V,eD4G9D,IAAI8D,QAAQ,CAACsC,EAAYgB,KAC9BzC,GAAY,IACPC,EACHhB,UAAW,SACXwC,eACCiB,MAAMD,MANN,IAA4BxC,ECxGjC,MAAM0C,EAAW,GACXC,EAAgBJ,EAAUK,gBAAkB,WAClD,IAAK,MAAM/C,KAAOlb,EAAM,CACtB,MAAMke,EAAMhD,EAAI8C,GAChB,IAAKE,EAEH,SAGF,MAAMC,EAAWC,GAAUF,GAIrBG,EAAa,CAAA,EACnB,IAAK,MAAMtS,KAAOwG,OAAOV,KAAKqJ,GAAM,CAClC,MAAMzZ,EAAQyZ,EAAInP,GACdA,IAAQiS,GAA2B,OAAVvc,IAC3B4c,EAAWtS,GAAOtK,EAEtB,CAGA,MAAM6c,EAAU,CACdvhB,KAAM,UACNohB,WACAE,cAGFN,EAASxgB,KAAK+gB,EAChB,CAEA,MAAO,CACLvhB,KAAM,oBACNghB,WAEJ,CAKO,SAASK,GAAUze,GACxB,OAAOhD,EAAa,CAAEiB,KAAM,IAAI8B,SAASC,EAAOA,OAAQA,EAAOC,WAAYD,EAAOE,YAAa9B,OAAQ,GACzG,ECpEA4H,iBAEE,MAAMoJ,IAAEA,SAAcwP,OAAOC,KAAKC,cAAc,QAG1Cld,EAAM,IAAIwN,EAFsB2P,SAASC,eAAe,OAErC,CACvBC,OAAQ,CAAEC,IAAK,GAAIC,KAAK,IACxBC,KAAM,IAMR,IAEE,MAAMzD,EAAO1M,QACLrB,EAAmB,CAAEC,IALZ,0DAK6B3N,WAAY,SAE1D0c,QAAQyC,IAAI,mBAAoB1D,GAChC,MAAM2D,QAAgBxB,GAAU,CAAEnC,SAElCiB,QAAQyC,IAAI,WAAYC,GAGxB1d,EAAIvB,KAAKkf,WAAWD,EACtB,CAAE,MAAOE,GACP5C,QAAQ4C,MAAM,4CAA6CA,EAC7D,CACF,CACAC","x_google_ignoreList":[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]} \ No newline at end of file diff --git a/package.json b/package.json index ef6b84f..fd58b5c 100644 --- a/package.json +++ b/package.json @@ -18,19 +18,19 @@ "url": "git+https://github.com/hyparam/geoparquet.git" }, "dependencies": { - "hyparquet": "1.17.3" + "hyparquet": "1.19.0" }, "devDependencies": { - "@rollup/plugin-node-resolve": "16.0.1", + "@rollup/plugin-node-resolve": "16.0.2", "@rollup/plugin-terser": "0.4.4", "@types/google.maps": "3.58.1", - "@types/node": "24.3.0", + "@types/node": "24.7.0", "@vitest/coverage-v8": "3.2.4", - "eslint": "9.33.0", + "eslint": "9.37.0", "http-server": "14.1.1", "hyparquet-compressors": "1.1.1", - "rollup": "4.46.2", - "typescript": "5.9.2", + "rollup": "4.52.4", + "typescript": "5.9.3", "vitest": "3.2.4" }, "files": [ diff --git a/src/index.js b/src/index.js index 51bc3ea..3b2818f 100644 --- a/src/index.js +++ b/src/index.js @@ -1,3 +1,2 @@ export { asyncBufferFromUrl } from 'hyparquet' -export { toGeoJson } from './toGeoJson.js' -export { decodeWKB } from './wkb.js' +export { toGeoJson, decodeWKB } from './toGeoJson.js' diff --git a/src/toGeoJson.js b/src/toGeoJson.js index a323e3f..279fcb1 100644 --- a/src/toGeoJson.js +++ b/src/toGeoJson.js @@ -1,5 +1,5 @@ import { parquetMetadataAsync, parquetReadObjects } from 'hyparquet' -import { decodeWKB } from './wkb.js' +import { wkbToGeojson } from 'hyparquet/src/wkb.js' /** * Convert a GeoParquet file to GeoJSON. @@ -63,3 +63,10 @@ export async function toGeoJson({ file, compressors }) { features, } } + +/** + * @param {Uint8Array} buffer + */ +export function decodeWKB(buffer) { + return wkbToGeojson({ view: new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength), offset: 0 }) +} diff --git a/src/wkb.js b/src/wkb.js deleted file mode 100644 index 6c5510d..0000000 --- a/src/wkb.js +++ /dev/null @@ -1,144 +0,0 @@ - -const geometryTypePoint = 1 -const geometryTypeLineString = 2 -const geometryTypePolygon = 3 -const geometryTypeMultiPoint = 4 -const geometryTypeMultiLineString = 5 -const geometryTypeMultiPolygon = 6 -const geometryTypeGeometryCollection = 7 -const geometryTypeCircularString = 8 -const geometryTypeCompoundCurve = 9 -const geometryTypeCurvePolygon = 10 -const geometryTypeMultiCurve = 11 -const geometryTypeMultiSurface = 12 -const geometryTypeCurve = 13 -const geometryTypeSurface = 14 -const geometryTypePolyhedralSurface = 15 -const geometryTypeTIN = 16 -const geometryTypeTriangle = 17 -const geometryTypeCircle = 18 -const geometryTypeGeodesicString = 19 -const geometryTypeEllipticalCurve = 20 -const geometryTypeNurbsCurve = 21 -const geometryTypeClothoid = 22 -const geometryTypeSpiralCurve = 23 -const geometryTypeCompoundSurface = 24 - -/** - * WKB (Well Known Binary) decoder for geometry objects. - * - * @import { Geometry } from '../src/geojson.js' - * @param {Uint8Array} wkb - * @returns {Geometry} GeoJSON geometry object - */ -export function decodeWKB(wkb) { - const dv = new DataView(wkb.buffer, wkb.byteOffset, wkb.byteLength) - let offset = 0 - - // Byte order: 0 = big-endian, 1 = little-endian - const byteOrder = wkb[offset]; offset += 1 - const isLittleEndian = byteOrder === 1 - - // Read geometry type - const geometryType = dv.getUint32(offset, isLittleEndian) - offset += 4 - - // WKB geometry types (OGC): - if (geometryType === geometryTypePoint) { - // Point - const x = dv.getFloat64(offset, isLittleEndian); offset += 8 - const y = dv.getFloat64(offset, isLittleEndian); offset += 8 - return { type: 'Point', coordinates: [x, y] } - } else if (geometryType === geometryTypeLineString) { - // LineString - const numPoints = dv.getUint32(offset, isLittleEndian); offset += 4 - const coords = [] - for (let i = 0; i < numPoints; i++) { - const x = dv.getFloat64(offset, isLittleEndian); offset += 8 - const y = dv.getFloat64(offset, isLittleEndian); offset += 8 - coords.push([x, y]) - } - return { type: 'LineString', coordinates: coords } - } else if (geometryType === geometryTypePolygon) { - // Polygon - const numRings = dv.getUint32(offset, isLittleEndian); offset += 4 - const coords = [] - for (let r = 0; r < numRings; r++) { - const numPoints = dv.getUint32(offset, isLittleEndian); offset += 4 - const ring = [] - for (let p = 0; p < numPoints; p++) { - const x = dv.getFloat64(offset, isLittleEndian); offset += 8 - const y = dv.getFloat64(offset, isLittleEndian); offset += 8 - ring.push([x, y]) - } - coords.push(ring) - } - return { type: 'Polygon', coordinates: coords } - } else if (geometryType === geometryTypeMultiPolygon) { - // MultiPolygon - const numPolygons = dv.getUint32(offset, isLittleEndian); offset += 4 - const polygons = [] - for (let i = 0; i < numPolygons; i++) { - // Each polygon has its own byte order & geometry type - const polyIsLittleEndian = wkb[offset] === 1; offset += 1 - const polyType = dv.getUint32(offset, polyIsLittleEndian); offset += 4 - if (polyType !== geometryTypePolygon) { - throw new Error(`Expected Polygon in MultiPolygon, got ${polyType}`) - } - const numRings = dv.getUint32(offset, polyIsLittleEndian); offset += 4 - - const pgCoords = [] - for (let r = 0; r < numRings; r++) { - const numPoints = dv.getUint32(offset, polyIsLittleEndian); offset += 4 - const ring = [] - for (let p = 0; p < numPoints; p++) { - const x = dv.getFloat64(offset, polyIsLittleEndian); offset += 8 - const y = dv.getFloat64(offset, polyIsLittleEndian); offset += 8 - ring.push([x, y]) - } - pgCoords.push(ring) - } - polygons.push(pgCoords) - } - return { type: 'MultiPolygon', coordinates: polygons } - } else if (geometryType === geometryTypeMultiPoint) { - // MultiPoint - const numPoints = dv.getUint32(offset, isLittleEndian); offset += 4 - const points = [] - for (let i = 0; i < numPoints; i++) { - // Each point has its own byte order & geometry type - const pointIsLittleEndian = wkb[offset] === 1; offset += 1 - const pointType = dv.getUint32(offset, pointIsLittleEndian); offset += 4 - if (pointType !== geometryTypePoint) { - throw new Error(`Expected Point in MultiPoint, got ${pointType}`) - } - const x = dv.getFloat64(offset, pointIsLittleEndian); offset += 8 - const y = dv.getFloat64(offset, pointIsLittleEndian); offset += 8 - points.push([x, y]) - } - return { type: 'MultiPoint', coordinates: points } - } else if (geometryType === geometryTypeMultiLineString) { - // MultiLineString - const numLineStrings = dv.getUint32(offset, isLittleEndian); offset += 4 - const lineStrings = [] - for (let i = 0; i < numLineStrings; i++) { - // Each line has its own byte order & geometry type - const lineIsLittleEndian = wkb[offset] === 1; offset += 1 - const lineType = dv.getUint32(offset, lineIsLittleEndian); offset += 4 - if (lineType !== geometryTypeLineString) { - throw new Error(`Expected LineString in MultiLineString, got ${lineType}`) - } - const numPoints = dv.getUint32(offset, isLittleEndian); offset += 4 - const coords = [] - for (let p = 0; p < numPoints; p++) { - const x = dv.getFloat64(offset, lineIsLittleEndian); offset += 8 - const y = dv.getFloat64(offset, lineIsLittleEndian); offset += 8 - coords.push([x, y]) - } - lineStrings.push(coords) - } - return { type: 'MultiLineString', coordinates: lineStrings } - } else { - throw new Error(`Unsupported geometry type: ${geometryType}`) - } -} diff --git a/test/wkb.test.js b/test/wkb.test.js index 64f40d4..9c11cd0 100644 --- a/test/wkb.test.js +++ b/test/wkb.test.js @@ -1,5 +1,5 @@ import { describe, expect, it } from 'vitest' -import { decodeWKB } from '../src/wkb.js' +import { decodeWKB } from '../src/index.js' describe('WKB decoding', () => { it('should decoding well-known binary Point', () => {