11import oldFs , { createReadStream , promises as fs } from "node:fs" ;
2- import { extendsFS } from "@sirherobrine23/extends" ;
2+ import { extendsFS , extendStream as stream } from "@sirherobrine23/extends" ;
33import { finished } from "node:stream/promises" ;
44import { format } from "node:util" ;
5- import stream from "node:stream" ;
65import path from "node:path" ;
6+ import { EventMap , defineEvents } from "@sirherobrine23/extends/src/stream.js" ;
77
88export type arHeader = {
99 name : string ,
@@ -48,139 +48,124 @@ export function createHead(filename: string, info: fileInfo) {
4848 return controlHead ;
4949}
5050
51- export class arParseAbstract extends stream . Writable {
51+ export class arParse < T extends EventMap = { } > extends stream . Writable < defineEvents < { entry ( header : arHeader , stream : stream . nodeStream . Readable ) : void } > , [ T ] > {
5252 #fileStreamSize: number ;
5353 #fileStream?: stream . Readable ;
5454 #oldBuffer?: Buffer ;
5555 #initialHead = true ;
56- constructor ( entry : ( header : arHeader , stream : stream . Readable ) => void ) {
56+ constructor ( private entry ? : ( header : arHeader , stream : stream . nodeStream . Readable ) => void ) {
5757 super ( {
5858 defaultEncoding : "binary" ,
5959 objectMode : false ,
6060 autoDestroy : true ,
6161 decodeStrings : true ,
6262 emitClose : true ,
63- highWaterMark : 1024 ,
64- final : ( callback ) => {
65- if ( this . #fileStream && this . #oldBuffer) {
66- if ( ! this . #fileStream. destroyed || this . #fileStream. readable ) {
67- this . #fileStream. push ( this . #oldBuffer. subarray ( 0 , this . #fileStreamSize) ) ;
68- this . #fileStream. push ( null ) ;
69- }
70- }
71- this . #oldBuffer = undefined ;
72- callback ( ) ;
73- } ,
74- destroy : ( error , callback ) => {
75- if ( this . #fileStream && error ) this . #fileStream. destroy ( error ) ;
76- callback ( error ) ;
77- } ,
78- write : ( remoteChunk , encoding , callback ) => {
79- let chunk = Buffer . isBuffer ( remoteChunk ) ? remoteChunk : Buffer . from ( remoteChunk , encoding ) ;
80- if ( this . #oldBuffer) chunk = Buffer . concat ( [ this . #oldBuffer, chunk ] ) ;
81- this . #oldBuffer = undefined ;
82- // file signature
83- if ( this . #initialHead) {
84- // More buffer to maneger correctly
85- if ( chunk . length < 70 ) {
86- this . #oldBuffer = chunk ;
87- return callback ( ) ;
88- }
89- const signature = chunk . subarray ( 0 , 8 ) . toString ( "ascii" ) ;
90- if ( signature !== "!<arch>\n" ) return callback ( new Error ( format ( "Invalid ar file, recived: %O" , signature ) ) ) ;
91- this . #initialHead = false ;
92- chunk = chunk . subarray ( 8 ) ;
63+ highWaterMark : 1024
64+ } ) ;
65+ }
66+
67+ _destroy ( error : Error , callback : ( error ?: Error ) => void ) : void {
68+ if ( this . #fileStream && error ) this . #fileStream. destroy ( error ) ;
69+ callback ( error ) ;
70+ }
71+
72+ _final ( callback : ( error ?: Error ) => void ) : void {
73+ if ( this . #fileStream && this . #oldBuffer) {
74+ if ( ! this . #fileStream. destroyed || this . #fileStream. readable ) {
75+ this . #fileStream. push ( this . #oldBuffer. subarray ( 0 , this . #fileStreamSize) ) ;
76+ this . #fileStream. push ( null ) ;
77+ }
78+ }
79+ this . #oldBuffer = undefined ;
80+ callback ( ) ;
81+ }
82+
83+ _write ( remoteChunk : Buffer , encoding : BufferEncoding , callback : ( error ?: Error ) => void ) {
84+ let chunk = Buffer . isBuffer ( remoteChunk ) ? remoteChunk : Buffer . from ( remoteChunk , encoding ) ;
85+ if ( this . #oldBuffer) chunk = Buffer . concat ( [ this . #oldBuffer, chunk ] ) ;
86+ this . #oldBuffer = undefined ;
87+ // file signature
88+ if ( this . #initialHead) {
89+ // More buffer to maneger correctly
90+ if ( chunk . length < 70 ) {
91+ this . #oldBuffer = chunk ;
92+ return callback ( ) ;
93+ }
94+ const signature = chunk . subarray ( 0 , 8 ) . toString ( "ascii" ) ;
95+ if ( signature !== "!<arch>\n" ) return callback ( new Error ( format ( "Invalid ar file, recived: %O" , signature ) ) ) ;
96+ this . #initialHead = false ;
97+ chunk = chunk . subarray ( 8 ) ;
98+ }
99+
100+ // if exists chunk and is not empty save to next request
101+ if ( chunk . length > 0 ) {
102+ // if exist file stream and chunk is not empty
103+ if ( this . #fileStream) {
104+ const fixedChunk = chunk . subarray ( 0 , this . #fileStreamSize) ;
105+ if ( ! this . #fileStream. destroyed || this . #fileStream. readable ) this . #fileStream. push ( fixedChunk ) ;
106+ this . #fileStreamSize -= fixedChunk . length ;
107+ chunk = chunk . subarray ( fixedChunk . length ) ;
108+ if ( this . #fileStreamSize <= 0 ) {
109+ this . #fileStream. push ( null ) ;
110+ this . #fileStream = undefined ;
93111 }
112+ if ( chunk . length <= 0 ) return callback ( ) ;
113+ }
114+ }
94115
95- // if exists chunk and is not empty save to next request
96- if ( chunk . length > 0 ) {
97- // if exist file stream and chunk is not empty
98- if ( this . #fileStream) {
99- const fixedChunk = chunk . subarray ( 0 , this . #fileStreamSize) ;
100- if ( ! this . #fileStream. destroyed || this . #fileStream. readable ) this . #fileStream. push ( fixedChunk ) ;
101- this . #fileStreamSize -= fixedChunk . length ;
102- chunk = chunk . subarray ( fixedChunk . length ) ;
103- if ( this . #fileStreamSize <= 0 ) {
104- this . #fileStream. push ( null ) ;
105- this . #fileStream = undefined ;
106- }
107- if ( chunk . length <= 0 ) return callback ( ) ;
116+ // more buffer
117+ if ( chunk . length >= 60 ) {
118+ for ( let chunkByte = 0 ; chunkByte < chunk . length ; chunkByte ++ ) {
119+ const lastByteHead = chunkByte ;
120+ const fistCharByte = lastByteHead - 60 ;
121+ if ( fistCharByte < 0 ) continue ;
122+ const head = chunk . subarray ( fistCharByte , lastByteHead ) ;
123+ const name = head . subarray ( 0 , 16 ) . toString ( "ascii" ) . trim ( ) ;
124+ const time = new Date ( parseInt ( head . subarray ( 16 , 28 ) . toString ( "ascii" ) . trim ( ) ) * 1000 ) ;
125+ const owner = parseInt ( head . subarray ( 28 , 34 ) . toString ( "ascii" ) . trim ( ) ) ;
126+ const group = parseInt ( head . subarray ( 34 , 40 ) . toString ( "ascii" ) . trim ( ) ) ;
127+ const mode = parseInt ( head . subarray ( 40 , 48 ) . toString ( "ascii" ) . trim ( ) ) ;
128+ const size = parseInt ( head . subarray ( 48 , 58 ) . toString ( "ascii" ) . trim ( ) ) ;
129+
130+ // One to error
131+ if ( ( ! name ) || ( time . toString ( ) === "Invalid Date" ) || ( isNaN ( owner ) ) || ( isNaN ( group ) ) || ( isNaN ( mode ) ) || ( isNaN ( size ) ) ) continue ;
132+ if ( head . subarray ( 58 , 60 ) . toString ( "ascii" ) !== "`\n" ) continue ;
133+
134+ if ( fistCharByte >= 1 ) {
135+ const chucked = chunk . subarray ( 0 , fistCharByte ) ;
136+ if ( this . #fileStream && chucked [ 0 ] !== 0x0A ) {
137+ this . #fileStream. push ( chucked ) ;
138+ this . #fileStream. push ( null ) ;
108139 }
109140 }
110141
111- // more buffer
112- if ( chunk . length >= 60 ) {
113- for ( let chunkByte = 0 ; chunkByte < chunk . length ; chunkByte ++ ) {
114- const lastByteHead = chunkByte ;
115- const fistCharByte = lastByteHead - 60 ;
116- if ( fistCharByte < 0 ) continue ;
117- const head = chunk . subarray ( fistCharByte , lastByteHead ) ;
118- const name = head . subarray ( 0 , 16 ) . toString ( "ascii" ) . trim ( ) ;
119- const time = new Date ( parseInt ( head . subarray ( 16 , 28 ) . toString ( "ascii" ) . trim ( ) ) * 1000 ) ;
120- const owner = parseInt ( head . subarray ( 28 , 34 ) . toString ( "ascii" ) . trim ( ) ) ;
121- const group = parseInt ( head . subarray ( 34 , 40 ) . toString ( "ascii" ) . trim ( ) ) ;
122- const mode = parseInt ( head . subarray ( 40 , 48 ) . toString ( "ascii" ) . trim ( ) ) ;
123- const size = parseInt ( head . subarray ( 48 , 58 ) . toString ( "ascii" ) . trim ( ) ) ;
124-
125- // One to error
126- if ( ( ! name ) || ( time . toString ( ) === "Invalid Date" ) || ( isNaN ( owner ) ) || ( isNaN ( group ) ) || ( isNaN ( mode ) ) || ( isNaN ( size ) ) ) continue ;
127- if ( head . subarray ( 58 , 60 ) . toString ( "ascii" ) !== "`\n" ) continue ;
128-
129- if ( fistCharByte >= 1 ) {
130- const chucked = chunk . subarray ( 0 , fistCharByte ) ;
131- if ( this . #fileStream && chucked [ 0 ] !== 0x0A ) {
132- this . #fileStream. push ( chucked ) ;
133- this . #fileStream. push ( null ) ;
134- }
135- }
142+ // Cut post header from chunk
143+ chunk = chunk . subarray ( lastByteHead ) ;
144+ if ( typeof this . entry === "function" && this . entry . length >= 1 ) this . entry ( { name, time, owner, group, mode, size} , ( this . #fileStream = new stream . Readable ( { read ( ) { } } ) ) ) ;
145+ else this . emit ( "entry" , { name, time, owner, group, mode, size} , ( this . #fileStream = new stream . Readable ( { read ( ) { } } ) ) ) ;
136146
137- // Cut post header from chunk
138- chunk = chunk . subarray ( lastByteHead ) ;
139- entry ( { name, time, owner, group, mode, size} , ( this . #fileStream = new stream . Readable ( { read ( ) { } } ) ) ) ;
140- this . #fileStreamSize = size ;
147+ this . #fileStreamSize = size ;
141148
142- const fileSize = chunk . subarray ( 0 , size ) ;
143- chunk = chunk . subarray ( fileSize . length ) ;
144- if ( ! this . #fileStream. destroyed || this . #fileStream. readable ) this . #fileStream. push ( fileSize ) ;
145- this . #fileStreamSize -= fileSize . length ;
146-
147- if ( this . #fileStreamSize <= 0 ) {
148- if ( ! this . #fileStream. destroyed || this . #fileStream. readable ) this . #fileStream. push ( null ) ;
149- this . #fileStream = undefined ;
150- this . #fileStreamSize = - 1 ;
151- }
149+ const fileSize = chunk . subarray ( 0 , size ) ;
150+ chunk = chunk . subarray ( fileSize . length ) ;
151+ if ( ! this . #fileStream. destroyed || this . #fileStream. readable ) this . #fileStream. push ( fileSize ) ;
152+ this . #fileStreamSize -= fileSize . length ;
152153
153- // Restart loop to check if chunk has more headers
154- chunkByte = 0 ;
155- }
154+ if ( this . #fileStreamSize <= 0 ) {
155+ if ( ! this . #fileStream. destroyed || this . #fileStream. readable ) this . #fileStream. push ( null ) ;
156+ this . #fileStream = undefined ;
157+ this . #fileStreamSize = - 1 ;
156158 }
157159
158- // Get more buffer data
159- if ( chunk . length > 0 ) this . #oldBuffer = chunk ;
160- return callback ( ) ;
160+ // Restart loop to check if chunk has more headers
161+ chunkByte = 0 ;
161162 }
162- } ) ;
163- }
164- }
163+ }
165164
166- export declare interface arParse extends stream . Writable {
167- on ( event : "close" , listener : ( ) => void ) : this;
168- on ( event : "drain" , listener : ( ) => void ) : this;
169- on ( event : "error" , listener : ( err : Error ) => void ) : this;
170- on ( event : "finish" , listener : ( ) => void ) : this;
171- on ( event : "pipe" , listener : ( src : stream . Readable ) => void ) : this;
172- on ( event : "unpipe" , listener : ( src : stream . Readable ) => void ) : this;
173- on ( event : "entry" , listener : ( header : arHeader , stream : stream . Readable ) => void ) : this;
174- on ( event : string | symbol , listener : ( ...args : any [ ] ) => void ) : this;
175-
176- once ( event : "close" , listener : ( ) => void ) : this;
177- once ( event : "drain" , listener : ( ) => void ) : this;
178- once ( event : "error" , listener : ( err : Error ) => void ) : this;
179- once ( event : "finish" , listener : ( ) => void ) : this;
180- once ( event : "pipe" , listener : ( src : stream . Readable ) => void ) : this;
181- once ( event : "unpipe" , listener : ( src : stream . Readable ) => void ) : this;
182- once ( event : "entry" , listener : ( header : arHeader , stream : stream . Readable ) => void ) : this;
183- once ( event : string | symbol , listener : ( ...args : any [ ] ) => void ) : this;
165+ // Get more buffer data
166+ if ( chunk . length > 0 ) this . #oldBuffer = chunk ;
167+ return callback ( ) ;
168+ }
184169}
185170
186171/**
@@ -195,12 +180,8 @@ export declare interface arParse extends stream.Writable {
195180 });
196181 *
197182 */
198- export function parseArStream ( ) : arParse {
199- return new class arParse extends arParseAbstract {
200- constructor ( ) {
201- super ( ( head , str ) => this . emit ( "entry" , head , str ) ) ;
202- }
203- } ;
183+ export function parseArStream ( ) {
184+ return new arParse ( ) ;
204185}
205186
206187export class arStream extends stream . Readable {
0 commit comments