@@ -5,16 +5,19 @@ import assert from 'assert';
5
5
import path from 'path' ;
6
6
7
7
const TAR_NAME = 'openai.tgz' ;
8
- const PACK_FILE = `.pack/${ TAR_NAME } ` ;
8
+ const PACK_FOLDER = '.pack' ;
9
+ const PACK_FILE = `${ PACK_FOLDER } /${ TAR_NAME } ` ;
9
10
const IS_CI = Boolean ( process . env [ 'CI' ] && process . env [ 'CI' ] !== 'false' ) ;
10
11
11
12
async function defaultNodeRunner ( ) {
12
13
await installPackage ( ) ;
13
14
await run ( 'npm' , [ 'run' , 'tsc' ] ) ;
14
- if ( state . live ) await run ( 'npm' , [ 'test' ] ) ;
15
+ if ( state . live ) {
16
+ await run ( 'npm' , [ 'test' ] ) ;
17
+ }
15
18
}
16
19
17
- const projects = {
20
+ const projectRunners = {
18
21
'node-ts-cjs' : defaultNodeRunner ,
19
22
'node-ts-cjs-web' : defaultNodeRunner ,
20
23
'node-ts-cjs-auto' : defaultNodeRunner ,
@@ -76,30 +79,17 @@ const projects = {
76
79
}
77
80
} ,
78
81
deno : async ( ) => {
82
+ // we don't need to explicitly install the package here
83
+ // because our deno setup relies on `rootDir/deno` to exist
84
+ // which is an artifact produced from our build process
79
85
await run ( 'deno' , [ 'task' , 'install' ] ) ;
80
- await installPackage ( ) ;
81
- const packFile = getPackFile ( ) ;
82
-
83
- const openaiDir = path . resolve (
84
- process . cwd ( ) ,
85
- 'node_modules' ,
86
- '.deno' ,
87
- 'openai@3.3.0' ,
88
- 'node_modules' ,
89
- 'openai' ,
90
- ) ;
91
-
92
- await run ( 'sh' , [ '-c' , 'rm -rf *' ] , { cwd : openaiDir , stdio : 'inherit' } ) ;
93
- await run ( 'tar' , [ 'xzf' , path . resolve ( packFile ) ] , { cwd : openaiDir , stdio : 'inherit' } ) ;
94
- await run ( 'sh' , [ '-c' , 'mv package/* .' ] , { cwd : openaiDir , stdio : 'inherit' } ) ;
95
- await run ( 'sh' , [ '-c' , 'rm -rf package' ] , { cwd : openaiDir , stdio : 'inherit' } ) ;
96
-
97
86
await run ( 'deno' , [ 'task' , 'check' ] ) ;
87
+
98
88
if ( state . live ) await run ( 'deno' , [ 'task' , 'test' ] ) ;
99
89
} ,
100
90
} ;
101
91
102
- const projectNames = Object . keys ( projects ) as Array < keyof typeof projects > ;
92
+ let projectNames = Object . keys ( projectRunners ) as Array < keyof typeof projectRunners > ;
103
93
const projectNamesSet = new Set ( projectNames ) ;
104
94
105
95
function parseArgs ( ) {
@@ -118,6 +108,11 @@ function parseArgs() {
118
108
type : 'boolean' ,
119
109
default : false ,
120
110
} ,
111
+ skip : {
112
+ type : 'array' ,
113
+ default : [ ] ,
114
+ description : 'Skip one or more projects. Separate project names with a space.' ,
115
+ } ,
121
116
skipPack : {
122
117
type : 'boolean' ,
123
118
default : false ,
@@ -156,6 +151,10 @@ function parseArgs() {
156
151
default : false ,
157
152
description : 'run all projects in parallel (jobs = # projects)' ,
158
153
} ,
154
+ noCleanup : {
155
+ type : 'boolean' ,
156
+ default : false ,
157
+ } ,
159
158
} )
160
159
. help ( ) . argv ;
161
160
}
@@ -165,9 +164,32 @@ type Args = Awaited<ReturnType<typeof parseArgs>>;
165
164
let state : Args & { rootDir : string } ;
166
165
167
166
async function main ( ) {
167
+ if ( ! process . env [ 'OPENAI_API_KEY' ] ) {
168
+ console . error ( `Error: The environment variable OPENAI_API_KEY must be set. Run the command
169
+ $echo 'OPENAI_API_KEY = "'"\${OPENAI_API_KEY}"'"' >> ecosystem-tests/cloudflare-worker/wrangler.toml` ) ;
170
+ process . exit ( 0 ) ;
171
+ }
172
+
168
173
const args = ( await parseArgs ( ) ) as Args ;
169
174
console . error ( `args:` , args ) ;
170
175
176
+ // Some projects, e.g. Deno can be slow to run, so offer the option to skip them. Example:
177
+ // --skip=deno node-ts-cjs
178
+ if ( args . skip . length > 0 ) {
179
+ args . skip . forEach ( ( projectName , idx ) => {
180
+ // Ensure the inputted project name is lower case
181
+ args . skip [ idx ] = ( projectName + '' ) . toLowerCase ( ) ;
182
+ } ) ;
183
+
184
+ projectNames = projectNames . filter ( ( projectName ) => ( args . skip as string [ ] ) . indexOf ( projectName ) < 0 ) ;
185
+
186
+ args . skip . forEach ( ( projectName ) => {
187
+ projectNamesSet . delete ( projectName as any ) ;
188
+ } ) ;
189
+ }
190
+
191
+ const tmpFolderPath = path . resolve ( process . cwd ( ) , 'tmp' ) ;
192
+
171
193
const rootDir = await packageDir ( ) ;
172
194
console . error ( `rootDir:` , rootDir ) ;
173
195
@@ -191,8 +213,63 @@ async function main() {
191
213
192
214
const failed : typeof projectNames = [ ] ;
193
215
216
+ let cleanupWasRun = false ;
217
+
218
+ // Cleanup the various artifacts created as part of executing this script
219
+ async function runCleanup ( ) {
220
+ if ( cleanupWasRun ) {
221
+ return ;
222
+ }
223
+ cleanupWasRun = true ;
224
+
225
+ // Restore the original files in the ecosystem-tests folders from before
226
+ // npm install was run
227
+ await fileCache . restoreFiles ( tmpFolderPath ) ;
228
+
229
+ const packFolderPath = path . join ( process . cwd ( ) , PACK_FOLDER ) ;
230
+
231
+ try {
232
+ // Clean up the .pack folder if this was the process that created it.
233
+ await fs . unlink ( PACK_FILE ) ;
234
+ await fs . rmdir ( packFolderPath ) ;
235
+ } catch ( err ) {
236
+ console . log ( 'Failed to delete .pack folder' , err ) ;
237
+ }
238
+
239
+ for ( let i = 0 ; i < projectNames . length ; i ++ ) {
240
+ const projectName = ( projectNames as any ) [ i ] as string ;
241
+
242
+ await defaultNodeCleanup ( projectName ) . catch ( ( err : any ) => {
243
+ console . error ( 'Error: Cleanup of file artifacts failed for project' , projectName , err ) ;
244
+ } ) ;
245
+ }
246
+ }
247
+
248
+ async function runCleanupAndExit ( ) {
249
+ await runCleanup ( ) ;
250
+
251
+ process . exit ( 1 ) ;
252
+ }
253
+
254
+ if ( ! ( await fileExists ( tmpFolderPath ) ) ) {
255
+ await fs . mkdir ( tmpFolderPath ) ;
256
+ }
257
+
194
258
let { jobs } = args ;
195
- if ( args . parallel ) jobs = projectsToRun . length ;
259
+ if ( args . parallel ) {
260
+ jobs = projectsToRun . length ;
261
+ }
262
+
263
+ if ( ! args . noCleanup ) {
264
+ // The cleanup code is only executed from the parent script that runs
265
+ // multiple projects.
266
+ process . on ( 'SIGINT' , runCleanupAndExit ) ;
267
+ process . on ( 'SIGTERM' , runCleanupAndExit ) ;
268
+ process . on ( 'exit' , runCleanup ) ;
269
+
270
+ await fileCache . cacheFiles ( tmpFolderPath ) ;
271
+ }
272
+
196
273
if ( jobs > 1 ) {
197
274
const queue = [ ...projectsToRun ] ;
198
275
const runningProjects = new Set ( ) ;
@@ -225,7 +302,9 @@ async function main() {
225
302
[ ...Array ( jobs ) . keys ( ) ] . map ( async ( ) => {
226
303
while ( queue . length ) {
227
304
const project = queue . shift ( ) ;
228
- if ( ! project ) break ;
305
+ if ( ! project ) {
306
+ break ;
307
+ }
229
308
230
309
// preserve interleaved ordering of writes to stdout/stderr
231
310
const chunks : { dest : 'stdout' | 'stderr' ; data : string | Buffer } [ ] = [ ] ;
@@ -238,6 +317,7 @@ async function main() {
238
317
__filename ,
239
318
project ,
240
319
'--skip-pack' ,
320
+ '--noCleanup' ,
241
321
`--retry=${ args . retry } ` ,
242
322
...( args . live ? [ '--live' ] : [ ] ) ,
243
323
...( args . verbose ? [ '--verbose' ] : [ ] ) ,
@@ -248,14 +328,18 @@ async function main() {
248
328
) ;
249
329
child . stdout ?. on ( 'data' , ( data ) => chunks . push ( { dest : 'stdout' , data } ) ) ;
250
330
child . stderr ?. on ( 'data' , ( data ) => chunks . push ( { dest : 'stderr' , data } ) ) ;
331
+
251
332
await child ;
252
333
} catch ( error ) {
253
334
failed . push ( project ) ;
254
335
} finally {
255
336
runningProjects . delete ( project ) ;
256
337
}
257
338
258
- if ( IS_CI ) console . log ( `::group::${ failed . includes ( project ) ? '❌' : '✅' } ${ project } ` ) ;
339
+ if ( IS_CI ) {
340
+ console . log ( `::group::${ failed . includes ( project ) ? '❌' : '✅' } ${ project } ` ) ;
341
+ }
342
+
259
343
for ( const { data } of chunks ) {
260
344
process . stdout . write ( data ) ;
261
345
}
@@ -268,7 +352,7 @@ async function main() {
268
352
clearProgress ( ) ;
269
353
} else {
270
354
for ( const project of projectsToRun ) {
271
- const fn = projects [ project ] ;
355
+ const fn = projectRunners [ project ] ;
272
356
273
357
await withChdir ( path . join ( rootDir , 'ecosystem-tests' , project ) , async ( ) => {
274
358
console . error ( '\n' ) ;
@@ -294,6 +378,10 @@ async function main() {
294
378
}
295
379
}
296
380
381
+ if ( ! args . noCleanup ) {
382
+ await runCleanup ( ) ;
383
+ }
384
+
297
385
if ( failed . length ) {
298
386
console . error ( `${ failed . length } project(s) failed - ${ failed . join ( ', ' ) } ` ) ;
299
387
process . exit ( 1 ) ;
@@ -340,10 +428,15 @@ async function buildPackage() {
340
428
return ;
341
429
}
342
430
343
- if ( ! ( await pathExists ( '.pack' ) ) ) {
344
- await fs . mkdir ( '.pack' ) ;
431
+ if ( ! ( await pathExists ( PACK_FOLDER ) ) ) {
432
+ await fs . mkdir ( PACK_FOLDER ) ;
345
433
}
346
434
435
+ // Run our build script to ensure all of our build artifacts are up to date.
436
+ // This matters the most for deno as it directly relies on build artifacts
437
+ // instead of the pack file
438
+ await run ( 'yarn' , [ 'build' ] ) ;
439
+
347
440
const proc = await run ( 'npm' , [ 'pack' , '--ignore-scripts' , '--json' ] , {
348
441
cwd : path . join ( process . cwd ( ) , 'dist' ) ,
349
442
alwaysPipe : true ,
@@ -366,6 +459,11 @@ async function installPackage() {
366
459
return ;
367
460
}
368
461
462
+ try {
463
+ // Ensure that there is a clean node_modules folder.
464
+ await run ( 'rm' , [ '-rf' , `./node_modules` ] ) ;
465
+ } catch ( err ) { }
466
+
369
467
const packFile = getPackFile ( ) ;
370
468
await fs . copyFile ( packFile , `./${ TAR_NAME } ` ) ;
371
469
return await run ( 'npm' , [ 'install' , '-D' , `./${ TAR_NAME } ` ] ) ;
@@ -440,6 +538,80 @@ export const packageDir = async (): Promise<string> => {
440
538
throw new Error ( 'Package directory not found' ) ;
441
539
} ;
442
540
541
+ // Caches files that are modified by this script, e.g. package.json,
542
+ // so that they can be restored when the script either finishes or is
543
+ // terminated
544
+ const fileCache = ( ( ) => {
545
+ const filesToCache : Array < string > = [ 'package.json' , 'package-lock.json' , 'deno.lock' , 'bun.lockb' ] ;
546
+
547
+ return {
548
+ // Copy existing files from each ecosystem-tests project folder to the ./tmp folder
549
+ cacheFiles : async ( tmpFolderPath : string ) => {
550
+ for ( let i = 0 ; i < projectNames . length ; i ++ ) {
551
+ const projectName = ( projectNames as any ) [ i ] as string ;
552
+ const projectPath = path . resolve ( process . cwd ( ) , 'ecosystem-tests' , projectName ) ;
553
+
554
+ for ( let j = 0 ; j < filesToCache . length ; j ++ ) {
555
+ const fileName = filesToCache [ j ] || '' ;
556
+
557
+ const filePath = path . resolve ( projectPath , fileName ) ;
558
+ if ( await fileExists ( filePath ) ) {
559
+ const tmpProjectPath = path . resolve ( tmpFolderPath , projectName ) ;
560
+
561
+ if ( ! ( await fileExists ( tmpProjectPath ) ) ) {
562
+ await fs . mkdir ( tmpProjectPath ) ;
563
+ }
564
+ await fs . copyFile ( filePath , path . resolve ( tmpProjectPath , fileName ) ) ;
565
+ }
566
+ }
567
+ }
568
+ } ,
569
+
570
+ // Restore the original files to each ecosystem-tests project folder from the ./tmp folder
571
+ restoreFiles : async ( tmpFolderPath : string ) => {
572
+ for ( let i = 0 ; i < projectNames . length ; i ++ ) {
573
+ const projectName = ( projectNames as any ) [ i ] as string ;
574
+
575
+ const projectPath = path . resolve ( process . cwd ( ) , 'ecosystem-tests' , projectName ) ;
576
+ const tmpProjectPath = path . resolve ( tmpFolderPath , projectName ) ;
577
+
578
+ for ( let j = 0 ; j < filesToCache . length ; j ++ ) {
579
+ const fileName = filesToCache [ j ] || '' ;
580
+
581
+ const filePath = path . resolve ( tmpProjectPath , fileName ) ;
582
+ if ( await fileExists ( filePath ) ) {
583
+ await fs . rename ( filePath , path . resolve ( projectPath , fileName ) ) ;
584
+ }
585
+ }
586
+ await fs . rmdir ( tmpProjectPath ) ;
587
+ }
588
+ } ,
589
+ } ;
590
+ } ) ( ) ;
591
+
592
+ async function defaultNodeCleanup ( projectName : string ) {
593
+ try {
594
+ const projectPath = path . resolve ( process . cwd ( ) , 'ecosystem-tests' , projectName ) ;
595
+
596
+ const packFilePath = path . resolve ( projectPath , TAR_NAME ) ;
597
+
598
+ if ( await fileExists ( packFilePath ) ) {
599
+ await fs . unlink ( packFilePath ) ;
600
+ }
601
+ } catch ( err ) {
602
+ console . error ( 'Cleanup failed for project' , projectName , err ) ;
603
+ }
604
+ }
605
+
606
+ async function fileExists ( filePath : string ) {
607
+ try {
608
+ await fs . stat ( filePath ) ;
609
+ return true ;
610
+ } catch {
611
+ return false ;
612
+ }
613
+ }
614
+
443
615
main ( ) . catch ( ( err ) => {
444
616
console . error ( err ) ;
445
617
process . exit ( 1 ) ;
0 commit comments