-
Notifications
You must be signed in to change notification settings - Fork 116
/
Copy pathstreams.ts
309 lines (280 loc) · 11.9 KB
/
streams.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
/*
* Copyright (c) 2020, salesforce.com, inc.
* All rights reserved.
* Licensed under the BSD 3-Clause license.
* For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
import { isAbsolute, join } from 'node:path';
import { pipeline as cbPipeline, Readable, Stream, Transform, Writable } from 'node:stream';
import { promisify } from 'node:util';
import { Messages, SfError } from '@salesforce/core';
import JSZip from 'jszip';
import { createWriteStream, existsSync, promises as fsPromises } from 'graceful-fs';
import { JsonMap } from '@salesforce/ts-types';
import { XMLBuilder } from 'fast-xml-parser';
import { Logger } from '@salesforce/core';
import { SourceComponent } from '../resolve/sourceComponent';
import { SourcePath } from '../common/types';
import { XML_COMMENT_PROP_NAME, XML_DECL } from '../common/constants';
import { ComponentSet } from '../collections/componentSet';
import { RegistryAccess } from '../registry/registryAccess';
import { ensureFileExists } from '../utils/fileSystemHandler';
import { ComponentStatus, FileResponseSuccess } from '../client/types';
import { ForceIgnore } from '../resolve';
import { MetadataTransformerFactory } from './transformers/metadataTransformerFactory';
import { ConvertContext } from './convertContext/convertContext';
import { SfdxFileFormat, WriteInfo, WriterFormat } from './types';
Messages.importMessagesDirectory(__dirname);
const messages = Messages.loadMessages('@salesforce/source-deploy-retrieve', 'sdr');
export const pipeline = promisify(cbPipeline);
export const stream2buffer = async (stream: Stream): Promise<Buffer> =>
new Promise<Buffer>((resolve, reject) => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const buf = Array<any>();
stream.on('data', (chunk) => buf.push(chunk));
stream.on('end', () => resolve(Buffer.concat(buf)));
stream.on('error', (err) => reject(`error converting stream - ${err}`));
});
export class ComponentConverter extends Transform {
public readonly context = new ConvertContext();
private transformerFactory: MetadataTransformerFactory;
public constructor(
private targetFormat: SfdxFileFormat,
registry: RegistryAccess,
private mergeSet?: ComponentSet,
private defaultDirectory?: string
) {
super({ objectMode: true });
this.transformerFactory = new MetadataTransformerFactory(registry, this.context);
}
public async _transform(
chunk: SourceComponent,
encoding: string,
callback: (err: Error | undefined, data: WriterFormat) => void
): Promise<void> {
let err: Error | undefined;
const writeInfos: WriteInfo[] = [];
// Only transform components not marked for delete.
if (!chunk.isMarkedForDelete()) {
try {
const converts: Array<Promise<WriteInfo[]>> = [];
const transformer = this.transformerFactory.getTransformer(chunk);
transformer.defaultDirectory = this.defaultDirectory;
const mergeWith = this.mergeSet?.getSourceComponents(chunk);
switch (this.targetFormat) {
case 'source':
if (mergeWith) {
for (const mergeComponent of mergeWith) {
converts.push(
transformer.toSourceFormat({ component: chunk, mergeWith: mergeComponent, mergeSet: this.mergeSet })
);
}
}
if (converts.length === 0) {
converts.push(transformer.toSourceFormat({ component: chunk, mergeSet: this.mergeSet }));
}
break;
case 'metadata':
converts.push(transformer.toMetadataFormat(chunk));
break;
default:
throw new SfError(messages.getMessage('error_convert_invalid_format', [this.targetFormat]), 'LibraryError');
}
// could maybe improve all this with lazy async collections...
(await Promise.all(converts)).forEach((infos) => writeInfos.push(...infos));
} catch (e) {
err = e as Error;
}
}
callback(err, { component: chunk, writeInfos });
}
/**
* Called at the end when all components have passed through the pipeline. Finalizers
* take care of any additional work to be done at this stage e.g. recomposing child components.
*/
public async _flush(callback: (err: Error | undefined, data?: WriterFormat) => void): Promise<void> {
let err: Error | undefined;
try {
for await (const finalizerResult of this.context.executeFinalizers(this.defaultDirectory)) {
finalizerResult.forEach((result) => this.push(result));
}
} catch (e) {
err = e as Error;
}
callback(err);
}
}
export abstract class ComponentWriter extends Writable {
protected rootDestination?: SourcePath;
protected logger: Logger;
public constructor(rootDestination?: SourcePath) {
super({ objectMode: true });
this.rootDestination = rootDestination;
this.logger = Logger.childFromRoot(this.constructor.name);
}
}
export class StandardWriter extends ComponentWriter {
/** filepaths that converted files were written to */
public readonly converted: string[] = [];
public readonly deleted: FileResponseSuccess[] = [];
public readonly forceignore: ForceIgnore;
public constructor(rootDestination: SourcePath) {
super(rootDestination);
this.forceignore = ForceIgnore.findAndCreate(rootDestination);
}
public async _write(chunk: WriterFormat, encoding: string, callback: (err?: Error) => void): Promise<void> {
let err: Error | undefined;
if (chunk.writeInfos.length !== 0) {
try {
const toResolve = new Set<string>();
// it is a reasonable expectation that when a conversion call exits, the files of
// every component has been written to the destination. This await ensures the microtask
// queue is empty when that call exits and overall less memory is consumed.
await Promise.all(
chunk.writeInfos
.map(makeWriteInfoAbsolute(this.rootDestination))
.filter(existsOrDoesntMatchIgnored(this.forceignore))
.map((info) => {
if (info.shouldDelete) {
this.deleted.push({
filePath: info.output,
state: ComponentStatus.Deleted,
type: info.type,
fullName: info.fullName,
});
return fsPromises.rm(info.output, { force: true, recursive: true });
}
// if there are children, resolve each file. o/w just pick one of the files to resolve
// "resolve" means "make these show up in the FileResponses"
if (
toResolve.size === 0 ||
chunk.component.type.children !== undefined ||
// make each decomposed label show up in the fileResponses
chunk.component.type.strategies?.transformer === 'decomposedLabels'
) {
// This is a workaround for a server side ListViews bug where
// duplicate components are sent. W-9614275
if (toResolve.has(info.output)) {
this.logger.debug(`Ignoring duplicate metadata for: ${info.output}`);
return;
}
toResolve.add(info.output);
}
ensureFileExists(info.output);
return pipeline(info.source, createWriteStream(info.output));
})
);
this.converted.push(...toResolve);
} catch (e) {
err = e as Error;
}
}
callback(err);
}
}
export class ZipWriter extends ComponentWriter {
/**
* Count of files (not directories) added to the zip file.
*/
public fileCount: number = 0;
private zip = JSZip();
private zipBuffer?: Buffer;
public constructor(rootDestination?: SourcePath) {
super(rootDestination);
const destination = rootDestination ? `for: ${rootDestination}` : 'in memory';
this.logger.debug(`generating zip ${destination}`);
}
public get buffer(): Buffer | undefined {
return this.zipBuffer;
}
public async _write(chunk: WriterFormat, encoding: string, callback: (err?: Error) => void): Promise<void> {
let err: Error | undefined;
try {
await Promise.all(
chunk.writeInfos.filter(isWriteInfoWithSource).map(async (writeInfo) => {
// we don't want to prematurely zip folder types when their children might still be not in the zip
// those files we'll leave open as ReadableStreams until the zip finalizes
if (Boolean(chunk.component.type.folderType) || Boolean(chunk.component.type.folderContentType)) {
return this.addToZip(writeInfo.source, writeInfo.output);
}
// everything else can be zipped immediately to reduce the number of open files (windows has a low limit!) and help perf
const streamAsBuffer = await stream2buffer(writeInfo.source);
return this.addToZip(streamAsBuffer, writeInfo.output);
})
);
} catch (e) {
err = e as Error;
}
callback(err);
}
public async _final(callback: (err?: Error) => void): Promise<void> {
let err: Error | undefined;
try {
this.zipBuffer = await this.zip.generateAsync({
type: 'nodebuffer',
compression: 'DEFLATE',
compressionOptions: { level: 3 },
});
this.logger.debug('Generated zip complete');
} catch (e) {
err = e as Error;
}
callback(err);
}
public addToZip(contents: string | Readable | Buffer, path: SourcePath): void {
// Ensure only posix paths are added to zip files
const posixPath = path.replace(/\\/g, '/');
this.zip.file(posixPath, contents);
this.fileCount++;
}
}
/**
* Convenient wrapper to serialize a js object to XML content. Implemented as a stream
* to be used as a valid source for ComponentWriters in the conversion pipeline,
* even though it's not beneficial in the typical way a stream is.
*/
export class JsToXml extends Readable {
public constructor(private xmlObject: JsonMap) {
super();
}
public _read(): void {
const builder = new XMLBuilder({
format: true,
indentBy: ' ',
ignoreAttributes: false,
cdataPropName: '__cdata',
commentPropName: XML_COMMENT_PROP_NAME,
});
const builtXml = String(builder.build(this.xmlObject));
const xmlContent = correctComments(XML_DECL.concat(handleSpecialEntities(builtXml)));
this.push(xmlContent);
this.push(null);
}
}
/** xmlBuilder likes to add newline and indent before/after the comment (hypothesis: it uses `<` as a hint to newlint/indent) */
const correctComments = (xml: string): string =>
xml.includes('<!--') ? xml.replace(/\s+<!--(.*?)-->\s+/g, '<!--$1-->') : xml;
/**
* use this function to handle special html entities.
* XmlBuilder will otherwise replace ex: ` ` with `'&#160;'` (escape the &)
* This is a separate function to allow for future handling of other special entities
*
* See https://github.com/NaturalIntelligence/fast-xml-parser/blob/fa5a7339a5ae2ca4aea8a256179b82464dbf510e/docs/v4/5.Entities.md
* The parser can call addEntities to support more, but the Builder does not have that option.
* You also can't use Builder.tagValueProcessor to use this function
* because the escaping of `&` happens AFTER that is called.
* */
const handleSpecialEntities = (xml: string): string => xml.replaceAll('&#160;', ' ');
/** discriminate between the shouldDelete and the regular WriteInfo */
const isWriteInfoWithSource = (writeInfo: WriteInfo): writeInfo is WriteInfo & { source: Readable } =>
writeInfo.source !== undefined;
const makeWriteInfoAbsolute =
(rootDestination = '') =>
(writeInfo: WriteInfo): WriteInfo => ({
...writeInfo,
output: isAbsolute(writeInfo.output) ? writeInfo.output : join(rootDestination, writeInfo.output),
});
const existsOrDoesntMatchIgnored =
(forceignore: ForceIgnore) =>
(writeInfo: WriteInfo): boolean =>
existsSync(writeInfo.output) || forceignore.accepts(writeInfo.output);