Skip to content

Commit

Permalink
Framework: Add build caching
Browse files Browse the repository at this point in the history
  • Loading branch information
aduth committed Jun 10, 2019
1 parent 42b2748 commit d2cf89e
Show file tree
Hide file tree
Showing 3 changed files with 135 additions and 67 deletions.
66 changes: 23 additions & 43 deletions bin/packages/build-worker.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,9 @@
* External dependencies
*/
const { promisify } = require( 'util' );
const fs = require( 'fs' );
const fs = require( 'fs-extra' );
const path = require( 'path' );
const babel = require( '@babel/core' );
const makeDir = require( 'make-dir' );
const sass = require( 'node-sass' );
const postcss = require( 'postcss' );

Expand All @@ -31,20 +30,6 @@ const JS_ENVIRONMENTS = {
module: 'build-module',
};

/**
* Promisified fs.readFile.
*
* @type {Function}
*/
const readFile = promisify( fs.readFile );

/**
* Promisified fs.writeFile.
*
* @type {Function}
*/
const writeFile = promisify( fs.writeFile );

/**
* Promisified sass.render.
*
Expand Down Expand Up @@ -74,7 +59,8 @@ function getBuildPath( file, buildFolder ) {
const pkgSrcPath = path.resolve( PACKAGES_DIR, pkgName, 'src' );
const pkgBuildPath = path.resolve( PACKAGES_DIR, pkgName, buildFolder );
const relativeToSrcPath = path.relative( pkgSrcPath, file );
return path.resolve( pkgBuildPath, relativeToSrcPath );
const buildPath = path.resolve( pkgBuildPath, relativeToSrcPath );
return path.relative( PACKAGES_DIR, buildPath );
}

/**
Expand All @@ -87,11 +73,7 @@ const BUILD_TASK_BY_EXTENSION = {
const outputFile = getBuildPath( file.replace( '.scss', '.css' ), 'build-style' );
const outputFileRTL = getBuildPath( file.replace( '.scss', '-rtl.css' ), 'build-style' );

const [ , contents ] = await Promise.all( [
makeDir( path.dirname( outputFile ) ),
readFile( file, 'utf8' ),
] );

const contents = await fs.readFile( file, 'utf8' );
const builtSass = await renderSass( {
file,
includePaths: [ path.resolve( __dirname, '../../assets/stylesheets' ) ],
Expand All @@ -118,42 +100,40 @@ const BUILD_TASK_BY_EXTENSION = {
to: 'dest/app.css',
} );

await Promise.all( [
writeFile( outputFile, result.css ),
writeFile( outputFileRTL, resultRTL.css ),
] );
return {
[ outputFile ]: result.css,
[ outputFileRTL ]: resultRTL.css,
};
},

async '.js'( file ) {
const built = {};

for ( const [ environment, buildDir ] of Object.entries( JS_ENVIRONMENTS ) ) {
const destPath = getBuildPath( file, buildDir );
const babelOptions = getBabelConfig( environment, file.replace( PACKAGES_DIR, '@wordpress' ) );

const [ , transformed ] = await Promise.all( [
makeDir( path.dirname( destPath ) ),
babel.transformFileAsync( file, babelOptions ),
] );
const transformed = await babel.transformFileAsync( file, babelOptions );

await Promise.all( [
writeFile( destPath + '.map', JSON.stringify( transformed.map ) ),
writeFile( destPath, transformed.code + '\n//# sourceMappingURL=' + path.basename( destPath ) + '.map' ),
] );
built[ destPath + '.map' ] = JSON.stringify( transformed.map );
built[ destPath ] = transformed.code + '\n//# sourceMappingURL=' + path.basename( destPath ) + '.map';
}

return built;
},
};

module.exports = async ( file, callback ) => {
const extension = path.extname( file );
const task = BUILD_TASK_BY_EXTENSION[ extension ];

if ( ! task ) {
return;
}

try {
await task( file );
callback();
} catch ( error ) {
callback( error );
if ( task ) {
try {
callback( null, await task( file ) );
} catch ( error ) {
callback( error );
}
} else {
callback( null, {} );
}
};
135 changes: 111 additions & 24 deletions bin/packages/build.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,14 @@
/**
* External dependencies
*/
const crypto = require( 'crypto' );
const fs = require( 'fs-extra' );
const path = require( 'path' );
const glob = require( 'fast-glob' );
const ProgressBar = require( 'progress' );
const workerFarm = require( 'worker-farm' );
const { Readable, Transform } = require( 'stream' );

const files = process.argv.slice( 2 );
const { Readable, Writable, Transform } = require( 'stream' );
const memoize = require( 'memize' );

/**
* Path to packages directory.
Expand All @@ -18,6 +19,37 @@ const files = process.argv.slice( 2 );
*/
const PACKAGES_DIR = path.resolve( __dirname, '../../packages' );

const { BUILD_CACHE_TARGET = 'node_modules/.cache/gutenbuild' } = process.env;

const BUILD_CACHE_PATH = path.resolve( process.cwd(), BUILD_CACHE_TARGET );

const BUILD_CONFIGURATION_FILES = [
// path.resolve( __dirname, './build.js' ),
// path.resolve( __dirname, './build-worker.js' ),
path.resolve( __dirname, './get-babel-config.js' ),
path.resolve( __dirname, './get-packages.js' ),
path.resolve( __dirname, './post-css-config.js' ),
path.resolve( __dirname, '../../babel.config.js' ),
path.resolve( __dirname, '../../package-lock.json' ),
path.resolve( __dirname, '../../assets/stylesheets/_colors.scss' ),
path.resolve( __dirname, '../../assets/stylesheets/_breakpoints.scss' ),
path.resolve( __dirname, '../../assets/stylesheets/_variables.scss' ),
path.resolve( __dirname, '../../assets/stylesheets/_mixins.scss' ),
path.resolve( __dirname, '../../assets/stylesheets/_animations.scss' ),
path.resolve( __dirname, '../../assets/stylesheets/_z-index.scss' ),
];

function getFilesChecksum( files ) {
return files.reduce(
( hash, file ) => hash.update( fs.readFileSync( file ) ),
crypto.createHash( 'md5' )
).digest( 'hex' );
}

const getPackageChecksum = memoize( ( file ) => getFilesChecksum( [
path.resolve( PACKAGES_DIR, getPackageName( file ), 'package.json' ),
] ) );

/**
* Get the package name for a specified file
*
Expand Down Expand Up @@ -68,9 +100,11 @@ let onFileComplete = () => {};

let stream;

if ( files.length ) {
const buildFiles = process.argv.slice( 2 );

if ( buildFiles.length ) {
stream = new Readable( { encoding: 'utf8' } );
files.forEach( ( file ) => stream.push( file ) );
buildFiles.forEach( ( file ) => stream.push( file ) );
stream.push( null );
stream = stream.pipe( createStyleEntryTransform() );
} else {
Expand Down Expand Up @@ -100,7 +134,7 @@ if ( files.length ) {
stream
.pause()
.on( 'data', ( file ) => {
bar.total = files.push( file );
bar.total = buildFiles.push( file );
} );

onFileComplete = () => {
Expand All @@ -110,29 +144,82 @@ if ( files.length ) {

const worker = workerFarm( require.resolve( './build-worker' ) );

const buildConfigurationChecksum = getFilesChecksum( BUILD_CONFIGURATION_FILES );

let ended = false,
complete = 0;

stream
.on( 'data', ( file ) => worker( file, ( error ) => {
onFileComplete();

if ( error ) {
// If an error occurs, the process can't be ended immediately since
// other workers are likely pending. Optimally, it would end at the
// earliest opportunity (after the current round of workers has had
// the chance to complete), but this is not made directly possible
// through `worker-farm`. Instead, ensure at least that when the
// process does exit, it exits with a non-zero code to reflect the
// fact that an error had occurred.
process.exitCode = 1;

console.error( error );
const build = new Transform( {
objectMode: true,
async transform( file, encoding, callback ) {
const checksum = crypto
.createHash( 'md5' )
.update( buildConfigurationChecksum )
.update( getPackageChecksum( file ) )
.update( getFilesChecksum( [ file ] ) )
.digest( 'hex' );

let cached;
try {
cached = await fs.readdir( path.resolve( BUILD_CACHE_PATH, checksum ) );
} catch ( error ) {}

if ( ! Array.isArray( cached ) ) {
cached = [];

await new Promise( ( resolve, reject ) => {
worker( file, async ( error, built ) => {
if ( error ) {
// If an error occurs, the process can't be ended immediately since
// other workers are likely pending. Optimally, it would end at the
// earliest opportunity (after the current round of workers has had
// the chance to complete), but this is not made directly possible
// through `worker-farm`. Instead, ensure at least that when the
// process does exit, it exits with a non-zero code to reflect the
// fact that an error had occurred.
process.exitCode = 1;

console.error( error );
reject( error );
} else {
for ( const [ relativeDestination, contents ] of Object.entries( built ) ) {
const absoluteDestination = path.resolve( BUILD_CACHE_PATH, checksum, relativeDestination );
await fs.mkdirp( path.dirname( absoluteDestination ) );
await fs.writeFile( absoluteDestination, contents );
cached.push( [ checksum, relativeDestination ] );
}

resolve();
}
} );
} );
}

if ( ended && ++complete === files.length ) {
cached = await fs.readdir( path.resolve( BUILD_CACHE_PATH, checksum ) );
cached.forEach( ( entry ) => this.push( [ checksum, entry ] ) );
if ( ended && ++complete === buildFiles.length ) {
workerFarm.end( worker );
}
} ) )

onFileComplete();
callback();
},
} );

const write = new Writable( {
objectMode: true,
async write( [ checksum, relativePath ], encoding, callback ) {
await fs.copy(
path.resolve( BUILD_CACHE_PATH, checksum, relativePath ),
path.resolve( PACKAGES_DIR, relativePath ),
);

callback();
},
} );

stream
.pipe( build )
.on( 'end', () => ended = true )
.resume();
.resume()
.pipe( write );
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@
"lint-staged": "8.1.5",
"lodash": "4.17.11",
"make-dir": "3.0.0",
"memize": "1.0.5",
"mkdirp": "0.5.1",
"node-sass": "4.12.0",
"node-watch": "0.6.0",
Expand Down

0 comments on commit d2cf89e

Please sign in to comment.