From 5a7c1a7af48316852b46d581966dd40e8fff42e4 Mon Sep 17 00:00:00 2001 From: Greg Magolan Date: Fri, 3 Apr 2020 21:04:26 -0700 Subject: [PATCH] fix(builtin): fix for pkg_npm single directory artifact dep case Also fixes the dependency on bazel-bin for the .pack & .publish scripts. This fixes an issue where pkg_npm has a single dep which is a directory artifact but the contents of that dep are not re-rooted. This is the expected behaviour as the only way to create a valid npm package with a package.json at the root is to re-root to the output directory of the dep. The fix is actually to not copy anything but to just forward the directory artifact dep onto the default info of pkg_npm. Some refactoring done so that the npm scripts are generated in a separate action from the package.js which is not run in the single directory artifact dep case. --- internal/pkg_npm/BUILD.bazel | 13 ++-- internal/pkg_npm/npm_script_generator.js | 39 +++++++++++ internal/pkg_npm/packager.js | 25 +++---- internal/pkg_npm/pkg_npm.bzl | 85 +++++++++++++++++------- internal/pkg_npm/test/BUILD.bazel | 27 +++++++- internal/pkg_npm/test/pkg_npm.spec.js | 64 ++++++++++++------ 6 files changed, 190 insertions(+), 63 deletions(-) create mode 100644 internal/pkg_npm/npm_script_generator.js diff --git a/internal/pkg_npm/BUILD.bazel b/internal/pkg_npm/BUILD.bazel index 75f210358d..98207815fa 100644 --- a/internal/pkg_npm/BUILD.bazel +++ b/internal/pkg_npm/BUILD.bazel @@ -14,14 +14,15 @@ exports_files(["pkg_npm.bzl"]) nodejs_binary( name = "packager", - data = [ - "packager.js", - "//third_party/github.com/gjtorikian/isBinaryFile", - "@nodejs//:run_npm.sh.template", - ], + data = ["//third_party/github.com/gjtorikian/isBinaryFile"], entry_point = ":packager.js", install_source_map_support = False, - visibility = ["//visibility:public"], +) + +nodejs_binary( + name = "npm_script_generator", + entry_point = ":npm_script_generator.js", + install_source_map_support = False, ) filegroup( diff --git a/internal/pkg_npm/npm_script_generator.js b/internal/pkg_npm/npm_script_generator.js new file mode 100644 index 0000000000..098e3d49d6 --- /dev/null +++ b/internal/pkg_npm/npm_script_generator.js @@ -0,0 +1,39 @@ +/** + * @license + * Copyright 2018 The Bazel Authors. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * @fileoverview This script generates npm pack & publish shell scripts from + * a template. + */ +'use strict'; + +const fs = require('fs'); + +function main(args) { + const [outDir, packPath, publishPath, runNpmTemplatePath] = args; + const npmTemplate = fs.readFileSync(runNpmTemplatePath, {encoding: 'utf-8'}); + const cwd = process.cwd(); + if (/[\//]sandbox[\//]/.test(cwd)) { + console.error('Error: npm_script_generator must be run with no sandbox'); + process.exit(1); + } + fs.writeFileSync(packPath, npmTemplate.replace('TMPL_args', `pack "${cwd}/${outDir}"`)); + fs.writeFileSync(publishPath, npmTemplate.replace('TMPL_args', `publish "${cwd}/${outDir}"`)); +} + +if (require.main === module) { + process.exitCode = main(process.argv.slice(2)); +} diff --git a/internal/pkg_npm/packager.js b/internal/pkg_npm/packager.js index 252de4613c..5ee4381071 100644 --- a/internal/pkg_npm/packager.js +++ b/internal/pkg_npm/packager.js @@ -14,6 +14,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +/** + * @fileoverview This script copies files and nested packages into the output + * directory of the pkg_npm rule. + */ +'use strict'; + const fs = require('fs'); const path = require('path'); const isBinary = require('isbinaryfile').isBinaryFileSync; @@ -66,9 +72,8 @@ function unquoteArgs(s) { function main(args) { args = fs.readFileSync(args[0], {encoding: 'utf-8'}).split('\n').map(unquoteArgs); const - [outDir, baseDir, srcsArg, binDir, genDir, depsArg, packagesArg, substitutionsArg, packPath, - publishPath, replaceWithVersion, stampFile, vendorExternalArg, hideBuildFilesArg, - runNpmTemplatePath] = args; + [outDir, baseDir, srcsArg, binDir, genDir, depsArg, packagesArg, substitutionsArg, + replaceWithVersion, stampFile, vendorExternalArg, hideBuildFilesArg] = args; const renameBuildFiles = parseInt(hideBuildFilesArg); const substitutions = [ @@ -76,7 +81,7 @@ function main(args) { [/(#|\/\/)\s+BEGIN-INTERNAL[\w\W]+?END-INTERNAL/g, ''], ]; const rawReplacements = JSON.parse(substitutionsArg); - for (let key of Object.keys(rawReplacements)) { + for (const key of Object.keys(rawReplacements)) { substitutions.push([new RegExp(key, 'g'), rawReplacements[key]]) } // Replace version last so that earlier substitutions can add @@ -123,7 +128,7 @@ function main(args) { } function outPath(f) { - for (ext of vendorExternalArg.split(',').filter(s => !!s)) { + for (const ext of vendorExternalArg.split(',').filter(s => !!s)) { const candidate = path.join(binDir, 'external', ext); if (!path.relative(candidate, f).startsWith('..')) { return path.join(outDir, path.relative(candidate, f)); @@ -145,7 +150,7 @@ function main(args) { } // Deps like bazel-bin/baseDir/my/path is copied to outDir/my/path. - for (dep of depsArg.split(',').filter(s => !!s)) { + for (const dep of depsArg.split(',').filter(s => !!s)) { try { copyWithReplace(dep, outPath(dep), substitutions, renameBuildFiles); } catch (e) { @@ -156,7 +161,7 @@ function main(args) { // package contents like bazel-bin/baseDir/my/directory/* is // recursively copied to outDir/my/* - for (pkg of packagesArg.split(',').filter(s => !!s)) { + for (const pkg of packagesArg.split(',').filter(s => !!s)) { const outDir = outPath(path.dirname(pkg)); function copyRecursive(base, file) { file = file.replace(/\\/g, '/'); @@ -167,7 +172,7 @@ function main(args) { }); } else { function outFile() { - for (ext of vendorExternalArg.split(',').filter(s => !!s)) { + for (const ext of vendorExternalArg.split(',').filter(s => !!s)) { if (file.startsWith(`external/${ext}`)) { return file.substr(`external/${ext}`.length); } @@ -182,10 +187,6 @@ function main(args) { copyRecursive(pkg, f); }); } - - const npmTemplate = fs.readFileSync(require.resolve(runNpmTemplatePath), {encoding: 'utf-8'}); - fs.writeFileSync(packPath, npmTemplate.replace('TMPL_args', `pack "${outDir}"`)); - fs.writeFileSync(publishPath, npmTemplate.replace('TMPL_args', `publish "${outDir}"`)); } if (require.main === module) { diff --git a/internal/pkg_npm/pkg_npm.bzl b/internal/pkg_npm/pkg_npm.bzl index 0b87b906a2..f724bcbe15 100644 --- a/internal/pkg_npm/pkg_npm.bzl +++ b/internal/pkg_npm/pkg_npm.bzl @@ -7,7 +7,6 @@ to the `deps` of one of their targets. """ load("//:providers.bzl", "DeclarationInfo", "JSNamedModuleInfo", "LinkablePackageInfo", "NodeContextInfo") -load("//internal/common:path_utils.bzl", "strip_external") _DOC = """The pkg_npm rule creates a directory containing a publishable npm artifact. @@ -68,14 +67,12 @@ $ bazel run @nodejs//:npm_node_repositories who $ bazel run :my_package.publish ``` -> Note that the `.pack` and `.publish` commands require that the `bazel-out` symlink exists in your project. -> Also, you must run the command from the workspace root directory containing the `bazel-out` symlink. - You can pass arguments to npm by escaping them from Bazel using a double-hyphen, for example: `bazel run my_package.publish -- --tag=next` """ +# Used in angular/angular /packages/bazel/src/ng_package/ng_package.bzl PKG_NPM_ATTRS = { "package_name": attr.string( doc = """Optional package_name that this npm package may be imported as.""", @@ -119,6 +116,11 @@ PKG_NPM_ATTRS = { doc = """Other targets which produce files that should be included in the package, such as `rollup_bundle`""", allow_files = True, ), + "_npm_script_generator": attr.label( + default = Label("//internal/pkg_npm:npm_script_generator"), + cfg = "host", + executable = True, + ), "_packager": attr.label( default = Label("//internal/pkg_npm:packager"), cfg = "host", @@ -130,6 +132,7 @@ PKG_NPM_ATTRS = { ), } +# Used in angular/angular /packages/bazel/src/ng_package/ng_package.bzl PKG_NPM_OUTPUTS = { "pack": "%{name}.pack", "publish": "%{name}.publish", @@ -141,6 +144,7 @@ PKG_NPM_OUTPUTS = { def _filter_out_external_files(ctx, files, package_path): result = [] for file in files: + # NB: package_path may be an empty string if file.short_path.startswith(package_path) and not file.short_path.startswith("../"): result.append(file.path) else: @@ -149,7 +153,8 @@ def _filter_out_external_files(ctx, files, package_path): result.append(file.path) return result -def create_package(ctx, deps_sources, nested_packages): +# Used in angular/angular /packages/bazel/src/ng_package/ng_package.bzl +def create_package(ctx, deps_files, nested_packages): """Creates an action that produces the npm package. It copies srcs and deps into the artifact and produces the .pack and .publish @@ -157,8 +162,8 @@ def create_package(ctx, deps_sources, nested_packages): Args: ctx: the skylark rule context - deps_sources: Files which have been specified as dependencies. Usually ".js" or ".d.ts" - generated files. + deps_files: list of files to include in the package which have been + specified as dependencies nested_packages: list of TreeArtifact outputs from other actions which are to be nested inside this package @@ -167,6 +172,19 @@ def create_package(ctx, deps_sources, nested_packages): """ stamp = ctx.attr.node_context_data[NodeContextInfo].stamp + + all_files = deps_files + ctx.files.srcs + + if not stamp and len(all_files) == 1 and all_files[0].is_directory and len(ctx.files.nested_packages) == 0: + # Special case where these is a single dep that is a directory artifact and there are no + # source files or nested_packages; in that case we assume the package is contained within + # that single directory and there is no work to do + package_dir = all_files[0] + + _create_npm_scripts(ctx, package_dir) + + return package_dir + package_dir = ctx.actions.declare_directory(ctx.label.name) package_path = ctx.label.package @@ -174,7 +192,7 @@ def create_package(ctx, deps_sources, nested_packages): # target. Also include files from external repositories that explicitly specified in # the vendor_external list. We only want to package deps files which are inside of the # current package unless explicitely specified. - filtered_deps_sources = _filter_out_external_files(ctx, deps_sources, package_path) + filtered_deps_sources = _filter_out_external_files(ctx, deps_files, package_path) args = ctx.actions.args() args.use_param_file("%s", use_always = True) @@ -186,17 +204,12 @@ def create_package(ctx, deps_sources, nested_packages): args.add_joined(filtered_deps_sources, join_with = ",", omit_if_empty = False) args.add_joined([p.path for p in nested_packages], join_with = ",", omit_if_empty = False) args.add(ctx.attr.substitutions) - args.add_all([ctx.outputs.pack.path, ctx.outputs.publish.path]) args.add(ctx.attr.replace_with_version) args.add(ctx.version_file.path if stamp else "") args.add_joined(ctx.attr.vendor_external, join_with = ",", omit_if_empty = False) args.add("1" if ctx.attr.hide_build_files else "0") - # require.resolve expects the path to start with the workspace name and not "external" - run_npm_template_path = strip_external(ctx.file._run_npm_template.path) - args.add(run_npm_template_path) - - inputs = ctx.files.srcs + deps_sources + nested_packages + [ctx.file._run_npm_template] + inputs = ctx.files.srcs + deps_files + nested_packages # The version_file is an undocumented attribute of the ctx that lets us read the volatile-status.txt file # produced by the --workspace_status_command. That command will be executed whenever @@ -210,34 +223,60 @@ def create_package(ctx, deps_sources, nested_packages): mnemonic = "AssembleNpmPackage", executable = ctx.executable._packager, inputs = inputs, - outputs = [package_dir, ctx.outputs.pack, ctx.outputs.publish], + outputs = [package_dir], arguments = [args], ) + + _create_npm_scripts(ctx, package_dir) + return package_dir +def _create_npm_scripts(ctx, package_dir): + args = ctx.actions.args() + args.add_all([ + package_dir.path, + ctx.outputs.pack.path, + ctx.outputs.publish.path, + ctx.file._run_npm_template.path, + ]) + + ctx.actions.run( + progress_message = "Generating npm pack & publish scripts", + mnemonic = "GenerateNpmScripts", + executable = ctx.executable._npm_script_generator, + inputs = [ctx.file._run_npm_template, package_dir], + outputs = [ctx.outputs.pack, ctx.outputs.publish], + arguments = [args], + # Must be run local (no sandbox) so that the pwd is the actual execroot + # in the script which is used to generate the path in the pack & publish + # scripts. + execution_requirements = {"local": "1"}, + ) + def _pkg_npm(ctx): - sources_depsets = [] + deps_files_depsets = [] for dep in ctx.attr.deps: # Collect whatever is in the "data" - sources_depsets.append(dep.data_runfiles.files) + deps_files_depsets.append(dep.data_runfiles.files) # Only collect DefaultInfo files (not transitive) - sources_depsets.append(dep.files) + deps_files_depsets.append(dep.files) # All direct & transitive JavaScript-producing deps # TODO: switch to JSModuleInfo when it is available if JSNamedModuleInfo in dep: - sources_depsets.append(dep[JSNamedModuleInfo].sources) + deps_files_depsets.append(dep[JSNamedModuleInfo].sources) # Include all transitive declerations if DeclarationInfo in dep: - sources_depsets.append(dep[DeclarationInfo].transitive_declarations) - - sources = depset(transitive = sources_depsets) + deps_files_depsets.append(dep[DeclarationInfo].transitive_declarations) # Note: to_list() should be called once per rule! - package_dir = create_package(ctx, sources.to_list(), ctx.files.nested_packages) + deps_files = depset(transitive = deps_files_depsets).to_list() + + package_dir = create_package(ctx, deps_files, ctx.files.nested_packages) + package_dir_depset = depset([package_dir]) result = [ diff --git a/internal/pkg_npm/test/BUILD.bazel b/internal/pkg_npm/test/BUILD.bazel index 92704596f5..cd21ec3bd8 100644 --- a/internal/pkg_npm/test/BUILD.bazel +++ b/internal/pkg_npm/test/BUILD.bazel @@ -60,10 +60,35 @@ pkg_npm( ], ) +pkg_npm( + name = "test_noop_pkg", + # Special case where these is a single dep that is a directory artifact + # then we assume the package is contained within that single directory + # and the pkg_npm rules does not need to copy any files + deps = [":rollup/bundle/subdirectory"], +) + +pkg_npm( + name = "test_noop2_pkg", + # Special case where these is a single dep that is a directory artifact + # then we assume the package is contained within that single directory + # and the pkg_npm rules does not need to copy any files + srcs = [":rollup/bundle/subdirectory"], +) + jasmine_node_test( name = "test", srcs = ["pkg_npm.spec.js"], - data = [":test_pkg"], + data = [ + ":test_noop2_pkg", + ":test_noop_pkg", + ":test_pkg", + ], + templated_args = [ + "$(rootpath :test_pkg)", + "$(rootpath :test_noop_pkg)", + "$(rootpath :test_noop2_pkg)", + ], ) genrule( diff --git a/internal/pkg_npm/test/pkg_npm.spec.js b/internal/pkg_npm/test/pkg_npm.spec.js index 99e5f00727..bcc22da6b0 100644 --- a/internal/pkg_npm/test/pkg_npm.spec.js +++ b/internal/pkg_npm/test/pkg_npm.spec.js @@ -1,50 +1,62 @@ const fs = require('fs'); -const path = require('path'); const runfiles = require(process.env['BAZEL_NODE_RUNFILES_HELPER']); +const args = process.argv.slice(2); +const testPkgRootpath = args[0]; +const testNoopPkgRootpath = args[1]; +const testNoop2PkgRootpath = args[2]; +const testPkgPath = runfiles.resolveWorkspaceRelative(testPkgRootpath); +const testNoopPkgPath = runfiles.resolveWorkspaceRelative(testNoopPkgRootpath); +const testNoop2PkgPath = runfiles.resolveWorkspaceRelative(testNoop2PkgRootpath); -function read(p) { - // We want to look up the test_pkg directory artifact in the runfiles. - // The manifest does have an entry for it, but since it's a directory we cannot use - // require.resolve to lookup that entry. So instead we lookup the sibling file in runfiles and - // bootstrap the filesystem lookup from there. - const dir = path.dirname(runfiles.resolvePackageRelative('test_loader.js')); - return fs.readFileSync(path.join(dir, 'test_pkg', p), {encoding: 'utf-8'}).trim(); +function readFromPkg(p) { + return fs.readFileSync(`${testPkgPath}/${p}`, {encoding: 'utf-8'}).trim(); } -describe('pkg_npm srcs', () => { +function readFromNoopPkg(p) { + return fs.readFileSync(`${testNoopPkgPath}/${p}`, {encoding: 'utf-8'}).trim(); +} + +function readFromNoop2Pkg(p) { + return fs.readFileSync(`${testNoop2PkgPath}/${p}`, {encoding: 'utf-8'}).trim(); +} + +describe('pkg_npm', () => { + it('creates an output directory after the rule name', () => { + expect(testPkgRootpath).toEqual('internal/pkg_npm/test/test_pkg'); + }); it('copies srcs and replaces contents', () => { - expect(read('some_file')).toEqual('replaced'); + expect(readFromPkg('some_file')).toEqual('replaced'); }); it('copies dependencies from bazel-genfiles', () => { - expect(read('a_dep')).toEqual('a_dep content'); + expect(readFromPkg('a_dep')).toEqual('a_dep content'); }); it('copies files from other packages', () => { - expect(read('dependent_file')).toEqual('dependent_file content'); + expect(readFromPkg('dependent_file')).toEqual('dependent_file content'); }); it('copies js files from ts_library', () => { - expect(read('foo.js')).toContain('exports.a = \'\';'); + expect(readFromPkg('foo.js')).toContain('exports.a = \'\';'); }); it('copies declaration files from ts_library', () => { - expect(read('foo.d.ts')).toContain('export declare const a: string;'); + expect(readFromPkg('foo.d.ts')).toContain('export declare const a: string;'); }); it('copies data dependencies', () => { - expect(read('data.json')).toEqual('[]'); + expect(readFromPkg('data.json')).toEqual('[]'); }); it('replaced 0.0.0-PLACEHOLDER', () => { - expect(JSON.parse(read('package.json')).version).toEqual('1.2.3'); + expect(JSON.parse(readFromPkg('package.json')).version).toEqual('1.2.3'); }); it('copies files from deps', () => { - expect(read('bundle.min.js')).toBe('bundle content'); + expect(readFromPkg('bundle.min.js')).toBe('bundle content'); }); it('copies files from external workspace if included in srcs', () => { - expect(read('vendored_external_file')).toEqual('vendored_external_file content'); + expect(readFromPkg('vendored_external_file')).toEqual('vendored_external_file content'); }); it('copies js files from external workspace ts_library if included in vendor_external', () => { - expect(read('external.js')).toContain('exports.b = \'\';'); + expect(readFromPkg('external.js')).toContain('exports.b = \'\';'); }); it('copies declaration files from external workspace ts_library if included in vendor_external', () => { - expect(read('external.d.ts')).toContain('export declare const b: string;'); + expect(readFromPkg('external.d.ts')).toContain('export declare const b: string;'); }); it('vendors external workspaces', () => { @@ -52,6 +64,16 @@ describe('pkg_npm srcs', () => { // has to live in the root of the repository in order for external/foo to appear inside it }); it('copies entire contents of directories', - () => {expect(read('rollup/bundle/subdirectory/index.js')) + () => {expect(readFromPkg('rollup/bundle/subdirectory/index.js')) .toContain(`const a = '';\n\nexport { a }`)}); + it('does not create an output directory if single dep is a directory artifact', () => { + expect(testNoopPkgRootpath).toEqual('internal/pkg_npm/test/rollup/bundle/subdirectory'); + }); + it('re-roots to directory artifact if single dep is a directory artifact', + () => {expect(readFromNoopPkg('index.js')).toContain(`const a = '';\n\nexport { a }`)}); + it('does not create an output directory if single dep is a directory artifact', () => { + expect(testNoop2PkgRootpath).toEqual('internal/pkg_npm/test/rollup/bundle/subdirectory'); + }); + it('re-roots to directory artifact if single dep is a directory artifact', + () => {expect(readFromNoop2Pkg('index.js')).toContain(`const a = '';\n\nexport { a }`)}); });