Skip to content

Commit

Permalink
Fix 607 Use s3ForcePathStyle Appropriately (#650)
Browse files Browse the repository at this point in the history
* Refactor S3 Setup detect function
- Modified function signature. Instead of augmenting an object (side effect) the function now gets an options object and returns a configuration object.
- Cleaned up of code auto used to extract bucket and region from s3 url.
- Added comments.
- Increased test coverage.
- All tests pass.

* Fixing issue 607
- Modified versioning to add bucket name to hosted_path when s3ForcePathStyle is true.
- Modified s3_setup to remove bucket name from prefix when s3ForcePathStyle is true.
- Added test coverage for s3ForcePathStyle cases.

* Fixed console logs to report paths actually used (which may be not at amazonaws.com).

* Various tweaks to tests so that they pass in various configurations.

* Modified test to account for path issues in evaluation on windows latest with node 20, 22.
  • Loading branch information
ronilan authored Jun 29, 2024
1 parent 9ad41ee commit 60d6a89
Show file tree
Hide file tree
Showing 11 changed files with 307 additions and 192 deletions.
9 changes: 4 additions & 5 deletions lib/info.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,26 +11,25 @@ const s3_setup = require('./util/s3_setup.js');
function info(gyp, argv, callback) {
const package_json = gyp.package_json;
const opts = versioning.evaluate(package_json, gyp.opts);
const config = {};
s3_setup.detect(opts, config);
const config = s3_setup.detect(opts);
const s3 = s3_setup.get_s3(config);
const s3_opts = {
Bucket: config.bucket,
Prefix: config.prefix
};
s3.listObjects(s3_opts, (err, meta) => {
if (err && err.code === 'NotFound') {
return callback(new Error('[' + package_json.name + '] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + config.prefix));
return callback(new Error('[' + package_json.name + '] Not found: https://' + opts.hosted_path));
} else if (err) {
return callback(err);
} else {
log.verbose(JSON.stringify(meta, null, 1));
if (meta && meta.Contents) {
if (meta && meta.Contents.length) {
meta.Contents.forEach((obj) => {
console.log(obj.Key);
});
} else {
console.error('[' + package_json.name + '] No objects found at https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + config.prefix);
console.error('[' + package_json.name + '] Not found: No objects at https://' + opts.hosted_path);
}
return callback();
}
Expand Down
5 changes: 2 additions & 3 deletions lib/publish.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,7 @@ function publish(gyp, argv, callback) {
}

log.info('publish', 'Detecting s3 credentials');
const config = {};
s3_setup.detect(opts, config);
const config = s3_setup.detect(opts);
const s3 = s3_setup.get_s3(config);

const key_name = url.resolve(config.prefix, opts.package_name);
Expand Down Expand Up @@ -59,7 +58,7 @@ function publish(gyp, argv, callback) {
}
if (resp) log.info('publish', 's3 putObject response: "' + JSON.stringify(resp) + '"');
log.info('publish', 'successfully put object');
console.log('[' + package_json.name + '] published to ' + opts.hosted_path);
console.log('[' + package_json.name + '] Success: published to ' + opts.hosted_path);
return callback();
});
} catch (err3) {
Expand Down
7 changes: 3 additions & 4 deletions lib/unpublish.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,7 @@ function unpublish(gyp, argv, callback) {
const package_json = gyp.package_json;
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
const config = {};
s3_setup.detect(opts, config);
const config = s3_setup.detect(opts);
const s3 = s3_setup.get_s3(config);
const key_name = url.resolve(config.prefix, opts.package_name);
const s3_opts = {
Expand All @@ -24,7 +23,7 @@ function unpublish(gyp, argv, callback) {
};
s3.headObject(s3_opts, (err, meta) => {
if (err && err.code === 'NotFound') {
console.log('[' + package_json.name + '] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
console.log('[' + package_json.name + '] Not found: ' + opts.hosted_tarball);
return callback();
} else if (err) {
return callback(err);
Expand All @@ -33,7 +32,7 @@ function unpublish(gyp, argv, callback) {
s3.deleteObject(s3_opts, (err2, resp) => {
if (err2) return callback(err2);
log.info(JSON.stringify(resp));
console.log('[' + package_json.name + '] Success: removed https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
console.log('[' + package_json.name + '] Success: removed ' + opts.hosted_tarball);
return callback();
});
}
Expand Down
56 changes: 40 additions & 16 deletions lib/util/s3_setup.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,33 +6,57 @@ const url = require('url');
const fs = require('fs');
const path = require('path');

module.exports.detect = function(opts, config) {
module.exports.detect = function(opts) {
const config = {};

const to = opts.hosted_path;
const uri = url.parse(to);
config.prefix = (!uri.pathname || uri.pathname === '/') ? '' : uri.pathname.replace('/', '');

if (opts.bucket && opts.region) {
// use user defined settings for host, region, bucket
config.endpoint = opts.host;
config.bucket = opts.bucket;
config.region = opts.region;
config.endpoint = opts.host;
config.s3ForcePathStyle = opts.s3ForcePathStyle;

// if using s3ForcePathStyle the bucket is part of the http object path
// but not the S3 key prefix path.
// remove it
const bucketPath = config.s3ForcePathStyle ? `/${config.bucket}/` : '/';
config.prefix = (!uri.pathname || uri.pathname === bucketPath) ? '' : uri.pathname.replace(bucketPath, '');
} else {
// auto detect region and bucket from url
// only virtual-hosted–style access can be auto detected
// the uri will have the following format:
// https://bucket-name.s3.Region.amazonaws.com/key-name (dash Region)
// or in some legacy region of this format:
// https://bucket-name.s3-Region.amazonaws.com/key-name (dot Region)
const parts = uri.hostname.split('.s3');
const bucket = parts[0];
if (!bucket) {
return;
}
if (!config.bucket) {
config.bucket = bucket;

// there is nothing before the .s3
// not a valid s3 virtual host bucket url
if (parts.length === 1) {
throw new Error('Could not parse s3 bucket name from virtual host url.');
}
if (!config.region) {
const region = parts[1].slice(1).split('.')[0];
if (region === 'amazonaws') {
config.region = 'us-east-1';
} else {
config.region = region;
}

// everything before .s3 is the bucket
config.bucket = parts[0];

// from everything that comes after the s3
// first char is connecting dot or dash
// everything up to the domain should be the region name
const region = parts[1].slice(1).split('.')[0];
// if user provided url does not include region, default to us-east-1.
if (region === 'amazonaws') {
config.region = 'us-east-1';
} else {
config.region = region;
}

config.prefix = (!uri.pathname || uri.pathname === '/') ? '' : uri.pathname.replace('/', '');
}

return config;
};

module.exports.get_s3 = function(config) {
Expand Down
8 changes: 7 additions & 1 deletion lib/util/versioning.js
Original file line number Diff line number Diff line change
Expand Up @@ -329,7 +329,13 @@ module.exports.evaluate = function(package_json, options, napi_build_version) {
const package_name = package_json.binary.package_name ? package_json.binary.package_name : default_package_name;
opts.package_name = eval_template(package_name, opts);
opts.staged_tarball = path.join('build/stage', opts.remote_path, opts.package_name);
opts.hosted_path = url.resolve(opts.host, opts.remote_path);
// when using s3ForcePathStyle the bucket is part of the http object path
// add it
if (opts.s3ForcePathStyle) {
opts.hosted_path = url.resolve(opts.host, drop_double_slashes(`${opts.bucket}/${opts.remote_path}`));
} else {
opts.hosted_path = url.resolve(opts.host, opts.remote_path);
}
opts.hosted_tarball = url.resolve(opts.hosted_path, opts.package_name);
return opts;
};
98 changes: 8 additions & 90 deletions test/build.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@ const test = require('tape');
const run = require('./run.util.js');
const existsSync = require('fs').existsSync || require('path').existsSync;
const fs = require('fs');
const os = require('os');
const { rimrafSync } = require('rimraf');
const rm = require('rimraf');

const path = require('path');
const napi = require('../lib/util/napi.js');
const versioning = require('../lib/util/versioning.js');
Expand All @@ -14,10 +14,6 @@ const tar = require('tar');
const localVer = [versioning.get_runtime_abi('node'), process.platform, process.arch].join('-');
const SOEXT = { 'darwin': 'dylib', 'linux': 'so', 'win32': 'dll' }[process.platform];

if (process.env.node_pre_gyp_mock_s3) {
process.env.node_pre_gyp_mock_s3 = `${os.tmpdir()}/mock`;
}

// The list of different sample apps that we use to test
const apps = [
{
Expand Down Expand Up @@ -116,6 +112,7 @@ test(appOne.name + ' passes --dist-url down to node-gyp via npm ' + appOne.args,

apps.forEach((app) => {


if (app.name === 'app7' && !napi.get_napi_version()) return;

// clear out entire binding directory
Expand All @@ -125,7 +122,7 @@ apps.forEach((app) => {
test('cleanup of app', (t) => {
const binding_directory = path.join(__dirname, app.name, 'lib/binding');
if (fs.existsSync(binding_directory)) {
rimrafSync(binding_directory);
rm.rimrafSync(binding_directory);
}
t.end();
});
Expand All @@ -140,7 +137,8 @@ apps.forEach((app) => {
test(app.name + ' configures with unparsed options ' + app.args, (t) => {
run('node-pre-gyp', 'configure', '--loglevel=info -- -Dfoo=bar', app, {}, (err, stdout, stderr) => {
t.ifError(err);
t.equal(stdout, '');
const clean = stdout.replaceAll('\n', '')
t.equal(clean, '');
t.ok(stderr.search(/(gyp info spawn args).*(-Dfoo=bar)/) > -1);
t.end();
});
Expand Down Expand Up @@ -223,6 +221,8 @@ apps.forEach((app) => {
});
});



test(app.name + ' packages ' + app.args, (t) => {
run('node-pre-gyp', 'package', '', app, {}, (err) => {
t.ifError(err);
Expand Down Expand Up @@ -276,88 +276,6 @@ apps.forEach((app) => {
});
});

const env = process.env;
if (env.AWS_ACCESS_KEY_ID || env.node_pre_gyp_accessKeyId || env.node_pre_gyp_mock_s3) {

test(app.name + ' publishes ' + app.args, (t) => {
run('node-pre-gyp', 'unpublish publish', '', app, {}, (err, stdout) => {
t.ifError(err);
t.notEqual(stdout, '');
t.end();
});
});

test(app.name + ' info shows it ' + app.args, (t) => {
run('node-pre-gyp', 'reveal', 'package_name', app, {}, (err, stdout) => {
t.ifError(err);
let package_name = stdout.trim();
if (package_name.indexOf('\n') !== -1) { // take just the first line
package_name = package_name.substr(0, package_name.indexOf('\n'));
}
run('node-pre-gyp', 'info', '', app, {}, (err2, stdout2) => {
t.ifError(err2);
t.stringContains(stdout2, package_name);
t.end();
});
});
});

test(app.name + ' can be uninstalled ' + app.args, (t) => {
run('node-pre-gyp', 'clean', '', app, {}, (err, stdout) => {
t.ifError(err);
t.notEqual(stdout, '');
t.end();
});
});

test(app.name + ' can be installed via remote ' + app.args, (t) => {
const opts = {
cwd: path.join(__dirname, app.name),
npg_debug: false
};
run('npm', 'install', '--fallback-to-build=false', app, opts, (err, stdout) => {
t.ifError(err);
t.notEqual(stdout, '');
t.end();
});
});

test(app.name + ' can be reinstalled via remote ' + app.args, (t) => {
const opts = {
cwd: path.join(__dirname, app.name),
npg_debug: false
};
run('npm', 'install', '--update-binary --fallback-to-build=false', app, opts, (err, stdout) => {
t.ifError(err);
t.notEqual(stdout, '');
t.end();
});
});

test(app.name + ' via remote passes tests ' + app.args, (t) => {
const opts = {
cwd: path.join(__dirname, app.name),
npg_debug: false
};
run('npm', 'install', '', app, opts, (err, stdout) => {
t.ifError(err);
t.notEqual(stdout, '');
t.end();
});
});

test(app.name + ' unpublishes ' + app.args, (t) => {
run('node-pre-gyp', 'unpublish', '', app, {}, (err, stdout) => {
t.ifError(err);
t.notEqual(stdout, '');
t.end();
});
});

} else {
test.skip(app.name + ' publishes ' + app.args, () => {});
}

// note: the above test will result in a non-runnable binary, so the below test must succeed otherwise all following tests will fail

test(app.name + ' builds with custom --target ' + app.args, (t) => {
Expand Down
38 changes: 20 additions & 18 deletions test/fetch.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,25 +3,25 @@
const fs = require('fs');

const test = require('tape');
const { mockS3Http } = require('../lib/node-pre-gyp.js');
const nock = require('nock');
const install = require('../lib/install.js');

test.onFinish(() => {
mockS3Http('on');
});

test('should follow redirects', (t) => {
// clear existing mocks
const was = mockS3Http('off');
// always use mock host for this test
const origin = 'https://npg-mock-bucket.s3.us-east-1.amazonaws.com';

// Dummy tar.gz data, contains a blank directory.
// dummy tar.gz data, contains a blank directory.
const targz = 'H4sICPr8u1oCA3kudGFyANPTZ6A5MDAwMDc1VQDTZhAaCGA0hGNobGRqZm5uZmxupGBgaGhiZsKgYMpAB1BaXJJYBHRKYk5pcioedeUZqak5+D2J5CkFhlEwCkbBKBjkAAAyG1ofAAYAAA==';
// Mock an HTTP redirect
const scope = nock('https://mapbox-node-pre-gyp-public-testing-bucket.s3.us-east-1.amazonaws.com')


// clear existing mocks
nock.cleanAll();
// and create mock for an HTTP redirect
const scope = nock(origin)
.persist()
.get(/\/node-pre-gyp\/node-pre-gyp-test-app1\/v0.1.0\/Release\/node-v\d+-\S+.tar.gz/)
.reply(302, '', {
'Location': 'https://mapbox-node-pre-gyp-public-testing-bucket.s3.us-east-1.amazonaws.com/otherapp.tar.gz'
'Location': `${origin}/otherapp.tar.gz`
})
.get('/otherapp.tar.gz')
.reply(200, Buffer.from(targz, 'base64'));
Expand All @@ -33,18 +33,20 @@ test('should follow redirects', (t) => {
}
};

// cd into app directory
process.chdir('test/app1');

// commands no longer read package.json so it must be passed to them.
// get data from package.json
opts.package_json = JSON.parse(fs.readFileSync('./package.json'));
// data in package.json may be changed from mock to real bucket by user.
// make sure host is always pointing to the mocked bucket as set above
opts.package_json.binary.host = origin;

console.log('mockS3Http was', was, 'is', mockS3Http('get'));

// run the command by calling the function
install(opts, [], (err) => {
t.ifError(err); // Worked fine
t.ok(scope.isDone()); // All mocks consumed
t.ifError(err); // worked fine
t.ok(scope.isDone()); // all mocks consumed
nock.cleanAll(); // clean this mock
t.end();
});

});

Loading

0 comments on commit 60d6a89

Please sign in to comment.