diff --git a/lib/install.js b/lib/install.js index 82705428..92c80d57 100644 --- a/lib/install.js +++ b/lib/install.js @@ -11,7 +11,7 @@ const existsAsync = fs.exists || path.exists; const versioning = require('./util/versioning.js'); const napi = require('./util/napi.js'); const s3_setup = require('./util/s3_setup.js'); -const url = require('url'); +const url = require('./util/url.js'); // for fetching binaries const fetch = require('node-fetch'); const tar = require('tar'); diff --git a/lib/publish.js b/lib/publish.js index 23ef53b8..3727f2b3 100644 --- a/lib/publish.js +++ b/lib/publish.js @@ -11,7 +11,7 @@ const versioning = require('./util/versioning.js'); const napi = require('./util/napi.js'); const s3_setup = require('./util/s3_setup.js'); const existsAsync = fs.exists || path.exists; -const url = require('url'); +const url = require('./util/url.js'); function publish(gyp, argv, callback) { const package_json = gyp.package_json; diff --git a/lib/unpublish.js b/lib/unpublish.js index cb198e39..99da3abb 100644 --- a/lib/unpublish.js +++ b/lib/unpublish.js @@ -8,7 +8,7 @@ const log = require('./util/log.js'); const versioning = require('./util/versioning.js'); const napi = require('./util/napi.js'); const s3_setup = require('./util/s3_setup.js'); -const url = require('url'); +const url = require('./util/url.js'); function unpublish(gyp, argv, callback) { const package_json = gyp.package_json; diff --git a/lib/util/s3_setup.js b/lib/util/s3_setup.js index ccbfa74e..fc9e1bc3 100644 --- a/lib/util/s3_setup.js +++ b/lib/util/s3_setup.js @@ -2,13 +2,12 @@ module.exports = exports; -const url = require('url'); module.exports.detect = function(opts) { const config = {}; const to = opts.hosted_path; - const uri = url.parse(to); + const uri = new URL(to); if (opts.bucket && opts.region) { // use user defined settings for host, region, bucket @@ -29,7 +28,7 @@ module.exports.detect = function(opts) { // https://bucket-name.s3.Region.amazonaws.com/key-name (dash Region) // or in some legacy region of this format: // https://bucket-name.s3-Region.amazonaws.com/key-name (dot Region) - const parts = uri.hostname.split('.s3'); + const parts = uri.hostname.replace(/^\[|\]$/, '').split('.s3'); // there is nothing before the .s3 // not a valid s3 virtual host bucket url diff --git a/lib/util/url.js b/lib/util/url.js new file mode 100644 index 00000000..d548a906 --- /dev/null +++ b/lib/util/url.js @@ -0,0 +1,19 @@ +'use strict'; + +// url.resolve is deprecated because it invokes the deprecated url.parse() internally +// https://nodejs.org/api/url.html#urlresolvefrom-to +module.exports.resolve = function(from, to) { + const resolvedUrl = new URL(to, new URL(from, 'resolve://')); + if (resolvedUrl.protocol === 'resolve:') { + // `from` is a relative URL. + const { pathname, search, hash } = resolvedUrl; + + // To keep consistency with deprecated url.resolve(), we need to remove the leading '/' from pathname + if (pathname[0] === '/') { + return pathname.slice(1) + search + hash; + } + + return pathname + search + hash; + } + return resolvedUrl.toString(); +}; diff --git a/lib/util/versioning.js b/lib/util/versioning.js index b69b367e..7a5a52f2 100644 --- a/lib/util/versioning.js +++ b/lib/util/versioning.js @@ -4,9 +4,9 @@ module.exports = exports; const path = require('path'); const semver = require('semver'); -const url = require('url'); const detect_libc = require('detect-libc'); const napi = require('./napi.js'); +const url = require('./url.js'); let abi_crosswalk; @@ -221,9 +221,13 @@ function validate_config(package_json, opts) { } if (o) { // enforce https over http - const protocol = url.parse(o.host).protocol; - if (protocol === 'http:') { - throw new Error("'host' protocol (" + protocol + ") is invalid - only 'https:' is accepted"); + try { + const protocol = new URL(o.host).protocol; + if (protocol === 'http:') { + throw new Error("'host' protocol (" + protocol + ") is invalid - only 'https:' is accepted"); + } + } catch (err) { + // do nothing } } napi.validate_package_json(package_json, opts);