Skip to content

Commit

Permalink
pack/publish: --dry-run, --json, and package previews (#19908)
Browse files Browse the repository at this point in the history
PR-URL: npm/npm#19908
Credit: @zkat
Reviewed-By: @iarna
  • Loading branch information
zkat authored and iarna committed Mar 23, 2018
1 parent 5ebe997 commit 116e9d8
Show file tree
Hide file tree
Showing 5 changed files with 440 additions and 26 deletions.
145 changes: 130 additions & 15 deletions lib/pack.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,9 @@

const BB = require('bluebird')

const byteSize = require('byte-size')
const cacache = require('cacache')
const columnify = require('columnify')
const cp = require('child_process')
const deprCheck = require('./utils/depr-check')
const fpm = require('./fetch-package-metadata')
Expand Down Expand Up @@ -46,15 +48,17 @@ function pack (args, silent, cb) {

BB.all(
args.map((arg) => pack_(arg, cwd))
).then((files) => {
if (!silent) {
output(files.map((f) => path.relative(cwd, f)).join('\n'))
).then((tarballs) => {
if (!silent && npm.config.get('json')) {
output(JSON.stringify(tarballs, null, 2))
} else if (!silent) {
tarballs.forEach(logContents)
output(tarballs.map((f) => path.relative(cwd, f.filename)).join('\n'))
}
cb(null, files)
}, cb)
return tarballs
}).nodeify(cb)
}

// add to cache, then cp to the cwd
function pack_ (pkg, dir) {
return BB.fromNode((cb) => fpm(pkg, dir, cb)).then((mani) => {
let name = mani.name[0] === '@'
Expand All @@ -64,17 +68,44 @@ function pack_ (pkg, dir) {
const target = `${name}-${mani.version}.tgz`
return pinflight(target, () => {
if (mani._requested.type === 'directory') {
return prepareDirectory(mani._resolved).then(() => {
return packDirectory(mani, mani._resolved, target)
return cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'packing'}, (tmp) => {
const tmpTarget = path.join(tmp, path.basename(target))
return prepareDirectory(mani._resolved)
.then(() => {
return packDirectory(mani, mani._resolved, tmpTarget, target, true)
})
.tap(() => {
if (npm.config.get('dry-run')) {
log.verbose('pack', '--dry-run mode enabled. Skipping write.')
} else {
return move(tmpTarget, target, {Promise: BB, fs})
}
})
})
} else if (npm.config.get('dry-run')) {
log.verbose('pack', '--dry-run mode enabled. Skipping write.')
return cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'packing'}, (tmp) => {
const tmpTarget = path.join(tmp, path.basename(target))
return packFromPackage(pkg, tmpTarget, target)
})
} else {
return pacote.tarball.toFile(pkg, target, pacoteOpts())
.then(() => target)
return packFromPackage(pkg, target, target)
}
})
})
}

function packFromPackage (arg, target, filename) {
const opts = pacoteOpts()
return pacote.tarball.toFile(arg, target, pacoteOpts())
.then(() => cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'unpacking'}, (tmp) => {
const tmpTarget = path.join(tmp, filename)
return pacote.extract(arg, tmpTarget, opts)
.then(() => readJson(path.join(tmpTarget, 'package.json')))
}))
.then((pkg) => getContents(pkg, target, filename))
}

module.exports.prepareDirectory = prepareDirectory
function prepareDirectory (dir) {
return readJson(path.join(dir, 'package.json')).then((pkg) => {
Expand Down Expand Up @@ -105,7 +136,7 @@ function prepareDirectory (dir) {
}

module.exports.packDirectory = packDirectory
function packDirectory (mani, dir, target) {
function packDirectory (mani, dir, target, filename, logIt) {
deprCheck(mani)
return readJson(path.join(dir, 'package.json')).then((pkg) => {
return lifecycle(pkg, 'prepack', dir)
Expand All @@ -126,18 +157,102 @@ function packDirectory (mani, dir, target) {
gzip: true
}

return packlist({ path: dir })
return BB.resolve(packlist({ path: dir }))
// NOTE: node-tar does some Magic Stuff depending on prefixes for files
// specifically with @ signs, so we just neutralize that one
// and any such future "features" by prepending `./`
.then((files) => tar.create(tarOpt, files.map((f) => `./${f}`)))
.then(() => move(tmpTarget, target, {Promise: BB, fs}))
.then(() => lifecycle(pkg, 'postpack', dir))
.then(() => target)
.then(() => getContents(pkg, tmpTarget, filename, logIt))
// thread the content info through
.tap(() => move(tmpTarget, target, {Promise: BB, fs}))
.tap(() => lifecycle(pkg, 'postpack', dir))
})
})
}

module.exports.logContents = logContents
function logContents (tarball) {
log.notice('')
log.notice('', `${npm.config.get('unicode') ? '📦 ' : 'package:'} ${tarball.name}@${tarball.version}`)
log.notice('=== Tarball Contents ===')
if (tarball.files.length) {
log.notice('', columnify(tarball.files.map((f) => {
const bytes = byteSize(f.size)
return {path: f.path, size: `${bytes.value}${bytes.unit}`}
}), {
include: ['size', 'path'],
showHeaders: false
}))
}
if (tarball.bundled.length) {
log.notice('=== Bundled Dependencies ===')
tarball.bundled.forEach((name) => log.notice('', name))
}
log.notice('=== Tarball Details ===')
log.notice('', columnify([
{name: 'name:', value: tarball.name},
{name: 'version:', value: tarball.version},
tarball.filename && {name: 'filename:', value: tarball.filename},
{name: 'package size:', value: byteSize(tarball.size)},
{name: 'unpacked size:', value: byteSize(tarball.unpackedSize)},
tarball.bundled.length && {name: 'bundled deps:', value: tarball.bundled.length},
tarball.bundled.length && {name: 'bundled files:', value: tarball.entryCount - tarball.files.length},
tarball.bundled.length && {name: 'own files:', value: tarball.files.length},
{name: 'total files:', value: tarball.entryCount}
].filter((x) => x), {
include: ['name', 'value'],
showHeaders: false
}))
log.notice('', '')
}

module.exports.getContents = getContents
function getContents (pkg, target, filename, silent) {
const bundledWanted = new Set(
pkg.bundleDependencies ||
pkg.bundledDependencies ||
[]
)
const files = []
const bundled = new Set()
let totalEntries = 0
let totalEntrySize = 0
return tar.t({
file: target,
onentry (entry) {
totalEntries++
totalEntrySize += entry.size
const p = entry.path
if (p.startsWith('package/node_modules/')) {
const name = p.match(/^package\/node_modules\/((?:@[^/]+)?[^/]+)/)[1]
if (bundledWanted.has(name)) {
bundled.add(name)
}
} else {
files.push({
path: entry.path.replace(/^package\//, ''),
size: entry.size,
mode: entry.mode
})
}
},
strip: 1
})
.then(() => BB.fromNode((cb) => fs.stat(target, cb)))
.then((stat) => ({
id: pkg._id,
name: pkg.name,
version: pkg.version,
from: pkg._from,
size: stat.size,
unpackedSize: totalEntrySize,
filename,
files,
entryCount: totalEntries,
bundled: Array.from(bundled)
}))
}

const PASSTHROUGH_OPTS = [
'always-auth',
'auth-type',
Expand Down
39 changes: 28 additions & 11 deletions lib/publish.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@ const pacote = require('pacote')
const pacoteOpts = require('./config/pacote')
const path = require('path')
const readJson = BB.promisify(require('read-package-json'))
const readUserInfo = require('./utils/read-user-info.js')
const semver = require('semver')
const statAsync = BB.promisify(require('graceful-fs').stat)
const readUserInfo = require('./utils/read-user-info.js')

publish.usage = 'npm publish [<tarball>|<folder>] [--tag <tag>] [--access <public|restricted>]' +
"\n\nPublishes '.' if no argument supplied" +
Expand Down Expand Up @@ -47,10 +47,16 @@ function publish (args, isRetry, cb) {
return cb(new Error('Tag name must not be a valid SemVer range: ' + t))
}

publish_(args[0]).then((pkg) => {
output(`+ ${pkg._id}`)
cb()
}, cb)
return publish_(args[0])
.then((tarball) => {
const silent = log.level === 'silent'
if (!silent && npm.config.get('json')) {
output(JSON.stringify(tarball, null, 2))
} else if (!silent) {
output(`+ ${tarball.id}`)
}
})
.nodeify(cb)
}

function publish_ (arg) {
Expand All @@ -76,6 +82,7 @@ function publish_ (arg) {
function publishFromDirectory (arg) {
// All this readJson is because any of the given scripts might modify the
// package.json in question, so we need to refresh after every step.
let contents
return pack.prepareDirectory(arg).then(() => {
return readJson(path.join(arg, 'package.json'))
}).then((pkg) => {
Expand All @@ -85,9 +92,10 @@ function publishFromDirectory (arg) {
}).then((pkg) => {
return cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'fromDir'}, (tmpDir) => {
const target = path.join(tmpDir, 'package.tgz')
return pack.packDirectory(pkg, arg, target).then(() => {
return upload(arg, pkg, false, target)
})
return pack.packDirectory(pkg, arg, target, null, true)
.tap((c) => { contents = c })
.then((c) => !npm.config.get('json') && pack.logContents(c))
.then(() => upload(arg, pkg, false, target))
})
}).then(() => {
return readJson(path.join(arg, 'package.json'))
Expand All @@ -96,6 +104,7 @@ function publishFromDirectory (arg) {
}).tap((pkg) => {
return lifecycle(pkg, 'postpublish', arg)
})
.then(() => contents)
}

function publishFromPackage (arg) {
Expand All @@ -106,7 +115,11 @@ function publishFromPackage (arg) {
return pacote.tarball.toFile(arg, target, opts)
.then(() => pacote.extract(arg, extracted, opts))
.then(() => readJson(path.join(extracted, 'package.json')))
.tap((pkg) => upload(arg, pkg, false, target))
.then((pkg) => {
return BB.resolve(pack.getContents(pkg, target))
.tap((c) => !npm.config.get('json') && pack.logContents(c))
.tap(() => upload(arg, pkg, false, target))
})
})
}

Expand All @@ -120,7 +133,6 @@ function upload (arg, pkg, isRetry, cached) {
"Remove the 'private' field from the package.json to publish it."
))
}

const mappedConfig = getPublishConfig(
pkg.publishConfig,
npm.config,
Expand Down Expand Up @@ -151,7 +163,7 @@ function upload (arg, pkg, isRetry, cached) {

const params = {
metadata: pkg,
body: createReadStream(cached),
body: !npm.config.get('dry-run') && createReadStream(cached),
auth: auth
}

Expand All @@ -165,6 +177,11 @@ function upload (arg, pkg, isRetry, cached) {
params.access = config.get('access')
}

if (npm.config.get('dry-run')) {
log.info('publish', '--dry-run mode enabled. Skipping upload.')
return BB.resolve()
}

log.showProgress('publish:' + pkg._id)
return BB.fromNode((cb) => {
registry.publish(registryBase, params, cb)
Expand Down
File renamed without changes.
Loading

0 comments on commit 116e9d8

Please sign in to comment.