Skip to content

Commit

Permalink
install: better logging, no more readJSON caching
Browse files Browse the repository at this point in the history
A bunch of the tests are now getting more complete from / resolved
information because the contents of rewritten package.json files on disk
are no longer masked by `read-package-json` memoizing fs reads of
package.json files. Part of fixing npm#7074.
  • Loading branch information
othiym23 committed Apr 3, 2015
1 parent da015ee commit c3744ba
Show file tree
Hide file tree
Showing 18 changed files with 339 additions and 280 deletions.
34 changes: 20 additions & 14 deletions lib/build.js
Original file line number Diff line number Diff line change
Expand Up @@ -85,20 +85,26 @@ function linkStuff (pkg, folder, global, didRB, cb) {
// if it's global, and folder is in {prefix}/node_modules,
// then bins are in {prefix}/bin
// otherwise, then bins are in folder/../.bin
var parent = pkg.name[0] === "@" ? path.dirname(path.dirname(folder)) : path.dirname(folder)
, gnm = global && npm.globalDir
, gtop = parent === gnm

log.verbose("linkStuff", [global, gnm, gtop, parent])
log.info("linkStuff", pkg._id)

shouldWarn(pkg, folder, global, function() {
asyncMap( [linkBins, linkMans, !didRB && rebuildBundles]
, function (fn, cb) {
if (!fn) return cb()
log.verbose(fn.name, pkg._id)
fn(pkg, folder, parent, gtop, cb)
}, cb)
var parent = pkg.name[0] === '@' ? path.dirname(path.dirname(folder)) : path.dirname(folder)
var gnm = global && npm.globalDir
var gtop = parent === gnm

log.info('linkStuff', pkg._id)
log.silly('linkStuff', pkg._id, 'has', parent, 'as its parent node_modules')
if (global) log.silly('linkStuff', pkg._id, 'is part of a global install')
if (gnm) log.silly('linkStuff', pkg._id, 'is installed into a global node_modules')
if (gtop) log.silly('linkStuff', pkg._id, 'is installed into the top-level global node_modules')

shouldWarn(pkg, folder, global, function () {
asyncMap(
[linkBins, linkMans, !didRB && rebuildBundles],
function (fn, cb) {
if (!fn) return cb()
log.verbose(fn.name, pkg._id)
fn(pkg, folder, parent, gtop, cb)
},
cb
)
})
}

Expand Down
2 changes: 0 additions & 2 deletions lib/cache.js
Original file line number Diff line number Diff line change
Expand Up @@ -135,9 +135,7 @@ function read (name, ver, forceBypass, cb) {

var root = cachedPackageRoot({name : name, version : ver})
function c (er, data) {
log.silly("cache", "addNamed cb", name+"@"+ver)
if (er) log.verbose("cache", "addNamed error for", name+"@"+ver, er)

if (data) deprCheck(data)

return cb(er, data)
Expand Down
19 changes: 11 additions & 8 deletions lib/cache/add-named.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,20 +34,23 @@ function addNamed (name, version, data, cb_) {
assert(typeof cb_ === "function", "must have callback")

var key = name + "@" + version
log.verbose("addNamed", key)
log.silly("addNamed", key)

function cb (er, data) {
if (data && !data._fromGithub) data._from = key
cb_(er, data)
}

log.silly("addNamed", "semver.valid", semver.valid(version))
log.silly("addNamed", "semver.validRange", semver.validRange(version))
var fn = ( semver.valid(version, true) ? addNameVersion
: semver.validRange(version, true) ? addNameRange
: addNameTag
)
fn(name, version, data, cb)
if (semver.valid(version, true)) {
log.verbose('addNamed', JSON.stringify(version), 'is a plain semver version for', name)
addNameVersion(name, version, data, cb)
} else if (semver.validRange(version, true)) {
log.verbose('addNamed', JSON.stringify(version), 'is a valid semver range for', name)
addNameRange(name, version, data, cb)
} else {
log.verbose('addNamed', JSON.stringify(version), 'is being treated as a dist-tag for', name)
addNameTag(name, version, data, cb)
}
}

function addNameTag (name, tag, data, cb) {
Expand Down
76 changes: 43 additions & 33 deletions lib/install.js
Original file line number Diff line number Diff line change
Expand Up @@ -223,8 +223,9 @@ function install (args, cb_) {

// initial "family" is the name:version of the root, if it's got
// a package.json file.
var jsonFile = path.resolve(where, "package.json")
readJson(jsonFile, log.warn, function (er, data) {
var jsonPath = path.resolve(where, "package.json")
log.verbose('install', 'initial load of', jsonPath)
readJson(jsonPath, log.warn, function (er, data) {
if (er
&& er.code !== "ENOENT"
&& er.code !== "ENOTDIR") return cb(er)
Expand All @@ -246,7 +247,9 @@ function install (args, cb_) {
}

function validateInstall (where, cb) {
readJson(path.resolve(where, 'package.json'), log.warn, function (er, data) {
var jsonPath = path.resolve(where, 'package.json')
log.verbose('validateInstall', 'loading', jsonPath, 'for validation')
readJson(jsonPath, log.warn, function (er, data) {
if (er
&& er.code !== 'ENOENT'
&& er.code !== 'ENOTDIR') return cb(er)
Expand Down Expand Up @@ -314,11 +317,11 @@ function findPeerInvalid_ (packageMap, fpiList) {
function readDependencies (context, where, opts, cb) {
var wrap = context ? context.wrap : null

readJson( path.resolve(where, "package.json")
, log.warn
, function (er, data) {
var jsonPath = path.resolve(where, 'package.json')
log.verbose('readDependencies', 'loading dependencies from', jsonPath)
readJson(jsonPath, log.warn, function (er, data) {
if (er && er.code === "ENOENT") er.code = "ENOPACKAGEJSON"
if (er) return cb(er)
if (er) return cb(er)

if (opts && opts.dev) {
if (!data.dependencies) data.dependencies = {}
Expand Down Expand Up @@ -472,7 +475,7 @@ function save (where, installed, tree, pretty, hasArguments, cb) {
data.bundleDependencies = bundle.sort()
}

log.verbose("saving", things)
log.verbose("save", "saving", things)
data[deps] = data[deps] || {}
Object.keys(things).forEach(function (t) {
data[deps][t] = things[t]
Expand All @@ -485,6 +488,7 @@ function save (where, installed, tree, pretty, hasArguments, cb) {

data[deps] = sortedObject(data[deps])

log.silly("save", "writing", saveTarget)
data = JSON.stringify(data, null, 2) + "\n"
writeFileAtomic(saveTarget, data, function (er) {
cb(er, installed, tree, pretty)
Expand Down Expand Up @@ -601,7 +605,9 @@ function installManyTop (what, where, context, cb_) {

if (context.explicit) return next()

readJson(path.join(where, "package.json"), log.warn, function (er, data) {
var jsonPath = path.join(where, 'package.json')
log.verbose('installManyTop', 'reading for lifecycle', jsonPath)
readJson(jsonPath, log.warn, function (er, data) {
if (er) return next(er)
lifecycle(data, "preinstall", where, next)
})
Expand Down Expand Up @@ -636,8 +642,9 @@ function installManyTop_ (what, where, context, cb) {
// recombine unscoped with @scope/package packages
asyncMap(unscoped.concat(scoped).map(function (p) {
return path.resolve(nm, p, "package.json")
}), function (jsonfile, cb) {
readJson(jsonfile, log.warn, function (er, data) {
}), function (jsonPath, cb) {
log.verbose('installManyTop', 'reading scoped package data from', jsonPath)
readJson(jsonPath, log.warn, function (er, data) {
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
if (er) return cb(null, [])
cb(null, [[data.name, data.version]])
Expand Down Expand Up @@ -789,7 +796,9 @@ function targetResolver (where, context, deps) {
})

asyncMap(inst, function (pkg, cb) {
readJson(path.resolve(name, pkg, "package.json"), log.warn, function (er, d) {
var jsonPath = path.resolve(name, pkg, 'package.json')
log.verbose('targetResolver', 'reading package data from', jsonPath)
readJson(jsonPath, log.warn, function (er, d) {
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
// error means it's not a package, most likely.
if (er) return cb(null, [])
Expand Down Expand Up @@ -930,11 +939,11 @@ function installOne (target, where, context, cb) {

function localLink (target, where, context, cb) {
log.verbose("localLink", target._id)
var jsonFile = path.resolve( npm.globalDir, target.name
, "package.json" )
, parent = context.parent
var jsonPath = path.resolve(npm.globalDir, target.name , 'package.json')
var parent = context.parent

readJson(jsonFile, log.warn, function (er, data) {
log.verbose('localLink', 'reading data to link', target.name, 'from', jsonPath)
readJson(jsonPath, log.warn, function (er, data) {
function thenLink () {
npm.commands.link([target.name], function (er, d) {
log.silly("localLink", "back from link", [er, d])
Expand Down Expand Up @@ -1057,23 +1066,24 @@ function write (target, targetFolder, context, cb_) {

log.silly("install write", "writing", target.name, target.version, "to", targetFolder)
chain(
[ [ cache.unpack, target.name, target.version, targetFolder
, null, null, user, group ]
, [ fs, "writeFile"
, path.resolve(targetFolder, "package.json")
, JSON.stringify(target, null, 2) + "\n" ]
, [ lifecycle, target, "preinstall", targetFolder ]
, function (cb) {
if (!target.bundleDependencies) return cb()

var bd = path.resolve(targetFolder, "node_modules")
fs.readdir(bd, function (er, b) {
// nothing bundled, maybe
if (er) return cb()
bundled = b || []
cb()
})
} ]
[ [ cache.unpack, target.name, target.version, targetFolder, null, null, user, group ],
function writePackageJSON (cb) {
var jsonPath = path.resolve(targetFolder, 'package.json')
log.verbose('write', 'writing to', jsonPath)
writeFileAtomic(jsonPath, JSON.stringify(target, null, 2) + '\n', cb)
},
[ lifecycle, target, "preinstall", targetFolder ],
function collectBundled (cb) {
if (!target.bundleDependencies) return cb()

var bd = path.resolve(targetFolder, "node_modules")
fs.readdir(bd, function (er, b) {
// nothing bundled, maybe
if (er) return cb()
bundled = b || []
cb()
})
} ]

// nest the chain so that we can throw away the results returned
// up until this point, since we really don't care about it.
Expand Down
9 changes: 0 additions & 9 deletions test/tap/dev-dep-duplicate/desired-ls-results.json

This file was deleted.

11 changes: 0 additions & 11 deletions test/tap/dev-dep-duplicate/package.json

This file was deleted.

82 changes: 55 additions & 27 deletions test/tap/install-with-dev-dep-duplicate.js
Original file line number Diff line number Diff line change
@@ -1,57 +1,85 @@
var npm = npm = require("../../")
var test = require("tap").test
var path = require("path")
var fs = require("fs")
var osenv = require("osenv")
var rimraf = require("rimraf")
var mr = require("npm-registry-mock")
var common = require("../common-tap.js")

var pkg = path.resolve(__dirname, "dev-dep-duplicate")
var desiredResultsPath = path.resolve(pkg, "desired-ls-results.json")

test("prefers version from dependencies over devDependencies", function (t) {
var fs = require('graceful-fs')
var path = require('path')

var mkdirp = require('mkdirp')
var mr = require('npm-registry-mock')
var osenv = require('osenv')
var rimraf = require('rimraf')
var test = require('tap').test

var common = require('../common-tap.js')
var npm = npm = require('../../')

var pkg = path.resolve(__dirname, 'dev-dep-duplicate')

var json = {
author: 'Anders Janmyr',
name: 'dev-dep-duplicate',
version: '0.0.0',
dependencies: {
underscore: '1.5.1'
},
devDependencies: {
underscore: '1.3.1'
}
}

var expected = {
name: 'dev-dep-duplicate',
version: '0.0.0',
dependencies: {
underscore: {
version: '1.5.1',
from: '[email protected]',
resolved: common.registry + '/underscore/-/underscore-1.5.1.tgz'
}
}
}

test('prefers version from dependencies over devDependencies', function (t) {
t.plan(1)

mr({port : common.port}, function (er, s) {
mr({ port: common.port }, function (er, s) {
setup(function (err) {
if (err) return t.fail(err)

npm.install(".", function (err) {
npm.install('.', function (err) {
if (err) return t.fail(err)

npm.commands.ls([], true, function (err, _, results) {
if (err) return t.fail(err)

fs.readFile(desiredResultsPath, function (err, desired) {
if (err) return t.fail(err)

t.deepEqual(results, JSON.parse(desired))
s.close()
t.end()
})
t.deepEqual(results, expected)
s.close()
t.end()
})
})
})
})
})

test("cleanup", function (t) {
test('cleanup', function (t) {
cleanup()
t.end()
})


function setup (cb) {
cleanup()
mkdirp.sync(pkg)
fs.writeFileSync(
path.join(pkg, 'package.json'),
JSON.stringify(json, null, 2)
)
process.chdir(pkg)

var opts = { cache: path.resolve(pkg, "cache"), registry: common.registry}
var opts = {
cache: path.resolve(pkg, 'cache'),
registry: common.registry
}
npm.load(opts, cb)
}

function cleanup () {
process.chdir(osenv.tmpdir())
rimraf.sync(path.resolve(pkg, "node_modules"))
rimraf.sync(path.resolve(pkg, "cache"))
rimraf.sync(pkg)
}
Loading

0 comments on commit c3744ba

Please sign in to comment.