From b6953183b9ef514296e171a3a031aa02a8430335 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Thu, 19 Apr 2018 09:48:29 -0400 Subject: [PATCH 001/328] version bump for bug fixes --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 3601635a..87e201c5 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "4.0.0", + "version": "4.0.1", "engines": { "node": ">=6.0" }, From cef29b59e55a1749d3ac6af12b00dcb2820469d1 Mon Sep 17 00:00:00 2001 From: Ben Samuel Date: Fri, 20 Apr 2018 08:40:54 -0400 Subject: [PATCH 002/328] Use JSZip 3.x to handle zip files (#178) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update to JSZip 3.x and remove zip-local - Restores ability to store unix permissions. - So async, much promises. * Fix some linter issues after merge. * Actually run prettier… --- lib/inject.js | 164 +++++++++++++++++++++++-------------------------- lib/zip.js | 116 +++++++++++++++++----------------- lib/zipTree.js | 75 ++++++++++++++++++++++ package.json | 6 +- test.bats | 13 ++++ 5 files changed, 222 insertions(+), 152 deletions(-) create mode 100644 lib/zipTree.js diff --git a/lib/inject.js b/lib/inject.js index e862e769..1c10a615 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -1,97 +1,76 @@ +const BbPromise = require('bluebird'); const fse = require('fs-extra'); const glob = require('glob-all'); const get = require('lodash.get'); const set = require('lodash.set'); const path = require('path'); const values = require('lodash.values'); -const zipper = require('zip-local'); const JSZip = require('jszip'); +const { writeZip, zipFile } = require('./zipTree'); -/** - * write zip contents to a file - * @param {Object} zip - * @param {string} path - */ -function writeZip(zip, path) { - const buff = zip.generate({ - type: 'nodebuffer', - compression: 'DEFLATE' - }); - - fse.writeFileSync(path, buff); -} - -/** - * add a new file to a zip file from a buffer - * @param {Object} zip - * @param {string} path path to put in zip - * @param {string} buffer file contents - */ -function zipFile(zip, path, buffer) { - zip.file(path, buffer, { - date: new Date(0) // necessary to get the same hash when zipping the same content - }); -} +BbPromise.promisifyAll(fse); /** - * inject requirements into packaged application + * Inject requirements into packaged application. * @param {string} requirementsPath requirements folder path * @param {string} packagePath target package path * @param {Object} options our options object + * @return {Promise} the JSZip object constructed. */ function injectRequirements(requirementsPath, packagePath, options) { const noDeploy = new Set(options.noDeploy || []); - const zip = zipper.sync.unzip(packagePath).lowLevel(); - - glob - .sync([path.join(requirementsPath, '**')], { mark: true, dot: true }) - .forEach(file => { - if (file.endsWith('/')) { - return; - } - - const relativeFile = path.relative(requirementsPath, file); - - if (relativeFile.match(/^__pycache__[\\/]/)) { - return; - } - if (noDeploy.has(relativeFile.split(/([-\\/]|\.py$|\.pyc$)/, 1)[0])) { - return; - } - - zipFile(zip, relativeFile, fse.readFileSync(file)); - }); - - writeZip(zip, packagePath); + return fse + .readFileAsync(packagePath) + .then(buffer => JSZip.loadAsync(buffer)) + .then(zip => + BbPromise.resolve( + glob.sync([path.join(requirementsPath, '**')], { + mark: true, + dot: true + }) + ) + .map(file => [file, path.relative(requirementsPath, file)]) + .filter( + ([file, relativeFile]) => + !file.endsWith('/') && + !relativeFile.match(/^__pycache__[\\/]/) && + !noDeploy.has(relativeFile.split(/([-\\/]|\.py$|\.pyc$)/, 1)[0]) + ) + .map(([file, relativeFile]) => + zipFile(zip, relativeFile, fse.readFileAsync(file)) + ) + .then(() => writeZip(zip, packagePath)) + ); } /** - * remove all modules but the selected module from a package - * @param {string} source original package - * @param {string} target result package + * Remove all modules but the selected module from a package. + * @param {string} source path to original package + * @param {string} target path to result package * @param {string} module module to keep + * @return {Promise} the JSZip object written out. */ function moveModuleUp(source, target, module) { - const sourceZip = zipper.sync.unzip(source).memory(); - const targetZip = JSZip.make(); - - sourceZip.contents().forEach(file => { - if (!file.startsWith(module + '/')) { - return; - } - zipFile( - targetZip, - file.replace(module + '/', ''), - sourceZip.read(file, 'buffer') - ); - }); - - writeZip(targetZip, target); + const targetZip = new JSZip(); + + return fse + .readFileAsync(source) + .then(buffer => JSZip.loadAsync(buffer)) + .then(sourceZip => sourceZip.filter(file => file.startsWith(module + '/'))) + .map(srcZipObj => + zipFile( + targetZip, + srcZipObj.name.replace(module + '/', ''), + srcZipObj.async('nodebuffer') + ) + ) + .then(() => writeZip(targetZip, target)); } /** - * inject requirements into packaged application + * Inject requirements into packaged application. + * @return {Promise} the combined promise for requirements injection. */ function injectAllRequirements() { this.serverless.cli.log('Injecting required Python packages to package...'); @@ -101,30 +80,39 @@ function injectAllRequirements() { } if (this.serverless.service.package.individually) { - values(this.serverless.service.functions).forEach(f => { - if ( - !(f.runtime || this.serverless.service.provider.runtime).match( + return BbPromise.resolve(values(this.serverless.service.functions)) + .filter(func => + (func.runtime || this.serverless.service.provider.runtime).match( /^python.*/ ) - ) { - return; - } - if (!get(f, 'module')) { - set(f, ['module'], '.'); - } - if (f.module !== '.') { - const artifactPath = path.join('.serverless', `${f.module}.zip`); - moveModuleUp(f.package.artifact, artifactPath, f.module); - f.package.artifact = artifactPath; - } - injectRequirements( - path.join('.serverless', f.module, 'requirements'), - f.package.artifact, - this.options + ) + .map(func => { + if (!get(func, 'module')) { + set(func, ['module'], '.'); + } + return func; + }) + .map(func => { + if (func.module !== '.') { + const artifact = func.package.artifact; + const newArtifact = path.join('.serverless', `${func.module}.zip`); + func.package.artifact = newArtifact; + return moveModuleUp(artifact, newArtifact, func.module).then( + () => func + ); + } else { + return func; + } + }) + .map(func => + injectRequirements( + path.join('.serverless', func.module, 'requirements'), + func.package.artifact, + this.options + ) ); - }); } else { - injectRequirements( + return injectRequirements( path.join('.serverless', 'requirements'), this.serverless.service.package.artifact, this.options diff --git a/lib/zip.js b/lib/zip.js index 3dd21a51..eba21976 100644 --- a/lib/zip.js +++ b/lib/zip.js @@ -2,47 +2,43 @@ const fse = require('fs-extra'); const path = require('path'); const get = require('lodash.get'); const set = require('lodash.set'); -const zipper = require('zip-local'); -const BbPromise = require('bluebird'); const values = require('lodash.values'); +const uniqBy = require('lodash.uniqby'); +const BbPromise = require('bluebird'); +const JSZip = require('jszip'); +const { addTree, writeZip } = require('./zipTree'); BbPromise.promisifyAll(fse); /** - * add the vendor helper to the current service tree + * Add the vendor helper to the current service tree. * @return {Promise} */ function addVendorHelper() { if (this.options.zip) { if (this.serverless.service.package.individually) { - let promises = []; - let doneModules = []; - values(this.serverless.service.functions).forEach(f => { - if (!get(f, 'package.include')) { - set(f, ['package', 'include'], []); - } - if (!get(f, 'module')) { - set(f, ['module'], '.'); - } - - f.package.include.push('unzip_requirements.py'); - - if (!doneModules.includes(f.module)) { + return BbPromise.resolve(values(this.serverless.service.functions)) + .map(f => { + if (!get(f, 'package.include')) { + set(f, ['package', 'include'], []); + } + if (!get(f, 'module')) { + set(f, ['module'], '.'); + } + f.package.include.push('unzip_requirements.py'); + return f; + }) + .then(functions => uniqBy(functions, func => func.module)) + .map(f => { this.serverless.cli.log( `Adding Python requirements helper to ${f.module}...` ); - promises.push( - fse.copyAsync( - path.resolve(__dirname, '../unzip_requirements.py'), - path.join(this.servicePath, f.module, 'unzip_requirements.py') - ) + return fse.copyAsync( + path.resolve(__dirname, '../unzip_requirements.py'), + path.join(this.servicePath, f.module, 'unzip_requirements.py') ); - - doneModules.push(f.module); - } - }); - return BbPromise.all(promises); + }); } else { this.serverless.cli.log('Adding Python requirements helper...'); @@ -61,31 +57,28 @@ function addVendorHelper() { } /** - * remove the vendor helper from the current service tree - * @return {Promise} + * Remove the vendor helper from the current service tree. + * @return {Promise} the promise to remove the vendor helper. */ function removeVendorHelper() { if (this.options.zip && this.options.cleanupZipHelper) { if (this.serverless.service.package.individually) { - let promises = []; - let doneModules = []; - values(this.serverless.service.functions).forEach(f => { - if (!get(f, 'module')) { - set(f, ['module'], '.'); - } - if (!doneModules.includes(f.module)) { + return BbPromise.resolve(values(this.serverless.service.functions)) + .map(f => { + if (!get(f, 'module')) { + set(f, ['module'], '.'); + } + return f; + }) + .then(funcs => uniqBy(funcs, f => f.module)) + .map(f => { this.serverless.cli.log( `Removing Python requirements helper from ${f.module}...` ); - promises.push( - fse.removeAsync( - path.join(this.servicePath, f.module, 'unzip_requirements.py') - ) + return fse.removeAsync( + path.join(this.servicePath, f.module, 'unzip_requirements.py') ); - doneModules.push(f.module); - } - }); - return BbPromise.all(promises); + }); } else { this.serverless.cli.log('Removing Python requirements helper...'); return fse.removeAsync( @@ -96,35 +89,36 @@ function removeVendorHelper() { } /** - * zip up .serverless/requirements + * Zip up .serverless/requirements or .serverless/[MODULE]/requirements. + * @return {Promise} the promise to pack requirements. */ function packRequirements() { if (this.options.zip) { if (this.serverless.service.package.individually) { - let doneModules = []; - values(this.serverless.service.functions).forEach(f => { - if (!get(f, 'module')) { - set(f, ['module'], '.'); - } - if (!doneModules.includes(f.module)) { + return BbPromise.resolve(values(this.serverless.service.functions)) + .map(f => { + if (!get(f, 'module')) { + set(f, ['module'], '.'); + } + return f; + }) + .then(funcs => uniqBy(funcs, f => f.module)) + .map(f => { this.serverless.cli.log( `Zipping required Python packages for ${f.module}...` ); f.package.include.push(`${f.module}/.requirements.zip`); - zipper.sync - .zip(`.serverless/${f.module}/requirements`) - .compress() - .save(`${f.module}/.requirements.zip`); - doneModules.push(f.module); - } - }); + return addTree( + new JSZip(), + `.serverless/${f.module}/requirements` + ).then(zip => writeZip(zip, `${f.module}/.requirements.zip`)); + }); } else { this.serverless.cli.log('Zipping required Python packages...'); this.serverless.service.package.include.push('.requirements.zip'); - zipper.sync - .zip(path.join(this.servicePath, '.serverless/requirements')) - .compress() - .save(path.join(this.servicePath, '.requirements.zip')); + return addTree(new JSZip(), '.serverless/requirements').then(zip => + writeZip(zip, path.join(this.servicePath, '.requirements.zip')) + ); } } } diff --git a/lib/zipTree.js b/lib/zipTree.js new file mode 100644 index 00000000..ea7a9df0 --- /dev/null +++ b/lib/zipTree.js @@ -0,0 +1,75 @@ +const BbPromise = require('bluebird'); +const fse = require('fs-extra'); +const path = require('path'); + +BbPromise.promisifyAll(fse); + +/** + * Add a directory recursively to a zip file. Files in src will be added to the top folder of zip. + * @param {JSZip} zip a zip object in the folder you want to add files to. + * @param {string} src the source folder. + * @return {Promise} a promise offering the original JSZip object. + */ +function addTree(zip, src) { + const srcN = path.normalize(src); + + return fse + .readdirAsync(srcN) + .map(name => { + const srcPath = path.join(srcN, name); + + return fse.statAsync(srcPath).then(stat => { + if (stat.isDirectory()) { + return addTree(zip.folder(name), srcPath); + } else { + const opts = { date: stat.mtime, unixPermissions: stat.mode }; + return fse + .readFileAsync(srcPath) + .then(data => zip.file(name, data, opts)); + } + }); + }) + .then(() => zip); // Original zip for chaining. +} + +/** + * Write zip contents to a file. + * @param {JSZip} zip the zip object + * @param {string} targetPath path to write the zip file to. + * @return {Promise} a promise resolving to null. + */ +function writeZip(zip, targetPath) { + const opts = { + platform: process.platform == 'win32' ? 'DOS' : 'UNIX', + compression: 'DEFLATE', + compressionOptions: { + level: 9 + } + }; + return new BbPromise(resolve => + zip + .generateNodeStream(opts) + .pipe(fse.createWriteStream(targetPath)) + .on('finish', resolve) + ).then(() => null); +} + +/** + * Add a new file to a zip file from a buffer. + * @param {JSZip} zip the zip object to add the file to. + * @param {string} zipPath the target path in the zip. + * @param {Promise} bufferPromise a promise providing a nodebuffer. + * @return {Promise} a promise providing the JSZip object. + */ +function zipFile(zip, zipPath, bufferPromise) { + return bufferPromise + .then(buffer => + zip.file(zipPath, buffer, { + // necessary to get the same hash when zipping the same content + date: new Date(0) + }) + ) + .then(() => zip); +} + +module.exports = { addTree, writeZip, zipFile }; diff --git a/package.json b/package.json index 87e201c5..ce37c8e8 100644 --- a/package.json +++ b/package.json @@ -51,12 +51,12 @@ "fs-extra": "^5.0.0", "glob-all": "^3.1.0", "is-wsl": "^1.1.0", - "jszip": "^2.5.0", + "jszip": "^3.1.0", "lodash.get": "^4.4.2", "lodash.set": "^4.3.2", "lodash.values": "^4.3.0", - "rimraf": "^2.6.2", - "zip-local": "^0.3.4" + "lodash.uniqby": "^4.0.0", + "rimraf": "^2.6.2" }, "eslintConfig": { "extends": "eslint:recommended", diff --git a/test.bats b/test.bats index adb24e73..49093fcc 100755 --- a/test.bats +++ b/test.bats @@ -217,3 +217,16 @@ teardown() { ls puck/flask ls puck/lambda_decorators.py } + +@test "Don't nuke execute perms" { + cd tests/base + npm i $(npm pack ../..) + touch foobar + chmod +x foobar + perl -p -i'.bak' -e 's/(handler.py$)/\1\n - foobar/' serverless.yml + sls --vendor=./vendor package + unzip .serverless/sls-py-req-test.zip -d puck + ls puck/flask + ls puck/lambda_decorators.py + ./puck/foobar +} From 94e9894ebb0902b4fefacf65c46459359af753a8 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Fri, 20 Apr 2018 08:41:41 -0400 Subject: [PATCH 003/328] version bump for #178 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index ce37c8e8..19cebf37 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "4.0.1", + "version": "4.0.2", "engines": { "node": ">=6.0" }, From 5a72d5cd1c1c8445adb5156eeef00f4bd010b2b7 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Fri, 20 Apr 2018 10:07:59 -0400 Subject: [PATCH 004/328] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index bb2fc843..b3c8ab5a 100644 --- a/README.md +++ b/README.md @@ -68,7 +68,7 @@ failure. ## Pipenv support :sparkles::cake::sparkles: If you include a `Pipfile` and have `pipenv` installed instead of a `requirements.txt` this will use -`pipenv lock --r` to generate them. It is fully compatible with all options such as `zip` and +`pipenv lock -r` to generate them. It is fully compatible with all options such as `zip` and `dockerizePip`. If you don't want this plugin to generate it for you, set the following option: ```yaml custom: From 21f031bf1828589e3a74b301ad34326b6961a4e4 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Thu, 26 Apr 2018 08:52:05 -0400 Subject: [PATCH 005/328] Better mixed runtime & function deploy handling (#180) * Better mixed runtime & function deploy handling fixes #161 and fixes #179 * another tweak * fix again? * Fix corrupted zip archive in case of same module * Do not try to install requirements for non-python runtime * Fix lint * format * update test for merging #181 * @andrewFarley's fix * huh. depcheck sucks. * fixix syntax error --- index.js | 29 +++++++++++++++++++++----- lib/inject.js | 11 ++++++---- lib/pip.js | 46 +++++++++++++++++++++++------------------ test.bats | 4 ++-- tests/base/package.json | 2 +- 5 files changed, 60 insertions(+), 32 deletions(-) diff --git a/index.js b/index.js index dda2fe8a..33e6a3da 100644 --- a/index.js +++ b/index.js @@ -108,17 +108,36 @@ class ServerlessPythonRequirements { } }; - const before = () => - BbPromise.bind(this) + const before = () => { + if ( + arguments[1].functionObj && + arguments[1].functionObj.runtime && + !arguments[1].functionObj.runtime.startsWith('python') + ) + return; + return BbPromise.bind(this) .then(pipfileToRequirements) .then(addVendorHelper) .then(installAllRequirements) .then(packRequirements); + }; - const after = () => - BbPromise.bind(this) + const after = () => { + if ( + arguments[1].functionObj && + arguments[1].functionObj.runtime && + !arguments[1].functionObj.runtime.startsWith('python') + ) + return; + return BbPromise.bind(this) .then(removeVendorHelper) - .then(injectAllRequirements); + .then(() => + injectAllRequirements.bind(this)( + arguments[1].functionObj && + arguments[1].functionObj.package.artifact + ) + ); + }; const invalidateCaches = () => { if (this.options.invalidateCaches) { diff --git a/lib/inject.js b/lib/inject.js index 1c10a615..337813a3 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -72,7 +72,7 @@ function moveModuleUp(source, target, module) { * Inject requirements into packaged application. * @return {Promise} the combined promise for requirements injection. */ -function injectAllRequirements() { +function injectAllRequirements(funcArtifact) { this.serverless.cli.log('Injecting required Python packages to package...'); if (this.options.zip) { @@ -94,8 +94,11 @@ function injectAllRequirements() { }) .map(func => { if (func.module !== '.') { - const artifact = func.package.artifact; - const newArtifact = path.join('.serverless', `${func.module}.zip`); + const artifact = func.package ? func.package.artifact : funcArtifact; + const newArtifact = path.join( + '.serverless', + `${func.module}-${func.name}.zip` + ); func.package.artifact = newArtifact; return moveModuleUp(artifact, newArtifact, func.module).then( () => func @@ -114,7 +117,7 @@ function injectAllRequirements() { } else { return injectRequirements( path.join('.serverless', 'requirements'), - this.serverless.service.package.artifact, + this.serverless.service.package.artifact || funcArtifact, this.options ); } diff --git a/lib/pip.js b/lib/pip.js index a5647d84..fc8a08a6 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -201,29 +201,35 @@ function installAllRequirements() { fse.ensureDirSync(path.join(this.servicePath, '.serverless')); if (this.serverless.service.package.individually) { let doneModules = []; - values(this.serverless.service.functions).forEach(f => { - if (!get(f, 'module')) { - set(f, ['module'], '.'); - } - if (!doneModules.includes(f.module)) { - installRequirements( - path.join(f.module, this.options.fileName), - path.join('.serverless', f.module), - this.serverless, - this.servicePath, - this.options - ); - if (f.vendor) { - // copy vendor libraries to requirements folder - copyVendors( - f.vendor, + values(this.serverless.service.functions) + .filter(func => + (func.runtime || this.serverless.service.provider.runtime).match( + /^python.*/ + ) + ) + .map(f => { + if (!get(f, 'module')) { + set(f, ['module'], '.'); + } + if (!doneModules.includes(f.module)) { + installRequirements( + path.join(f.module, this.options.fileName), path.join('.serverless', f.module), - this.serverless + this.serverless, + this.servicePath, + this.options ); + if (f.vendor) { + // copy vendor libraries to requirements folder + copyVendors( + f.vendor, + path.join('.serverless', f.module), + this.serverless + ); + } + doneModules.push(f.module); } - doneModules.push(f.module); - } - }); + }); } else { installRequirements( this.options.fileName, diff --git a/test.bats b/test.bats index 49093fcc..b4b6a237 100755 --- a/test.bats +++ b/test.bats @@ -197,8 +197,8 @@ teardown() { cd tests/individually npm i $(npm pack ../..) sls package - unzip .serverless/module1.zip -d puck - unzip .serverless/module2.zip -d puck2 + unzip .serverless/module1-sls-py-req-test-indiv-dev-hello1.zip -d puck + unzip .serverless/module2-sls-py-req-test-indiv-dev-hello2.zip -d puck2 ls puck/handler1.py ls puck2/handler2.py ls puck/pyaml diff --git a/tests/base/package.json b/tests/base/package.json index c53d13ee..1130d4b8 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-4.0.2.tgz" } } From d8b6ff0c143875ad1dfa03b0d48d1c2e9e59cf86 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" Date: Wed, 2 May 2018 07:47:33 -0400 Subject: [PATCH 006/328] Update fs-extra requirement to ^6.0.0 (#185) Updates the requirements on [fs-extra](https://github.com/jprichardson/node-fs-extra) to permit the latest version. - [Release notes](https://github.com/jprichardson/node-fs-extra/releases) - [Changelog](https://github.com/jprichardson/node-fs-extra/blob/master/CHANGELOG.md) - [Commits](https://github.com/jprichardson/node-fs-extra/commits/6.0.0) Signed-off-by: dependabot[bot] --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 19cebf37..18fc2a7c 100644 --- a/package.json +++ b/package.json @@ -48,7 +48,7 @@ }, "dependencies": { "bluebird": "^3.0.6", - "fs-extra": "^5.0.0", + "fs-extra": "^6.0.0", "glob-all": "^3.1.0", "is-wsl": "^1.1.0", "jszip": "^3.1.0", From 37661bb9f569c574f6ac936c4905fd8f13f9c71b Mon Sep 17 00:00:00 2001 From: Jonny Fuller Date: Mon, 7 May 2018 08:28:56 -0400 Subject: [PATCH 007/328] Fixed bindPath so paths with white space will work. (#187) --- lib/pip.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pip.js b/lib/pip.js index fc8a08a6..9e724d07 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -89,7 +89,7 @@ function installRequirements( // Prepare bind path depending on os platform const bindPath = getBindPath(servicePath); - cmdOptions = ['run', '--rm', '-v', `${bindPath}:/var/task:z`]; + cmdOptions = ['run', '--rm', '-v', `"${bindPath}:/var/task:z"`]; if (options.dockerSsh) { // Mount necessary ssh files to work with private repos cmdOptions.push( From c8e7475d79c9ce5acdcd526b37b367ef3b9e37c1 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Wed, 9 May 2018 09:55:32 -0400 Subject: [PATCH 008/328] v4.0.3 for deploy single function fixes --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 18fc2a7c..12249c73 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "4.0.2", + "version": "4.0.3", "engines": { "node": ">=6.0" }, From d4e199c6d26e33fed1dbd3c37dea3259ad4e327b Mon Sep 17 00:00:00 2001 From: Tomoaki Abe Date: Tue, 22 May 2018 04:29:24 +0900 Subject: [PATCH 009/328] Fix related to test (#193) * Add file to clean in test teardown * Because git is displayed in an untracked file * Fix package.json of test according to the current version * Because it was displayed in `git diff` --- test.bats | 3 ++- tests/base/package.json | 2 +- tests/individually/package.json | 2 +- tests/pipenv/package.json | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/test.bats b/test.bats index b4b6a237..3ce2b990 100755 --- a/test.bats +++ b/test.bats @@ -10,7 +10,8 @@ setup() { } teardown() { - rm -rf puck puck2 puck3 node_modules .serverless .requirements.zip .requirements-cache + rm -rf puck puck2 puck3 node_modules .serverless .requirements.zip .requirements-cache \ + foobar package-lock.json serverless-python-requirements-*.tgz if [ -f serverless.yml.bak ]; then mv serverless.yml.bak serverless.yml; fi } diff --git a/tests/base/package.json b/tests/base/package.json index 1130d4b8..73634414 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.0.2.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-4.0.3.tgz" } } diff --git a/tests/individually/package.json b/tests/individually/package.json index c53d13ee..73634414 100644 --- a/tests/individually/package.json +++ b/tests/individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-4.0.3.tgz" } } diff --git a/tests/pipenv/package.json b/tests/pipenv/package.json index c53d13ee..73634414 100644 --- a/tests/pipenv/package.json +++ b/tests/pipenv/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-4.0.3.tgz" } } From a7769a3cb8a5949ff460e142024d4468e5fa2820 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Mon, 4 Jun 2018 09:28:53 -0400 Subject: [PATCH 010/328] add --keep-outdated to pipenv lock -r. fixes #195 --- lib/pipenv.js | 10 +++++++--- package.json | 2 +- tests/base/package.json | 2 +- tests/individually/package.json | 2 +- tests/pipenv/package.json | 2 +- 5 files changed, 11 insertions(+), 7 deletions(-) diff --git a/lib/pipenv.js b/lib/pipenv.js index 4fd82a90..f131620c 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -15,9 +15,13 @@ function pipfileToRequirements() { this.serverless.cli.log('Generating requirements.txt from Pipfile...'); - const res = spawnSync('pipenv', ['lock', '--requirements'], { - cwd: this.servicePath - }); + const res = spawnSync( + 'pipenv', + ['lock', '--requirements', '--keep-outdated'], + { + cwd: this.servicePath + } + ); if (res.error) { if (res.error.code === 'ENOENT') { throw new Error( diff --git a/package.json b/package.json index 12249c73..79ce6aad 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "4.0.3", + "version": "4.0.4", "engines": { "node": ">=6.0" }, diff --git a/tests/base/package.json b/tests/base/package.json index 73634414..f135b421 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.0.3.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-4.0.4.tgz" } } diff --git a/tests/individually/package.json b/tests/individually/package.json index 73634414..f135b421 100644 --- a/tests/individually/package.json +++ b/tests/individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.0.3.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-4.0.4.tgz" } } diff --git a/tests/pipenv/package.json b/tests/pipenv/package.json index 73634414..f135b421 100644 --- a/tests/pipenv/package.json +++ b/tests/pipenv/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.0.3.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-4.0.4.tgz" } } From 5d8bbf89aa1e00c2e79521ceec480441326c6ca3 Mon Sep 17 00:00:00 2001 From: Milan Suk Date: Tue, 5 Jun 2018 15:57:19 +0200 Subject: [PATCH 011/328] README link typo (#196) Just a typo. :) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index b3c8ab5a..a4e40d1d 100644 --- a/README.md +++ b/README.md @@ -224,7 +224,7 @@ custom: ## :apple::beer::snake: Mac Brew installed Python notes [Brew wilfully breaks the `--target` option with no seeming intention to fix it](https://github.com/Homebrew/brew/pull/821) which causes issues since this uses that option. There are a few easy workarounds for this: -* Install Python from [python.org](https://wwwpython.org/downloads/) and specify it with the +* Install Python from [python.org](https://www.python.org/downloads/) and specify it with the [`pythonBin` option](#customize-python-executable). OR From 4e0bb409d3b4e2cca6c0cb9c648d2bc7d27b7d83 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Mon, 11 Jun 2018 16:52:44 -0400 Subject: [PATCH 012/328] Fix / -> \ replacement for windows. Closes #200 --- lib/pip.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pip.js b/lib/pip.js index 9e724d07..d9cbee34 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -147,7 +147,7 @@ function installRequirements( */ function dockerPathForWin(options, path) { if (process.platform === 'win32' && options.dockerizePip) { - return path.replace('\\', '/'); + return path.replace(/\\/g, '/'); } return path; } From 5c5ff97637c1fce9dbd669fe558784897c30d550 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Wed, 13 Jun 2018 10:58:06 -0400 Subject: [PATCH 013/328] set parser for prettier so it works via stdin --- package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index 79ce6aad..b191acf5 100644 --- a/package.json +++ b/package.json @@ -67,6 +67,7 @@ }, "prettier": { "semi": true, - "singleQuote": true + "singleQuote": true, + "parser": "babylon" } } From 56beaf5c89b28a0b32d266c41f75795221e0da07 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Thu, 14 Jun 2018 15:56:26 -0400 Subject: [PATCH 014/328] version bump for #200 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index b191acf5..79ffcef5 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "4.0.4", + "version": "4.0.5", "engines": { "node": ">=6.0" }, From 5614ab0fee4fa81504d5fe130028996e3975e903 Mon Sep 17 00:00:00 2001 From: Dmitry Orlov Date: Fri, 15 Jun 2018 20:28:36 +0200 Subject: [PATCH 015/328] Reduce size of installed dependencies by stripping (#191) --- README.md | 25 ++++- index.js | 1 + lib/pip.js | 9 ++ lib/slim.js | 59 ++++++++++++ test.bats | 166 +++++++++++++++++++++++++++++++++ tests/base/_slimPatterns.yml | 2 + tests/base/serverless.yml | 4 + tests/pipenv/_slimPatterns.yml | 2 + tests/pipenv/serverless.yml | 4 + 9 files changed, 271 insertions(+), 1 deletion(-) create mode 100644 lib/slim.js create mode 100644 tests/base/_slimPatterns.yml create mode 100644 tests/pipenv/_slimPatterns.yml diff --git a/README.md b/README.md index a4e40d1d..8c9bf04f 100644 --- a/README.md +++ b/README.md @@ -95,7 +95,28 @@ try: except ImportError: pass ``` - +### Slim Package +_Works on non 'win32' environments: Docker, WSL are included_ +To remove the tests, information and caches from the installed packages, +enable the `slim` option. This will: `strip` the `.so` files, remove `__pycache__` +directories and `dist-info` directories. +```yaml +custom: + pythonRequirements: + slim: true +``` +#### Custom Removal Patterns +To specify additional directories to remove from the installed packages, +define the patterns using regex as a `slimPatterns` option in serverless config: +```yaml +custom: + pythonRequirements: + slim: true + slimPatterns: + - "*.egg-info*" +``` +This will remove all folders within the installed requirements that match +the names in `slimPatterns` ## Omitting Packages You can omit a package from deployment with the `noDeploy` option. Note that dependencies of omitted packages must explicitly be omitted too. @@ -266,3 +287,5 @@ For usage of `dockerizePip` on Windows do Step 1 only if running serverless on w * [@kichik](https://github.com/kichik) - Imposed windows & `noDeploy` support, switched to adding files straight to zip instead of creating symlinks, and improved pip chache support when using docker. + * [@dee-me-tree-or-love](https://github.com/dee-me-tree-or-love) - the `slim` package option + diff --git a/index.js b/index.js index 33e6a3da..bff0994f 100644 --- a/index.js +++ b/index.js @@ -26,6 +26,7 @@ class ServerlessPythonRequirements { get options() { const options = Object.assign( { + slim: false, zip: false, cleanupZipHelper: true, invalidateCaches: false, diff --git a/lib/pip.js b/lib/pip.js index d9cbee34..2b061371 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -6,6 +6,7 @@ const set = require('lodash.set'); const { spawnSync } = require('child_process'); const values = require('lodash.values'); const { buildImage, getBindPath, getDockerUid } = require('./docker'); +const { getSlimPackageCommands } = require('./slim'); /** * Install requirements described in requirementsPath to targetFolder @@ -122,6 +123,14 @@ function installRequirements( cmd = pipCmd[0]; cmdOptions = pipCmd.slice(1); } + + // If enabled slimming, strip out the caches, tests and dist-infos + if (options.slim === true || options.slim === 'true') { + const preparedPath = dockerPathForWin(options, targetRequirementsFolder); + const slimCmd = getSlimPackageCommands(options, preparedPath); + cmdOptions.push(...slimCmd); + } + const res = spawnSync(cmd, cmdOptions, { cwd: servicePath, shell: true }); if (res.error) { if (res.error.code === 'ENOENT') { diff --git a/lib/slim.js b/lib/slim.js new file mode 100644 index 00000000..e42a884f --- /dev/null +++ b/lib/slim.js @@ -0,0 +1,59 @@ +const isWsl = require('is-wsl'); + +/** + * Get commands to slim the installed requirements + * only for non-windows platforms: + * works for docker builds and when run on UNIX platforms (wsl included) + * @param {Object} options + * @param {string} folderPath + * @return {Array.} + */ +function getSlimPackageCommands(options, folderPath) { + let stripCmd = []; + + // Default stripping is done for non-windows environments + if (process.platform !== 'win32' || isWsl) { + stripCmd = getDefaultSLimOptions(folderPath); + + // If specified any custom patterns to remove + if (options.slimPatterns instanceof Array) { + // Add the custom specified patterns to remove to the default commands + const customPatterns = options.slimPatterns.map(pattern => { + return getRemovalCommand(folderPath, pattern); + }); + stripCmd = stripCmd.concat(customPatterns); + } + } + return stripCmd; +} + +/** + * Gets the commands to slim the default (safe) files: + * including removing caches, stripping compiled files, removing dist-infos + * @param {String} folderPath + * @return {Array} + */ +function getDefaultSLimOptions(folderPath) { + return [ + `&& find ${folderPath} -name "*.so" -exec strip {} \\;`, + `&& find ${folderPath} -name "*.py[c|o]" -exec rm -rf {} +`, + `&& find ${folderPath} -type d -name "__pycache__*" -exec rm -rf {} +`, + `&& find ${folderPath} -type d -name "*.dist-info*" -exec rm -rf {} +` + ]; +} + +/** + * Get the command created fromt he find and remove template: + * returns a string in form `&& find -name "" -exec rm -rf {} +` + * @param {String} folderPath + * @param {String} removalMatch + * @return {String} + */ +function getRemovalCommand(folderPath, removalMatch) { + return `&& find ${folderPath} -type d -name "${removalMatch}" -exec rm -rf {} +`; +} + +module.exports = { + getSlimPackageCommands, + getDefaultSLimOptions +}; diff --git a/test.bats b/test.bats index 3ce2b990..4501ba52 100755 --- a/test.bats +++ b/test.bats @@ -32,6 +32,27 @@ teardown() { ! ls puck/flask } +@test "py3.6 can package flask with slim options" { + cd tests/base + npm i $(npm pack ../..) + sls --slim=true package + unzip .serverless/sls-py-req-test.zip -d puck + ls puck/flask + test $(find puck -name "*.pyc" | wc -l) -eq 0 +} + +@test "py3.6 can package flask with slim & slimPatterns options" { + cd tests/base + mv _slimPatterns.yml slimPatterns.yml + npm i $(npm pack ../..) + sls --slim=true package + mv slimPatterns.yml _slimPatterns.yml + unzip .serverless/sls-py-req-test.zip -d puck + ls puck/flask + test $(find puck -name "*.pyc" | wc -l) -eq 0 + test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 +} + @test "py3.6 doesn't package boto3 by default" { cd tests/base npm i $(npm pack ../..) @@ -59,6 +80,15 @@ teardown() { ls puck/.requirements.zip puck/unzip_requirements.py } +@test "py3.6 can package flask with zip & slim & dockerizePip option" { + cd tests/base + npm i $(npm pack ../..) + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + sls --dockerizePip=true --zip=true --slim=true package + unzip .serverless/sls-py-req-test.zip -d puck + ls puck/.requirements.zip puck/unzip_requirements.py +} + @test "py3.6 can package flask with dockerizePip option" { cd tests/base npm i $(npm pack ../..) @@ -68,6 +98,29 @@ teardown() { ls puck/flask } +@test "py3.6 can package flask with slim & dockerizePip option" { + cd tests/base + npm i $(npm pack ../..) + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + sls --dockerizePip=true --slim=true package + unzip .serverless/sls-py-req-test.zip -d puck + ls puck/flask + test $(find puck -name "*.pyc" | wc -l) -eq 0 +} + +@test "py3.6 can package flask with slim & dockerizePip & slimPatterns options" { + cd tests/base + mv _slimPatterns.yml slimPatterns.yml + npm i $(npm pack ../..) + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + sls --dockerizePip=true --slim=true package + mv slimPatterns.yml _slimPatterns.yml + unzip .serverless/sls-py-req-test.zip -d puck + ls puck/flask + test $(find puck -name "*.pyc" | wc -l) -eq 0 + test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 +} + @test "py3.6 uses cache with dockerizePip option" { cd tests/base npm i $(npm pack ../..) @@ -77,6 +130,17 @@ teardown() { ls .requirements-cache/http } +@test "py3.6 uses cache with dockerizePip & slim option" { + cd tests/base + npm i $(npm pack ../..) + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n pipCmdExtraArgs: ["--cache-dir", ".requirements-cache"]/' serverless.yml + sls --dockerizePip=true --slim=true package + ls .requirements-cache/http + test $(find puck -name "*.pyc" | wc -l) -eq 0 +} + + @test "py2.7 can package flask with default options" { cd tests/base npm i $(npm pack ../..) @@ -85,6 +149,15 @@ teardown() { ls puck/flask } +@test "py2.7 can package flask with slim option" { + cd tests/base + npm i $(npm pack ../..) + sls --runtime=python2.7 --slim=true package + unzip .serverless/sls-py-req-test.zip -d puck + ls puck/flask + test $(find puck -name "*.pyc" | wc -l) -eq 0 +} + @test "py2.7 can package flask with zip option" { cd tests/base npm i $(npm pack ../..) @@ -93,6 +166,18 @@ teardown() { ls puck/.requirements.zip puck/unzip_requirements.py } +@test "py2.7 can package flask with slim & dockerizePip & slimPatterns options" { + cd tests/base + mv _slimPatterns.yml slimPatterns.yml + npm i $(npm pack ../..) + sls --runtime=python2.7 --slim=true packag + mv slimPatterns.yml _slimPatterns.yml + unzip .serverless/sls-py-req-test.zip -d puck + ls puck/flask + test $(find puck -name "*.pyc" | wc -l) -eq 0 + test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 +} + @test "py2.7 doesn't package boto3 by default" { cd tests/base npm i $(npm pack ../..) @@ -119,6 +204,15 @@ teardown() { ls puck/.requirements.zip puck/unzip_requirements.py } +@test "py2.7 can package flask with zip & slim & dockerizePip option" { + cd tests/base + npm i $(npm pack ../..) + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + sls --dockerizePip=true --runtime=python2.7 --zip=true --slim=true package + unzip .serverless/sls-py-req-test.zip -d puck + ls puck/.requirements.zip puck/unzip_requirements.py +} + @test "py2.7 can package flask with dockerizePip option" { cd tests/base npm i $(npm pack ../..) @@ -128,6 +222,29 @@ teardown() { ls puck/flask } +@test "py2.7 can package flask with slim & dockerizePip option" { + cd tests/base + npm i $(npm pack ../..) + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + sls --dockerizePip=true --slim=true --runtime=python2.7 package + unzip .serverless/sls-py-req-test.zip -d puck + ls puck/flask + test $(find puck -name "*.pyc" | wc -l) -eq 0 +} + +@test "py2.7 can package flask with slim & dockerizePip & slimPatterns options" { + cd tests/base + mv _slimPatterns.yml slimPatterns.yml + npm i $(npm pack ../..) + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + sls --dockerizePip=true --slim=true --runtime=python2.7 package + mv slimPatterns.yml _slimPatterns.yml + unzip .serverless/sls-py-req-test.zip -d puck + ls puck/flask + test $(find puck -name "*.pyc" | wc -l) -eq 0 + test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 +} + @test "pipenv py3.6 can package flask with default options" { cd tests/pipenv npm i $(npm pack ../..) @@ -136,6 +253,27 @@ teardown() { ls puck/flask } +@test "pipenv py3.6 can package flask with slim option" { + cd tests/pipenv + npm i $(npm pack ../..) + sls --slim=true package + unzip .serverless/sls-py-req-test.zip -d puck + ls puck/flask + test $(find puck -name "*.pyc" | wc -l) -eq 0 +} + +@test "pipenv py3.6 can package flask with slim & slimPatterns option" { + cd tests/pipenv + npm i $(npm pack ../..) + mv _slimPatterns.yml slimPatterns.yml + sls --slim=true package + mv slimPatterns.yml _slimPatterns.yml + unzip .serverless/sls-py-req-test.zip -d puck + ls puck/flask + test $(find puck -name "*.pyc" | wc -l) -eq 0 + test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 +} + @test "pipenv py3.6 can package flask with zip option" { cd tests/pipenv npm i $(npm pack ../..) @@ -182,6 +320,20 @@ teardown() { ! ls puck3/flask } +@test "py3.6 can package flask with package individually & slim option" { + cd tests/base + npm i $(npm pack ../..) + sls --individually=true --slim=true package + unzip .serverless/hello.zip -d puck + unzip .serverless/hello2.zip -d puck2 + unzip .serverless/hello3.zip -d puck3 + ls puck/flask + ls puck2/flask + ! ls puck3/flask + test $(find "puck*" -name "*.pyc" | wc -l) -eq 0 +} + + @test "py2.7 can package flask with package individually option" { cd tests/base npm i $(npm pack ../..) @@ -194,6 +346,20 @@ teardown() { ! ls puck3/flask } +@test "py2.7 can package flask with package individually & slim option" { + cd tests/base + npm i $(npm pack ../..) + sls --individually=true --slim=true --runtime=python2.7 package + unzip .serverless/hello.zip -d puck + unzip .serverless/hello2.zip -d puck2 + unzip .serverless/hello3.zip -d puck3 + ls puck/flask + ls puck2/flask + ! ls puck3/flask + test $(find puck* -name "*.pyc" | wc -l) -eq 0 +} + + @test "py3.6 can package only requirements of module" { cd tests/individually npm i $(npm pack ../..) diff --git a/tests/base/_slimPatterns.yml b/tests/base/_slimPatterns.yml new file mode 100644 index 00000000..ffc3c134 --- /dev/null +++ b/tests/base/_slimPatterns.yml @@ -0,0 +1,2 @@ +slimPatterns: + - "*.egg-info*" \ No newline at end of file diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index ea22c46e..7c864714 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -10,8 +10,12 @@ custom: pythonRequirements: zip: ${opt:zip, self:custom.defaults.zip} dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} + slim: ${opt:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} vendor: ${opt:vendor, ''} defaults: + slim: false + slimPatterns: false zip: false dockerizePip: false individually: false diff --git a/tests/pipenv/_slimPatterns.yml b/tests/pipenv/_slimPatterns.yml new file mode 100644 index 00000000..ffc3c134 --- /dev/null +++ b/tests/pipenv/_slimPatterns.yml @@ -0,0 +1,2 @@ +slimPatterns: + - "*.egg-info*" \ No newline at end of file diff --git a/tests/pipenv/serverless.yml b/tests/pipenv/serverless.yml index b8ebb38b..feb7f9de 100644 --- a/tests/pipenv/serverless.yml +++ b/tests/pipenv/serverless.yml @@ -9,9 +9,13 @@ plugins: custom: pythonRequirements: zip: ${opt:zip, self:custom.defaults.zip} + slim: ${opt:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} defaults: zip: false + slimPatterns: false + slim: false dockerizePip: false package: From d88b0fcaa47ce509508ef79f9976b9d4c9af0d33 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Mon, 18 Jun 2018 14:34:35 -0400 Subject: [PATCH 016/328] version bump for `slim`! --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 79ffcef5..a925b742 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "4.0.5", + "version": "4.1.0", "engines": { "node": ">=6.0" }, From c4901fe7567c35e792baf67ed21e9912cfa212e5 Mon Sep 17 00:00:00 2001 From: Tomoaki Abe Date: Tue, 19 Jun 2018 04:47:15 +0900 Subject: [PATCH 017/328] Improve function runtime check (#202) - Add function to check functionObj.runtime - Add brace to if statement --- index.js | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/index.js b/index.js index bff0994f..6e55fdb3 100644 --- a/index.js +++ b/index.js @@ -109,13 +109,18 @@ class ServerlessPythonRequirements { } }; + const isFunctionRuntimePython = args => { + // If functionObj.runtime is undefined, python. + if (!args[1].functionObj || !args[1].functionObj.runtime) { + return true; + } + return args[1].functionObj.runtime.startsWith('python'); + }; + const before = () => { - if ( - arguments[1].functionObj && - arguments[1].functionObj.runtime && - !arguments[1].functionObj.runtime.startsWith('python') - ) + if (!isFunctionRuntimePython(arguments)) { return; + } return BbPromise.bind(this) .then(pipfileToRequirements) .then(addVendorHelper) @@ -124,12 +129,9 @@ class ServerlessPythonRequirements { }; const after = () => { - if ( - arguments[1].functionObj && - arguments[1].functionObj.runtime && - !arguments[1].functionObj.runtime.startsWith('python') - ) + if (!isFunctionRuntimePython(arguments)) { return; + } return BbPromise.bind(this) .then(removeVendorHelper) .then(() => From 71f36e7e938966a6758522c8f73e3994b4e5d11e Mon Sep 17 00:00:00 2001 From: Amir Szekely Date: Wed, 20 Jun 2018 17:57:29 -0700 Subject: [PATCH 018/328] Reorder `bindPaths` for faster detection (#211) --- lib/docker.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/docker.js b/lib/docker.js index 4b914942..ddfababc 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -92,11 +92,12 @@ function getBindPath(servicePath) { throw new Error(`Unknown path format ${baseBindPath.substr(10)}...`); } - bindPaths.push(`/${drive.toLowerCase()}/${path}`); + bindPaths.push(`/${drive.toLowerCase()}/${path}`); // Docker Toolbox (seems like Docker for Windows can support this too) + bindPaths.push(`${drive.toLowerCase()}:/${path}`); // Docker for Windows + // other options just in case bindPaths.push(`/${drive.toUpperCase()}/${path}`); bindPaths.push(`/mnt/${drive.toLowerCase()}/${path}`); bindPaths.push(`/mnt/${drive.toUpperCase()}/${path}`); - bindPaths.push(`${drive.toLowerCase()}:/${path}`); bindPaths.push(`${drive.toUpperCase()}:/${path}`); for (let i = 0; i < bindPaths.length; i++) { From 37adf9e7f481f2dc9733f8ec0ceea418bf1b61ce Mon Sep 17 00:00:00 2001 From: Jongbin Park Date: Tue, 10 Jul 2018 23:57:28 +0900 Subject: [PATCH 019/328] Properly supporting `sls deploy function` command and zip option (#217) This PR fixes two issues I encountered while using `individually: true` option. **1. Should package only the given function when invoked by `sls deploy function --function $FN_NAME`** Suppose using `individually: true`. If you'd like to deploy a single function using `sls deploy function` command you'd expect the plugin to package only the module that contains the function specified. The current implementation, however, runs packaging for every module in `serverless.yml` definition, taking as much time as deploying all applications just to deploy a single function. I simply added explicit `targetFuncs` property to filter target function if any specified in command line option. **2. `moveModulesUp` should be called regardless of `zip` option** When `individually: true`, each deployed function has its module as a root directory. This is achieved by calling `moveModulesUp`, but this method call is ignored when `zip` option is enabled. This certainly seems to be a wrong implementation (causing #203). If `injectModule` is ensured not to be called when `zip: true`, it is safe to call `moveModulesUp` whether `zip` is enabled or not. The fix seems to work well on my case, but it lacks new unit tests (I couldn't figure out how to compose one.) If you let me known how to add a test, I'll update the PR as soon as I can. --- index.js | 8 ++++++++ lib/clean.js | 3 +-- lib/inject.js | 25 +++++++++++-------------- lib/pip.js | 3 +-- lib/zip.js | 7 +++---- 5 files changed, 24 insertions(+), 22 deletions(-) diff --git a/index.js b/index.js index 6e55fdb3..1857b239 100644 --- a/index.js +++ b/index.js @@ -3,6 +3,7 @@ const BbPromise = require('bluebird'); const fse = require('fs-extra'); +const values = require('lodash.values'); const { addVendorHelper, removeVendorHelper, @@ -83,6 +84,13 @@ class ServerlessPythonRequirements { return options; } + get targetFuncs() { + let inputOpt = this.serverless.processedInput.options; + return inputOpt.function + ? [inputOpt.functionObj] + : values(this.serverless.service.functions); + } + /** * The plugin constructor * @param {Object} serverless diff --git a/lib/clean.js b/lib/clean.js index f3c4fbef..332ceb37 100644 --- a/lib/clean.js +++ b/lib/clean.js @@ -1,7 +1,6 @@ const BbPromise = require('bluebird'); const fse = require('fs-extra'); const path = require('path'); -const values = require('lodash.values'); BbPromise.promisifyAll(fse); @@ -13,7 +12,7 @@ function cleanup() { const artifacts = ['.requirements']; if (this.options.zip) { if (this.serverless.service.package.individually) { - values(this.serverless.service.functions).forEach(f => { + this.targetFuncs.forEach(f => { artifacts.push(path.join(f.module, '.requirements.zip')); artifacts.push(path.join(f.module, 'unzip_requirements.py')); }); diff --git a/lib/inject.js b/lib/inject.js index 337813a3..973ba99b 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -4,7 +4,6 @@ const glob = require('glob-all'); const get = require('lodash.get'); const set = require('lodash.set'); const path = require('path'); -const values = require('lodash.values'); const JSZip = require('jszip'); const { writeZip, zipFile } = require('./zipTree'); @@ -75,12 +74,8 @@ function moveModuleUp(source, target, module) { function injectAllRequirements(funcArtifact) { this.serverless.cli.log('Injecting required Python packages to package...'); - if (this.options.zip) { - return; - } - if (this.serverless.service.package.individually) { - return BbPromise.resolve(values(this.serverless.service.functions)) + return BbPromise.resolve(this.targetFuncs) .filter(func => (func.runtime || this.serverless.service.provider.runtime).match( /^python.*/ @@ -107,14 +102,16 @@ function injectAllRequirements(funcArtifact) { return func; } }) - .map(func => - injectRequirements( - path.join('.serverless', func.module, 'requirements'), - func.package.artifact, - this.options - ) - ); - } else { + .map(func => { + return this.options.zip + ? func + : injectRequirements( + path.join('.serverless', func.module, 'requirements'), + func.package.artifact, + this.options + ); + }); + } else if (!this.options.zip) { return injectRequirements( path.join('.serverless', 'requirements'), this.serverless.service.package.artifact || funcArtifact, diff --git a/lib/pip.js b/lib/pip.js index 2b061371..7d555b31 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -4,7 +4,6 @@ const path = require('path'); const get = require('lodash.get'); const set = require('lodash.set'); const { spawnSync } = require('child_process'); -const values = require('lodash.values'); const { buildImage, getBindPath, getDockerUid } = require('./docker'); const { getSlimPackageCommands } = require('./slim'); @@ -210,7 +209,7 @@ function installAllRequirements() { fse.ensureDirSync(path.join(this.servicePath, '.serverless')); if (this.serverless.service.package.individually) { let doneModules = []; - values(this.serverless.service.functions) + this.targetFuncs .filter(func => (func.runtime || this.serverless.service.provider.runtime).match( /^python.*/ diff --git a/lib/zip.js b/lib/zip.js index eba21976..1139d0d9 100644 --- a/lib/zip.js +++ b/lib/zip.js @@ -2,7 +2,6 @@ const fse = require('fs-extra'); const path = require('path'); const get = require('lodash.get'); const set = require('lodash.set'); -const values = require('lodash.values'); const uniqBy = require('lodash.uniqby'); const BbPromise = require('bluebird'); const JSZip = require('jszip'); @@ -17,7 +16,7 @@ BbPromise.promisifyAll(fse); function addVendorHelper() { if (this.options.zip) { if (this.serverless.service.package.individually) { - return BbPromise.resolve(values(this.serverless.service.functions)) + return BbPromise.resolve(this.targetFuncs) .map(f => { if (!get(f, 'package.include')) { set(f, ['package', 'include'], []); @@ -63,7 +62,7 @@ function addVendorHelper() { function removeVendorHelper() { if (this.options.zip && this.options.cleanupZipHelper) { if (this.serverless.service.package.individually) { - return BbPromise.resolve(values(this.serverless.service.functions)) + return BbPromise.resolve(this.targetFuncs) .map(f => { if (!get(f, 'module')) { set(f, ['module'], '.'); @@ -95,7 +94,7 @@ function removeVendorHelper() { function packRequirements() { if (this.options.zip) { if (this.serverless.service.package.individually) { - return BbPromise.resolve(values(this.serverless.service.functions)) + return BbPromise.resolve(this.targetFuncs) .map(f => { if (!get(f, 'module')) { set(f, ['module'], '.'); From fa12ab4ada86efbb701b6cec2776b16ad8fae70a Mon Sep 17 00:00:00 2001 From: Amir Szekely Date: Mon, 16 Jul 2018 06:10:41 -0700 Subject: [PATCH 020/328] Support serverless.yml and serverless.yaml in getBindPath() (#213) This should fix the issue reported by @mrpgraae in #210. I've also added support for `SLS_DEBUG` so we can more easily get some information on future errors. --- lib/docker.js | 39 ++++++++++++++++++++++++++++++++++----- lib/pip.js | 2 +- 2 files changed, 35 insertions(+), 6 deletions(-) diff --git a/lib/docker.js b/lib/docker.js index ddfababc..26cbf6de 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -1,5 +1,7 @@ const { spawnSync } = require('child_process'); const isWsl = require('is-wsl'); +const fse = require('fs-extra'); +const path = require('path'); /** * Helper function to run a docker command @@ -32,12 +34,32 @@ function buildImage(dockerFile) { return imageName; } +/** + * Find a file that exists on all projects so we can test if Docker can see it too + * @param {string} servicePath + * @return {string} file name + */ +function findTestFile(servicePath) { + if (fse.pathExistsSync(path.join(servicePath, 'serverless.yml'))) { + return 'serverless.yml'; + } + if (fse.pathExistsSync(path.join(servicePath, 'serverless.yaml'))) { + return 'serverless.yaml'; + } + if (fse.pathExistsSync(path.join(servicePath, 'serverless.json'))) { + return 'serverless.json'; + } + throw new Error( + 'Unable to find serverless.yml or serverless.yaml or serverless.json for getBindPath()' + ); +} + /** * Test bind path to make sure it's working * @param {string} bindPath * @return {boolean} */ -function tryBindPath(bindPath) { +function tryBindPath(serverless, bindPath, testFile) { const options = [ 'run', '--rm', @@ -45,11 +67,15 @@ function tryBindPath(bindPath) { `${bindPath}:/test`, 'alpine', 'ls', - '/test/serverless.yml' + `/test/${testFile}` ]; try { const ps = dockerCommand(options); - return ps.stdout.trim() === '/test/serverless.yml'; + if (process.env.SLS_DEBUG) { + serverless.cli.log(`Trying bindPath ${bindPath} (${options})`); + serverless.cli.log(ps.stdout.trim()); + } + return ps.stdout.trim() === `/test/${testFile}`; } catch (err) { return false; } @@ -57,10 +83,11 @@ function tryBindPath(bindPath) { /** * Get bind path depending on os platform + * @param {object} serverless * @param {string} servicePath * @return {string} The bind path. */ -function getBindPath(servicePath) { +function getBindPath(serverless, servicePath) { // Determine bind path if (process.platform !== 'win32' && !isWsl) { return servicePath; @@ -100,9 +127,11 @@ function getBindPath(servicePath) { bindPaths.push(`/mnt/${drive.toUpperCase()}/${path}`); bindPaths.push(`${drive.toUpperCase()}:/${path}`); + const testFile = findTestFile(servicePath); + for (let i = 0; i < bindPaths.length; i++) { const bindPath = bindPaths[i]; - if (tryBindPath(bindPath)) { + if (tryBindPath(serverless, bindPath, testFile)) { return bindPath; } } diff --git a/lib/pip.js b/lib/pip.js index 7d555b31..f339d364 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -87,7 +87,7 @@ function installRequirements( serverless.cli.log(`Docker Image: ${dockerImage}`); // Prepare bind path depending on os platform - const bindPath = getBindPath(servicePath); + const bindPath = getBindPath(serverless, servicePath); cmdOptions = ['run', '--rm', '-v', `"${bindPath}:/var/task:z"`]; if (options.dockerSsh) { From 19960e964ad21317897df6969cd459c37fc5ee5d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" Date: Tue, 17 Jul 2018 10:11:09 -0400 Subject: [PATCH 021/328] Update fs-extra requirement to ^7.0.0 (#219) Updates the requirements on [fs-extra](https://github.com/jprichardson/node-fs-extra) to permit the latest version.
Changelog *Sourced from [fs-extra's changelog](https://github.com/jprichardson/node-fs-extra/blob/master/CHANGELOG.md).* > 7.0.0 / 2018-07-16 > ------------------ > > - **BREAKING:** Refine `copy*()` handling of symlinks to properly detect symlinks that point to the same file. ([#582](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/582)) > - Fix bug with copying write-protected directories ([#600](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/600)) > - Universalify `fs.lchmod()` ([#596](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/596)) > - Add `engines` field to `package.json` ([#580](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/580)) > > 6.0.1 / 2018-05-09 > ------------------ > > - Fix `fs.promises` `ExperimentalWarning` on Node v10.1.0 ([#578](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/578)) > > 6.0.0 / 2018-05-01 > ------------------ > > - Drop support for Node.js versions 4, 5, & 7 ([#564](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/564)) > - Rewrite `move` to use `fs.rename` where possible ([#549](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/549)) > - Don't convert relative paths to absolute paths for `filter` ([#554](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/554)) > - `copy*`'s behavior when `preserveTimestamps` is `false` has been OS-dependent since 5.0.0, but that's now explicitly noted in the docs ([#563](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/563)) > - Fix subdirectory detection for `copy*` & `move*` ([#541](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/541)) > - Handle case-insensitive paths correctly in `copy*` ([#568](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/568)) > > 5.0.0 / 2017-12-11 > ------------------ > > Significant refactor of `copy()` & `copySync()`, including breaking changes. No changes to other functions in this release. > > Huge thanks to **[[**manidlou**](https://github.com/manidlou)](https://github.com/manidlou)** for doing most of the work on this release. > > - The `filter` option can no longer be a RegExp (must be a function). This was deprecated since fs-extra v1.0.0. [#512](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/512) > - `copy()`'s `filter` option can now be a function that returns a Promise. [#518](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/518) > - `copy()` & `copySync()` now use `fs.copyFile()`/`fs.copyFileSync()` in environments that support it (currently Node 8.5.0+). Older Node versions still get the old implementation. [#505](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/505) > - Don't allow copying a directory into itself. [#83](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/issues/83) > - Handle copying between identical files. [#198](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/issues/198) > - Error out when copying an empty folder to a path that already exists. [#464](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/issues/464) > - Don't create `dest`'s parent if the `filter` function aborts the `copy()` operation. [#517](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/517) > - Fix `writeStream` not being closed if there was an error in `copy()`. [#516](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/516) > > 4.0.3 / 2017-12-05 > ------------------ > > - Fix wrong `chmod` values in `fs.remove()` [#501](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/501) > - Fix `TypeError` on systems that don't have some `fs` operations like `lchown` [#520](https://github-redirect.dependabot.com/jprichardson/node-fs-extra/pull/520) > > 4.0.2 / 2017-09-12 > ------------------ > > - Added `EOL` option to `writeJson*` & `outputJson*` (via upgrade to jsonfile v4) > - Added promise support to [`fs.copyFile()`](https://nodejs.org/api/fs.html#fs_fs_copyfile_src_dest_flags_callback) in Node 8.5+ > ... (truncated)
Commits - See full diff in [compare view](https://github.com/jprichardson/node-fs-extra/commits/7.0.0)

Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot ignore this [patch|minor|major] version` will close this PR and stop Dependabot creating any more for this minor/major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) - `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language - `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language - `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language - `@dependabot badge me` will comment on this PR with code to add a "Dependabot enabled" badge to your readme Additionally, you can set the following in your Dependabot [dashboard](https://app.dependabot.com): - Update frequency (including time of day and day of week) - Automerge options (never/patch/minor, and dev/runtime dependencies) - Pull request limits (per update run and/or open at any time) - Out-of-range updates (receive only lockfile updates, if desired) - Security updates (receive only security updates, if desired) Finally, you can contact us by mentioning @dependabot.
--- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index a925b742..2d8dc5d1 100644 --- a/package.json +++ b/package.json @@ -48,7 +48,7 @@ }, "dependencies": { "bluebird": "^3.0.6", - "fs-extra": "^6.0.0", + "fs-extra": "^7.0.0", "glob-all": "^3.1.0", "is-wsl": "^1.1.0", "jszip": "^3.1.0", From 45e180b00fc6b266c2e7172ee76849d2456a43d0 Mon Sep 17 00:00:00 2001 From: benjipott Date: Wed, 18 Jul 2018 19:31:21 +0200 Subject: [PATCH 022/328] Update slim.js (#220) Update slim option to match regex folder like "*tensorflo/core" --- lib/slim.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/slim.js b/lib/slim.js index e42a884f..2eaf890f 100644 --- a/lib/slim.js +++ b/lib/slim.js @@ -50,7 +50,7 @@ function getDefaultSLimOptions(folderPath) { * @return {String} */ function getRemovalCommand(folderPath, removalMatch) { - return `&& find ${folderPath} -type d -name "${removalMatch}" -exec rm -rf {} +`; + return `&& find ${folderPath} -type d -wholename "${removalMatch}" -exec rm -rf {} +`; } module.exports = { From 825cf614bb0dd1658087bf1600e239b048ee7f1a Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Wed, 18 Jul 2018 13:37:25 -0400 Subject: [PATCH 023/328] Version 4.1.1 * improve function runtime check for mixed runtime services * reorder detection of docker bindPath detection in windows to improve performance * fix `sls deploy function` when using zip option and individual packaging * support `serverless.yaml` and `serverless.json` in addition to `serverless.yml` in getBindPath on windows * update fs-extra dep to v7 * match whole path (instead of fliename only) expressions when using `strip` --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 2d8dc5d1..5487c04c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "4.1.0", + "version": "4.1.1", "engines": { "node": ">=6.0" }, From ce04ef595a5b431c5ac3b90334f43565f44793f0 Mon Sep 17 00:00:00 2001 From: GyuYong Jung Date: Sun, 29 Jul 2018 00:25:11 +0900 Subject: [PATCH 024/328] Add description for command (#221) When type `sls`: * As is ![](http://i64.tinypic.com/2i7ofae.png) * To be ![](http://i67.tinypic.com/1qrhqo.png) When type `sls requirements`: * As is _nothing_ * To be ![](http://i66.tinypic.com/1gq9ty.png) --- index.js | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/index.js b/index.js index 1857b239..60ac9f59 100644 --- a/index.js +++ b/index.js @@ -104,6 +104,8 @@ class ServerlessPythonRequirements { this.commands = { requirements: { + usage: 'Serverless plugin to bundle Python packages', + lifecycleEvents: ['requirements'], commands: { clean: { usage: 'Remove .requirements and requirements.zip', @@ -165,6 +167,10 @@ class ServerlessPythonRequirements { 'after:package:createDeploymentArtifacts': after, 'before:deploy:function:packageFunction': before, 'after:deploy:function:packageFunction': after, + 'requirements:requirements': () => { + this.serverless.cli.generateCommandsHelp(['requirements']); + return BbPromise.resolve(); + }, 'requirements:install:install': () => BbPromise.bind(this) .then(pipfileToRequirements) From 3e0980605be8f0e72bc49792907868ed7f7ef410 Mon Sep 17 00:00:00 2001 From: Christopher Zorn Date: Wed, 8 Aug 2018 12:04:13 -0700 Subject: [PATCH 025/328] When using SLS_DEBUG environment variable output pip requirements command (#222) closes #169 --- lib/pip.js | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index f339d364..826e52dc 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -129,8 +129,11 @@ function installRequirements( const slimCmd = getSlimPackageCommands(options, preparedPath); cmdOptions.push(...slimCmd); } - - const res = spawnSync(cmd, cmdOptions, { cwd: servicePath, shell: true }); + let spawnArgs = { cwd: servicePath, shell: true }; + if (process.env.SLS_DEBUG) { + spawnArgs.stdio = 'inherit'; + } + const res = spawnSync(cmd, cmdOptions, spawnArgs); if (res.error) { if (res.error.code === 'ENOENT') { if (options.dockerizePip) { From 69031d807ac5b7aea5eec8dff14652b0f25bdfb8 Mon Sep 17 00:00:00 2001 From: Alex Jurkiewicz Date: Tue, 14 Aug 2018 12:12:42 +1000 Subject: [PATCH 026/328] Add custom Dockerfile workflow example (#228) As per #101. --- README.md | 51 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/README.md b/README.md index 8c9bf04f..d1414de4 100644 --- a/README.md +++ b/README.md @@ -266,6 +266,57 @@ For usage of `dockerizePip` on Windows do Step 1 only if running serverless on w 1. [Installing the Docker client on Windows Subsystem for Linux (Ubuntu)](https://medium.com/@sebagomez/installing-the-docker-client-on-ubuntus-windows-subsystem-for-linux-612b392a44c4) +## Native Code Dependencies During Build + +Some Python packages require extra OS dependencies to build successfully. To deal with this, replace the default image (`lambci/lambda:python3.6`) with a `Dockerfile` like: + +```dockerfile +# AWS Lambda execution environment is based on Amazon Linux 1 +FROM amazonlinux:1 + +# Install Python 3.6 +RUN yum -y install python36 python36-pip + +# Install your dependencies +RUN curl -s https://bootstrap.pypa.io/get-pip.py | python3 +RUN yum -y install python3-devel mysql-devel gcc + +# Set the same WORKDIR as default image +RUN mkdir /var/task +WORKDIR /var/task +``` + +Then update your `serverless.yml`: + +```yaml +custom: + pythonRequirements: + dockerFile: Dockerfile +``` + +## Native Code Dependencies During Runtime + +Some Python packages require extra OS libraries (`*.so` files) at runtime. You need to manually include these files in the root directory of your Serverless package. The simplest way to do this is to commit the files to your repository: + +For instance, the `mysqlclient` package requires `libmysqlclient.so.1020`. If you use the Dockerfile from the previous section, you can extract this file from the builder Dockerfile: + +1. Extract the library: +```bash +docker run --rm -v "$(pwd):/var/task" sls-py-reqs-custom cp -v /usr/lib64/mysql57/libmysqlclient.so.1020 . +``` +(If you get the error `Unable to find image 'sls-py-reqs-custom:latest' locally`, run `sls package` to build the image.) +2. Commit to your repo: +```bash +git add libmysqlclient.so.1020 +git commit -m "Add libmysqlclient.so.1020" +``` +3. Verify the library gets included in your package: +```bash +sls package +zipinfo .serverless/xxx.zip +``` +(If you can't see the library, you might need to adjust your package include/exclude configuration in `serverless.yml`.) + ## Contributors * [@dschep](https://github.com/dschep) - Lead developer & maintainer * [@azurelogic](https://github.com/azurelogic) - logging & documentation fixes From d00d095406cfe5845ada740efe03a3dca0190264 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Mon, 13 Aug 2018 22:14:31 -0400 Subject: [PATCH 027/328] Update README.md --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index d1414de4..56e7c71a 100644 --- a/README.md +++ b/README.md @@ -339,4 +339,5 @@ zipinfo .serverless/xxx.zip switched to adding files straight to zip instead of creating symlinks, and improved pip chache support when using docker. * [@dee-me-tree-or-love](https://github.com/dee-me-tree-or-love) - the `slim` package option + * [@alexjurkiewicz](https://github.com/alexjurkiewicz) - [docs about docker workflows](#native-code-dependencies-during-build) From a7232c47fd6362a18055578da8fec149eb22662c Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Tue, 28 Aug 2018 09:06:49 -0400 Subject: [PATCH 028/328] clean up slim code using glob-all to make it more cross platform (#227) * basic cleanup aroudn slim option * clean up slim code using glob-all to make it more cross platform * fix use of Array.concat * try omiting \\ in strip command. might need to differ command for windows * prettier * use + with -exec for strip bc strip supports multiple files and + doesnt need to be escaped * doh prettier again * damn... find in docker on widnows is finicky * Update README.md --- README.md | 6 ++- index.js | 1 + lib/pip.js | 17 ++++----- lib/slim.js | 68 +++++++++------------------------- tests/base/_slimPatterns.yml | 2 +- tests/pipenv/_slimPatterns.yml | 2 +- 6 files changed, 31 insertions(+), 65 deletions(-) diff --git a/README.md b/README.md index 56e7c71a..93032ac5 100644 --- a/README.md +++ b/README.md @@ -107,13 +107,15 @@ custom: ``` #### Custom Removal Patterns To specify additional directories to remove from the installed packages, -define the patterns using regex as a `slimPatterns` option in serverless config: +define a list of of patterns int he serverless config using the `slimPatterns` +option and glob syntax. Note, it matches against whole paths, so to match a file in any +directory, start your pattern with `**/`. ```yaml custom: pythonRequirements: slim: true slimPatterns: - - "*.egg-info*" + - "**/*.egg-info*" ``` This will remove all folders within the installed requirements that match the names in `slimPatterns` diff --git a/index.js b/index.js index 60ac9f59..f64c0680 100644 --- a/index.js +++ b/index.js @@ -28,6 +28,7 @@ class ServerlessPythonRequirements { const options = Object.assign( { slim: false, + slimPatterns: false, zip: false, cleanupZipHelper: true, invalidateCaches: false, diff --git a/lib/pip.js b/lib/pip.js index 826e52dc..4e5e24e1 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -5,7 +5,7 @@ const get = require('lodash.get'); const set = require('lodash.set'); const { spawnSync } = require('child_process'); const { buildImage, getBindPath, getDockerUid } = require('./docker'); -const { getSlimPackageCommands } = require('./slim'); +const { getStripCommand, deleteFiles } = require('./slim'); /** * Install requirements described in requirementsPath to targetFolder @@ -106,12 +106,6 @@ function installRequirements( if (process.platform === 'linux') { // Use same user so requirements folder is not root and so --cache-dir works cmdOptions.push('-u', `${process.getuid()}`); - // const stripCmd = quote([ - // 'find', targetRequirementsFolder, - // '-name', '"*.so"', - // '-exec', 'strip', '{}', '\;', - // ]); - // pipCmd = ['/bin/bash', '-c', '"' + pipCmd + ' && ' + stripCmd + ' && ' + chownCmd + '"']; } else { // Use same user so --cache-dir works cmdOptions.push('-u', getDockerUid(bindPath)); @@ -123,11 +117,10 @@ function installRequirements( cmdOptions = pipCmd.slice(1); } - // If enabled slimming, strip out the caches, tests and dist-infos + // If enabled slimming, strip so files if (options.slim === true || options.slim === 'true') { const preparedPath = dockerPathForWin(options, targetRequirementsFolder); - const slimCmd = getSlimPackageCommands(options, preparedPath); - cmdOptions.push(...slimCmd); + cmdOptions.push(getStripCommand(options, preparedPath)); } let spawnArgs = { cwd: servicePath, shell: true }; if (process.env.SLS_DEBUG) { @@ -148,6 +141,10 @@ function installRequirements( if (res.status !== 0) { throw new Error(res.stderr); } + // If enabled slimming, delete files in slimPatterns + if (options.slim === true || options.slim === 'true') { + deleteFiles(options, targetRequirementsFolder); + } } /** diff --git a/lib/slim.js b/lib/slim.js index 2eaf890f..ede926ce 100644 --- a/lib/slim.js +++ b/lib/slim.js @@ -1,59 +1,25 @@ const isWsl = require('is-wsl'); +const glob = require('glob-all'); +const fse = require('fs-extra'); -/** - * Get commands to slim the installed requirements - * only for non-windows platforms: - * works for docker builds and when run on UNIX platforms (wsl included) - * @param {Object} options - * @param {string} folderPath - * @return {Array.} - */ -function getSlimPackageCommands(options, folderPath) { - let stripCmd = []; +const getStripCommand = (options, folderPath) => + process.platform !== 'win32' || isWsl || options.dockerizePip + ? ` && find ${folderPath} -name "*.so" -exec strip {} ';'` + : ''; - // Default stripping is done for non-windows environments - if (process.platform !== 'win32' || isWsl) { - stripCmd = getDefaultSLimOptions(folderPath); - - // If specified any custom patterns to remove - if (options.slimPatterns instanceof Array) { - // Add the custom specified patterns to remove to the default commands - const customPatterns = options.slimPatterns.map(pattern => { - return getRemovalCommand(folderPath, pattern); - }); - stripCmd = stripCmd.concat(customPatterns); +const deleteFiles = (options, folderPath) => { + let patterns = ['**/*.py[c|o]', '**/__pycache__*', '**/*.dist-info*']; + if (options.slimPatterns) { + patterns = patterns.concat(options.slimPatterns); + } + for (const pattern of patterns) { + for (const file of glob.sync(`${folderPath}/${pattern}`)) { + fse.removeSync(file); } } - return stripCmd; -} - -/** - * Gets the commands to slim the default (safe) files: - * including removing caches, stripping compiled files, removing dist-infos - * @param {String} folderPath - * @return {Array} - */ -function getDefaultSLimOptions(folderPath) { - return [ - `&& find ${folderPath} -name "*.so" -exec strip {} \\;`, - `&& find ${folderPath} -name "*.py[c|o]" -exec rm -rf {} +`, - `&& find ${folderPath} -type d -name "__pycache__*" -exec rm -rf {} +`, - `&& find ${folderPath} -type d -name "*.dist-info*" -exec rm -rf {} +` - ]; -} - -/** - * Get the command created fromt he find and remove template: - * returns a string in form `&& find -name "" -exec rm -rf {} +` - * @param {String} folderPath - * @param {String} removalMatch - * @return {String} - */ -function getRemovalCommand(folderPath, removalMatch) { - return `&& find ${folderPath} -type d -wholename "${removalMatch}" -exec rm -rf {} +`; -} +}; module.exports = { - getSlimPackageCommands, - getDefaultSLimOptions + getStripCommand, + deleteFiles }; diff --git a/tests/base/_slimPatterns.yml b/tests/base/_slimPatterns.yml index ffc3c134..70f863cc 100644 --- a/tests/base/_slimPatterns.yml +++ b/tests/base/_slimPatterns.yml @@ -1,2 +1,2 @@ slimPatterns: - - "*.egg-info*" \ No newline at end of file + - "**/*.egg-info*" diff --git a/tests/pipenv/_slimPatterns.yml b/tests/pipenv/_slimPatterns.yml index ffc3c134..70f863cc 100644 --- a/tests/pipenv/_slimPatterns.yml +++ b/tests/pipenv/_slimPatterns.yml @@ -1,2 +1,2 @@ slimPatterns: - - "*.egg-info*" \ No newline at end of file + - "**/*.egg-info*" From 401ec2bb8048746c0c5c364b0377675e194472e3 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Thu, 30 Aug 2018 20:55:27 -0400 Subject: [PATCH 029/328] Update README.md --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index 93032ac5..7fd5bffe 100644 --- a/README.md +++ b/README.md @@ -19,6 +19,9 @@ sls plugin install -n serverless-python-requirements [:apple::beer::snake: Mac Brew installed Python notes](#applebeersnake-mac-brew-installed-python-notes) +## Intro Guide +For an introduction on how to user this plugin, check out [this post on the Serverless Blog](https://serverless.com/blog/serverless-python-packaging/) + ## Cross compiling! Compiling non-pure-Python modules or fetching their manylinux wheels is supported on non-linux OSs via the use of Docker and the From 991fd9fd02b084df197156c32c00218a1f79e872 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Thu, 30 Aug 2018 20:57:46 -0400 Subject: [PATCH 030/328] Update README.md --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index 7fd5bffe..d2c85153 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,10 @@ A Serverless v1.x plugin to automatically bundle dependencies from sls plugin install -n serverless-python-requirements ``` +This will automatically add the plugin to your project's `package.json` and the plugins section of its +`serverless.yml`. That's all that's needed for basic use! The plugin will now bundle your python +dependencies specified in your `requirements.txt` or `Pipfile` when you run `sls deploy`. + [:apple::beer::snake: Mac Brew installed Python notes](#applebeersnake-mac-brew-installed-python-notes) ## Intro Guide From 01c1fb1f4aa476b0b86fb3643b2e0957a9b4423b Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Thu, 30 Aug 2018 20:59:57 -0400 Subject: [PATCH 031/328] Update README.md --- README.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index d2c85153..16547837 100644 --- a/README.md +++ b/README.md @@ -21,10 +21,11 @@ This will automatically add the plugin to your project's `package.json` and the `serverless.yml`. That's all that's needed for basic use! The plugin will now bundle your python dependencies specified in your `requirements.txt` or `Pipfile` when you run `sls deploy`. -[:apple::beer::snake: Mac Brew installed Python notes](#applebeersnake-mac-brew-installed-python-notes) +For a more in depth introduction on how to user this plugin, check out +[this post on the Serverless Blog](https://serverless.com/blog/serverless-python-packaging/) + +If you're on a mac, check out [these notes](#applebeersnake-mac-brew-installed-python-notes) about using python installed by brew. -## Intro Guide -For an introduction on how to user this plugin, check out [this post on the Serverless Blog](https://serverless.com/blog/serverless-python-packaging/) ## Cross compiling! Compiling non-pure-Python modules or fetching their manylinux wheels is From 137f8e1b1e10579a2b8db88a49c335329a3307dd Mon Sep 17 00:00:00 2001 From: Farley Date: Sat, 8 Sep 2018 19:25:30 +0200 Subject: [PATCH 032/328] Download and static caching as a feature (#165) Fixes #157 (filed by me) ## What this does * Makes the download caching of pip a "first-class citizen" as an option directly in this plugin's options. This will "fix" a few (attempts) at using the pip cache, specifically in Docker, and will simplify this feature (as the user simply has to enable it, not specify a folder). In a future MR, I'd highly suggest enabling this by default. * Second, it adds a new type of caching called "static caching" which allows you to cache the outputs of this plugin. This greatly speeds up every single build as long as you have the feature enabled and do not change your requirements.txt file. In a future MR, I'd highly suggest enabling this by default also. * The pip download and static cache are shared between any projects of the same user through an [appdir](https://www.npmjs.com/package/appdirectory) cache folder when packaging your service. This _especially_ helps on projects that heavily use Docker (Win/Mac) for deployments or development, or for pip modules that need to compile every time, and _especially_ for projects with long requirements.txt files. This will also greatly help the longer and more complex your requirements.txt is, and/or if you use the same requirements.txt on multiple projects (common in team environments). ## Implementation details * When either cache is enabled, this plugin now caches those requirements (download or static) to an "appdir" cache folder (per the [appdirectory](https://www.npmjs.com/package/appdirectory) node module). * When this feature is NOT enabled, nothing changes * Injection happens directly from the new cached requirements directory via a symlink created in the right place in `.serverless` or `.serverless/functionname` if deploying individually. * As mentioned above, there is a symlink into the .serverless folder when the static cache is enabled pointing to it, so you still "know" where your cache is (for both individually and non-individually packaged functions). * The requirements.txt "generator" was improved to remove comments, empty lines, and sort the list of items before trying to use it (or check its md5 sum). This allows for more actual md5 matches between projects, in-case of comments and such in the requirements file. * A new command was added to the command-line to flush the download/static cache, called cleanCache invokable with: `serverless requirements cleanCache`. This clears all items including the download and static cache. * A handful of new tests were created for various edge conditions I've found while doing this refactoring, some were based on bugs other people found while using this plugin with some combination of options, some are not directly related to this merge's intent, but it's just part of my stream of work/consciousness. Sorry tests take a lot longer to run now since there are lots more now. * A UID bug fix related to docker + pip was implemented (seen on a few other bugs) from @cgrimal * The following new configurable custom options were added to this plugin... Variable Name | Value | Description --- | --- | --- useStaticCache | `false/true` | Default: false. This will enable or disable the static cache. After some testing I would like to make this default: true, as this will greatly help everyone, and there's no reason to not enable this. Possibly making this default: true will help weed out issues faster. I'll gladly step-up to quickly fix any bugs people have with it since I'm now well accustomed with the code. useDownloadCache | `false/true` | Default: false. This will enable or disable the pip download cache. This was previously the "example" code using a pipEnvExtraCmd to specify a local folder to cache downloads to. This does not require a cache location to be set, if not specified it will use an appdirs.usercache() location. cacheLocation | `` | Default: [appdirectory](https://www.npmjs.com/package/appdirectory).userCache(appName: serverless-python-requirements) This will allow the user to specify where the caches (both static and download) should be stored. This will be useful for people who want to do advanced things like storing cache globally shared between users, or for CI/CD build servers on shared-storage to allow multiple build machines to leverage a cache to speed builds up. An example would be to mount a shared NFS store on all your CI/CD runners to `/mnt/shared` and set this value to `/mnt/shared/sls-py-cache`. staticCacheMaxVersions | `` | Default: 0. This will restrict the a maximum number of caches in the cache folder. Setting to 0 makes no maximum number of versions. This will be useful for build/CI/CD machines that have limited disk space and don't want to (potentially) infinitely cache hundreds/thousands of versions of items in cache. Although, I would be disturbed if a project had hundreds of changes to their requirements.txt file. ## TODO - [X] Feature Implementation - [X] BUG: Deploying single-functions fails (Packaging works, but fails because of #161 ) - [X] Code Styling / Linting - [X] Test to be sure Pipfile / generated requirements.txt still works - [X] Tested a bunch on Mac / Linux with and without Docker - [X] Adding Tests for Download Cache - [X] Make sure zip feature still works - [X] Ensure all existing tests pass - [X] Adding Tests for static cache - [X] Updating README.md to inform users how to use it - [X] Make sure dockerSsh works - [X] Implement error when trying to use --cache-dir with dockerizePip (won't work) - [X] Implement suggestion when trying to use --cache-dir without dockerizePip - [x] Test on Windows - [x] Iterate through any feedback - [x] Rebase with master constantly, awaiting merge... :) Replaces #162 --- .gitignore | 3 + README.md | 37 ++++-- index.js | 35 +++-- lib/clean.js | 32 ++++- lib/docker.js | 7 +- lib/pip.js | 362 +++++++++++++++++++++++++++++++++++++++++++------- lib/shared.js | 108 +++++++++++++++ package.json | 7 +- test.bats | 162 +++++++++++++++++++--- 9 files changed, 658 insertions(+), 95 deletions(-) create mode 100644 lib/shared.js diff --git a/.gitignore b/.gitignore index 53d9d5b0..85e60616 100644 --- a/.gitignore +++ b/.gitignore @@ -42,3 +42,6 @@ admin.env #PYTHON STUFF *.py[co] __pycache__ + +#NODE STUFF +package-lock.json diff --git a/README.md b/README.md index 16547837..119553fe 100644 --- a/README.md +++ b/README.md @@ -140,25 +140,40 @@ custom: ``` ## Extra Config Options -### extra pip arguments -You can specify extra arguments to be passed to pip like this: +### Caching +You can enable two kinds of caching with this plugin which are currently both DISABLED by default. First, a download cache that will cache downloads that pip needs to compile the packages. And second, a what we call "static caching" which caches output of pip after compiling everything for your requirements file. Since generally requirements.txt files rarely change, you will often see large amounts of speed improvements when enabling the static cache feature. These caches will be shared between all your projects if no custom cacheLocation is specified (see below). + + _**Please note:** This has replaced the previously recommended usage of "--cache-dir" in the pipCmdExtraArgs_ ```yaml custom: pythonRequirements: - dockerizePip: true - pipCmdExtraArgs: - - --cache-dir - - .requirements-cache + useDownloadCache: true + useStaticCache: true ``` +_Additionally, In future versions of this plugin, both caching features will probably be enabled by default_ -When using `--cache-dir` don't forget to also exclude it from the package. +### Other caching options... +There are two additional options related to caching. You can specify where in your system that this plugin caches with the `cacheLocation` option. By default it will figure out automatically where based on your username and your OS to store the cache via the [appdirectory](https://www.npmjs.com/package/appdirectory) module. Additionally, you can specify how many max static caches to store with `staticCacheMaxVersions`, as a simple attempt to limit disk space usage for caching. This is DISABLED (set to 0) by default. Example: +```yaml +custom: + pythonRequirements: + useStaticCache: true + useDownloadCache: true + cacheLocation: '/home/user/.my_cache_goes_here' + staticCacheMaxVersions: 10 + +``` +### Extra pip arguments +You can specify extra arguments [supported by pip](https://pip.pypa.io/en/stable/reference/pip_install/#options) to be passed to pip like this: ```yaml -package: - exclude: - - .requirements-cache/** +custom: + pythonRequirements: + pipCmdExtraArgs: + - --compile ``` + ### Customize requirements file name [Some `pip` workflows involve using requirements files not named `requirements.txt`](https://www.kennethreitz.org/essays/a-better-pip-workflow). @@ -350,4 +365,4 @@ zipinfo .serverless/xxx.zip improved pip chache support when using docker. * [@dee-me-tree-or-love](https://github.com/dee-me-tree-or-love) - the `slim` package option * [@alexjurkiewicz](https://github.com/alexjurkiewicz) - [docs about docker workflows](#native-code-dependencies-during-build) - + * [@andrewfarley](https://github.com/andrewfarley) - Implemented download caching and static caching diff --git a/index.js b/index.js index f64c0680..ff73b1f7 100644 --- a/index.js +++ b/index.js @@ -12,7 +12,7 @@ const { const { injectAllRequirements } = require('./lib/inject'); const { installAllRequirements } = require('./lib/pip'); const { pipfileToRequirements } = require('./lib/pipenv'); -const { cleanup } = require('./lib/clean'); +const { cleanup, cleanupCache } = require('./lib/clean'); BbPromise.promisifyAll(fse); @@ -39,6 +39,10 @@ class ServerlessPythonRequirements { dockerSsh: false, dockerImage: null, dockerFile: null, + useStaticCache: false, + useDownloadCache: false, + cacheLocation: false, + staticCacheMaxVersions: 0, pipCmdExtraArgs: [], noDeploy: [ 'boto3', @@ -115,6 +119,11 @@ class ServerlessPythonRequirements { install: { usage: 'install requirements manually', lifecycleEvents: ['install'] + }, + cleanCache: { + usage: + 'Removes all items in the pip download/static cache (if present)', + lifecycleEvents: ['cleanCache'] } } } @@ -128,6 +137,11 @@ class ServerlessPythonRequirements { return args[1].functionObj.runtime.startsWith('python'); }; + const clean = () => + BbPromise.bind(this) + .then(cleanup) + .then(removeVendorHelper); + const before = () => { if (!isFunctionRuntimePython(arguments)) { return; @@ -155,13 +169,13 @@ class ServerlessPythonRequirements { const invalidateCaches = () => { if (this.options.invalidateCaches) { - return BbPromise.bind(this) - .then(cleanup) - .then(removeVendorHelper); + return clean; } return BbPromise.resolve(); }; + const cleanCache = () => BbPromise.bind(this).then(cleanupCache); + this.hooks = { 'after:package:cleanup': invalidateCaches, 'before:package:createDeploymentArtifacts': before, @@ -172,16 +186,9 @@ class ServerlessPythonRequirements { this.serverless.cli.generateCommandsHelp(['requirements']); return BbPromise.resolve(); }, - 'requirements:install:install': () => - BbPromise.bind(this) - .then(pipfileToRequirements) - .then(addVendorHelper) - .then(installAllRequirements) - .then(packRequirements), - 'requirements:clean:clean': () => - BbPromise.bind(this) - .then(cleanup) - .then(removeVendorHelper) + 'requirements:install:install': before, + 'requirements:clean:clean': clean, + 'requirements:cleanCache:cleanCache': cleanCache }; } } diff --git a/lib/clean.js b/lib/clean.js index 332ceb37..119ab586 100644 --- a/lib/clean.js +++ b/lib/clean.js @@ -1,6 +1,8 @@ const BbPromise = require('bluebird'); const fse = require('fs-extra'); const path = require('path'); +const glob = require('glob-all'); +const { getUserCachePath } = require('./shared'); BbPromise.promisifyAll(fse); @@ -29,4 +31,32 @@ function cleanup() { ); } -module.exports = { cleanup }; +/** + * Clean up static cache, remove all items in there + * @return {Promise} + */ +function cleanupCache() { + const cacheLocation = getUserCachePath(this.options); + if (fse.existsSync(cacheLocation)) { + if (this.serverless) { + this.serverless.cli.log(`Removing static caches at: ${cacheLocation}`); + } + + // Only remove cache folders that we added, just incase someone accidentally puts a weird + // static cache location so we don't remove a bunch of personal stuff + const promises = []; + glob + .sync([path.join(cacheLocation, '*slspyc/')], { mark: true, dot: false }) + .forEach(file => { + promises.push(fse.removeAsync(file)); + }); + return BbPromise.all(promises); + } else { + if (this.serverless) { + this.serverless.cli.log(`No static cache found`); + } + return BbPromise.resolve(); + } +} + +module.exports = { cleanup, cleanupCache }; diff --git a/lib/docker.js b/lib/docker.js index 26cbf6de..db2e81b5 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -49,8 +49,11 @@ function findTestFile(servicePath) { if (fse.pathExistsSync(path.join(servicePath, 'serverless.json'))) { return 'serverless.json'; } + if (fse.pathExistsSync(path.join(servicePath, 'requirements.txt'))) { + return 'requirements.txt'; + } throw new Error( - 'Unable to find serverless.yml or serverless.yaml or serverless.json for getBindPath()' + 'Unable to find serverless.{yml|yaml|json} or requirements.txt for getBindPath()' ); } @@ -154,7 +157,7 @@ function getDockerUid(bindPath) { 'stat', '-c', '%u', - '/test/.serverless' + '/bin/sh' ]; const ps = dockerCommand(options); return ps.stdout.trim(); diff --git a/lib/pip.js b/lib/pip.js index 4e5e24e1..044f6c78 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -4,38 +4,61 @@ const path = require('path'); const get = require('lodash.get'); const set = require('lodash.set'); const { spawnSync } = require('child_process'); +const { quote } = require('shell-quote'); const { buildImage, getBindPath, getDockerUid } = require('./docker'); const { getStripCommand, deleteFiles } = require('./slim'); +const { + checkForAndDeleteMaxCacheVersions, + md5Path, + getRequirementsWorkingPath, + getUserCachePath +} = require('./shared'); /** - * Install requirements described in requirementsPath to targetFolder + * Just generate the requirements file in the .serverless folder * @param {string} requirementsPath - * @param {string} targetFolder + * @param {string} targetFile * @param {Object} serverless * @param {string} servicePath * @param {Object} options * @return {undefined} */ -function installRequirements( +function installRequirementsFile( requirementsPath, - targetFolder, + targetFile, serverless, servicePath, options ) { - // Create target folder if it does not exist - const targetRequirementsFolder = path.join(targetFolder, 'requirements'); - fse.ensureDirSync(targetRequirementsFolder); - - const dotSlsReqs = path.join(targetFolder, 'requirements.txt'); if (options.usePipenv && fse.existsSync(path.join(servicePath, 'Pipfile'))) { - generateRequirementsFile(dotSlsReqs, dotSlsReqs, options); + generateRequirementsFile( + path.join(servicePath, '.serverless/requirements.txt'), + targetFile, + options + ); + serverless.cli.log( + `Parsed requirements.txt from Pipfile in ${targetFile}...` + ); } else { - generateRequirementsFile(requirementsPath, dotSlsReqs, options); + generateRequirementsFile(requirementsPath, targetFile, options); + serverless.cli.log( + `Generated requirements from ${requirementsPath} in ${targetFile}...` + ); } +} + +/** + * Install requirements described from requirements in the targetFolder into that same targetFolder + * @param {string} targetFolder + * @param {Object} serverless + * @param {Object} options + * @return {undefined} + */ +function installRequirements(targetFolder, serverless, options) { + const targetRequirementsTxt = path.join(targetFolder, 'requirements.txt'); serverless.cli.log( - `Installing requirements of ${requirementsPath} in ${targetFolder}...` + `Installing requirements from ${targetRequirementsTxt} ...` ); let cmd; @@ -45,13 +68,41 @@ function installRequirements( '-m', 'pip', 'install', - '-t', - dockerPathForWin(options, targetRequirementsFolder), - '-r', - dockerPathForWin(options, dotSlsReqs), ...options.pipCmdExtraArgs ]; + // Check if we're using the legacy --cache-dir command... + if (options.pipCmdExtraArgs.indexOf('--cache-dir') > -1) { + if (options.dockerizePip) { + throw 'Error: You can not use --cache-dir with Docker any more, please\n' + + ' use the new option useDownloadCache instead. Please see:\n' + + ' https://github.com/UnitedIncome/serverless-python-requirements#caching'; + } else { + serverless.cli.log('=================================================='); + serverless.cli.log( + 'Warning: You are using a deprecated --cache-dir inside\n' + + ' your pipCmdExtraArgs which may not work properly, please use the\n' + + ' useDownloadCache option instead. Please see: \n' + + ' https://github.com/UnitedIncome/serverless-python-requirements#caching' + ); + serverless.cli.log('=================================================='); + } + } + if (!options.dockerizePip) { + // Push our local OS-specific paths for requirements and target directory + pipCmd.push('-t', dockerPathForWin(options, targetFolder)); + pipCmd.push('-r', dockerPathForWin(options, targetRequirementsTxt)); + // If we want a download cache... + if (options.useDownloadCache) { + const downloadCacheDir = path.join( + getUserCachePath(options), + 'downloadCacheslspyc' + ); + serverless.cli.log(`Using download cache directory ${downloadCacheDir}`); + fse.ensureDirSync(downloadCacheDir); + pipCmd.push('--cache-dir', downloadCacheDir); + } + // Check if pip has Debian's --system option and set it if so const pipTestRes = spawnSync(options.pythonBin, [ '-m', @@ -71,9 +122,14 @@ function installRequirements( pipCmd.push('--system'); } } + // If we are dockerizing pip if (options.dockerizePip) { cmd = 'docker'; + // Push docker-specific paths for requirements and target directory + pipCmd.push('-t', '/var/task/'); + pipCmd.push('-r', '/var/task/requirements.txt'); + // Build docker image if required let dockerImage; if (options.dockerFile) { @@ -87,25 +143,74 @@ function installRequirements( serverless.cli.log(`Docker Image: ${dockerImage}`); // Prepare bind path depending on os platform - const bindPath = getBindPath(serverless, servicePath); + const bindPath = getBindPath(serverless, targetFolder); cmdOptions = ['run', '--rm', '-v', `"${bindPath}:/var/task:z"`]; if (options.dockerSsh) { // Mount necessary ssh files to work with private repos cmdOptions.push( '-v', - `${process.env.HOME}/.ssh/id_rsa:/root/.ssh/id_rsa:z` + `"${process.env.HOME}/.ssh/id_rsa:/root/.ssh/id_rsa:z"` ); cmdOptions.push( '-v', - `${process.env.HOME}/.ssh/known_hosts:/root/.ssh/known_hosts:z` + `"${process.env.HOME}/.ssh/known_hosts:/root/.ssh/known_hosts:z"` ); - cmdOptions.push('-v', `${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z`); + cmdOptions.push('-v', `"${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z"`); cmdOptions.push('-e', 'SSH_AUTH_SOCK=/tmp/ssh_sock'); } + + // If we want a download cache... + const dockerDownloadCacheDir = '/var/useDownloadCache'; + if (options.useDownloadCache) { + const downloadCacheDir = path.join( + getUserCachePath(options), + 'downloadCacheslspyc' + ); + serverless.cli.log(`Using download cache directory ${downloadCacheDir}`); + fse.ensureDirSync(downloadCacheDir); + // This little hack is necessary because getBindPath requires something inside of it to test... + // Ugh, this is so ugly, but someone has to fix getBindPath in some other way (eg: make it use + // its own temp file) + fse.closeSync( + fse.openSync(path.join(downloadCacheDir, 'requirements.txt'), 'w') + ); + const windowsized = getBindPath(serverless, downloadCacheDir); + // And now push it to a volume mount and to pip... + cmdOptions.push('-v', `"${windowsized}:${dockerDownloadCacheDir}:z"`); + pipCmd.push('--cache-dir', dockerDownloadCacheDir); + } + if (process.platform === 'linux') { // Use same user so requirements folder is not root and so --cache-dir works - cmdOptions.push('-u', `${process.getuid()}`); + var commands = []; + if (options.useDownloadCache) { + // Set the ownership of the download cache dir to root + commands.push(quote(['chown', '-R', '0:0', dockerDownloadCacheDir])); + } + // Install requirements with pip + commands.push(quote(pipCmd)); + // Set the ownership of the current folder to user + commands.push( + quote([ + 'chown', + '-R', + `${process.getuid()}:${process.getgid()}`, + '/var/task' + ]) + ); + if (options.useDownloadCache) { + // Set the ownership of the download cache dir back to user + commands.push( + quote([ + 'chown', + '-R', + `${process.getuid()}:${process.getgid()}`, + dockerDownloadCacheDir + ]) + ); + } + pipCmd = ['/bin/bash', '-c', '"' + commands.join(' && ') + '"']; } else { // Use same user so --cache-dir works cmdOptions.push('-u', getDockerUid(bindPath)); @@ -119,10 +224,10 @@ function installRequirements( // If enabled slimming, strip so files if (options.slim === true || options.slim === 'true') { - const preparedPath = dockerPathForWin(options, targetRequirementsFolder); + const preparedPath = dockerPathForWin(options, targetFolder); cmdOptions.push(getStripCommand(options, preparedPath)); } - let spawnArgs = { cwd: servicePath, shell: true }; + let spawnArgs = { cwd: targetFolder, shell: true }; if (process.env.SLS_DEBUG) { spawnArgs.stdio = 'inherit'; } @@ -143,7 +248,7 @@ function installRequirements( } // If enabled slimming, delete files in slimPatterns if (options.slim === true || options.slim === 'true') { - deleteFiles(options, targetRequirementsFolder); + deleteFiles(options, targetFolder); } } @@ -161,6 +266,8 @@ function dockerPathForWin(options, path) { } /** create a filtered requirements.txt without anything from noDeploy + * then remove all comments and empty lines, and sort the list which + * assist with matching the static cache * @param {string} source requirements * @param {string} target requirements where results are written * @param {Object} options @@ -171,8 +278,13 @@ function generateRequirementsFile(source, target, options) { .readFileSync(source, { encoding: 'utf-8' }) .split(/\r?\n/); const filteredRequirements = requirements.filter(req => { + req = req.trim(); + if (req.length == 0 || req[0] == '#') { + return false; + } return !noDeploy.has(req.split(/[=<> \t]/)[0].trim()); }); + filteredRequirements.sort(); // Sort them alphabetically fse.writeFileSync(target, filteredRequirements.join('\n')); } @@ -185,15 +297,15 @@ function generateRequirementsFile(source, target, options) { */ function copyVendors(vendorFolder, targetFolder, serverless) { // Create target folder if it does not exist - const targetRequirementsFolder = path.join(targetFolder, 'requirements'); + fse.ensureDirSync(targetFolder); serverless.cli.log( - `Copying vendor libraries from ${vendorFolder} to ${targetRequirementsFolder}...` + `Copying vendor libraries from ${vendorFolder} to ${targetFolder}...` ); fse.readdirSync(vendorFolder).map(file => { let source = path.join(vendorFolder, file); - let dest = path.join(targetRequirementsFolder, file); + let dest = path.join(targetFolder, file); if (fse.existsSync(dest)) { rimraf.sync(dest); } @@ -202,11 +314,129 @@ function copyVendors(vendorFolder, targetFolder, serverless) { } /** - * pip install the requirements to the .serverless/requirements directory + * This evaluates if requirements are actually needed to be installed, but fails + * gracefully if no req file is found intentionally. It also assists with code + * re-use for this logic pertaining to individually packaged functions + * @param {string} servicePath + * @param {string} modulePath + * @param {Object} options + * @param {Object} funcOptions + * @param {Object} serverless + * @return {string} + */ +function installRequirementsIfNeeded( + servicePath, + modulePath, + options, + funcOptions, + serverless +) { + // Our source requirements, under our service path, and our module path (if specified) + const fileName = path.join(servicePath, modulePath, options.fileName); + + // First, generate the requirements file to our local .serverless folder + fse.ensureDirSync(path.join(servicePath, '.serverless')); + const slsReqsTxt = path.join(servicePath, '.serverless', 'requirements.txt'); + + installRequirementsFile( + fileName, + slsReqsTxt, + serverless, + servicePath, + options + ); + + // If no requirements file or an empty requirements file, then do nothing + if (!fse.existsSync(slsReqsTxt) || fse.statSync(slsReqsTxt).size == 0) { + serverless.cli.log( + `Skipping empty output requirements.txt file from ${slsReqsTxt}` + ); + return false; + } + + // Copy our requirements to another filename in .serverless (incase of individually packaged) + if (modulePath && modulePath != '.') { + fse.existsSync(path.join(servicePath, '.serverless', modulePath)); + const destinationFile = path.join( + servicePath, + '.serverless', + modulePath, + 'requirements.txt' + ); + serverless.cli.log( + `Copying from ${slsReqsTxt} into ${destinationFile} ...` + ); + fse.copySync(slsReqsTxt, destinationFile); + } + + // Then generate our MD5 Sum of this requirements file to determine where it should "go" to and/or pull cache from + const reqChecksum = md5Path(slsReqsTxt); + + // Then figure out where this cache should be, if we're caching, if we're in a module, etc + const workingReqsFolder = getRequirementsWorkingPath( + reqChecksum, + servicePath, + options + ); + + // Check if our static cache is present and is valid + if (fse.existsSync(workingReqsFolder)) { + if ( + fse.existsSync(path.join(workingReqsFolder, '.completed_requirements')) && + workingReqsFolder.endsWith('_slspyc') + ) { + serverless.cli.log( + `Using static cache of requirements found at ${workingReqsFolder} ...` + ); + // We'll "touch" the folder, as to bring it to the start of the FIFO cache + fse.utimesSync(workingReqsFolder, new Date(), new Date()); + return workingReqsFolder; + } + // Remove our old folder if it didn't complete properly, but _just incase_ only remove it if named properly... + if ( + workingReqsFolder.endsWith('_slspyc') || + workingReqsFolder.endsWith('.requirements') + ) { + rimraf.sync(workingReqsFolder); + } + } + + // Ensuring the working reqs folder exists + fse.ensureDirSync(workingReqsFolder); + + // Copy our requirements.txt into our working folder... + fse.copySync(slsReqsTxt, path.join(workingReqsFolder, 'requirements.txt')); + + // Then install our requirements from this folder + installRequirements(workingReqsFolder, serverless, options); + + // Copy vendor libraries to requirements folder + if (options.vendor) { + copyVendors(options.vendor, workingReqsFolder, serverless); + } + if (funcOptions.vendor) { + copyVendors(funcOptions.vendor, workingReqsFolder, serverless); + } + + // Then touch our ".completed_requirements" file so we know we can use this for static cache + if (options.useStaticCache) { + fse.closeSync( + fse.openSync(path.join(workingReqsFolder, '.completed_requirements'), 'w') + ); + } + return workingReqsFolder; +} + +/** + * pip install the requirements to the requirements directory * @return {undefined} */ function installAllRequirements() { - fse.ensureDirSync(path.join(this.servicePath, '.serverless')); + // fse.ensureDirSync(path.join(this.servicePath, '.serverless')); + // First, check and delete cache versions, if enabled + checkForAndDeleteMaxCacheVersions(this.options, this.serverless); + + // Then if we're going to package functions individually... if (this.serverless.service.package.individually) { let doneModules = []; this.targetFuncs @@ -219,36 +449,70 @@ function installAllRequirements() { if (!get(f, 'module')) { set(f, ['module'], '.'); } + // If we didn't already process a module (functions can re-use modules) if (!doneModules.includes(f.module)) { - installRequirements( - path.join(f.module, this.options.fileName), - path.join('.serverless', f.module), - this.serverless, + const reqsInstalledAt = installRequirementsIfNeeded( + this.servicePath, + f.module, + this.options, + f, + this.serverless + ); + // Add modulePath into .serverless for each module so it's easier for injecting and for users to see where reqs are + let modulePath = path.join( this.servicePath, - this.options + '.serverless', + `${f.module}`, + 'requirements' ); - if (f.vendor) { - // copy vendor libraries to requirements folder - copyVendors( - f.vendor, - path.join('.serverless', f.module), - this.serverless - ); + // Only do if we didn't already do it + if ( + reqsInstalledAt && + !fse.existsSync(modulePath) && + reqsInstalledAt != modulePath + ) { + if (this.options.useStaticCache) { + // Windows can't symlink so we have to copy on Windows, + // it's not as fast, but at least it works + if (process.platform == 'win32') { + fse.copySync(reqsInstalledAt, modulePath); + } else { + fse.symlink(reqsInstalledAt, modulePath); + } + } else { + fse.rename(reqsInstalledAt, modulePath); + } } doneModules.push(f.module); } }); } else { - installRequirements( - this.options.fileName, - '.serverless', - this.serverless, + const reqsInstalledAt = installRequirementsIfNeeded( this.servicePath, - this.options + '', + this.options, + {}, + this.serverless ); - if (this.options.vendor) { - // copy vendor libraries to requirements folder - copyVendors(this.options.vendor, '.serverless', this.serverless); + // Add symlinks into .serverless for so it's easier for injecting and for users to see where reqs are + let symlinkPath = path.join( + this.servicePath, + '.serverless', + `requirements` + ); + // Only do if we didn't already do it + if ( + reqsInstalledAt && + !fse.existsSync(symlinkPath) && + reqsInstalledAt != symlinkPath + ) { + // Windows can't symlink so we have to copy on Windows, + // it's not as fast, but at least it works + if (process.platform == 'win32') { + fse.copySync(reqsInstalledAt, symlinkPath); + } else { + fse.symlink(reqsInstalledAt, symlinkPath); + } } } } diff --git a/lib/shared.js b/lib/shared.js new file mode 100644 index 00000000..b3a1ffaa --- /dev/null +++ b/lib/shared.js @@ -0,0 +1,108 @@ +const Appdir = require('appdirectory'); +const rimraf = require('rimraf'); +const md5File = require('md5-file'); +const glob = require('glob-all'); +const path = require('path'); +const fse = require('fs-extra'); + +/** + * This helper will check if we're using static cache and have max + * versions enabled and will delete older versions in a fifo fashion + * @param {Object} options + * @param {Object} serverless + * @return {undefined} + */ +function checkForAndDeleteMaxCacheVersions(options, serverless) { + // If we're using the static cache, and we have static cache max versions enabled + if ( + options.useStaticCache && + options.staticCacheMaxVersions && + parseInt(options.staticCacheMaxVersions) > 0 + ) { + // Get the list of our cache files + const files = glob.sync( + [path.join(getUserCachePath(options), '*_slspyc/')], + { mark: true } + ); + // Check if we have too many + if (files.length >= options.staticCacheMaxVersions) { + // Sort by modified time + files.sort(function(a, b) { + return ( + fse.statSync(a).mtime.getTime() - fse.statSync(b).mtime.getTime() + ); + }); + // Remove the older files... + var items = 0; + for ( + var i = 0; + i < files.length - options.staticCacheMaxVersions + 1; + i++ + ) { + rimraf.sync(files[i]); + items++; + } + // Log the number of cache files flushed + serverless.cli.log( + `Removed ${items} items from cache because of staticCacheMaxVersions` + ); + } + } +} + +/** + * The working path that all requirements will be compiled into + * @param {string} subfolder + * @param {string} servicePath + * @param {Object} options + * @return {string} + */ +function getRequirementsWorkingPath(subfolder, servicePath, options) { + // If we want to use the static cache + if (options && options.useStaticCache) { + if (subfolder) { + subfolder = subfolder + '_slspyc'; + } + // If we have max number of cache items... + + return path.join(getUserCachePath(options), subfolder); + } + + // If we don't want to use the static cache, then fallback to the way things used to work + return path.join(servicePath, '.serverless', 'requirements'); +} + +/** + * The static cache path that will be used for this system + options, used if static cache is enabled + * @param {Object} options + * @return {string} + */ +function getUserCachePath(options) { + // If we've manually set the static cache location + if (options && options.cacheLocation) { + return path.resolve(options.cacheLocation); + } + + // Otherwise, find/use the python-ey appdirs cache location + const dirs = new Appdir({ + appName: 'serverless-python-requirements', + appAuthor: 'UnitedIncome' + }); + return dirs.userCache(); +} + +/** + * Helper to get the md5 a a file's contents to determine if a requirements has a static cache + * @param {string} fullpath + * @return {string} + */ +function md5Path(fullpath) { + return md5File.sync(fullpath); +} + +module.exports = { + checkForAndDeleteMaxCacheVersions, + getRequirementsWorkingPath, + getUserCachePath, + md5Path +}; diff --git a/package.json b/package.json index 5487c04c..67be6ac4 100644 --- a/package.json +++ b/package.json @@ -47,6 +47,7 @@ "prettier": "*" }, "dependencies": { + "appdirectory": "^0.1.0", "bluebird": "^3.0.6", "fs-extra": "^7.0.0", "glob-all": "^3.1.0", @@ -54,9 +55,11 @@ "jszip": "^3.1.0", "lodash.get": "^4.4.2", "lodash.set": "^4.3.2", - "lodash.values": "^4.3.0", "lodash.uniqby": "^4.0.0", - "rimraf": "^2.6.2" + "lodash.values": "^4.3.0", + "md5-file": "^3.2.3", + "rimraf": "^2.6.2", + "shell-quote": "^1.6.1" }, "eslintConfig": { "extends": "eslint:recommended", diff --git a/test.bats b/test.bats index 4501ba52..07634168 100755 --- a/test.bats +++ b/test.bats @@ -7,12 +7,35 @@ setup() { export LC_ALL=C.UTF-8 export LANG=C.UTF-8 fi + export USR_CACHE_DIR=`node -e 'console.log(require("./lib/shared").getUserCachePath())'` + # Please note: If you update change the requirements.txt in test/base this value will + # change. Run a test which uses this variable manually step by step and list the cache + # folder to find the new hash if you do this + export CACHE_FOLDER_HASH="b8b9d2be59f6f2ea5778e8b2aa4d2ddc_slspyc" + if [ -d "${USR_CACHE_DIR}" ] ; then + rm -Rf "${USR_CACHE_DIR}" + fi } teardown() { rm -rf puck puck2 puck3 node_modules .serverless .requirements.zip .requirements-cache \ foobar package-lock.json serverless-python-requirements-*.tgz if [ -f serverless.yml.bak ]; then mv serverless.yml.bak serverless.yml; fi + if [ -f slimPatterns.yml ]; then rm -f slimPatterns.yml; fi + if [ -d "${USR_CACHE_DIR}" ] ; then + rm -Rf "${USR_CACHE_DIR}" + fi +} + +@test "py3.6 supports custom file name with fileName option" { + cd tests/base + npm i $(npm pack ../..) + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n fileName: puck/' serverless.yml + echo "requests" > puck + sls package + ls .serverless/requirements/requests + ! ls .serverless/requirements/flask } @test "py3.6 can package flask with default options" { @@ -43,10 +66,9 @@ teardown() { @test "py3.6 can package flask with slim & slimPatterns options" { cd tests/base - mv _slimPatterns.yml slimPatterns.yml + cat _slimPatterns.yml > slimPatterns.yml npm i $(npm pack ../..) sls --slim=true package - mv slimPatterns.yml _slimPatterns.yml unzip .serverless/sls-py-req-test.zip -d puck ls puck/flask test $(find puck -name "*.pyc" | wc -l) -eq 0 @@ -110,36 +132,132 @@ teardown() { @test "py3.6 can package flask with slim & dockerizePip & slimPatterns options" { cd tests/base - mv _slimPatterns.yml slimPatterns.yml + cat _slimPatterns.yml > slimPatterns.yml npm i $(npm pack ../..) ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" sls --dockerizePip=true --slim=true package - mv slimPatterns.yml _slimPatterns.yml unzip .serverless/sls-py-req-test.zip -d puck ls puck/flask test $(find puck -name "*.pyc" | wc -l) -eq 0 test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 } -@test "py3.6 uses cache with dockerizePip option" { +@test "py3.6 uses download cache with useDownloadCache option" { + cd tests/base + npm i $(npm pack ../..) + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true/' serverless.yml + sls package + USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` + ls $USR_CACHE_DIR/downloadCacheslspyc/http +} + +@test "py3.6 uses download cache with cacheLocation option" { + cd tests/base + npm i $(npm pack ../..) + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true\n cacheLocation: .requirements-cache/' serverless.yml + sls package + ls .requirements-cache/downloadCacheslspyc/http +} + +@test "py3.6 uses download cache with dockerizePip option" { + cd tests/base + npm i $(npm pack ../..) + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true/' serverless.yml + sls --dockerizePip=true package + USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` + ls $USR_CACHE_DIR/downloadCacheslspyc/http +} + +@test "py3.6 uses download cache with dockerizePip + cacheLocation option" { + cd tests/base + npm i $(npm pack ../..) + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true\n cacheLocation: .requirements-cache/' serverless.yml + sls --dockerizePip=true package + ls .requirements-cache/downloadCacheslspyc/http +} + +@test "py3.6 uses static and download cache" { + cd tests/base + npm i $(npm pack ../..) + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true\n useStaticCache: true/' serverless.yml + sls package + USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` + ls $USR_CACHE_DIR/$CACHE_FOLDER_HASH/flask + ls $USR_CACHE_DIR/downloadCacheslspyc/http +} + +@test "py3.6 uses static and download cache with dockerizePip option" { cd tests/base npm i $(npm pack ../..) ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n pipCmdExtraArgs: ["--cache-dir", ".requirements-cache"]/' serverless.yml + perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true\n useStaticCache: true/' serverless.yml sls --dockerizePip=true package - ls .requirements-cache/http + USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` + ls $USR_CACHE_DIR/$CACHE_FOLDER_HASH/flask + ls $USR_CACHE_DIR/downloadCacheslspyc/http } -@test "py3.6 uses cache with dockerizePip & slim option" { +@test "py3.6 uses static cache" { cd tests/base npm i $(npm pack ../..) ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n pipCmdExtraArgs: ["--cache-dir", ".requirements-cache"]/' serverless.yml + perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useStaticCache: true/' serverless.yml + sls package + USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` + ls $USR_CACHE_DIR/$CACHE_FOLDER_HASH/flask + ls $USR_CACHE_DIR/$CACHE_FOLDER_HASH/.completed_requirements +} + +@test "py3.6 uses static cache with cacheLocation option" { + cd tests/base + npm i $(npm pack ../..) + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useStaticCache: true\n cacheLocation: .requirements-cache/' serverless.yml + sls package + USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` + ls .requirements-cache/$CACHE_FOLDER_HASH/flask + ls .requirements-cache/$CACHE_FOLDER_HASH/.completed_requirements +} + +@test "py3.6 checking that static cache actually pulls from cache (by poisoning it)" { + cd tests/base + npm i $(npm pack ../..) + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useStaticCache: true/' serverless.yml + sls package + cp .serverless/sls-py-req-test.zip ./puck + USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` + echo "injected new file into static cache folder" > $USR_CACHE_DIR/$CACHE_FOLDER_HASH/injected_file_is_bad_form + sls package + [ `wc -c ./.serverless/sls-py-req-test.zip | awk '{ print $1 }'` -gt `wc -c ./puck | awk '{ print $1 }'` ] +} + +@test "py3.6 uses static cache with dockerizePip & slim option" { + cd tests/base + npm i $(npm pack ../..) + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useStaticCache: true/' serverless.yml sls --dockerizePip=true --slim=true package - ls .requirements-cache/http + ls $USR_CACHE_DIR/$CACHE_FOLDER_HASH/flask + unzip .serverless/sls-py-req-test.zip -d puck test $(find puck -name "*.pyc" | wc -l) -eq 0 } +@test "py3.6 uses download cache with dockerizePip & slim option" { + cd tests/base + npm i $(npm pack ../..) + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true/' serverless.yml + sls --dockerizePip=true --slim=true package + ls $USR_CACHE_DIR/downloadCacheslspyc/http + unzip .serverless/sls-py-req-test.zip -d puck + test $(find puck -name "*.pyc" | wc -l) -eq 0 +} @test "py2.7 can package flask with default options" { cd tests/base @@ -168,10 +286,9 @@ teardown() { @test "py2.7 can package flask with slim & dockerizePip & slimPatterns options" { cd tests/base - mv _slimPatterns.yml slimPatterns.yml + cat _slimPatterns.yml > slimPatterns.yml npm i $(npm pack ../..) sls --runtime=python2.7 --slim=true packag - mv slimPatterns.yml _slimPatterns.yml unzip .serverless/sls-py-req-test.zip -d puck ls puck/flask test $(find puck -name "*.pyc" | wc -l) -eq 0 @@ -234,11 +351,10 @@ teardown() { @test "py2.7 can package flask with slim & dockerizePip & slimPatterns options" { cd tests/base - mv _slimPatterns.yml slimPatterns.yml + cat _slimPatterns.yml > slimPatterns.yml npm i $(npm pack ../..) ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" sls --dockerizePip=true --slim=true --runtime=python2.7 package - mv slimPatterns.yml _slimPatterns.yml unzip .serverless/sls-py-req-test.zip -d puck ls puck/flask test $(find puck -name "*.pyc" | wc -l) -eq 0 @@ -265,9 +381,8 @@ teardown() { @test "pipenv py3.6 can package flask with slim & slimPatterns option" { cd tests/pipenv npm i $(npm pack ../..) - mv _slimPatterns.yml slimPatterns.yml + cat _slimPatterns.yml > slimPatterns.yml sls --slim=true package - mv slimPatterns.yml _slimPatterns.yml unzip .serverless/sls-py-req-test.zip -d puck ls puck/flask test $(find puck -name "*.pyc" | wc -l) -eq 0 @@ -385,6 +500,21 @@ teardown() { ls puck/lambda_decorators.py } +@test "py3.6 can package lambda-decorators using vendor and invidiually option" { + cd tests/base + npm i $(npm pack ../..) + sls --individually=true --vendor=./vendor package + unzip .serverless/hello.zip -d puck + unzip .serverless/hello2.zip -d puck2 + unzip .serverless/hello3.zip -d puck3 + ls puck/flask + ls puck2/flask + ! ls puck3/flask + ls puck/lambda_decorators.py + ls puck2/lambda_decorators.py + ! ls puck3/lambda_decorators.py +} + @test "Don't nuke execute perms" { cd tests/base npm i $(npm pack ../..) From 0f8530c9ecc49657e72d236f184f21646a55e01c Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Sat, 8 Sep 2018 19:02:12 +0000 Subject: [PATCH 033/328] Add test covering #233 --- test.bats | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/test.bats b/test.bats index 07634168..c372484b 100755 --- a/test.bats +++ b/test.bats @@ -527,3 +527,14 @@ teardown() { ls puck/lambda_decorators.py ./puck/foobar } + +@test "Don't nuke execute perms when using individually" { + cd tests/individually + npm i $(npm pack ../..) + touch module1/foobar + chmod +x module1/foobar + perl -p -i'.bak' -e 's/(handler.py$)/\1\n - foobar/' serverless.yml + sls package + unzip .serverless/hello1.zip -d puck + ./puck/module1/foobar +} From ee4ce9b850f4dc4af52af34db541d4577729d677 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Sat, 8 Sep 2018 19:16:22 +0000 Subject: [PATCH 034/328] explicitly skip docker tests when it isnt installed --- test.bats | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/test.bats b/test.bats index c372484b..e2080b4a 100755 --- a/test.bats +++ b/test.bats @@ -30,6 +30,7 @@ teardown() { @test "py3.6 supports custom file name with fileName option" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n fileName: puck/' serverless.yml echo "requests" > puck @@ -96,6 +97,7 @@ teardown() { @test "py3.6 can package flask with zip & dockerizePip option" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" sls --dockerizePip=true --zip=true package unzip .serverless/sls-py-req-test.zip -d puck @@ -105,6 +107,7 @@ teardown() { @test "py3.6 can package flask with zip & slim & dockerizePip option" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" sls --dockerizePip=true --zip=true --slim=true package unzip .serverless/sls-py-req-test.zip -d puck @@ -114,6 +117,7 @@ teardown() { @test "py3.6 can package flask with dockerizePip option" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" sls --dockerizePip=true package unzip .serverless/sls-py-req-test.zip -d puck @@ -123,6 +127,7 @@ teardown() { @test "py3.6 can package flask with slim & dockerizePip option" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" sls --dockerizePip=true --slim=true package unzip .serverless/sls-py-req-test.zip -d puck @@ -134,6 +139,7 @@ teardown() { cd tests/base cat _slimPatterns.yml > slimPatterns.yml npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" sls --dockerizePip=true --slim=true package unzip .serverless/sls-py-req-test.zip -d puck @@ -145,6 +151,7 @@ teardown() { @test "py3.6 uses download cache with useDownloadCache option" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true/' serverless.yml sls package @@ -155,6 +162,7 @@ teardown() { @test "py3.6 uses download cache with cacheLocation option" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true\n cacheLocation: .requirements-cache/' serverless.yml sls package @@ -164,6 +172,7 @@ teardown() { @test "py3.6 uses download cache with dockerizePip option" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true/' serverless.yml sls --dockerizePip=true package @@ -174,6 +183,7 @@ teardown() { @test "py3.6 uses download cache with dockerizePip + cacheLocation option" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true\n cacheLocation: .requirements-cache/' serverless.yml sls --dockerizePip=true package @@ -183,6 +193,7 @@ teardown() { @test "py3.6 uses static and download cache" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true\n useStaticCache: true/' serverless.yml sls package @@ -194,6 +205,7 @@ teardown() { @test "py3.6 uses static and download cache with dockerizePip option" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true\n useStaticCache: true/' serverless.yml sls --dockerizePip=true package @@ -205,6 +217,7 @@ teardown() { @test "py3.6 uses static cache" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useStaticCache: true/' serverless.yml sls package @@ -216,6 +229,7 @@ teardown() { @test "py3.6 uses static cache with cacheLocation option" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useStaticCache: true\n cacheLocation: .requirements-cache/' serverless.yml sls package @@ -227,6 +241,7 @@ teardown() { @test "py3.6 checking that static cache actually pulls from cache (by poisoning it)" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useStaticCache: true/' serverless.yml sls package @@ -240,6 +255,7 @@ teardown() { @test "py3.6 uses static cache with dockerizePip & slim option" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useStaticCache: true/' serverless.yml sls --dockerizePip=true --slim=true package @@ -251,6 +267,7 @@ teardown() { @test "py3.6 uses download cache with dockerizePip & slim option" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true/' serverless.yml sls --dockerizePip=true --slim=true package @@ -315,6 +332,7 @@ teardown() { @test "py2.7 can package flask with zip & dockerizePip option" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" sls --dockerizePip=true --runtime=python2.7 --zip=true package unzip .serverless/sls-py-req-test.zip -d puck @@ -324,6 +342,7 @@ teardown() { @test "py2.7 can package flask with zip & slim & dockerizePip option" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" sls --dockerizePip=true --runtime=python2.7 --zip=true --slim=true package unzip .serverless/sls-py-req-test.zip -d puck @@ -333,6 +352,7 @@ teardown() { @test "py2.7 can package flask with dockerizePip option" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" sls --dockerizePip=true --runtime=python2.7 package unzip .serverless/sls-py-req-test.zip -d puck @@ -342,6 +362,7 @@ teardown() { @test "py2.7 can package flask with slim & dockerizePip option" { cd tests/base npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" sls --dockerizePip=true --slim=true --runtime=python2.7 package unzip .serverless/sls-py-req-test.zip -d puck @@ -353,6 +374,7 @@ teardown() { cd tests/base cat _slimPatterns.yml > slimPatterns.yml npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" sls --dockerizePip=true --slim=true --runtime=python2.7 package unzip .serverless/sls-py-req-test.zip -d puck From c0e40a4161601d0ec5af7dd7f2e78bed80d2c644 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Sat, 8 Sep 2018 19:16:56 +0000 Subject: [PATCH 035/328] more test covering #233 --- test.bats | 13 +++++++++++++ tests/base/package.json | 2 +- tests/individually/package.json | 2 +- tests/individually/serverless.yml | 5 +++++ tests/pipenv/package.json | 2 +- 5 files changed, 21 insertions(+), 3 deletions(-) diff --git a/test.bats b/test.bats index e2080b4a..b1fd6858 100755 --- a/test.bats +++ b/test.bats @@ -560,3 +560,16 @@ teardown() { unzip .serverless/hello1.zip -d puck ./puck/module1/foobar } + +@test "Don't nuke execute perms when using individually w/docker" { + cd tests/individually + docker &> /dev/null || skip "docker not present" + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + npm i $(npm pack ../..) + touch module1/foobar + chmod +x module1/foobar + perl -p -i'.bak' -e 's/(handler.py$)/\1\n - foobar/' serverless.yml + sls package --dockerizePip=true + unzip .serverless/hello1.zip -d puck + ./puck/module1/foobar +} diff --git a/tests/base/package.json b/tests/base/package.json index f135b421..facece60 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.0.4.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-4.1.1.tgz" } } diff --git a/tests/individually/package.json b/tests/individually/package.json index f135b421..facece60 100644 --- a/tests/individually/package.json +++ b/tests/individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.0.4.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-4.1.1.tgz" } } diff --git a/tests/individually/serverless.yml b/tests/individually/serverless.yml index 427dba75..9ae79d6e 100644 --- a/tests/individually/serverless.yml +++ b/tests/individually/serverless.yml @@ -6,6 +6,11 @@ provider: package: individually: true +custom: + pythonRequirements: + dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} + defaults: + dockerizePip: false functions: hello1: diff --git a/tests/pipenv/package.json b/tests/pipenv/package.json index f135b421..facece60 100644 --- a/tests/pipenv/package.json +++ b/tests/pipenv/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.0.4.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-4.1.1.tgz" } } From a3ecccf7369d561c9cfd170b20a4e66a505ea8f8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" Date: Mon, 10 Sep 2018 10:07:25 -0400 Subject: [PATCH 036/328] Update md5-file requirement from ^3.2.3 to ^4.0.0 (#235) Updates the requirements on [md5-file](https://github.com/roryrjb/md5-file) to permit the latest version.
Commits - See full diff in [compare view](https://github.com/roryrjb/md5-file/commits/v4.0.0)

Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot ignore this [patch|minor|major] version` will close this PR and stop Dependabot creating any more for this minor/major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) - `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language - `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language - `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language - `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language - `@dependabot badge me` will comment on this PR with code to add a "Dependabot enabled" badge to your readme Additionally, you can set the following in your Dependabot [dashboard](https://app.dependabot.com): - Update frequency (including time of day and day of week) - Automerge options (never/patch/minor, and dev/runtime dependencies) - Pull request limits (per update run and/or open at any time) - Out-of-range updates (receive only lockfile updates, if desired) - Security updates (receive only security updates, if desired) Finally, you can contact us by mentioning @dependabot.
--- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 67be6ac4..efde7fa6 100644 --- a/package.json +++ b/package.json @@ -57,7 +57,7 @@ "lodash.set": "^4.3.2", "lodash.uniqby": "^4.0.0", "lodash.values": "^4.3.0", - "md5-file": "^3.2.3", + "md5-file": "^4.0.0", "rimraf": "^2.6.2", "shell-quote": "^1.6.1" }, From 95c73980ed998be93710b890af92250518308769 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Mon, 10 Sep 2018 10:08:25 -0400 Subject: [PATCH 037/328] version bump for caching!!! --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index efde7fa6..a86f62d3 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "4.1.1", + "version": "4.2.0", "engines": { "node": ">=6.0" }, From 7235b593ee373124602b4f0f815214ffb971c856 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Mon, 10 Sep 2018 10:18:37 -0400 Subject: [PATCH 038/328] Update README.md --- README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 119553fe..c464dc0a 100644 --- a/README.md +++ b/README.md @@ -148,7 +148,7 @@ You can enable two kinds of caching with this plugin which are currently both DI custom: pythonRequirements: useDownloadCache: true - useStaticCache: true + useStaticCache: true ``` _Additionally, In future versions of this plugin, both caching features will probably be enabled by default_ @@ -157,10 +157,10 @@ There are two additional options related to caching. You can specify where in y ```yaml custom: pythonRequirements: - useStaticCache: true - useDownloadCache: true - cacheLocation: '/home/user/.my_cache_goes_here' - staticCacheMaxVersions: 10 + useStaticCache: true + useDownloadCache: true + cacheLocation: '/home/user/.my_cache_goes_here' + staticCacheMaxVersions: 10 ``` From 437fcbc68012e4bbc7d71220756a94fb39d35670 Mon Sep 17 00:00:00 2001 From: Farley Date: Wed, 12 Sep 2018 18:15:43 +0200 Subject: [PATCH 039/328] Bugfix for requirements.txt options ordering (#237) Fixes #236 @vickeryj Can you please try this? ``` rm -Rf node_modules/serverless-python-requirements npm i github:andrewfarley/serverless-python-requirements#bug-fix-requirements-ordering ``` --- lib/pip.js | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index 044f6c78..3f13ee4e 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -267,7 +267,9 @@ function dockerPathForWin(options, path) { /** create a filtered requirements.txt without anything from noDeploy * then remove all comments and empty lines, and sort the list which - * assist with matching the static cache + * assist with matching the static cache. The sorting will skip any + * lines starting with -- as those are typically ordered at the + * start of a file ( eg: --index-url / --extra-index-url ) * @param {string} source requirements * @param {string} target requirements where results are written * @param {Object} options @@ -277,14 +279,24 @@ function generateRequirementsFile(source, target, options) { const requirements = fse .readFileSync(source, { encoding: 'utf-8' }) .split(/\r?\n/); + var prepend = []; const filteredRequirements = requirements.filter(req => { req = req.trim(); - if (req.length == 0 || req[0] == '#') { + if (req.startsWith('#')) { + // Skip comments + return false; + } else if (req.startsWith('--')) { + // If we have options (prefixed with --) keep them for later + prepend.push(req); return false; } return !noDeploy.has(req.split(/[=<> \t]/)[0].trim()); }); - filteredRequirements.sort(); // Sort them alphabetically + filteredRequirements.sort(); // Sort remaining alphabetically + // Then prepend any options from above in the same order + for (let item of prepend.reverse()) { + filteredRequirements.unshift(item); + } fse.writeFileSync(target, filteredRequirements.join('\n')); } From b32f55bb1791c38cbe8de2e30ca61af92e092fc8 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Wed, 12 Sep 2018 12:15:59 -0400 Subject: [PATCH 040/328] version bump --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index a86f62d3..65e5f986 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "4.2.0", + "version": "4.2.1", "engines": { "node": ">=6.0" }, From e14aac9542bfcaa540a5b7c23e1e91999cde890d Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Fri, 14 Sep 2018 09:12:15 -0400 Subject: [PATCH 041/328] Update tests (#239) * Don't skip non-docker tests when docker isn't present * Don't hard code a static cache md5sum @andrewfarley, I noticed that tests were failing on #238 and got the same errors locally on `master`. Do you think it's reasonable to compute the md5sum this way? I don't like hard coding the hash. But, I understand that this makes the test verify less of the process. Best would probably be to externally replicate what `genrerateRequirementsFile` does to create a new file and md5sum that instead of `.serverless/requirements.txt` --- lib/pip.js | 4 +++- test.bats | 21 +++++++-------------- tests/base/package.json | 2 +- tests/individually/package.json | 2 +- tests/pipenv/package.json | 2 +- 5 files changed, 13 insertions(+), 18 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index 3f13ee4e..7d350c6c 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -289,6 +289,8 @@ function generateRequirementsFile(source, target, options) { // If we have options (prefixed with --) keep them for later prepend.push(req); return false; + } else if (req === '') { + return false; } return !noDeploy.has(req.split(/[=<> \t]/)[0].trim()); }); @@ -297,7 +299,7 @@ function generateRequirementsFile(source, target, options) { for (let item of prepend.reverse()) { filteredRequirements.unshift(item); } - fse.writeFileSync(target, filteredRequirements.join('\n')); + fse.writeFileSync(target, filteredRequirements.join('\n') + '\n'); } /** diff --git a/test.bats b/test.bats index b1fd6858..edd8cddc 100755 --- a/test.bats +++ b/test.bats @@ -11,7 +11,6 @@ setup() { # Please note: If you update change the requirements.txt in test/base this value will # change. Run a test which uses this variable manually step by step and list the cache # folder to find the new hash if you do this - export CACHE_FOLDER_HASH="b8b9d2be59f6f2ea5778e8b2aa4d2ddc_slspyc" if [ -d "${USR_CACHE_DIR}" ] ; then rm -Rf "${USR_CACHE_DIR}" fi @@ -151,8 +150,6 @@ teardown() { @test "py3.6 uses download cache with useDownloadCache option" { cd tests/base npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true/' serverless.yml sls package USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` @@ -162,8 +159,6 @@ teardown() { @test "py3.6 uses download cache with cacheLocation option" { cd tests/base npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true\n cacheLocation: .requirements-cache/' serverless.yml sls package ls .requirements-cache/downloadCacheslspyc/http @@ -193,11 +188,10 @@ teardown() { @test "py3.6 uses static and download cache" { cd tests/base npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true\n useStaticCache: true/' serverless.yml sls package - USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` + USR_CACHE_DIR=`node -e 'console.log(require("./node_modules/serverless-python-requirements/lib/shared").getUserCachePath())'` + CACHE_FOLDER_HASH=$(md5sum <(grep -v boto3 requirements.txt|sort) | cut -d' ' -f1)_slspyc ls $USR_CACHE_DIR/$CACHE_FOLDER_HASH/flask ls $USR_CACHE_DIR/downloadCacheslspyc/http } @@ -210,6 +204,7 @@ teardown() { perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true\n useStaticCache: true/' serverless.yml sls --dockerizePip=true package USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` + CACHE_FOLDER_HASH=$(md5sum <(grep -v boto3 requirements.txt|sort) | cut -d' ' -f1)_slspyc ls $USR_CACHE_DIR/$CACHE_FOLDER_HASH/flask ls $USR_CACHE_DIR/downloadCacheslspyc/http } @@ -217,11 +212,10 @@ teardown() { @test "py3.6 uses static cache" { cd tests/base npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useStaticCache: true/' serverless.yml sls package USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` + CACHE_FOLDER_HASH=$(md5sum <(grep -v boto3 requirements.txt|sort) | cut -d' ' -f1)_slspyc ls $USR_CACHE_DIR/$CACHE_FOLDER_HASH/flask ls $USR_CACHE_DIR/$CACHE_FOLDER_HASH/.completed_requirements } @@ -229,11 +223,10 @@ teardown() { @test "py3.6 uses static cache with cacheLocation option" { cd tests/base npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useStaticCache: true\n cacheLocation: .requirements-cache/' serverless.yml sls package USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` + CACHE_FOLDER_HASH=$(md5sum <(grep -v boto3 requirements.txt|sort) | cut -d' ' -f1)_slspyc ls .requirements-cache/$CACHE_FOLDER_HASH/flask ls .requirements-cache/$CACHE_FOLDER_HASH/.completed_requirements } @@ -241,12 +234,11 @@ teardown() { @test "py3.6 checking that static cache actually pulls from cache (by poisoning it)" { cd tests/base npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useStaticCache: true/' serverless.yml sls package cp .serverless/sls-py-req-test.zip ./puck USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` + CACHE_FOLDER_HASH=$(md5sum <(grep -v boto3 requirements.txt|sort) | cut -d' ' -f1)_slspyc echo "injected new file into static cache folder" > $USR_CACHE_DIR/$CACHE_FOLDER_HASH/injected_file_is_bad_form sls package [ `wc -c ./.serverless/sls-py-req-test.zip | awk '{ print $1 }'` -gt `wc -c ./puck | awk '{ print $1 }'` ] @@ -259,6 +251,7 @@ teardown() { ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useStaticCache: true/' serverless.yml sls --dockerizePip=true --slim=true package + CACHE_FOLDER_HASH=$(md5sum <(grep -v boto3 requirements.txt|sort) | cut -d' ' -f1)_slspyc ls $USR_CACHE_DIR/$CACHE_FOLDER_HASH/flask unzip .serverless/sls-py-req-test.zip -d puck test $(find puck -name "*.pyc" | wc -l) -eq 0 diff --git a/tests/base/package.json b/tests/base/package.json index facece60..f75ba960 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-4.2.1.tgz" } } diff --git a/tests/individually/package.json b/tests/individually/package.json index facece60..f75ba960 100644 --- a/tests/individually/package.json +++ b/tests/individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-4.2.1.tgz" } } diff --git a/tests/pipenv/package.json b/tests/pipenv/package.json index facece60..f75ba960 100644 --- a/tests/pipenv/package.json +++ b/tests/pipenv/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-4.2.1.tgz" } } From 837f0631081d46061347061a8441e33c82dd153d Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Fri, 14 Sep 2018 09:26:18 -0400 Subject: [PATCH 042/328] test path with space --- test.bats | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/test.bats b/test.bats index edd8cddc..954f494f 100755 --- a/test.bats +++ b/test.bats @@ -24,6 +24,10 @@ teardown() { if [ -d "${USR_CACHE_DIR}" ] ; then rm -Rf "${USR_CACHE_DIR}" fi + cd ../.. + if [ -d "tests/base with a space" ] ; then + rm -Rf "tests/base with a space" + fi } @test "py3.6 supports custom file name with fileName option" { @@ -566,3 +570,12 @@ teardown() { unzip .serverless/hello1.zip -d puck ./puck/module1/foobar } + +@test "py3.6 can package flask in a project with a space in it" { + cp -a tests/base "tests/base with a space" + cd "tests/base with a space" + npm i $(npm pack ../..) + sls package + unzip .serverless/sls-py-req-test.zip -d puck + ls puck/flask +} From 8a32692d25f88a023920a883a770257e3593746c Mon Sep 17 00:00:00 2001 From: Farley Date: Fri, 14 Sep 2018 22:11:59 +0200 Subject: [PATCH 043/328] Allowing all paths to have spaces (#244) --- lib/pip.js | 32 ++++++++++++++++++++++---------- 1 file changed, 22 insertions(+), 10 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index 7d350c6c..da4dd059 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -14,6 +14,10 @@ const { getUserCachePath } = require('./shared'); +function quote_single(quoteme) { + return quote([quoteme]); +} + /** * Just generate the requirements file in the .serverless folder * @param {string} requirementsPath @@ -100,7 +104,7 @@ function installRequirements(targetFolder, serverless, options) { ); serverless.cli.log(`Using download cache directory ${downloadCacheDir}`); fse.ensureDirSync(downloadCacheDir); - pipCmd.push('--cache-dir', downloadCacheDir); + pipCmd.push('--cache-dir', quote_single(downloadCacheDir)); } // Check if pip has Debian's --system option and set it if so @@ -145,18 +149,23 @@ function installRequirements(targetFolder, serverless, options) { // Prepare bind path depending on os platform const bindPath = getBindPath(serverless, targetFolder); - cmdOptions = ['run', '--rm', '-v', `"${bindPath}:/var/task:z"`]; + cmdOptions = ['run', '--rm', '-v', quote_single(`${bindPath}:/var/task:z`)]; if (options.dockerSsh) { // Mount necessary ssh files to work with private repos cmdOptions.push( '-v', - `"${process.env.HOME}/.ssh/id_rsa:/root/.ssh/id_rsa:z"` + quote_single(`${process.env.HOME}/.ssh/id_rsa:/root/.ssh/id_rsa:z`) ); cmdOptions.push( '-v', - `"${process.env.HOME}/.ssh/known_hosts:/root/.ssh/known_hosts:z"` + quote_single( + `${process.env.HOME}/.ssh/known_hosts:/root/.ssh/known_hosts:z` + ) + ); + cmdOptions.push( + '-v', + quote_single(`${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z`) ); - cmdOptions.push('-v', `"${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z"`); cmdOptions.push('-e', 'SSH_AUTH_SOCK=/tmp/ssh_sock'); } @@ -177,8 +186,11 @@ function installRequirements(targetFolder, serverless, options) { ); const windowsized = getBindPath(serverless, downloadCacheDir); // And now push it to a volume mount and to pip... - cmdOptions.push('-v', `"${windowsized}:${dockerDownloadCacheDir}:z"`); - pipCmd.push('--cache-dir', dockerDownloadCacheDir); + cmdOptions.push( + '-v', + quote_single(`${windowsized}:${dockerDownloadCacheDir}:z`) + ); + pipCmd.push('--cache-dir', quote_single(dockerDownloadCacheDir)); } if (process.platform === 'linux') { @@ -189,7 +201,7 @@ function installRequirements(targetFolder, serverless, options) { commands.push(quote(['chown', '-R', '0:0', dockerDownloadCacheDir])); } // Install requirements with pip - commands.push(quote(pipCmd)); + commands.push(pipCmd.join(' ')); // Set the ownership of the current folder to user commands.push( quote([ @@ -213,7 +225,7 @@ function installRequirements(targetFolder, serverless, options) { pipCmd = ['/bin/bash', '-c', '"' + commands.join(' && ') + '"']; } else { // Use same user so --cache-dir works - cmdOptions.push('-u', getDockerUid(bindPath)); + cmdOptions.push('-u', quote_single(getDockerUid(bindPath))); } cmdOptions.push(dockerImage); cmdOptions.push(...pipCmd); @@ -262,7 +274,7 @@ function dockerPathForWin(options, path) { if (process.platform === 'win32' && options.dockerizePip) { return path.replace(/\\/g, '/'); } - return path; + return quote_single(path); } /** create a filtered requirements.txt without anything from noDeploy From b4ecc43ce3969c9191d0e1a359ba185a18caefdd Mon Sep 17 00:00:00 2001 From: Farley Date: Fri, 14 Sep 2018 22:29:20 +0200 Subject: [PATCH 044/328] Adding -f and -i to requirements.txt options ignore list before sorting (#238) Fixes: #236 (again, properly) Problem: requirements.txt options which must be in the proper order at the start of the file were being sorted. This properly skips sorting them and keeps them in order they were. --- lib/pip.js | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index da4dd059..d5017e28 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -281,7 +281,9 @@ function dockerPathForWin(options, path) { * then remove all comments and empty lines, and sort the list which * assist with matching the static cache. The sorting will skip any * lines starting with -- as those are typically ordered at the - * start of a file ( eg: --index-url / --extra-index-url ) + * start of a file ( eg: --index-url / --extra-index-url ) or any + * lines that start with -f or -i, Please see: + * https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format * @param {string} source requirements * @param {string} target requirements where results are written * @param {Object} options @@ -297,7 +299,11 @@ function generateRequirementsFile(source, target, options) { if (req.startsWith('#')) { // Skip comments return false; - } else if (req.startsWith('--')) { + } else if ( + req.startsWith('--') || + req.startsWith('-f') || + req.startsWith('-i') + ) { // If we have options (prefixed with --) keep them for later prepend.push(req); return false; @@ -309,7 +315,9 @@ function generateRequirementsFile(source, target, options) { filteredRequirements.sort(); // Sort remaining alphabetically // Then prepend any options from above in the same order for (let item of prepend.reverse()) { - filteredRequirements.unshift(item); + if (item && item.length > 0) { + filteredRequirements.unshift(item); + } } fse.writeFileSync(target, filteredRequirements.join('\n') + '\n'); } From 9da6422711766080ff53ebdc3c2d661c4e4fa64a Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Fri, 14 Sep 2018 16:30:18 -0400 Subject: [PATCH 045/328] version bump for bug fixes --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 65e5f986..7cbc7560 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "4.2.1", + "version": "4.2.2", "engines": { "node": ">=6.0" }, From 5dd691d30df09d965603f21e65655e9ea16c8d5e Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Mon, 17 Sep 2018 16:30:50 -0400 Subject: [PATCH 046/328] Perform space in path test with docker too --- test.bats | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/test.bats b/test.bats index 954f494f..34027577 100755 --- a/test.bats +++ b/test.bats @@ -579,3 +579,12 @@ teardown() { unzip .serverless/sls-py-req-test.zip -d puck ls puck/flask } + +@test "py3.6 can package flask in a project with a space in it with docker" { + cp -a tests/base "tests/base with a space" + cd "tests/base with a space" + npm i $(npm pack ../..) + sls --dockerizePip=true package + unzip .serverless/sls-py-req-test.zip -d puck + ls puck/flask +} From c5d9315c17b3e54554c8d85f52c3977d33011b50 Mon Sep 17 00:00:00 2001 From: Paul Vecchio Date: Tue, 18 Sep 2018 07:54:45 -0700 Subject: [PATCH 047/328] fix broken windows path (#247) Current version seems to handle windows paths improperly ``` sh docker: Error response from daemon: Mount denied: The source path "C\\:/Users/Paul/Development/solidangle/match-api/app/.serverless/requirements\\:/var/task\\" is not a valid Windows path. See 'docker run --help'. ``` Calling `dockerPathForWin` on `bindPath` creation resolves issue. --- lib/pip.js | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index d5017e28..a80135ce 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -147,9 +147,12 @@ function installRequirements(targetFolder, serverless, options) { serverless.cli.log(`Docker Image: ${dockerImage}`); // Prepare bind path depending on os platform - const bindPath = getBindPath(serverless, targetFolder); + const bindPath = dockerPathForWin( + options, + getBindPath(serverless, targetFolder) + ); - cmdOptions = ['run', '--rm', '-v', quote_single(`${bindPath}:/var/task:z`)]; + cmdOptions = ['run', '--rm', '-v', `${bindPath}:/var/task:z`]; if (options.dockerSsh) { // Mount necessary ssh files to work with private repos cmdOptions.push( From 8b06b7c2e8a5a641559d9ef5cd26ce3b2eae23af Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Tue, 18 Sep 2018 10:55:21 -0400 Subject: [PATCH 048/328] version bump for #247 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 7cbc7560..a2748661 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "4.2.2", + "version": "4.2.3", "engines": { "node": ">=6.0" }, From 1c603fae9a13dcd1e30d768356984821d9ec9a0b Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Tue, 18 Sep 2018 10:57:46 -0400 Subject: [PATCH 049/328] Don't set pip CWD to target dir (#246) This fixes #245 by making it so that relative paths in dependencies resolve correctly --- lib/pip.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pip.js b/lib/pip.js index a80135ce..39fdb5ab 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -242,7 +242,7 @@ function installRequirements(targetFolder, serverless, options) { const preparedPath = dockerPathForWin(options, targetFolder); cmdOptions.push(getStripCommand(options, preparedPath)); } - let spawnArgs = { cwd: targetFolder, shell: true }; + let spawnArgs = { shell: true }; if (process.env.SLS_DEBUG) { spawnArgs.stdio = 'inherit'; } From d50b7f89ecc0bae66ab6b6ffe38710d34e14c006 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Tue, 18 Sep 2018 10:59:03 -0400 Subject: [PATCH 050/328] version bump for #246 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index a2748661..d332c158 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "4.2.3", + "version": "4.2.4", "engines": { "node": ">=6.0" }, From 3b2c2452d831c65d99b0f639ba9e4a6d6fa1adf5 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Thu, 20 Sep 2018 10:16:55 -0400 Subject: [PATCH 051/328] attempt to fix #184 (#251) --- lib/pip.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pip.js b/lib/pip.js index 39fdb5ab..e1557216 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -275,7 +275,7 @@ function installRequirements(targetFolder, serverless, options) { */ function dockerPathForWin(options, path) { if (process.platform === 'win32' && options.dockerizePip) { - return path.replace(/\\/g, '/'); + return `"${path.replace(/\\/g, '/')}"`; } return quote_single(path); } From 2d13f13e6c65fe3548e9203e1e3698654d8d2f65 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Wed, 10 Oct 2018 00:20:26 +0000 Subject: [PATCH 052/328] some test fixes --- test.bats | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/test.bats b/test.bats index 34027577..12343bcd 100755 --- a/test.bats +++ b/test.bats @@ -50,13 +50,14 @@ teardown() { ls puck/flask } -@test "py3.6 can package flask with zip option" { +@test "py3.6 can package flask & bottle with zip option" { cd tests/base npm i $(npm pack ../..) sls --zip=true package unzip .serverless/sls-py-req-test.zip -d puck ls puck/.requirements.zip puck/unzip_requirements.py ! ls puck/flask + ! ls puck/bottle } @test "py3.6 can package flask with slim options" { @@ -581,6 +582,8 @@ teardown() { } @test "py3.6 can package flask in a project with a space in it with docker" { + docker &> /dev/null || skip "docker not present" + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" cp -a tests/base "tests/base with a space" cd "tests/base with a space" npm i $(npm pack ../..) From 796f64d5741822a66c1756de2706530e9364e597 Mon Sep 17 00:00:00 2001 From: Devin Turner Date: Thu, 11 Oct 2018 08:00:47 -0500 Subject: [PATCH 053/328] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index c464dc0a..d424b15c 100644 --- a/README.md +++ b/README.md @@ -28,7 +28,7 @@ If you're on a mac, check out [these notes](#applebeersnake-mac-brew-installed-p ## Cross compiling! -Compiling non-pure-Python modules or fetching their manylinux wheels is +Compiling non-pure-Python modules or fetching their many linux wheels is supported on non-linux OSs via the use of Docker and the [docker-lambda](https://github.com/lambci/docker-lambda) image. To enable docker usage, add the following to your `serverless.yml`: @@ -115,7 +115,7 @@ custom: ``` #### Custom Removal Patterns To specify additional directories to remove from the installed packages, -define a list of of patterns int he serverless config using the `slimPatterns` +define a list of patterns in the serverless config using the `slimPatterns` option and glob syntax. Note, it matches against whole paths, so to match a file in any directory, start your pattern with `**/`. ```yaml From a42eee3b9a45b93ceddf8a9d1d7d30453f49d02b Mon Sep 17 00:00:00 2001 From: PatrickBuTaxdoo Date: Thu, 11 Oct 2018 18:52:14 +0200 Subject: [PATCH 054/328] fixed #252 (#253) * fixed #252 * better approach to fix #252 * fixed undone merge --- lib/pip.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/pip.js b/lib/pip.js index e1557216..bc84b4d4 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -276,6 +276,8 @@ function installRequirements(targetFolder, serverless, options) { function dockerPathForWin(options, path) { if (process.platform === 'win32' && options.dockerizePip) { return `"${path.replace(/\\/g, '/')}"`; + } else if (process.platform === 'win32' && !options.dockerizePip) { + return path; } return quote_single(path); } From 63edf6b32807aebac1809404e547ab0ab3b49ede Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Wed, 19 Sep 2018 12:22:38 -0400 Subject: [PATCH 055/328] simpler test cleanup precursor to JS version since i want it as simple as possible before port --- test.bats | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/test.bats b/test.bats index 12343bcd..9c581112 100755 --- a/test.bats +++ b/test.bats @@ -18,12 +18,9 @@ setup() { teardown() { rm -rf puck puck2 puck3 node_modules .serverless .requirements.zip .requirements-cache \ - foobar package-lock.json serverless-python-requirements-*.tgz - if [ -f serverless.yml.bak ]; then mv serverless.yml.bak serverless.yml; fi - if [ -f slimPatterns.yml ]; then rm -f slimPatterns.yml; fi - if [ -d "${USR_CACHE_DIR}" ] ; then - rm -Rf "${USR_CACHE_DIR}" - fi + foobar package-lock.json serverless.yml.bak slimPatterns.yml "${USR_CACHE_DIR}" + serverless-python-requirements-*.tgz + git checkout serverless.yml cd ../.. if [ -d "tests/base with a space" ] ; then rm -Rf "tests/base with a space" From eb53a715868bb988e33b89590ceaca018847ce9a Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Wed, 19 Sep 2018 12:23:44 -0400 Subject: [PATCH 056/328] move 1st test down.. probably too much but its not the first/primary test --- test.bats | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/test.bats b/test.bats index 9c581112..9b8812dc 100755 --- a/test.bats +++ b/test.bats @@ -27,18 +27,6 @@ teardown() { fi } -@test "py3.6 supports custom file name with fileName option" { - cd tests/base - npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n fileName: puck/' serverless.yml - echo "requests" > puck - sls package - ls .serverless/requirements/requests - ! ls .serverless/requirements/flask -} - @test "py3.6 can package flask with default options" { cd tests/base npm i $(npm pack ../..) @@ -588,3 +576,15 @@ teardown() { unzip .serverless/sls-py-req-test.zip -d puck ls puck/flask } + +@test "py3.6 supports custom file name with fileName option" { + cd tests/base + npm i $(npm pack ../..) + docker &> /dev/null || skip "docker not present" + ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" + perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n fileName: puck/' serverless.yml + echo "requests" > puck + sls package + ls .serverless/requirements/requests + ! ls .serverless/requirements/flask +} From eb8cc1134786c5acf4eff3a77be1cccaa5ae9eb7 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Thu, 20 Sep 2018 13:09:09 -0400 Subject: [PATCH 057/328] First pass at tape tests instead of bats tests Main advantage is that these can run on windows more easily, thus fixing #249 --- appveyor.yml | 23 +++---- index.js | 5 +- lib/pip.js | 2 +- package.json | 9 ++- test.js | 136 ++++++++++++++++++++++++++++++++++++++++ tests/base/package.json | 2 +- 6 files changed, 157 insertions(+), 20 deletions(-) create mode 100644 test.js diff --git a/appveyor.yml b/appveyor.yml index 7431e08b..abc3badb 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,14 +1,9 @@ -version: '{build}' -init: -- ps: >- - Install-Product node 6 - - npm i -g serverless -build: off -test_script: -- cmd: >- - cd example - - npm i - - sls package --pythonBin=c:/python36/python.exe \ No newline at end of file +version: '{build}' +init: +- ps: npm i -g serverless +build: off +test_script: +- cmd: >- + npm i + + node test.js diff --git a/index.js b/index.js index ff73b1f7..155897ce 100644 --- a/index.js +++ b/index.js @@ -34,7 +34,10 @@ class ServerlessPythonRequirements { invalidateCaches: false, fileName: 'requirements.txt', usePipenv: true, - pythonBin: this.serverless.service.provider.runtime || 'python', + pythonBin: + process.platform === 'win32' + ? 'python.exe' + : this.serverless.service.provider.runtime || 'python', dockerizePip: false, dockerSsh: false, dockerImage: null, diff --git a/lib/pip.js b/lib/pip.js index bc84b4d4..aed9c03f 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -274,7 +274,7 @@ function installRequirements(targetFolder, serverless, options) { * @return {string} */ function dockerPathForWin(options, path) { - if (process.platform === 'win32' && options.dockerizePip) { + if (process.platform === 'win32') { return `"${path.replace(/\\/g, '/')}"`; } else if (process.platform === 'win32' && !options.dockerizePip) { return path; diff --git a/package.json b/package.json index d332c158..8be5cffa 100644 --- a/package.json +++ b/package.json @@ -38,13 +38,16 @@ "main": "index.js", "bin": {}, "scripts": { - "test": "bats test.bats", + "test": "node test.js && bats test.bats", "lint": "eslint *.js lib/*.js && prettier -l index.js lib/*.js || (echo need formatting ; exit 1)", - "format": "prettier --write index.js lib/*.js" + "format": "prettier --write index.js lib/*.js test.js" }, "devDependencies": { "eslint": "*", - "prettier": "*" + "prettier": "*", + "cross-spawn": "*", + "deasync-promise": "*", + "tape": "*" }, "dependencies": { "appdirectory": "^0.1.0", diff --git a/test.js b/test.js new file mode 100644 index 00000000..ff5d90a4 --- /dev/null +++ b/test.js @@ -0,0 +1,136 @@ +const crossSpawn = require('cross-spawn'); +const deasync = require('deasync-promise'); +const glob = require('glob-all'); +const JSZip = require('jszip'); +const tape = require('tape'); +const { removeSync, readFileSync } = require('fs-extra'); +const { sep } = require('path'); + +const { getUserCachePath } = require('./lib/shared'); + +const initialWorkingDir = process.cwd(); + +const mkCommand = cmd => (args, options = {}) => { + const { error, stdout, stderr, status } = crossSpawn.sync( + cmd, + args, + Object.assign( + { + env: Object.assign( + process.env, + { SLS_DEBUG: 't' }, + process.env.CI ? { LC_ALL: 'C.UTF-8', LANG: 'C.UTF-8' } : {} + ) + }, + options + ) + ); + if (error) throw error; + if (status) { + console.error(stdout.toString()); // eslint-disable-line no-console + console.error(stderr.toString()); // eslint-disable-line no-console + throw new Error(`${cmd} failed with status code ${status}`); + } + return stdout && stdout.toString().trim(); +}; +const sls = mkCommand('sls'); +const git = mkCommand('git'); +const npm = mkCommand('npm'); + +const setup = () => { + removeSync(getUserCachePath()); +}; + +const teardown = () => { + [ + 'puck', + 'puck2', + 'puck3', + 'node_modules', + '.serverless', + '.requirements.zip', + '.requirements-cache', + 'foobar', + 'package-lock.json', + 'slimPatterns.yml', + 'serverless.yml.bak', + getUserCachePath(), + ...glob.sync('serverless-python-requirements-*.tgz') + ].map(path => removeSync(path)); + git(['checkout', 'serverless.yml']); + process.chdir(initialWorkingDir); + removeSync('tests/base with a space'); +}; + +const test = (desc, func) => + tape.test(desc, t => { + setup(); + try { + func(t); + } finally { + teardown(); + } + }); + +const getPythonBin = (version = 3) => { + if (![2, 3].includes(version)) throw new Error('version must be 2 or 3'); + if (process.platform === 'win32') + return `c:/python${version === 2 ? '27' : '36'}-x64/python.exe`; + else return version === 2 ? 'python2.7' : 'python3.6'; +}; + +const listZipFiles = filename => + Object.keys(deasync(new JSZip().loadAsync(readFileSync(filename))).files); + +test('default pythonBin can package flask with default options', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.end(); +}); + +test('py3.6 can package flask with default options', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--pythonBin=${getPythonBin(3)}`, 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.end(); +}); + +test('py3.6 can package flask with zip option', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test('py3.6 can package flask with slim option', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--pythonBin=${getPythonBin(3)}`, '--slim=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.end(); +}); diff --git a/tests/base/package.json b/tests/base/package.json index f75ba960..d37ade00 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.2.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-4.2.4.tgz" } } From 6786b9ce42a35db37d0a7b75ea17537358619b64 Mon Sep 17 00:00:00 2001 From: Devin Turner Date: Thu, 11 Oct 2018 17:37:31 -0500 Subject: [PATCH 058/328] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index d424b15c..e97d077f 100644 --- a/README.md +++ b/README.md @@ -28,7 +28,7 @@ If you're on a mac, check out [these notes](#applebeersnake-mac-brew-installed-p ## Cross compiling! -Compiling non-pure-Python modules or fetching their many linux wheels is +Compiling non-pure-Python modules or fetching their manylinux wheels is supported on non-linux OSs via the use of Docker and the [docker-lambda](https://github.com/lambci/docker-lambda) image. To enable docker usage, add the following to your `serverless.yml`: From eaf1b784e2b1271785b20420879bb39c87d4062b Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Thu, 1 Nov 2018 15:20:00 -0400 Subject: [PATCH 059/328] honor escaped newlines in requirements.txt (#264) * test proving bug #261 with pip-tools * honor escaped newlines in requirements.txt fixes #261 * fix merge issue --- circle.yml | 2 +- lib/pip.js | 1 + test.bats | 14 ++++++++++---- tests/base/serverless.yml | 1 + 4 files changed, 13 insertions(+), 5 deletions(-) diff --git a/circle.yml b/circle.yml index cef66fcb..f86e51af 100644 --- a/circle.yml +++ b/circle.yml @@ -24,7 +24,7 @@ jobs: # other deps - run: sudo apt -y update && sudo apt -y install python-pip python2.7 curl unzip # instal pipenv - - run: sudo python3.6 -m pip install pipenv + - run: sudo python3.6 -m pip install pipenv pip-tools # install nodejs - run: curl -sL https://deb.nodesource.com/setup_6.x | sudo bash - && sudo apt -y install nodejs # install serverless & depcheck diff --git a/lib/pip.js b/lib/pip.js index aed9c03f..a6285d8c 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -297,6 +297,7 @@ function generateRequirementsFile(source, target, options) { const noDeploy = new Set(options.noDeploy || []); const requirements = fse .readFileSync(source, { encoding: 'utf-8' }) + .replace(/\\\n/g, ' ') .split(/\r?\n/); var prepend = []; const filteredRequirements = requirements.filter(req => { diff --git a/test.bats b/test.bats index 9b8812dc..e10d5e1d 100755 --- a/test.bats +++ b/test.bats @@ -35,6 +35,15 @@ teardown() { ls puck/flask } +@test "py3.6 can package flask with hashes" { + cd tests/base + npm i $(npm pack ../..) + pip-compile --output-file requirements-w-hashes.txt --generate-hashes requirements.txt + sls package --fileName requirements-w-hashes.txt + unzip .serverless/sls-py-req-test.zip -d puck + ls puck/flask +} + @test "py3.6 can package flask & bottle with zip option" { cd tests/base npm i $(npm pack ../..) @@ -580,11 +589,8 @@ teardown() { @test "py3.6 supports custom file name with fileName option" { cd tests/base npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n fileName: puck/' serverless.yml echo "requests" > puck - sls package + sls --fileName puck package ls .serverless/requirements/requests ! ls .serverless/requirements/flask } diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index 7c864714..65078c7c 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -13,6 +13,7 @@ custom: slim: ${opt:slim, self:custom.defaults.slim} slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} vendor: ${opt:vendor, ''} + fileName: ${opt:fileName, 'requirements.txt'} defaults: slim: false slimPatterns: false From 411ced4d20cbfd926dc74a660d3b15e885b8213b Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Thu, 1 Nov 2018 15:27:19 -0400 Subject: [PATCH 060/328] Version 4.2.5 * Fixes issues with spaces in path on windows #184 * More windows path issues #252 * fix issues when using pip-tools with hashes #261 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 8be5cffa..c426c94f 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "4.2.4", + "version": "4.2.5", "engines": { "node": ">=6.0" }, From 4447b280270613f23614a0b7f3be7dcae479096d Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Wed, 7 Nov 2018 14:23:43 -0500 Subject: [PATCH 061/328] test for zip + noDeploy. #186 --- test.js | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/test.js b/test.js index ff5d90a4..ca732f6e 100644 --- a/test.js +++ b/test.js @@ -36,6 +36,7 @@ const mkCommand = cmd => (args, options = {}) => { const sls = mkCommand('sls'); const git = mkCommand('git'); const npm = mkCommand('npm'); +const perl = mkCommand('perl'); const setup = () => { removeSync(getUserCachePath()); @@ -81,6 +82,12 @@ const getPythonBin = (version = 3) => { const listZipFiles = filename => Object.keys(deasync(new JSZip().loadAsync(readFileSync(filename))).files); +const listRequirementsZipFiles = filename => { + const zip = deasync(new JSZip().loadAsync(readFileSync(filename))); + const reqsBuffer = deasync(zip.file('.requirements.zip').async('nodebuffer')); + const reqsZip = deasync(new JSZip().loadAsync(reqsBuffer)); + return Object.keys(reqsZip.files) +}; test('default pythonBin can package flask with default options', t => { process.chdir('tests/base'); @@ -134,3 +141,30 @@ test('py3.6 can package flask with slim option', t => { ); t.end(); }); + + +/* + * News tests not in test.bats + */ + +test("py3.6 doesn't package bottle with zip option", t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl(['-p', "-i'.bak'", '-e', 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', 'serverless.yml']) + sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = listRequirementsZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.true(zippedReqs.includes(`flask${sep}__init__.py`), 'flask is packaged in the .requirements.zip file'); + t.false(zippedReqs.includes(`bottle.py`), 'bottle is not packaged in the .requirements.zip file'); + t.end(); +}); From e5921885f9d0adf4a327bc7258558212844505c1 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Wed, 7 Nov 2018 15:05:56 -0500 Subject: [PATCH 062/328] oh, maybe in zips its alway `/` not `${sep}` --- test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test.js b/test.js index ca732f6e..82d43722 100644 --- a/test.js +++ b/test.js @@ -164,7 +164,7 @@ test("py3.6 doesn't package bottle with zip option", t => { zipfiles.includes(`flask${sep}__init__.py`), "flask isn't packaged on its own" ); - t.true(zippedReqs.includes(`flask${sep}__init__.py`), 'flask is packaged in the .requirements.zip file'); + t.true(zippedReqs.includes(`flask/__init__.py`), 'flask is packaged in the .requirements.zip file'); t.false(zippedReqs.includes(`bottle.py`), 'bottle is not packaged in the .requirements.zip file'); t.end(); }); From eabbd89d7e1396acceed71899fb0ac519adbfed5 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Thu, 8 Nov 2018 08:14:09 -0500 Subject: [PATCH 063/328] Pass env vars to docker (#271) * Add environment variables to docker run cmd * Pass env vars to docker Based on #231 & closes #267 * format * fix docs * Update README.md --- README.md | 9 +++++++++ index.js | 1 + lib/pip.js | 7 +++++++ 3 files changed, 17 insertions(+) diff --git a/README.md b/README.md index e97d077f..f0e35927 100644 --- a/README.md +++ b/README.md @@ -72,6 +72,15 @@ It is important that the host of your private repositories has already been adde `$HOME/.ssh/known_hosts` file, as the install process will fail otherwise due to host authenticity failure. +You can also pass environment variables to docker by specifying them in `dockerEnv` +option: +```yaml +custom: + pythonRequirements: + dockerEnv: + - https_proxy +``` + [:checkered_flag: Windows notes](#checkered_flag-windows-dockerizepip-notes) ## Pipenv support :sparkles::cake::sparkles: diff --git a/index.js b/index.js index 155897ce..aadad43b 100644 --- a/index.js +++ b/index.js @@ -42,6 +42,7 @@ class ServerlessPythonRequirements { dockerSsh: false, dockerImage: null, dockerFile: null, + dockerEnv: false, useStaticCache: false, useDownloadCache: false, cacheLocation: false, diff --git a/lib/pip.js b/lib/pip.js index a6285d8c..e59173d8 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -196,6 +196,13 @@ function installRequirements(targetFolder, serverless, options) { pipCmd.push('--cache-dir', quote_single(dockerDownloadCacheDir)); } + if (options.dockerEnv) { + // Add environment variables to docker run cmd + options.dockerEnv.forEach(function(item) { + cmdOptions.push('-e', item); + }); + } + if (process.platform === 'linux') { // Use same user so requirements folder is not root and so --cache-dir works var commands = []; From 8551233b9a164bcbcd8632baef1621ce67bdc7b4 Mon Sep 17 00:00:00 2001 From: Benjamin Weigel Date: Sat, 17 Nov 2018 01:54:43 +0100 Subject: [PATCH 064/328] override slimPatterns instead of appending (#276) closes #216 --- README.md | 14 ++- appveyor.yml | 9 +- index.js | 1 + lib/slim.js | 9 +- test.bats | 4 +- test.js | 169 ++++++++++++++++++++++++++++++++++-- tests/base/serverless.yml | 2 + tests/pipenv/serverless.yml | 2 + 8 files changed, 191 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index f0e35927..f0418b20 100644 --- a/README.md +++ b/README.md @@ -116,7 +116,7 @@ except ImportError: _Works on non 'win32' environments: Docker, WSL are included_ To remove the tests, information and caches from the installed packages, enable the `slim` option. This will: `strip` the `.so` files, remove `__pycache__` -directories and `dist-info` directories. +and `dist-info` directories as well as `.pyc` and `.pyo` files. ```yaml custom: pythonRequirements: @@ -125,7 +125,8 @@ custom: #### Custom Removal Patterns To specify additional directories to remove from the installed packages, define a list of patterns in the serverless config using the `slimPatterns` -option and glob syntax. Note, it matches against whole paths, so to match a file in any +option and glob syntax. These paterns will be added to the default ones (`**/*.py[c|o]`, `**/__pycache__*`, `**/*.dist-info*`). +Note, the glob syntax matches against whole paths, so to match a file in any directory, start your pattern with `**/`. ```yaml custom: @@ -134,6 +135,15 @@ custom: slimPatterns: - "**/*.egg-info*" ``` +To overwrite the default patterns set the option `slimPatternsAppendDefaults` to `false` (`true` by default). +```yaml +custom: + pythonRequirements: + slim: true + slimPatternsAppendDefaults: false + slimPatterns: + - "**/*.egg-info*" +``` This will remove all folders within the installed requirements that match the names in `slimPatterns` ## Omitting Packages diff --git a/appveyor.yml b/appveyor.yml index abc3badb..a8301d47 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,9 +1,10 @@ version: '{build}' init: -- ps: npm i -g serverless + - cmd: pip install pipenv + - ps: npm i -g serverless build: off test_script: -- cmd: >- - npm i + - cmd: >- + npm i - node test.js + node test.js diff --git a/index.js b/index.js index aadad43b..1d2854bf 100644 --- a/index.js +++ b/index.js @@ -29,6 +29,7 @@ class ServerlessPythonRequirements { { slim: false, slimPatterns: false, + slimPatternsAppendDefaults: true, zip: false, cleanupZipHelper: true, invalidateCaches: false, diff --git a/lib/slim.js b/lib/slim.js index ede926ce..0c87229a 100644 --- a/lib/slim.js +++ b/lib/slim.js @@ -10,7 +10,14 @@ const getStripCommand = (options, folderPath) => const deleteFiles = (options, folderPath) => { let patterns = ['**/*.py[c|o]', '**/__pycache__*', '**/*.dist-info*']; if (options.slimPatterns) { - patterns = patterns.concat(options.slimPatterns); + if ( + options.slimPatternsAppendDefaults === false || + options.slimPatternsAppendDefaults == 'false' + ) { + patterns = options.slimPatterns; + } else { + patterns = patterns.concat(options.slimPatterns); + } } for (const pattern of patterns) { for (const file of glob.sync(`${folderPath}/${pattern}`)) { diff --git a/test.bats b/test.bats index e10d5e1d..3c0e6c3c 100755 --- a/test.bats +++ b/test.bats @@ -297,7 +297,7 @@ teardown() { cd tests/base cat _slimPatterns.yml > slimPatterns.yml npm i $(npm pack ../..) - sls --runtime=python2.7 --slim=true packag + sls --runtime=python2.7 --slim=true package unzip .serverless/sls-py-req-test.zip -d puck ls puck/flask test $(find puck -name "*.pyc" | wc -l) -eq 0 @@ -462,7 +462,6 @@ teardown() { test $(find "puck*" -name "*.pyc" | wc -l) -eq 0 } - @test "py2.7 can package flask with package individually option" { cd tests/base npm i $(npm pack ../..) @@ -488,7 +487,6 @@ teardown() { test $(find puck* -name "*.pyc" | wc -l) -eq 0 } - @test "py3.6 can package only requirements of module" { cd tests/individually npm i $(npm pack ../..) diff --git a/test.js b/test.js index 82d43722..0200fa35 100644 --- a/test.js +++ b/test.js @@ -3,7 +3,7 @@ const deasync = require('deasync-promise'); const glob = require('glob-all'); const JSZip = require('jszip'); const tape = require('tape'); -const { removeSync, readFileSync } = require('fs-extra'); +const { removeSync, readFileSync, copySync } = require('fs-extra'); const { sep } = require('path'); const { getUserCachePath } = require('./lib/shared'); @@ -63,8 +63,8 @@ const teardown = () => { removeSync('tests/base with a space'); }; -const test = (desc, func) => - tape.test(desc, t => { +const test = (desc, func, opts = {}) => + tape.test(desc, opts, t => { setup(); try { func(t); @@ -86,7 +86,17 @@ const listRequirementsZipFiles = filename => { const zip = deasync(new JSZip().loadAsync(readFileSync(filename))); const reqsBuffer = deasync(zip.file('.requirements.zip').async('nodebuffer')); const reqsZip = deasync(new JSZip().loadAsync(reqsBuffer)); - return Object.keys(reqsZip.files) + return Object.keys(reqsZip.files); +}; + +const canUseDocker = () => { + let result; + try { + result = crossSpawn.sync('docker', ['ps']); + } catch (e) { + return false; + } + return result.status === 0; }; test('default pythonBin can package flask with default options', t => { @@ -142,7 +152,6 @@ test('py3.6 can package flask with slim option', t => { t.end(); }); - /* * News tests not in test.bats */ @@ -151,10 +160,18 @@ test("py3.6 doesn't package bottle with zip option", t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - perl(['-p', "-i'.bak'", '-e', 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', 'serverless.yml']) + perl([ + '-p', + "-i'.bak'", + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml' + ]); sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - const zippedReqs = listRequirementsZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = listRequirementsZipFiles( + '.serverless/sls-py-req-test.zip' + ); t.true( zipfiles.includes('.requirements.zip'), 'zipped requirements are packaged' @@ -164,7 +181,141 @@ test("py3.6 doesn't package bottle with zip option", t => { zipfiles.includes(`flask${sep}__init__.py`), "flask isn't packaged on its own" ); - t.true(zippedReqs.includes(`flask/__init__.py`), 'flask is packaged in the .requirements.zip file'); - t.false(zippedReqs.includes(`bottle.py`), 'bottle is not packaged in the .requirements.zip file'); + t.true( + zippedReqs.includes(`flask/__init__.py`), + 'flask is packaged in the .requirements.zip file' + ); + t.false( + zippedReqs.includes(`bottle.py`), + 'bottle is not packaged in the .requirements.zip file' + ); + t.end(); +}); + +test('py3.6 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', t => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); + + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.includes('.egg-info')), + [], + '.egg-info folders are not packaged' + ); + t.end(); +}); + +test( + 'py3.6 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', + t => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + '--dockerizePip=true', + '--slim=true', + '--slimPatternsAppendDefaults=false', + 'package' + ]); + + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.includes('.egg-infooo')), + [], + '.egg-info folders are not packaged' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test('py2.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false options', t => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + '--runtime=python2.7', + '--slim=true', + '--slimPatternsAppendDefaults=false', + 'package' + ]); + + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.includes('.egg-info')), + [], + '.egg-info folders are not packaged' + ); + t.end(); +}); + +test( + 'py2.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', + t => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + '--dockerizePip=true', + '--runtime=python2.7', + '--slim=true', + '--slimPatternsAppendDefaults=false', + 'package' + ]); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.includes('.egg-info')), + [], + '.egg-info folders are not packaged' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test('pipenv py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', t => { + process.chdir('tests/pipenv'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + + sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.includes('.egg-info')), + [], + '.egg-info folders are not packaged' + ); t.end(); }); diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index 65078c7c..23b3f793 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -12,11 +12,13 @@ custom: dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} slim: ${opt:slim, self:custom.defaults.slim} slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${opt:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} vendor: ${opt:vendor, ''} fileName: ${opt:fileName, 'requirements.txt'} defaults: slim: false slimPatterns: false + slimPatternsAppendDefaults: true zip: false dockerizePip: false individually: false diff --git a/tests/pipenv/serverless.yml b/tests/pipenv/serverless.yml index feb7f9de..6df76a55 100644 --- a/tests/pipenv/serverless.yml +++ b/tests/pipenv/serverless.yml @@ -11,10 +11,12 @@ custom: zip: ${opt:zip, self:custom.defaults.zip} slim: ${opt:slim, self:custom.defaults.slim} slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${opt:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} defaults: zip: false slimPatterns: false + slimPatternsAppendDefaults: true slim: false dockerizePip: false From f2d73eac40e332e59eec8aa50e5999d9462406af Mon Sep 17 00:00:00 2001 From: Benjamin Weigel Date: Thu, 22 Nov 2018 00:24:02 +0100 Subject: [PATCH 065/328] Fix #194 (#283) Closes #194 ...no clue why git(hub) thinks there are 5 commits & 9 changed files (all those changes are already in the master; on which I rebased before branching) --- lib/pip.js | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/lib/pip.js b/lib/pip.js index e59173d8..fce97071 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -381,6 +381,20 @@ function installRequirementsIfNeeded( // Our source requirements, under our service path, and our module path (if specified) const fileName = path.join(servicePath, modulePath, options.fileName); + // Skip requirements generation, if requirements file doesn't exist + if (options.usePipenv) { + if ( + !fse.existsSync(path.join(servicePath, 'Pipfile')) && + !fse.existsSync(fileName) + ) { + return false; + } + } else { + if (!fse.existsSync(fileName)) { + return false; + } + } + // First, generate the requirements file to our local .serverless folder fse.ensureDirSync(path.join(servicePath, '.serverless')); const slsReqsTxt = path.join(servicePath, '.serverless', 'requirements.txt'); From 337cd460facb1f89bd2bf3680e538627326a60d4 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Wed, 21 Nov 2018 18:25:38 -0500 Subject: [PATCH 066/328] add @bweigel to contributors --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index f0418b20..81d7044d 100644 --- a/README.md +++ b/README.md @@ -385,3 +385,4 @@ zipinfo .serverless/xxx.zip * [@dee-me-tree-or-love](https://github.com/dee-me-tree-or-love) - the `slim` package option * [@alexjurkiewicz](https://github.com/alexjurkiewicz) - [docs about docker workflows](#native-code-dependencies-during-build) * [@andrewfarley](https://github.com/andrewfarley) - Implemented download caching and static caching + * [@bweigel](https://github.com/bweigel) - adding the `slimPatternsAppendDefaults` option & fixing per-function packaging when some functions don't have requirements From 937fa564bf12fcb043678a0b48064bc60cbd2ea2 Mon Sep 17 00:00:00 2001 From: Ben Samuel Date: Mon, 26 Nov 2018 20:45:20 -0500 Subject: [PATCH 067/328] Clean up how the commands are built and run, and make sure strip is called correctly. - Make sure we're consistently quoting arguments. - Add mergeCommands function to construct a script for docker to run when needed. - Add getStripMode to run strip correctly for the platform and docker. --- lib/pip.js | 200 ++++++++++++++++++++++++++++------------------------ lib/slim.js | 18 +++-- test.js | 12 ++-- 3 files changed, 129 insertions(+), 101 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index fce97071..e2bca6fa 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -6,7 +6,7 @@ const set = require('lodash.set'); const { spawnSync } = require('child_process'); const { quote } = require('shell-quote'); const { buildImage, getBindPath, getDockerUid } = require('./docker'); -const { getStripCommand, deleteFiles } = require('./slim'); +const { getStripCommand, getStripMode, deleteFiles } = require('./slim'); const { checkForAndDeleteMaxCacheVersions, md5Path, @@ -14,8 +14,34 @@ const { getUserCachePath } = require('./shared'); -function quote_single(quoteme) { - return quote([quoteme]); +/** + * Omit empty commands. + * In this context, a "command" is a list of arguments. An empty list or falsy value is ommitted. + * @param {string[][]} many commands to merge. + * @return {string[][]} a list of valid commands. + */ +function filterCommands(commands) { + return commands.filter((cmd) => Boolean(cmd) && cmd.length > 0); +} + +/** + * Render zero or more commands as a single command for a Unix environment. + * In this context, a "command" is a list of arguments. An empty list or falsy value is ommitted. + * + * @param {string[][]} many commands to merge. + * @return {string[]} a single list of words. + */ +function mergeCommands(commands) { + const cmds = filterCommands(commands); + if (cmds.length === 0) { + throw new Error('Expected at least one non-empty command') + } else if (cmds.length === 1) { + return cmds[0]; + } else { + // Quote the arguments in each command and join them all using &&. + const script = cmds.map(quote).join(' && '); + return ["/bin/sh", "-c", script]; + } } /** @@ -51,6 +77,25 @@ function installRequirementsFile( } } +function pipAcceptsSystem(pythonBin) { + // Check if pip has Debian's --system option and set it if so + const pipTestRes = spawnSync(pythonBin, [ + '-m', + 'pip', + 'help', + 'install' + ]); + if (pipTestRes.error) { + if (pipTestRes.error.code === 'ENOENT') { + throw new Error( + `${pythonBin} not found! Try the pythonBin option.` + ); + } + throw pipTestRes.error; + } + return pipTestRes.stdout.toString().indexOf('--system') >= 0; +} + /** * Install requirements described from requirements in the targetFolder into that same targetFolder * @param {string} targetFolder @@ -65,15 +110,16 @@ function installRequirements(targetFolder, serverless, options) { `Installing requirements from ${targetRequirementsTxt} ...` ); - let cmd; - let cmdOptions; - let pipCmd = [ + const dockerCmd = []; + const pipCmd = [ options.pythonBin, '-m', 'pip', 'install', ...options.pipCmdExtraArgs ]; + const pipCmds = [pipCmd]; + const postCmds = []; // Check if we're using the legacy --cache-dir command... if (options.pipCmdExtraArgs.indexOf('--cache-dir') > -1) { if (options.dockerizePip) { @@ -94,8 +140,8 @@ function installRequirements(targetFolder, serverless, options) { if (!options.dockerizePip) { // Push our local OS-specific paths for requirements and target directory - pipCmd.push('-t', dockerPathForWin(options, targetFolder)); - pipCmd.push('-r', dockerPathForWin(options, targetRequirementsTxt)); + pipCmd.push('-t', dockerPathForWin(targetFolder), + '-r', dockerPathForWin(targetRequirementsTxt)); // If we want a download cache... if (options.useDownloadCache) { const downloadCacheDir = path.join( @@ -104,35 +150,17 @@ function installRequirements(targetFolder, serverless, options) { ); serverless.cli.log(`Using download cache directory ${downloadCacheDir}`); fse.ensureDirSync(downloadCacheDir); - pipCmd.push('--cache-dir', quote_single(downloadCacheDir)); + pipCmd.push('--cache-dir', downloadCacheDir); } - // Check if pip has Debian's --system option and set it if so - const pipTestRes = spawnSync(options.pythonBin, [ - '-m', - 'pip', - 'help', - 'install' - ]); - if (pipTestRes.error) { - if (pipTestRes.error.code === 'ENOENT') { - throw new Error( - `${options.pythonBin} not found! ` + 'Try the pythonBin option.' - ); - } - throw pipTestRes.error; - } - if (pipTestRes.stdout.toString().indexOf('--system') >= 0) { + if (pipAcceptsSystem(options.pythonBin)) { pipCmd.push('--system'); } } // If we are dockerizing pip if (options.dockerizePip) { - cmd = 'docker'; - // Push docker-specific paths for requirements and target directory - pipCmd.push('-t', '/var/task/'); - pipCmd.push('-r', '/var/task/requirements.txt'); + pipCmd.push('-t', '/var/task/', '-r', '/var/task/requirements.txt'); // Build docker image if required let dockerImage; @@ -148,28 +176,18 @@ function installRequirements(targetFolder, serverless, options) { // Prepare bind path depending on os platform const bindPath = dockerPathForWin( - options, getBindPath(serverless, targetFolder) ); - cmdOptions = ['run', '--rm', '-v', `${bindPath}:/var/task:z`]; + dockerCmd.push('docker', 'run', '--rm', '-v', `${bindPath}:/var/task:z`); if (options.dockerSsh) { // Mount necessary ssh files to work with private repos - cmdOptions.push( - '-v', - quote_single(`${process.env.HOME}/.ssh/id_rsa:/root/.ssh/id_rsa:z`) - ); - cmdOptions.push( - '-v', - quote_single( - `${process.env.HOME}/.ssh/known_hosts:/root/.ssh/known_hosts:z` - ) - ); - cmdOptions.push( - '-v', - quote_single(`${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z`) + dockerCmd.push( + '-v', `${process.env.HOME}/.ssh/id_rsa:/root/.ssh/id_rsa:z`, + '-v', `${process.env.HOME}/.ssh/known_hosts:/root/.ssh/known_hosts:z`, + '-v', `${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z`, + '-e', 'SSH_AUTH_SOCK=/tmp/ssh_sock' ); - cmdOptions.push('-e', 'SSH_AUTH_SOCK=/tmp/ssh_sock'); } // If we want a download cache... @@ -189,85 +207,83 @@ function installRequirements(targetFolder, serverless, options) { ); const windowsized = getBindPath(serverless, downloadCacheDir); // And now push it to a volume mount and to pip... - cmdOptions.push( + dockerCmd.push( '-v', - quote_single(`${windowsized}:${dockerDownloadCacheDir}:z`) + `${windowsized}:${dockerDownloadCacheDir}:z` ); - pipCmd.push('--cache-dir', quote_single(dockerDownloadCacheDir)); + pipCmd.push('--cache-dir', dockerDownloadCacheDir); } if (options.dockerEnv) { // Add environment variables to docker run cmd options.dockerEnv.forEach(function(item) { - cmdOptions.push('-e', item); + dockerCmd.push('-e', item); }); } if (process.platform === 'linux') { // Use same user so requirements folder is not root and so --cache-dir works - var commands = []; if (options.useDownloadCache) { // Set the ownership of the download cache dir to root - commands.push(quote(['chown', '-R', '0:0', dockerDownloadCacheDir])); + pipCmds.unshift(['chown', '-R', '0:0', dockerDownloadCacheDir]); } // Install requirements with pip - commands.push(pipCmd.join(' ')); // Set the ownership of the current folder to user - commands.push( - quote([ - 'chown', - '-R', - `${process.getuid()}:${process.getgid()}`, - '/var/task' - ]) - ); + pipCmds.push(['chown', '-R', `${process.getuid()}:${process.getgid()}`, '/var/task']); if (options.useDownloadCache) { // Set the ownership of the download cache dir back to user - commands.push( - quote([ + pipCmds.push( + [ 'chown', '-R', `${process.getuid()}:${process.getgid()}`, dockerDownloadCacheDir - ]) + ] ); } - pipCmd = ['/bin/bash', '-c', '"' + commands.join(' && ') + '"']; } else { // Use same user so --cache-dir works - cmdOptions.push('-u', quote_single(getDockerUid(bindPath))); + dockerCmd.push('-u', getDockerUid(bindPath)); } - cmdOptions.push(dockerImage); - cmdOptions.push(...pipCmd); - } else { - cmd = pipCmd[0]; - cmdOptions = pipCmd.slice(1); + dockerCmd.push(dockerImage); } // If enabled slimming, strip so files - if (options.slim === true || options.slim === 'true') { - const preparedPath = dockerPathForWin(options, targetFolder); - cmdOptions.push(getStripCommand(options, preparedPath)); + switch (getStripMode(options)) { + case 'docker': + pipCmds.push(getStripCommand(options, '/var/task')); + case 'direct': + postCmds.push(getStripCommand(options, dockerPathForWin(targetFolder))); } + let spawnArgs = { shell: true }; if (process.env.SLS_DEBUG) { spawnArgs.stdio = 'inherit'; } - const res = spawnSync(cmd, cmdOptions, spawnArgs); - if (res.error) { - if (res.error.code === 'ENOENT') { - if (options.dockerizePip) { - throw new Error('docker not found! Please install it.'); - } - throw new Error( - `${options.pythonBin} not found! Try the pythonBin option.` - ); - } - throw res.error; - } - if (res.status !== 0) { - throw new Error(res.stderr); + let mainCmds = []; + if (dockerCmd.length) { + dockerCmd.push(...mergeCommands(pipCmds)); + mainCmds = [dockerCmd]; + } else { + mainCmds = pipCmds; } + mainCmds.push(...postCmds); + + serverless.cli.log(`Running ${quote(dockerCmd)}...`); + + filterCommands(mainCmds).forEach(([cmd, ...args]) => { + const res = spawnSync(cmd, args); + if (res.error) { + if (res.error.code === 'ENOENT') { + const advice = cmd.indexOf('python') > -1 ? 'Try the pythonBin option' : 'Please install it'; + throw new Error(`${cmd} not found! ${advice}`); + } + throw res.error; + } + if (res.status !== 0) { + throw new Error(res.stderr); + } + }); // If enabled slimming, delete files in slimPatterns if (options.slim === true || options.slim === 'true') { deleteFiles(options, targetFolder); @@ -275,18 +291,16 @@ function installRequirements(targetFolder, serverless, options) { } /** - * convert path from Windows style to Linux style, if needed - * @param {Object} options + * Convert path from Windows style to Linux style, if needed. * @param {string} path * @return {string} */ -function dockerPathForWin(options, path) { +function dockerPathForWin(path) { if (process.platform === 'win32') { - return `"${path.replace(/\\/g, '/')}"`; - } else if (process.platform === 'win32' && !options.dockerizePip) { + return path.replace(/\\/g, '/'); + } else { return path; } - return quote_single(path); } /** create a filtered requirements.txt without anything from noDeploy diff --git a/lib/slim.js b/lib/slim.js index 0c87229a..a78f2f57 100644 --- a/lib/slim.js +++ b/lib/slim.js @@ -2,10 +2,19 @@ const isWsl = require('is-wsl'); const glob = require('glob-all'); const fse = require('fs-extra'); -const getStripCommand = (options, folderPath) => - process.platform !== 'win32' || isWsl || options.dockerizePip - ? ` && find ${folderPath} -name "*.so" -exec strip {} ';'` - : ''; +const getStripMode = (options) => { + if (options.slim === false || options.slim === 'false') { + return 'skip'; + } else if (options.dockerizePip) { + return 'docker'; + } else if (!isWsl && process.platform === 'win32' || process.platform === 'darwin') { + return 'skip'; + } else { + return 'direct'; + } +} + +const getStripCommand = (options, folderPath) => (['find', folderPath, '-name', '*.so', '-exec', 'strip', '{}', '+']); const deleteFiles = (options, folderPath) => { let patterns = ['**/*.py[c|o]', '**/__pycache__*', '**/*.dist-info*']; @@ -27,6 +36,7 @@ const deleteFiles = (options, folderPath) => { }; module.exports = { + getStripMode, getStripCommand, deleteFiles }; diff --git a/test.js b/test.js index 0200fa35..93d9d9dd 100644 --- a/test.js +++ b/test.js @@ -3,6 +3,7 @@ const deasync = require('deasync-promise'); const glob = require('glob-all'); const JSZip = require('jszip'); const tape = require('tape'); +const { quote } = require('shell-quote'); const { removeSync, readFileSync, copySync } = require('fs-extra'); const { sep } = require('path'); @@ -25,11 +26,14 @@ const mkCommand = cmd => (args, options = {}) => { options ) ); - if (error) throw error; + if (error) { + console.error(`Error running: ${quote([cmd, ...args])}`); + throw error; + } if (status) { - console.error(stdout.toString()); // eslint-disable-line no-console - console.error(stderr.toString()); // eslint-disable-line no-console - throw new Error(`${cmd} failed with status code ${status}`); + console.error('STDOUT: ', stdout.toString()); // eslint-disable-line no-console + console.error('STDERR: ', stderr.toString()); // eslint-disable-line no-console + throw new Error(`${quote([cmd, ...args])} failed with status code ${status}`); } return stdout && stdout.toString().trim(); }; From 0362deec5e9747edbc092d3b73e92f5a08913263 Mon Sep 17 00:00:00 2001 From: Ben Samuel Date: Tue, 27 Nov 2018 12:12:13 -0500 Subject: [PATCH 068/328] Linting and such. --- lib/pip.js | 92 ++++++++++++++++++++++++++++------------------------- lib/slim.js | 20 +++++++++--- test.js | 6 ++-- 3 files changed, 68 insertions(+), 50 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index e2bca6fa..29e21cfc 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -21,7 +21,7 @@ const { * @return {string[][]} a list of valid commands. */ function filterCommands(commands) { - return commands.filter((cmd) => Boolean(cmd) && cmd.length > 0); + return commands.filter(cmd => Boolean(cmd) && cmd.length > 0); } /** @@ -34,13 +34,13 @@ function filterCommands(commands) { function mergeCommands(commands) { const cmds = filterCommands(commands); if (cmds.length === 0) { - throw new Error('Expected at least one non-empty command') + throw new Error('Expected at least one non-empty command'); } else if (cmds.length === 1) { return cmds[0]; } else { // Quote the arguments in each command and join them all using &&. const script = cmds.map(quote).join(' && '); - return ["/bin/sh", "-c", script]; + return ['/bin/sh', '-c', script]; } } @@ -79,17 +79,10 @@ function installRequirementsFile( function pipAcceptsSystem(pythonBin) { // Check if pip has Debian's --system option and set it if so - const pipTestRes = spawnSync(pythonBin, [ - '-m', - 'pip', - 'help', - 'install' - ]); + const pipTestRes = spawnSync(pythonBin, ['-m', 'pip', 'help', 'install']); if (pipTestRes.error) { if (pipTestRes.error.code === 'ENOENT') { - throw new Error( - `${pythonBin} not found! Try the pythonBin option.` - ); + throw new Error(`${pythonBin} not found! Try the pythonBin option.`); } throw pipTestRes.error; } @@ -140,8 +133,12 @@ function installRequirements(targetFolder, serverless, options) { if (!options.dockerizePip) { // Push our local OS-specific paths for requirements and target directory - pipCmd.push('-t', dockerPathForWin(targetFolder), - '-r', dockerPathForWin(targetRequirementsTxt)); + pipCmd.push( + '-t', + dockerPathForWin(targetFolder), + '-r', + dockerPathForWin(targetRequirementsTxt) + ); // If we want a download cache... if (options.useDownloadCache) { const downloadCacheDir = path.join( @@ -175,18 +172,20 @@ function installRequirements(targetFolder, serverless, options) { serverless.cli.log(`Docker Image: ${dockerImage}`); // Prepare bind path depending on os platform - const bindPath = dockerPathForWin( - getBindPath(serverless, targetFolder) - ); + const bindPath = dockerPathForWin(getBindPath(serverless, targetFolder)); dockerCmd.push('docker', 'run', '--rm', '-v', `${bindPath}:/var/task:z`); if (options.dockerSsh) { // Mount necessary ssh files to work with private repos dockerCmd.push( - '-v', `${process.env.HOME}/.ssh/id_rsa:/root/.ssh/id_rsa:z`, - '-v', `${process.env.HOME}/.ssh/known_hosts:/root/.ssh/known_hosts:z`, - '-v', `${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z`, - '-e', 'SSH_AUTH_SOCK=/tmp/ssh_sock' + '-v', + `${process.env.HOME}/.ssh/id_rsa:/root/.ssh/id_rsa:z`, + '-v', + `${process.env.HOME}/.ssh/known_hosts:/root/.ssh/known_hosts:z`, + '-v', + `${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z`, + '-e', + 'SSH_AUTH_SOCK=/tmp/ssh_sock' ); } @@ -207,10 +206,7 @@ function installRequirements(targetFolder, serverless, options) { ); const windowsized = getBindPath(serverless, downloadCacheDir); // And now push it to a volume mount and to pip... - dockerCmd.push( - '-v', - `${windowsized}:${dockerDownloadCacheDir}:z` - ); + dockerCmd.push('-v', `${windowsized}:${dockerDownloadCacheDir}:z`); pipCmd.push('--cache-dir', dockerDownloadCacheDir); } @@ -229,17 +225,20 @@ function installRequirements(targetFolder, serverless, options) { } // Install requirements with pip // Set the ownership of the current folder to user - pipCmds.push(['chown', '-R', `${process.getuid()}:${process.getgid()}`, '/var/task']); + pipCmds.push([ + 'chown', + '-R', + `${process.getuid()}:${process.getgid()}`, + '/var/task' + ]); if (options.useDownloadCache) { // Set the ownership of the download cache dir back to user - pipCmds.push( - [ - 'chown', - '-R', - `${process.getuid()}:${process.getgid()}`, - dockerDownloadCacheDir - ] - ); + pipCmds.push([ + 'chown', + '-R', + `${process.getuid()}:${process.getgid()}`, + dockerDownloadCacheDir + ]); } } else { // Use same user so --cache-dir works @@ -252,8 +251,10 @@ function installRequirements(targetFolder, serverless, options) { switch (getStripMode(options)) { case 'docker': pipCmds.push(getStripCommand(options, '/var/task')); + break; case 'direct': postCmds.push(getStripCommand(options, dockerPathForWin(targetFolder))); + break; } let spawnArgs = { shell: true }; @@ -272,17 +273,20 @@ function installRequirements(targetFolder, serverless, options) { serverless.cli.log(`Running ${quote(dockerCmd)}...`); filterCommands(mainCmds).forEach(([cmd, ...args]) => { - const res = spawnSync(cmd, args); - if (res.error) { - if (res.error.code === 'ENOENT') { - const advice = cmd.indexOf('python') > -1 ? 'Try the pythonBin option' : 'Please install it'; - throw new Error(`${cmd} not found! ${advice}`); - } - throw res.error; - } - if (res.status !== 0) { - throw new Error(res.stderr); + const res = spawnSync(cmd, args); + if (res.error) { + if (res.error.code === 'ENOENT') { + const advice = + cmd.indexOf('python') > -1 + ? 'Try the pythonBin option' + : 'Please install it'; + throw new Error(`${cmd} not found! ${advice}`); } + throw res.error; + } + if (res.status !== 0) { + throw new Error(res.stderr); + } }); // If enabled slimming, delete files in slimPatterns if (options.slim === true || options.slim === 'true') { diff --git a/lib/slim.js b/lib/slim.js index a78f2f57..965be7d0 100644 --- a/lib/slim.js +++ b/lib/slim.js @@ -2,19 +2,31 @@ const isWsl = require('is-wsl'); const glob = require('glob-all'); const fse = require('fs-extra'); -const getStripMode = (options) => { +const getStripMode = options => { if (options.slim === false || options.slim === 'false') { return 'skip'; } else if (options.dockerizePip) { return 'docker'; - } else if (!isWsl && process.platform === 'win32' || process.platform === 'darwin') { + } else if ( + (!isWsl && process.platform === 'win32') || + process.platform === 'darwin' + ) { return 'skip'; } else { return 'direct'; } -} +}; -const getStripCommand = (options, folderPath) => (['find', folderPath, '-name', '*.so', '-exec', 'strip', '{}', '+']); +const getStripCommand = (options, folderPath) => [ + 'find', + folderPath, + '-name', + '*.so', + '-exec', + 'strip', + '{}', + ';' +]; const deleteFiles = (options, folderPath) => { let patterns = ['**/*.py[c|o]', '**/__pycache__*', '**/*.dist-info*']; diff --git a/test.js b/test.js index 93d9d9dd..557b0c5b 100644 --- a/test.js +++ b/test.js @@ -27,13 +27,15 @@ const mkCommand = cmd => (args, options = {}) => { ) ); if (error) { - console.error(`Error running: ${quote([cmd, ...args])}`); + console.error(`Error running: ${quote([cmd, ...args])}`); // eslint-disable-line no-console throw error; } if (status) { console.error('STDOUT: ', stdout.toString()); // eslint-disable-line no-console console.error('STDERR: ', stderr.toString()); // eslint-disable-line no-console - throw new Error(`${quote([cmd, ...args])} failed with status code ${status}`); + throw new Error( + `${quote([cmd, ...args])} failed with status code ${status}` + ); } return stdout && stdout.toString().trim(); }; From 14798296182de64fe18c977c9b3d320c6d437d08 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Thu, 29 Nov 2018 08:53:29 -0800 Subject: [PATCH 069/328] Add documentation about how to include the default omitted packages (#290) --- README.md | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 81d7044d..1c52dd1b 100644 --- a/README.md +++ b/README.md @@ -148,9 +148,20 @@ This will remove all folders within the installed requirements that match the names in `slimPatterns` ## Omitting Packages You can omit a package from deployment with the `noDeploy` option. Note that -dependencies of omitted packages must explicitly be omitted too. -By default, this will not install the AWS SDKs that are already installed on -Lambda. This example makes it instead omit pytest: +dependencies of omitted packages must explicitly be omitted too. By default, +the following packages are omitted as they are already installed on Lambda: + + * boto3 + * botocore + * docutils + * jmespath + * pip + * python-dateutil + * s3transfer + * setuptools + * six + +This example makes it instead omit pytest: ```yaml custom: pythonRequirements: @@ -158,6 +169,14 @@ custom: - pytest ``` +To include the default omitted packages, set the `noDeploy` option to an empty +list: +```yaml +custom: + pythonRequirements: + noDeploy: [] +``` + ## Extra Config Options ### Caching You can enable two kinds of caching with this plugin which are currently both DISABLED by default. First, a download cache that will cache downloads that pip needs to compile the packages. And second, a what we call "static caching" which caches output of pip after compiling everything for your requirements file. Since generally requirements.txt files rarely change, you will often see large amounts of speed improvements when enabling the static cache feature. These caches will be shared between all your projects if no custom cacheLocation is specified (see below). From d162be32382c85d32968c9b310d6afb9781efb0d Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Thu, 29 Nov 2018 08:54:33 -0800 Subject: [PATCH 070/328] Trim trailing white space throughout the project (#289) Many editors clean up trailing white space on save. By removing it all in one go, it helps keep future diffs cleaner by avoiding spurious white space changes on unrelated lines. --- README.md | 28 ++++++++++++++-------------- test.bats | 14 +++++++------- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/README.md b/README.md index 1c52dd1b..0bfd4010 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ This will automatically add the plugin to your project's `package.json` and the `serverless.yml`. That's all that's needed for basic use! The plugin will now bundle your python dependencies specified in your `requirements.txt` or `Pipfile` when you run `sls deploy`. -For a more in depth introduction on how to user this plugin, check out +For a more in depth introduction on how to user this plugin, check out [this post on the Serverless Blog](https://serverless.com/blog/serverless-python-packaging/) If you're on a mac, check out [these notes](#applebeersnake-mac-brew-installed-python-notes) about using python installed by brew. @@ -113,17 +113,17 @@ except ImportError: pass ``` ### Slim Package -_Works on non 'win32' environments: Docker, WSL are included_ -To remove the tests, information and caches from the installed packages, -enable the `slim` option. This will: `strip` the `.so` files, remove `__pycache__` -and `dist-info` directories as well as `.pyc` and `.pyo` files. +_Works on non 'win32' environments: Docker, WSL are included_ +To remove the tests, information and caches from the installed packages, +enable the `slim` option. This will: `strip` the `.so` files, remove `__pycache__` +and `dist-info` directories as well as `.pyc` and `.pyo` files. ```yaml custom: pythonRequirements: slim: true -``` -#### Custom Removal Patterns -To specify additional directories to remove from the installed packages, +``` +#### Custom Removal Patterns +To specify additional directories to remove from the installed packages, define a list of patterns in the serverless config using the `slimPatterns` option and glob syntax. These paterns will be added to the default ones (`**/*.py[c|o]`, `**/__pycache__*`, `**/*.dist-info*`). Note, the glob syntax matches against whole paths, so to match a file in any @@ -134,7 +134,7 @@ custom: slim: true slimPatterns: - "**/*.egg-info*" -``` +``` To overwrite the default patterns set the option `slimPatternsAppendDefaults` to `false` (`true` by default). ```yaml custom: @@ -143,10 +143,10 @@ custom: slimPatternsAppendDefaults: false slimPatterns: - "**/*.egg-info*" -``` -This will remove all folders within the installed requirements that match -the names in `slimPatterns` -## Omitting Packages +``` +This will remove all folders within the installed requirements that match +the names in `slimPatterns` +## Omitting Packages You can omit a package from deployment with the `noDeploy` option. Note that dependencies of omitted packages must explicitly be omitted too. By default, the following packages are omitted as they are already installed on Lambda: @@ -199,7 +199,7 @@ custom: useDownloadCache: true cacheLocation: '/home/user/.my_cache_goes_here' staticCacheMaxVersions: 10 - + ``` ### Extra pip arguments diff --git a/test.bats b/test.bats index 3c0e6c3c..3e42ac4e 100755 --- a/test.bats +++ b/test.bats @@ -8,7 +8,7 @@ setup() { export LANG=C.UTF-8 fi export USR_CACHE_DIR=`node -e 'console.log(require("./lib/shared").getUserCachePath())'` - # Please note: If you update change the requirements.txt in test/base this value will + # Please note: If you update change the requirements.txt in test/base this value will # change. Run a test which uses this variable manually step by step and list the cache # folder to find the new hash if you do this if [ -d "${USR_CACHE_DIR}" ] ; then @@ -71,7 +71,7 @@ teardown() { unzip .serverless/sls-py-req-test.zip -d puck ls puck/flask test $(find puck -name "*.pyc" | wc -l) -eq 0 - test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 + test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 } @test "py3.6 doesn't package boto3 by default" { @@ -143,7 +143,7 @@ teardown() { unzip .serverless/sls-py-req-test.zip -d puck ls puck/flask test $(find puck -name "*.pyc" | wc -l) -eq 0 - test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 + test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 } @test "py3.6 uses download cache with useDownloadCache option" { @@ -279,7 +279,7 @@ teardown() { @test "py2.7 can package flask with slim option" { cd tests/base npm i $(npm pack ../..) - sls --runtime=python2.7 --slim=true package + sls --runtime=python2.7 --slim=true package unzip .serverless/sls-py-req-test.zip -d puck ls puck/flask test $(find puck -name "*.pyc" | wc -l) -eq 0 @@ -301,7 +301,7 @@ teardown() { unzip .serverless/sls-py-req-test.zip -d puck ls puck/flask test $(find puck -name "*.pyc" | wc -l) -eq 0 - test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 + test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 } @test "py2.7 doesn't package boto3 by default" { @@ -372,7 +372,7 @@ teardown() { unzip .serverless/sls-py-req-test.zip -d puck ls puck/flask test $(find puck -name "*.pyc" | wc -l) -eq 0 - test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 + test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 } @test "pipenv py3.6 can package flask with default options" { @@ -400,7 +400,7 @@ teardown() { unzip .serverless/sls-py-req-test.zip -d puck ls puck/flask test $(find puck -name "*.pyc" | wc -l) -eq 0 - test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 + test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 } @test "pipenv py3.6 can package flask with zip option" { From 9934bf76c3e7b9e51fb9d5a3e59bca004ca3f5ff Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Thu, 29 Nov 2018 11:57:50 -0500 Subject: [PATCH 071/328] =?UTF-8?q?Windows=20Only:=20When=20call=20spawnSy?= =?UTF-8?q?c=20with=20shell=3Dtrue,=20enclosing=20by=20double=E2=80=A6=20(?= =?UTF-8?q?#288)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit …-quate -v option. Remove enclosing by double-quate at dockerPathForWin on Windows platform. It's caused bad effect on spawnSync without shell=true or windowsVerbatimArguments=true. closes #274 closes #281 --- index.js | 3 +++ lib/pip.js | 11 +++++++++-- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/index.js b/index.js index 1d2854bf..470fcd67 100644 --- a/index.js +++ b/index.js @@ -69,6 +69,9 @@ class ServerlessPythonRequirements { if (options.dockerizePip === 'non-linux') { options.dockerizePip = process.platform !== 'linux'; } + if (options.dockerizePip && process.platform === 'win32') { + options.pythonBin = 'python'; + } if ( !options.dockerizePip && (options.dockerSsh || options.dockerImage || options.dockerFile) diff --git a/lib/pip.js b/lib/pip.js index fce97071..94299e97 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -18,6 +18,13 @@ function quote_single(quoteme) { return quote([quoteme]); } +function quoteFroWin(quoteme) { + if (process.platform === 'win32') { + return `"${quoteme}"`; + } + return quoteme; +} + /** * Just generate the requirements file in the .serverless folder * @param {string} requirementsPath @@ -152,7 +159,7 @@ function installRequirements(targetFolder, serverless, options) { getBindPath(serverless, targetFolder) ); - cmdOptions = ['run', '--rm', '-v', `${bindPath}:/var/task:z`]; + cmdOptions = ['run', '--rm', '-v', quoteFroWin(`${bindPath}:/var/task:z`)]; if (options.dockerSsh) { // Mount necessary ssh files to work with private repos cmdOptions.push( @@ -282,7 +289,7 @@ function installRequirements(targetFolder, serverless, options) { */ function dockerPathForWin(options, path) { if (process.platform === 'win32') { - return `"${path.replace(/\\/g, '/')}"`; + return `${path.replace(/\\/g, '/')}`; } else if (process.platform === 'win32' && !options.dockerizePip) { return path; } From d043045d9fd70c700c8478135db9561148e7e54b Mon Sep 17 00:00:00 2001 From: Benjamin Weigel Date: Mon, 3 Dec 2018 20:05:38 +0100 Subject: [PATCH 072/328] Port tests from bats to tape (#282) hi @dschep I decided to split up and simplify PR #279 a little bit. Started with porting some tests, which also helps towards resolutions of #269 ... --- package.json | 4 +- test.bats | 594 ----------- test.js | 1417 ++++++++++++++++++++++++-- tests/base/_slimPatterns.yml | 2 +- tests/base/package.json | 2 +- tests/base/requirements-w-hashes.txt | 89 ++ tests/base/serverless.yml | 9 +- tests/individually/package.json | 2 +- tests/individually/serverless.yml | 1 + tests/pipenv/_slimPatterns.yml | 2 +- tests/pipenv/package.json | 2 +- 11 files changed, 1443 insertions(+), 681 deletions(-) delete mode 100755 test.bats create mode 100644 tests/base/requirements-w-hashes.txt diff --git a/package.json b/package.json index c426c94f..b56b3037 100644 --- a/package.json +++ b/package.json @@ -38,7 +38,7 @@ "main": "index.js", "bin": {}, "scripts": { - "test": "node test.js && bats test.bats", + "test": "node test.js", "lint": "eslint *.js lib/*.js && prettier -l index.js lib/*.js || (echo need formatting ; exit 1)", "format": "prettier --write index.js lib/*.js test.js" }, @@ -76,4 +76,4 @@ "singleQuote": true, "parser": "babylon" } -} +} \ No newline at end of file diff --git a/test.bats b/test.bats deleted file mode 100755 index 3e42ac4e..00000000 --- a/test.bats +++ /dev/null @@ -1,594 +0,0 @@ -#!/usr/bin/env bats - - -setup() { - export SLS_DEBUG=t - if ! [ -z "$CI" ]; then - export LC_ALL=C.UTF-8 - export LANG=C.UTF-8 - fi - export USR_CACHE_DIR=`node -e 'console.log(require("./lib/shared").getUserCachePath())'` - # Please note: If you update change the requirements.txt in test/base this value will - # change. Run a test which uses this variable manually step by step and list the cache - # folder to find the new hash if you do this - if [ -d "${USR_CACHE_DIR}" ] ; then - rm -Rf "${USR_CACHE_DIR}" - fi -} - -teardown() { - rm -rf puck puck2 puck3 node_modules .serverless .requirements.zip .requirements-cache \ - foobar package-lock.json serverless.yml.bak slimPatterns.yml "${USR_CACHE_DIR}" - serverless-python-requirements-*.tgz - git checkout serverless.yml - cd ../.. - if [ -d "tests/base with a space" ] ; then - rm -Rf "tests/base with a space" - fi -} - -@test "py3.6 can package flask with default options" { - cd tests/base - npm i $(npm pack ../..) - sls package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask -} - -@test "py3.6 can package flask with hashes" { - cd tests/base - npm i $(npm pack ../..) - pip-compile --output-file requirements-w-hashes.txt --generate-hashes requirements.txt - sls package --fileName requirements-w-hashes.txt - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask -} - -@test "py3.6 can package flask & bottle with zip option" { - cd tests/base - npm i $(npm pack ../..) - sls --zip=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/.requirements.zip puck/unzip_requirements.py - ! ls puck/flask - ! ls puck/bottle -} - -@test "py3.6 can package flask with slim options" { - cd tests/base - npm i $(npm pack ../..) - sls --slim=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask - test $(find puck -name "*.pyc" | wc -l) -eq 0 -} - -@test "py3.6 can package flask with slim & slimPatterns options" { - cd tests/base - cat _slimPatterns.yml > slimPatterns.yml - npm i $(npm pack ../..) - sls --slim=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask - test $(find puck -name "*.pyc" | wc -l) -eq 0 - test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 -} - -@test "py3.6 doesn't package boto3 by default" { - cd tests/base - npm i $(npm pack ../..) - sls package - unzip .serverless/sls-py-req-test.zip -d puck - ! ls puck/boto3 -} - -@test "py3.6 doesn't package bottle with noDeploy option" { - cd tests/base - npm i $(npm pack ../..) - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n noDeploy: [bottle]/' serverless.yml - sls package - unzip .serverless/sls-py-req-test.zip -d puck - ! ls puck/bottle.py - ! ls puck/__pycache__/bottle.cpython-36.pyc -} - -@test "py3.6 can package flask with zip & dockerizePip option" { - cd tests/base - npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - sls --dockerizePip=true --zip=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/.requirements.zip puck/unzip_requirements.py -} - -@test "py3.6 can package flask with zip & slim & dockerizePip option" { - cd tests/base - npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - sls --dockerizePip=true --zip=true --slim=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/.requirements.zip puck/unzip_requirements.py -} - -@test "py3.6 can package flask with dockerizePip option" { - cd tests/base - npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - sls --dockerizePip=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask -} - -@test "py3.6 can package flask with slim & dockerizePip option" { - cd tests/base - npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - sls --dockerizePip=true --slim=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask - test $(find puck -name "*.pyc" | wc -l) -eq 0 -} - -@test "py3.6 can package flask with slim & dockerizePip & slimPatterns options" { - cd tests/base - cat _slimPatterns.yml > slimPatterns.yml - npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - sls --dockerizePip=true --slim=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask - test $(find puck -name "*.pyc" | wc -l) -eq 0 - test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 -} - -@test "py3.6 uses download cache with useDownloadCache option" { - cd tests/base - npm i $(npm pack ../..) - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true/' serverless.yml - sls package - USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` - ls $USR_CACHE_DIR/downloadCacheslspyc/http -} - -@test "py3.6 uses download cache with cacheLocation option" { - cd tests/base - npm i $(npm pack ../..) - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true\n cacheLocation: .requirements-cache/' serverless.yml - sls package - ls .requirements-cache/downloadCacheslspyc/http -} - -@test "py3.6 uses download cache with dockerizePip option" { - cd tests/base - npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true/' serverless.yml - sls --dockerizePip=true package - USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` - ls $USR_CACHE_DIR/downloadCacheslspyc/http -} - -@test "py3.6 uses download cache with dockerizePip + cacheLocation option" { - cd tests/base - npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true\n cacheLocation: .requirements-cache/' serverless.yml - sls --dockerizePip=true package - ls .requirements-cache/downloadCacheslspyc/http -} - -@test "py3.6 uses static and download cache" { - cd tests/base - npm i $(npm pack ../..) - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true\n useStaticCache: true/' serverless.yml - sls package - USR_CACHE_DIR=`node -e 'console.log(require("./node_modules/serverless-python-requirements/lib/shared").getUserCachePath())'` - CACHE_FOLDER_HASH=$(md5sum <(grep -v boto3 requirements.txt|sort) | cut -d' ' -f1)_slspyc - ls $USR_CACHE_DIR/$CACHE_FOLDER_HASH/flask - ls $USR_CACHE_DIR/downloadCacheslspyc/http -} - -@test "py3.6 uses static and download cache with dockerizePip option" { - cd tests/base - npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true\n useStaticCache: true/' serverless.yml - sls --dockerizePip=true package - USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` - CACHE_FOLDER_HASH=$(md5sum <(grep -v boto3 requirements.txt|sort) | cut -d' ' -f1)_slspyc - ls $USR_CACHE_DIR/$CACHE_FOLDER_HASH/flask - ls $USR_CACHE_DIR/downloadCacheslspyc/http -} - -@test "py3.6 uses static cache" { - cd tests/base - npm i $(npm pack ../..) - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useStaticCache: true/' serverless.yml - sls package - USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` - CACHE_FOLDER_HASH=$(md5sum <(grep -v boto3 requirements.txt|sort) | cut -d' ' -f1)_slspyc - ls $USR_CACHE_DIR/$CACHE_FOLDER_HASH/flask - ls $USR_CACHE_DIR/$CACHE_FOLDER_HASH/.completed_requirements -} - -@test "py3.6 uses static cache with cacheLocation option" { - cd tests/base - npm i $(npm pack ../..) - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useStaticCache: true\n cacheLocation: .requirements-cache/' serverless.yml - sls package - USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` - CACHE_FOLDER_HASH=$(md5sum <(grep -v boto3 requirements.txt|sort) | cut -d' ' -f1)_slspyc - ls .requirements-cache/$CACHE_FOLDER_HASH/flask - ls .requirements-cache/$CACHE_FOLDER_HASH/.completed_requirements -} - -@test "py3.6 checking that static cache actually pulls from cache (by poisoning it)" { - cd tests/base - npm i $(npm pack ../..) - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useStaticCache: true/' serverless.yml - sls package - cp .serverless/sls-py-req-test.zip ./puck - USR_CACHE_DIR=`node -e 'console.log(require("../../lib/shared").getUserCachePath())'` - CACHE_FOLDER_HASH=$(md5sum <(grep -v boto3 requirements.txt|sort) | cut -d' ' -f1)_slspyc - echo "injected new file into static cache folder" > $USR_CACHE_DIR/$CACHE_FOLDER_HASH/injected_file_is_bad_form - sls package - [ `wc -c ./.serverless/sls-py-req-test.zip | awk '{ print $1 }'` -gt `wc -c ./puck | awk '{ print $1 }'` ] -} - -@test "py3.6 uses static cache with dockerizePip & slim option" { - cd tests/base - npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useStaticCache: true/' serverless.yml - sls --dockerizePip=true --slim=true package - CACHE_FOLDER_HASH=$(md5sum <(grep -v boto3 requirements.txt|sort) | cut -d' ' -f1)_slspyc - ls $USR_CACHE_DIR/$CACHE_FOLDER_HASH/flask - unzip .serverless/sls-py-req-test.zip -d puck - test $(find puck -name "*.pyc" | wc -l) -eq 0 -} - -@test "py3.6 uses download cache with dockerizePip & slim option" { - cd tests/base - npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n useDownloadCache: true/' serverless.yml - sls --dockerizePip=true --slim=true package - ls $USR_CACHE_DIR/downloadCacheslspyc/http - unzip .serverless/sls-py-req-test.zip -d puck - test $(find puck -name "*.pyc" | wc -l) -eq 0 -} - -@test "py2.7 can package flask with default options" { - cd tests/base - npm i $(npm pack ../..) - sls --runtime=python2.7 package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask -} - -@test "py2.7 can package flask with slim option" { - cd tests/base - npm i $(npm pack ../..) - sls --runtime=python2.7 --slim=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask - test $(find puck -name "*.pyc" | wc -l) -eq 0 -} - -@test "py2.7 can package flask with zip option" { - cd tests/base - npm i $(npm pack ../..) - sls --runtime=python2.7 --zip=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/.requirements.zip puck/unzip_requirements.py -} - -@test "py2.7 can package flask with slim & dockerizePip & slimPatterns options" { - cd tests/base - cat _slimPatterns.yml > slimPatterns.yml - npm i $(npm pack ../..) - sls --runtime=python2.7 --slim=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask - test $(find puck -name "*.pyc" | wc -l) -eq 0 - test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 -} - -@test "py2.7 doesn't package boto3 by default" { - cd tests/base - npm i $(npm pack ../..) - sls package - unzip .serverless/sls-py-req-test.zip -d puck - ! ls puck/boto3 -} - -@test "py2.7 doesn't package bottle with noDeploy option" { - cd tests/base - npm i $(npm pack ../..) - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n noDeploy: [bottle]/' serverless.yml - sls --runtime=python2.7 package - unzip .serverless/sls-py-req-test.zip -d puck - ! ls puck/bottle.py -} - -@test "py2.7 can package flask with zip & dockerizePip option" { - cd tests/base - npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - sls --dockerizePip=true --runtime=python2.7 --zip=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/.requirements.zip puck/unzip_requirements.py -} - -@test "py2.7 can package flask with zip & slim & dockerizePip option" { - cd tests/base - npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - sls --dockerizePip=true --runtime=python2.7 --zip=true --slim=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/.requirements.zip puck/unzip_requirements.py -} - -@test "py2.7 can package flask with dockerizePip option" { - cd tests/base - npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - sls --dockerizePip=true --runtime=python2.7 package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask -} - -@test "py2.7 can package flask with slim & dockerizePip option" { - cd tests/base - npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - sls --dockerizePip=true --slim=true --runtime=python2.7 package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask - test $(find puck -name "*.pyc" | wc -l) -eq 0 -} - -@test "py2.7 can package flask with slim & dockerizePip & slimPatterns options" { - cd tests/base - cat _slimPatterns.yml > slimPatterns.yml - npm i $(npm pack ../..) - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - sls --dockerizePip=true --slim=true --runtime=python2.7 package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask - test $(find puck -name "*.pyc" | wc -l) -eq 0 - test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 -} - -@test "pipenv py3.6 can package flask with default options" { - cd tests/pipenv - npm i $(npm pack ../..) - sls package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask -} - -@test "pipenv py3.6 can package flask with slim option" { - cd tests/pipenv - npm i $(npm pack ../..) - sls --slim=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask - test $(find puck -name "*.pyc" | wc -l) -eq 0 -} - -@test "pipenv py3.6 can package flask with slim & slimPatterns option" { - cd tests/pipenv - npm i $(npm pack ../..) - cat _slimPatterns.yml > slimPatterns.yml - sls --slim=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask - test $(find puck -name "*.pyc" | wc -l) -eq 0 - test $(find puck -type d -name "*.egg-info*" | wc -l) -eq 0 -} - -@test "pipenv py3.6 can package flask with zip option" { - cd tests/pipenv - npm i $(npm pack ../..) - sls --zip=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/.requirements.zip puck/unzip_requirements.py -} - -@test "pipenv py3.6 doesn't package boto3 by default" { - cd tests/pipenv - npm i $(npm pack ../..) - sls package - unzip .serverless/sls-py-req-test.zip -d puck - ! ls puck/boto3 -} - -@test "pipenv py3.6 doesn't package bottle with noDeploy option" { - cd tests/pipenv - npm i $(npm pack ../..) - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n noDeploy: [bottle]/' serverless.yml - sls package - unzip .serverless/sls-py-req-test.zip -d puck - ! ls puck/bottle.py -} - -@test "py3.6 can package flask with zip option and no explicit include" { - cd tests/base - npm i $(npm pack ../..) - sed -i'.bak' -e 's/include://' -e 's/^.*handler.py//' serverless.yml - sls --zip=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/.requirements.zip puck/unzip_requirements.py -} - -@test "py3.6 can package flask with package individually option" { - cd tests/base - npm i $(npm pack ../..) - sls --individually=true package - unzip .serverless/hello.zip -d puck - unzip .serverless/hello2.zip -d puck2 - unzip .serverless/hello3.zip -d puck3 - ls puck/flask - ls puck2/flask - ! ls puck3/flask -} - -@test "py3.6 can package flask with package individually & slim option" { - cd tests/base - npm i $(npm pack ../..) - sls --individually=true --slim=true package - unzip .serverless/hello.zip -d puck - unzip .serverless/hello2.zip -d puck2 - unzip .serverless/hello3.zip -d puck3 - ls puck/flask - ls puck2/flask - ! ls puck3/flask - test $(find "puck*" -name "*.pyc" | wc -l) -eq 0 -} - -@test "py2.7 can package flask with package individually option" { - cd tests/base - npm i $(npm pack ../..) - sls --individually=true --runtime=python2.7 package - unzip .serverless/hello.zip -d puck - unzip .serverless/hello2.zip -d puck2 - unzip .serverless/hello3.zip -d puck3 - ls puck/flask - ls puck2/flask - ! ls puck3/flask -} - -@test "py2.7 can package flask with package individually & slim option" { - cd tests/base - npm i $(npm pack ../..) - sls --individually=true --slim=true --runtime=python2.7 package - unzip .serverless/hello.zip -d puck - unzip .serverless/hello2.zip -d puck2 - unzip .serverless/hello3.zip -d puck3 - ls puck/flask - ls puck2/flask - ! ls puck3/flask - test $(find puck* -name "*.pyc" | wc -l) -eq 0 -} - -@test "py3.6 can package only requirements of module" { - cd tests/individually - npm i $(npm pack ../..) - sls package - unzip .serverless/module1-sls-py-req-test-indiv-dev-hello1.zip -d puck - unzip .serverless/module2-sls-py-req-test-indiv-dev-hello2.zip -d puck2 - ls puck/handler1.py - ls puck2/handler2.py - ls puck/pyaml - ls puck2/flask - ! ls puck/handler2.py - ! ls puck2/handler1.py - ! ls puck/flask - ! ls puck2/pyaml -} - -@test "py3.6 can package lambda-decorators using vendor option" { - cd tests/base - npm i $(npm pack ../..) - sls --vendor=./vendor package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask - ls puck/lambda_decorators.py -} - -@test "py3.6 can package lambda-decorators using vendor and invidiually option" { - cd tests/base - npm i $(npm pack ../..) - sls --individually=true --vendor=./vendor package - unzip .serverless/hello.zip -d puck - unzip .serverless/hello2.zip -d puck2 - unzip .serverless/hello3.zip -d puck3 - ls puck/flask - ls puck2/flask - ! ls puck3/flask - ls puck/lambda_decorators.py - ls puck2/lambda_decorators.py - ! ls puck3/lambda_decorators.py -} - -@test "Don't nuke execute perms" { - cd tests/base - npm i $(npm pack ../..) - touch foobar - chmod +x foobar - perl -p -i'.bak' -e 's/(handler.py$)/\1\n - foobar/' serverless.yml - sls --vendor=./vendor package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask - ls puck/lambda_decorators.py - ./puck/foobar -} - -@test "Don't nuke execute perms when using individually" { - cd tests/individually - npm i $(npm pack ../..) - touch module1/foobar - chmod +x module1/foobar - perl -p -i'.bak' -e 's/(handler.py$)/\1\n - foobar/' serverless.yml - sls package - unzip .serverless/hello1.zip -d puck - ./puck/module1/foobar -} - -@test "Don't nuke execute perms when using individually w/docker" { - cd tests/individually - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - npm i $(npm pack ../..) - touch module1/foobar - chmod +x module1/foobar - perl -p -i'.bak' -e 's/(handler.py$)/\1\n - foobar/' serverless.yml - sls package --dockerizePip=true - unzip .serverless/hello1.zip -d puck - ./puck/module1/foobar -} - -@test "py3.6 can package flask in a project with a space in it" { - cp -a tests/base "tests/base with a space" - cd "tests/base with a space" - npm i $(npm pack ../..) - sls package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask -} - -@test "py3.6 can package flask in a project with a space in it with docker" { - docker &> /dev/null || skip "docker not present" - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - cp -a tests/base "tests/base with a space" - cd "tests/base with a space" - npm i $(npm pack ../..) - sls --dockerizePip=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask -} - -@test "py3.6 supports custom file name with fileName option" { - cd tests/base - npm i $(npm pack ../..) - echo "requests" > puck - sls --fileName puck package - ls .serverless/requirements/requests - ! ls .serverless/requirements/flask -} diff --git a/test.js b/test.js index 557b0c5b..b310e672 100644 --- a/test.js +++ b/test.js @@ -3,11 +3,17 @@ const deasync = require('deasync-promise'); const glob = require('glob-all'); const JSZip = require('jszip'); const tape = require('tape'); +const { + removeSync, + readFileSync, + copySync, + writeFileSync, + pathExistsSync +} = require('fs-extra'); const { quote } = require('shell-quote'); -const { removeSync, readFileSync, copySync } = require('fs-extra'); const { sep } = require('path'); -const { getUserCachePath } = require('./lib/shared'); +const { getUserCachePath, md5Path } = require('./lib/shared'); const initialWorkingDir = process.cwd(); @@ -61,10 +67,13 @@ const teardown = () => { 'package-lock.json', 'slimPatterns.yml', 'serverless.yml.bak', + 'module1/foobar', getUserCachePath(), ...glob.sync('serverless-python-requirements-*.tgz') ].map(path => removeSync(path)); - git(['checkout', 'serverless.yml']); + if (!process.cwd().endsWith('base with a space')) { + git(['checkout', 'serverless.yml']); + } process.chdir(initialWorkingDir); removeSync('tests/base with a space'); }; @@ -74,6 +83,9 @@ const test = (desc, func, opts = {}) => setup(); try { func(t); + } catch (err) { + t.fail(err); + t.end(); } finally { teardown(); } @@ -88,6 +100,8 @@ const getPythonBin = (version = 3) => { const listZipFiles = filename => Object.keys(deasync(new JSZip().loadAsync(readFileSync(filename))).files); +const listZipFilesWithMetaData = filename => + Object(deasync(new JSZip().loadAsync(readFileSync(filename))).files); const listRequirementsZipFiles = filename => { const zip = deasync(new JSZip().loadAsync(readFileSync(filename))); const reqsBuffer = deasync(zip.file('.requirements.zip').async('nodebuffer')); @@ -112,6 +126,7 @@ test('default pythonBin can package flask with default options', t => { sls(['package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); t.end(); }); @@ -122,6 +137,21 @@ test('py3.6 can package flask with default options', t => { sls([`--pythonBin=${getPythonBin(3)}`, 'package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.end(); +}); + +test('py3.6 can package flask with hashes', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + `--pythonBin=${getPythonBin(3)}`, + '--fileName=requirements-w-hashes.txt', + 'package' + ]); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.end(); }); @@ -155,163 +185,701 @@ test('py3.6 can package flask with slim option', t => { [], 'no pyc files packaged' ); + t.true( + zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); t.end(); }); /* - * News tests not in test.bats + * News tests NOT in test.bats */ -test("py3.6 doesn't package bottle with zip option", t => { +test('py3.6 can package flask with slim & slimPatterns options', t => { + process.chdir('tests/base'); + + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--slim=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test("py3.6 doesn't package bottle with noDeploy option", t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); perl([ '-p', - "-i'.bak'", + '-i.bak', '-e', 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', 'serverless.yml' ]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + sls([`--pythonBin=${getPythonBin(3)}`, 'package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - const zippedReqs = listRequirementsZipFiles( - '.serverless/sls-py-req-test.zip' - ); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); +}); + +test( + 'py3.6 can package flask with dockerizePip option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--dockerizePip=true', 'package']); + + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false( + zipfiles.includes(`boto3${sep}__init__.py`), + 'boto3 is NOT packaged' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test( + 'py3.6 can package flask with slim & dockerizePip option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--dockerizePip=true', '--slim=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + '*.pyc files are NOT packaged' + ); + t.true( + zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test( + 'py3.6 can package flask with slim & dockerizePip & slimPatterns options', + t => { + process.chdir('tests/base'); + + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--dockerizePip=true', '--slim=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + '*.pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test( + 'py3.6 can package flask with zip & dockerizePip option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--dockerizePip=true', '--zip=true', 'package']); + + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = listRequirementsZipFiles( + '.serverless/sls-py-req-test.zip' + ); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true( + zipfiles.includes(`unzip_requirements.py`), + 'unzip util is packaged' + ); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.true( + zippedReqs.includes(`flask/__init__.py`), + 'flask is packaged in the .requirements.zip file' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test( + 'py3.6 can package flask with zip & slim & dockerizePip option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--dockerizePip=true', '--zip=true', '--slim=true', 'package']); + + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = listRequirementsZipFiles( + '.serverless/sls-py-req-test.zip' + ); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true( + zipfiles.includes(`unzip_requirements.py`), + 'unzip util is packaged' + ); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.true( + zippedReqs.includes(`flask/__init__.py`), + 'flask is packaged in the .requirements.zip file' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test('py2.7 can package flask with default options', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--pythonBin=${getPythonBin(2)}`, 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.end(); +}); + +test('py2.7 can package flask with slim option', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--pythonBin=${getPythonBin(2)}`, '--slim=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' ); t.true( - zippedReqs.includes(`flask/__init__.py`), - 'flask is packaged in the .requirements.zip file' - ); - t.false( - zippedReqs.includes(`bottle.py`), - 'bottle is not packaged in the .requirements.zip file' + zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' ); t.end(); }); -test('py3.6 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', t => { +test('py2.7 can package flask with zip option', t => { process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); - + sls([`--pythonBin=${getPythonBin(2)}`, '--zip=true', 'package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' ); - t.deepEqual( - zipfiles.filter(filename => filename.includes('.egg-info')), - [], - '.egg-info folders are not packaged' + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" ); t.end(); }); test( - 'py3.6 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', + 'py2.7 can package flask with slim & dockerizePip & slimPatterns options', t => { process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); sls([ + `--pythonBin=${getPythonBin(2)}`, '--dockerizePip=true', '--slim=true', - '--slimPatternsAppendDefaults=false', 'package' ]); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + '*.pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.includes('.egg-infooo')), + zipfiles.filter(filename => filename.endsWith('__main__.py')), [], - '.egg-info folders are not packaged' + '__main__.py files are NOT packaged' ); t.end(); }, { skip: !canUseDocker() } ); -test('py2.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false options', t => { +test("py2.7 doesn't package bottle with noDeploy option", t => { process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - '--runtime=python2.7', - '--slim=true', - '--slimPatternsAppendDefaults=false', - 'package' + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml' ]); - + sls([`--pythonBin=${getPythonBin(2)}`, 'package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.includes('.egg-info')), - [], - '.egg-info folders are not packaged' - ); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); t.end(); }); test( - 'py2.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', + 'py2.7 can package flask with zip & dockerizePip option', t => { process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); sls([ + `--pythonBin=${getPythonBin(2)}`, '--dockerizePip=true', - '--runtime=python2.7', + '--zip=true', + 'package' + ]); + + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = listRequirementsZipFiles( + '.serverless/sls-py-req-test.zip' + ); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true( + zipfiles.includes(`unzip_requirements.py`), + 'unzip util is packaged' + ); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.true( + zippedReqs.includes(`flask/__init__.py`), + 'flask is packaged in the .requirements.zip file' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test( + 'py2.7 can package flask with zip & slim & dockerizePip option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + `--pythonBin=${getPythonBin(2)}`, + '--dockerizePip=true', + '--zip=true', '--slim=true', - '--slimPatternsAppendDefaults=false', 'package' ]); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + const zippedReqs = listRequirementsZipFiles( + '.serverless/sls-py-req-test.zip' + ); t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' ); - t.deepEqual( - zipfiles.filter(filename => filename.includes('.egg-info')), - [], - '.egg-info folders are not packaged' + t.true( + zipfiles.includes(`unzip_requirements.py`), + 'unzip util is packaged' + ); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.true( + zippedReqs.includes(`flask/__init__.py`), + 'flask is packaged in the .requirements.zip file' ); t.end(); }, { skip: !canUseDocker() } ); -test('pipenv py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', t => { - process.chdir('tests/pipenv'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); +test( + 'py2.7 can package flask with dockerizePip option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--pythonBin=${getPythonBin(2)}`, '--dockerizePip=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false( + zipfiles.includes(`boto3${sep}__init__.py`), + 'boto3 is NOT packaged' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test( + 'py2.7 can package flask with slim & dockerizePip option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + `--pythonBin=${getPythonBin(2)}`, + '--dockerizePip=true', + '--slim=true', + 'package' + ]); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + '*.pyc files are NOT packaged' + ); + t.true( + zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test( + 'py2.7 can package flask with slim & dockerizePip & slimPatterns options', + t => { + process.chdir('tests/base'); + + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + `--pythonBin=${getPythonBin(2)}`, + '--dockerizePip=true', + '--slim=true', + 'package' + ]); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + '*.pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test('pipenv py3.6 can package flask with default options', t => { + process.chdir('tests/pipenv'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.end(); +}); + +test('pipenv py3.6 can package flask with slim option', t => { + process.chdir('tests/pipenv'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--slim=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.true( + zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); +}); + +test('pipenv py3.6 can package flask with slim & slimPatterns options', t => { + process.chdir('tests/pipenv'); + + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--slim=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test('pipenv py3.6 can package flask with zip option', t => { + process.chdir('tests/pipenv'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test("pipenv py3.6 doesn't package bottle with noDeploy option", t => { + process.chdir('tests/pipenv'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml' + ]); + sls(['package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); +}); + +test('py3.6 can package flask with zip option and no explicit include', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl(['-p', '-i.bak', '-e', 's/include://', 'serverless.yml']); + perl(['-p', '-i.bak', '-e', 's/^.*handler.py.*$//', 'serverless.yml']); + sls(['--zip=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test('py3.6 can package lambda-decorators using vendor option', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--vendor=./vendor`, 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged' + ); + t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.end(); +}); + +test( + "Don't nuke execute perms", + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + const perm = '775'; + + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(handler.py.*$)/$1\n - foobar/', + 'serverless.yml' + ]); + writeFileSync(`foobar`, '', { mode: perm }); + sls(['--vendor=./vendor', 'package']); + + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged' + ); + t.true(zipfiles.includes(`foobar`), 'foobar is packaged'); + t.false( + zipfiles.includes(`boto3${sep}__init__.py`), + 'boto3 is NOT packaged' + ); + + const zipfiles_with_metadata = listZipFilesWithMetaData( + '.serverless/sls-py-req-test.zip' + ); + t.true( + zipfiles_with_metadata['foobar'].unixPermissions + .toString(8) + .slice(3, 6) === perm, + 'foobar has retained its executable file permissions' + ); + + t.end(); + }, + { skip: process.platform === 'win32' } +); + +test('py3.6 can package flask in a project with a space in it', t => { + copySync('tests/base', 'tests/base with a space'); + process.chdir('tests/base with a space'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.end(); +}); + +test( + 'py3.6 can package flask in a project with a space in it with docker', + t => { + copySync('tests/base', 'tests/base with a space'); + process.chdir('tests/base with a space'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--dockerizePip=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false( + zipfiles.includes(`boto3${sep}__init__.py`), + 'boto3 is NOT packaged' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test('py3.6 supports custom file name with fileName option', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + writeFileSync('puck', 'requests'); + npm(['i', path]); + sls(['--fileName=puck', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes(`requests${sep}__init__.py`), + 'requests is packaged' + ); + t.false(zipfiles.includes(`flask${sep}__init__.py`), 'flask is NOT packaged'); + t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.end(); +}); + +test("py3.6 doesn't package bottle with zip option", t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml' + ]); + sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = listRequirementsZipFiles( + '.serverless/sls-py-req-test.zip' + ); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.true( + zippedReqs.includes(`flask/__init__.py`), + 'flask is packaged in the .requirements.zip file' + ); + t.false( + zippedReqs.includes(`bottle.py`), + 'bottle is NOT packaged in the .requirements.zip file' + ); + t.end(); +}); + +test('py3.6 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', t => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( @@ -319,9 +887,702 @@ test('pipenv py3.6 can package flask with slim & slimPatterns & slimPatternsAppe 'pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.includes('.egg-info')), + zipfiles.filter(filename => filename.endsWith('__main__.py')), [], - '.egg-info folders are not packaged' + '__main__.py files are NOT packaged' ); t.end(); }); + +test( + 'py3.6 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', + t => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + '--dockerizePip=true', + '--slim=true', + '--slimPatternsAppendDefaults=false', + 'package' + ]); + + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test('py2.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false options', t => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + '--runtime=python2.7', + '--slim=true', + '--slimPatternsAppendDefaults=false', + 'package' + ]); + + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test( + 'py2.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', + t => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + '--dockerizePip=true', + '--runtime=python2.7', + '--slim=true', + '--slimPatternsAppendDefaults=false', + 'package' + ]); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test('pipenv py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', t => { + process.chdir('tests/pipenv'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + + sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test('py3.6 can package flask with package individually option', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--individually=true', 'package']); + + const zipfiles_hello = listZipFiles('.serverless/hello.zip'); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + + const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + + const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + + t.end(); +}); + +test('py3.6 can package flask with package individually & slim option', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--individually=true', '--slim=true', 'package']); + + const zipfiles_hello = listZipFiles('.serverless/hello.zip'); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged in function hello' + ); + t.deepEqual( + zipfiles_hello.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + + const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.deepEqual( + zipfiles_hello2.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + + const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged in function hello3' + ); + t.deepEqual( + zipfiles_hello3.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + + t.end(); +}); + +test('py2.7 can package flask with package individually option', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--individually=true', '--runtime=python2.7', 'package']); + + const zipfiles_hello = listZipFiles('.serverless/hello.zip'); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + + const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + + const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + + t.end(); +}); + +test('py2.7 can package flask with package individually & slim option', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--individually=true', '--runtime=python2.7', '--slim=true', 'package']); + + const zipfiles_hello = listZipFiles('.serverless/hello.zip'); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged in function hello' + ); + t.deepEqual( + zipfiles_hello.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + + const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.deepEqual( + zipfiles_hello2.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + + const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged in function hello3' + ); + t.deepEqual( + zipfiles_hello3.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + + t.end(); +}); + +test('py3.6 can package only requirements of module', t => { + process.chdir('tests/individually'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package']); + + const zipfiles_hello = listZipFiles( + '.serverless/module1-sls-py-req-test-indiv-dev-hello1.zip' + ); + t.true( + zipfiles_hello.includes('handler1.py'), + 'handler1.py is packaged at root level in function hello1' + ); + t.false( + zipfiles_hello.includes('handler2.py'), + 'handler2.py is NOT packaged at root level in function hello1' + ); + t.true( + zipfiles_hello.includes(`pyaml${sep}__init__.py`), + 'pyaml is packaged in function hello1' + ); + t.false( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello1' + ); + + const zipfiles_hello2 = listZipFiles( + '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' + ); + t.true( + zipfiles_hello2.includes('handler2.py'), + 'handler2.py is packaged at root level in function hello2' + ); + t.false( + zipfiles_hello2.includes('handler1.py'), + 'handler1.py is NOT packaged at root level in function hello2' + ); + t.false( + zipfiles_hello2.includes(`pyaml${sep}__init__.py`), + 'pyaml is NOT packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + + t.end(); +}); + +test('py3.6 can package lambda-decorators using vendor and invidiually option', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--individually=true', '--vendor=./vendor', 'package']); + + const zipfiles_hello = listZipFiles('.serverless/hello.zip'); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged at root level in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged in function hello' + ); + + const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged at root level in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged in function hello2' + ); + + const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged at root level in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`lambda_decorators.py`), + 'lambda_decorators.py is NOT packaged in function hello3' + ); + + t.end(); +}); + +test( + "Don't nuke execute perms when using individually", + t => { + process.chdir('tests/individually'); + const path = npm(['pack', '../..']); + const perm = '775'; + writeFileSync(`module1${sep}foobar`, '', { mode: perm }); + + npm(['i', path]); + sls(['package']); + + const zipfiles_hello = listZipFilesWithMetaData('.serverless/hello1.zip'); + + t.true( + zipfiles_hello['module1/foobar'].unixPermissions + .toString(8) + .slice(3, 6) === perm, + 'foobar has retained its executable file permissions' + ); + + t.end(); + }, + { skip: process.platform === 'win32' } +); + +test( + "Don't nuke execute perms when using individually w/docker", + t => { + process.chdir('tests/individually'); + const path = npm(['pack', '../..']); + const perm = '775'; + writeFileSync(`module1${sep}foobar`, '', { mode: perm }); + + npm(['i', path]); + sls(['--dockerizePip=true', 'package']); + + const zipfiles_hello = listZipFilesWithMetaData('.serverless/hello1.zip'); + + t.true( + zipfiles_hello['module1/foobar'].unixPermissions + .toString(8) + .slice(3, 6) === perm, + 'foobar has retained its executable file permissions' + ); + + t.end(); + }, + { skip: !canUseDocker() || process.platform === 'win32' } +); + +test('py3.6 uses download cache with useDownloadCache option', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--useDownloadCache=true', 'package']); + const cachepath = getUserCachePath(); + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'cache directoy exists' + ); + t.end(); +}); + +test('py3.6 uses download cache with cacheLocation option', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + '--useDownloadCache=true', + '--cacheLocation=.requirements-cache', + 'package' + ]); + t.true( + pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), + 'cache directoy exists' + ); + t.end(); +}); + +test( + 'py3.6 uses download cache with dockerizePip option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--useDownloadCache=true', '--dockerizePip=true', 'package']); + const cachepath = getUserCachePath(); + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'cache directoy exists' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test( + 'py3.6 uses download cache with dockerizePip + cacheLocation option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + '--useDownloadCache=true', + '--dockerizePip=true', + '--cacheLocation=.requirements-cache', + 'package' + ]); + t.true( + pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), + 'cache directoy exists' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test('py3.6 uses static and download cache', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--useDownloadCache=true', '--useStaticCache=true', 'package']); + const cachepath = getUserCachePath(); + const cacheFolderHash = md5Path('.serverless/requirements.txt'); + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'http exists in download-cache' + ); + t.true( + pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + 'flask exists in static-cache' + ); + t.end(); +}); + +test( + 'py3.6 uses static and download cache with dockerizePip option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + '--useDownloadCache=true', + '--useStaticCache=true', + '--dockerizePip=true', + 'package' + ]); + const cachepath = getUserCachePath(); + const cacheFolderHash = md5Path('.serverless/requirements.txt'); + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'http exists in download-cache' + ); + t.true( + pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + 'flask exists in static-cache' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test('py3.6 uses static cache', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--useStaticCache=true', 'package']); + const cachepath = getUserCachePath(); + const cacheFolderHash = md5Path('.serverless/requirements.txt'); + t.true( + pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + 'flask exists in static-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}.completed_requirements` + ), + '.completed_requirements exists in static-cache' + ); + + // py3.6 checking that static cache actually pulls from cache (by poisoning it) + writeFileSync( + `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}injected_file_is_bad_form`, + 'injected new file into static cache folder' + ); + sls(['--useStaticCache=true', 'package']); + + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('injected_file_is_bad_form'), + "static cache is really used when running 'sls package' again" + ); + + t.end(); +}); + +test('py3.6 uses static cache with cacheLocation option', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + const cachepath = '.requirements-cache'; + sls(['--useStaticCache=true', `--cacheLocation=${cachepath}`, 'package']); + const cacheFolderHash = md5Path('.serverless/requirements.txt'); + t.true( + pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + 'flask exists in static-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}.completed_requirements` + ), + '.completed_requirements exists in static-cache' + ); + t.end(); +}); + +test( + 'py3.6 uses static cache with dockerizePip & slim option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + '--useStaticCache=true', + '--dockerizePip=true', + '--slim=true', + 'package' + ]); + const cachepath = getUserCachePath(); + const cacheFolderHash = md5Path('.serverless/requirements.txt'); + t.true( + pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + 'flask exists in static-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}.completed_requirements` + ), + '.completed_requirements exists in static-cache' + ); + + // py3.6 checking that static cache actually pulls from cache (by poisoning it) + writeFileSync( + `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}injected_file_is_bad_form`, + 'injected new file into static cache folder' + ); + sls([ + '--useStaticCache=true', + '--dockerizePip=true', + '--slim=true', + 'package' + ]); + + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('injected_file_is_bad_form'), + "static cache is really used when running 'sls package' again" + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files are packaged' + ); + + t.end(); + }, + { skip: !canUseDocker() } +); + +test( + 'py3.6 uses download cache with dockerizePip & slim option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + '--useDownloadCache=true', + '--dockerizePip=true', + '--slim=true', + 'package' + ]); + const cachepath = getUserCachePath(); + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'http exists in download-cache' + ); + + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files are packaged' + ); + + t.end(); + }, + { skip: !canUseDocker() } +); diff --git a/tests/base/_slimPatterns.yml b/tests/base/_slimPatterns.yml index 70f863cc..02c631b4 100644 --- a/tests/base/_slimPatterns.yml +++ b/tests/base/_slimPatterns.yml @@ -1,2 +1,2 @@ slimPatterns: - - "**/*.egg-info*" + - "**/__main__.py" diff --git a/tests/base/package.json b/tests/base/package.json index d37ade00..d13fd651 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.2.4.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-4.2.5.tgz" } } diff --git a/tests/base/requirements-w-hashes.txt b/tests/base/requirements-w-hashes.txt new file mode 100644 index 00000000..018a6fb9 --- /dev/null +++ b/tests/base/requirements-w-hashes.txt @@ -0,0 +1,89 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --generate-hashes --output-file requirements-w-hashes.txt requirements.txt +# +boto3==1.9.50 \ + --hash=sha256:177e9dd53db5028bb43050da20cc7956287889fc172e5e6275a634e42a10beeb \ + --hash=sha256:8c63e616b91907037ab19236afbcf0057efb31411faf38b46f4590e634dc17ea +botocore==1.12.50 \ + --hash=sha256:07fae5a2b8cfb5a92c1dbee3f2feb4da7c471bcead7e18ce735babe5f39e270f \ + --hash=sha256:eeaa190f50ee05a56225ee78c64cb8bf0c3bf090ec605ca6c2f325aa3826a347 \ + # via boto3, s3transfer +bottle==0.12.13 \ + --hash=sha256:39b751aee0b167be8dffb63ca81b735bbf1dd0905b3bc42761efedee8f123355 +click==7.0 \ + --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \ + --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \ + # via flask +docutils==0.14 \ + --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \ + --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \ + --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6 \ + # via botocore +flask==1.0.2 \ + --hash=sha256:2271c0070dbcb5275fad4a82e29f23ab92682dc45f9dfbc22c02ba9b9322ce48 \ + --hash=sha256:a080b744b7e345ccfcbc77954861cb05b3c63786e93f2b3875e0913d44b43f05 +itsdangerous==1.1.0 \ + --hash=sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19 \ + --hash=sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749 \ + # via flask +jinja2==2.10 \ + --hash=sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd \ + --hash=sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4 \ + # via flask +jmespath==0.9.3 \ + --hash=sha256:6a81d4c9aa62caf061cb517b4d9ad1dd300374cd4706997aff9cd6aedd61fc64 \ + --hash=sha256:f11b4461f425740a1d908e9a3f7365c3d2e569f6ca68a2ff8bc5bcd9676edd63 \ + # via boto3, botocore +markupsafe==1.1.0 \ + --hash=sha256:048ef924c1623740e70204aa7143ec592504045ae4429b59c30054cb31e3c432 \ + --hash=sha256:130f844e7f5bdd8e9f3f42e7102ef1d49b2e6fdf0d7526df3f87281a532d8c8b \ + --hash=sha256:19f637c2ac5ae9da8bfd98cef74d64b7e1bb8a63038a3505cd182c3fac5eb4d9 \ + --hash=sha256:1b8a7a87ad1b92bd887568ce54b23565f3fd7018c4180136e1cf412b405a47af \ + --hash=sha256:1c25694ca680b6919de53a4bb3bdd0602beafc63ff001fea2f2fc16ec3a11834 \ + --hash=sha256:1f19ef5d3908110e1e891deefb5586aae1b49a7440db952454b4e281b41620cd \ + --hash=sha256:1fa6058938190ebe8290e5cae6c351e14e7bb44505c4a7624555ce57fbbeba0d \ + --hash=sha256:31cbb1359e8c25f9f48e156e59e2eaad51cd5242c05ed18a8de6dbe85184e4b7 \ + --hash=sha256:3e835d8841ae7863f64e40e19477f7eb398674da6a47f09871673742531e6f4b \ + --hash=sha256:4e97332c9ce444b0c2c38dd22ddc61c743eb208d916e4265a2a3b575bdccb1d3 \ + --hash=sha256:525396ee324ee2da82919f2ee9c9e73b012f23e7640131dd1b53a90206a0f09c \ + --hash=sha256:52b07fbc32032c21ad4ab060fec137b76eb804c4b9a1c7c7dc562549306afad2 \ + --hash=sha256:52ccb45e77a1085ec5461cde794e1aa037df79f473cbc69b974e73940655c8d7 \ + --hash=sha256:5c3fbebd7de20ce93103cb3183b47671f2885307df4a17a0ad56a1dd51273d36 \ + --hash=sha256:5e5851969aea17660e55f6a3be00037a25b96a9b44d2083651812c99d53b14d1 \ + --hash=sha256:5edfa27b2d3eefa2210fb2f5d539fbed81722b49f083b2c6566455eb7422fd7e \ + --hash=sha256:7d263e5770efddf465a9e31b78362d84d015cc894ca2c131901a4445eaa61ee1 \ + --hash=sha256:83381342bfc22b3c8c06f2dd93a505413888694302de25add756254beee8449c \ + --hash=sha256:857eebb2c1dc60e4219ec8e98dfa19553dae33608237e107db9c6078b1167856 \ + --hash=sha256:98e439297f78fca3a6169fd330fbe88d78b3bb72f967ad9961bcac0d7fdd1550 \ + --hash=sha256:bf54103892a83c64db58125b3f2a43df6d2cb2d28889f14c78519394feb41492 \ + --hash=sha256:d9ac82be533394d341b41d78aca7ed0e0f4ba5a2231602e2f05aa87f25c51672 \ + --hash=sha256:e982fe07ede9fada6ff6705af70514a52beb1b2c3d25d4e873e82114cf3c5401 \ + --hash=sha256:edce2ea7f3dfc981c4ddc97add8a61381d9642dc3273737e756517cc03e84dd6 \ + --hash=sha256:efdc45ef1afc238db84cb4963aa689c0408912a0239b0721cb172b4016eb31d6 \ + --hash=sha256:f137c02498f8b935892d5c0172560d7ab54bc45039de8805075e19079c639a9c \ + --hash=sha256:f82e347a72f955b7017a39708a3667f106e6ad4d10b25f237396a7115d8ed5fd \ + --hash=sha256:fb7c206e01ad85ce57feeaaa0bf784b97fa3cad0d4a5737bc5295785f5c613a1 \ + # via jinja2 +python-dateutil==2.7.5 \ + --hash=sha256:063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93 \ + --hash=sha256:88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02 \ + # via botocore +s3transfer==0.1.13 \ + --hash=sha256:90dc18e028989c609146e241ea153250be451e05ecc0c2832565231dacdf59c1 \ + --hash=sha256:c7a9ec356982d5e9ab2d4b46391a7d6a950e2b04c472419f5fdec70cc0ada72f \ + # via boto3 +six==1.11.0 \ + --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ + --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb \ + # via python-dateutil +urllib3==1.24.1 \ + --hash=sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39 \ + --hash=sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22 \ + # via botocore +werkzeug==0.14.1 \ + --hash=sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c \ + --hash=sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b \ + # via flask diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index 23b3f793..a62dbc01 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -15,6 +15,9 @@ custom: slimPatternsAppendDefaults: ${opt:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} vendor: ${opt:vendor, ''} fileName: ${opt:fileName, 'requirements.txt'} + useStaticCache: ${opt:useStaticCache, self:custom.defaults.useStaticCache} + useDownloadCache: ${opt:useDownloadCache, self:custom.defaults.useDownloadCache} + cacheLocation: ${opt:cacheLocation, ''} defaults: slim: false slimPatterns: false @@ -22,13 +25,15 @@ custom: zip: false dockerizePip: false individually: false + useStaticCache: false + useDownloadCache: false package: individually: ${opt:individually, self:custom.defaults.individually} exclude: - '**/*' - include: - - handler.py + include: + - 'handler.py' functions: hello: diff --git a/tests/individually/package.json b/tests/individually/package.json index f75ba960..d13fd651 100644 --- a/tests/individually/package.json +++ b/tests/individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.2.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-4.2.5.tgz" } } diff --git a/tests/individually/serverless.yml b/tests/individually/serverless.yml index 9ae79d6e..c7cf3802 100644 --- a/tests/individually/serverless.yml +++ b/tests/individually/serverless.yml @@ -6,6 +6,7 @@ provider: package: individually: true + exclude: 'node_modules/**' custom: pythonRequirements: dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} diff --git a/tests/pipenv/_slimPatterns.yml b/tests/pipenv/_slimPatterns.yml index 70f863cc..02c631b4 100644 --- a/tests/pipenv/_slimPatterns.yml +++ b/tests/pipenv/_slimPatterns.yml @@ -1,2 +1,2 @@ slimPatterns: - - "**/*.egg-info*" + - "**/__main__.py" diff --git a/tests/pipenv/package.json b/tests/pipenv/package.json index f75ba960..d13fd651 100644 --- a/tests/pipenv/package.json +++ b/tests/pipenv/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.2.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-4.2.5.tgz" } } From 6255b1636bd11166fd0d5222f13763ea61da78f9 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Mon, 3 Dec 2018 14:06:09 -0500 Subject: [PATCH 073/328] @bweigel is a test porting hero!! --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0bfd4010..d8e5568a 100644 --- a/README.md +++ b/README.md @@ -404,4 +404,4 @@ zipinfo .serverless/xxx.zip * [@dee-me-tree-or-love](https://github.com/dee-me-tree-or-love) - the `slim` package option * [@alexjurkiewicz](https://github.com/alexjurkiewicz) - [docs about docker workflows](#native-code-dependencies-during-build) * [@andrewfarley](https://github.com/andrewfarley) - Implemented download caching and static caching - * [@bweigel](https://github.com/bweigel) - adding the `slimPatternsAppendDefaults` option & fixing per-function packaging when some functions don't have requirements + * [@bweigel](https://github.com/bweigel) - adding the `slimPatternsAppendDefaults` option & fixing per-function packaging when some functions don't have requirements & Porting tests from bats to js! From 5016ca8b54bbaa20d995fbe09658d9eecb45ad92 Mon Sep 17 00:00:00 2001 From: bweigel Date: Fri, 4 Jan 2019 16:11:41 +0100 Subject: [PATCH 074/328] fix weird bug when using root & module level requirements --- lib/pip.js | 44 +++---- lib/shared.js | 8 +- test.js | 158 ++++++++++++++++++++++++- tests/base/fn2/__init__.py | 0 tests/base/fn2/fn2_handler.py | 0 tests/base/fn2/requirements.txt | 1 + tests/base/serverless.yml | 10 +- tests/individually/module1/handler1.py | 9 +- 8 files changed, 198 insertions(+), 32 deletions(-) create mode 100644 tests/base/fn2/__init__.py create mode 100644 tests/base/fn2/fn2_handler.py create mode 100644 tests/base/fn2/requirements.txt diff --git a/lib/pip.js b/lib/pip.js index 29e21cfc..538ad55a 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -53,7 +53,7 @@ function mergeCommands(commands) { * @param {Object} options * @return {undefined} */ -function installRequirementsFile( +function generateRequirementsFile( requirementsPath, targetFile, serverless, @@ -61,7 +61,7 @@ function installRequirementsFile( options ) { if (options.usePipenv && fse.existsSync(path.join(servicePath, 'Pipfile'))) { - generateRequirementsFile( + filterRequirementsFile( path.join(servicePath, '.serverless/requirements.txt'), targetFile, options @@ -70,7 +70,7 @@ function installRequirementsFile( `Parsed requirements.txt from Pipfile in ${targetFile}...` ); } else { - generateRequirementsFile(requirementsPath, targetFile, options); + filterRequirementsFile(requirementsPath, targetFile, options); serverless.cli.log( `Generated requirements from ${requirementsPath} in ${targetFile}...` ); @@ -306,7 +306,6 @@ function dockerPathForWin(path) { return path; } } - /** create a filtered requirements.txt without anything from noDeploy * then remove all comments and empty lines, and sort the list which * assist with matching the static cache. The sorting will skip any @@ -318,7 +317,7 @@ function dockerPathForWin(path) { * @param {string} target requirements where results are written * @param {Object} options */ -function generateRequirementsFile(source, target, options) { +function filterRequirementsFile(source, target, options) { const noDeploy = new Set(options.noDeploy || []); const requirements = fse .readFileSync(source, { encoding: 'utf-8' }) @@ -413,11 +412,21 @@ function installRequirementsIfNeeded( } } - // First, generate the requirements file to our local .serverless folder - fse.ensureDirSync(path.join(servicePath, '.serverless')); - const slsReqsTxt = path.join(servicePath, '.serverless', 'requirements.txt'); + let requirementsTxtDirectory; + // Copy our requirements to another path in .serverless (incase of individually packaged) + if (modulePath && modulePath !== '.') { + requirementsTxtDirectory = path.join( + servicePath, + '.serverless', + modulePath + ); + } else { + requirementsTxtDirectory = path.join(servicePath, '.serverless'); + } + fse.ensureDirSync(requirementsTxtDirectory); + const slsReqsTxt = path.join(requirementsTxtDirectory, 'requirements.txt'); - installRequirementsFile( + generateRequirementsFile( fileName, slsReqsTxt, serverless, @@ -433,28 +442,13 @@ function installRequirementsIfNeeded( return false; } - // Copy our requirements to another filename in .serverless (incase of individually packaged) - if (modulePath && modulePath != '.') { - fse.existsSync(path.join(servicePath, '.serverless', modulePath)); - const destinationFile = path.join( - servicePath, - '.serverless', - modulePath, - 'requirements.txt' - ); - serverless.cli.log( - `Copying from ${slsReqsTxt} into ${destinationFile} ...` - ); - fse.copySync(slsReqsTxt, destinationFile); - } - // Then generate our MD5 Sum of this requirements file to determine where it should "go" to and/or pull cache from const reqChecksum = md5Path(slsReqsTxt); // Then figure out where this cache should be, if we're caching, if we're in a module, etc const workingReqsFolder = getRequirementsWorkingPath( reqChecksum, - servicePath, + requirementsTxtDirectory, options ); diff --git a/lib/shared.js b/lib/shared.js index b3a1ffaa..fe48e0e0 100644 --- a/lib/shared.js +++ b/lib/shared.js @@ -57,7 +57,11 @@ function checkForAndDeleteMaxCacheVersions(options, serverless) { * @param {Object} options * @return {string} */ -function getRequirementsWorkingPath(subfolder, servicePath, options) { +function getRequirementsWorkingPath( + subfolder, + requirementsTxtDirectory, + options +) { // If we want to use the static cache if (options && options.useStaticCache) { if (subfolder) { @@ -69,7 +73,7 @@ function getRequirementsWorkingPath(subfolder, servicePath, options) { } // If we don't want to use the static cache, then fallback to the way things used to work - return path.join(servicePath, '.serverless', 'requirements'); + return path.join(requirementsTxtDirectory, 'requirements'); } /** diff --git a/test.js b/test.js index b310e672..64d15221 100644 --- a/test.js +++ b/test.js @@ -1008,35 +1008,79 @@ test('py3.6 can package flask with package individually option', t => { sls(['--individually=true', 'package']); const zipfiles_hello = listZipFiles('.serverless/hello.zip'); + t.false( + zipfiles_hello.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello' + ); t.true( zipfiles_hello.includes('handler.py'), 'handler.py is packaged in function hello' ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); t.true( zipfiles_hello.includes(`flask${sep}__init__.py`), 'flask is packaged in function hello' ); const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); + t.false( + zipfiles_hello2.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello2' + ); t.true( zipfiles_hello2.includes('handler.py'), 'handler.py is packaged in function hello2' ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); t.true( zipfiles_hello2.includes(`flask${sep}__init__.py`), 'flask is packaged in function hello2' ); const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); + t.false( + zipfiles_hello3.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello3' + ); t.true( zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello2' + 'handler.py is packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello3' ); t.false( zipfiles_hello3.includes(`flask${sep}__init__.py`), 'flask is NOT packaged in function hello3' ); + const zipfiles_hello4 = listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.false( + zipfiles_hello4.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello4' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); + t.end(); }); @@ -1060,6 +1104,10 @@ test('py3.6 can package flask with package individually & slim option', t => { zipfiles_hello.includes(`flask${sep}__init__.py`), 'flask is packaged in function hello' ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); t.true( @@ -1075,6 +1123,10 @@ test('py3.6 can package flask with package individually & slim option', t => { zipfiles_hello2.includes(`flask${sep}__init__.py`), 'flask is packaged in function hello2' ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); t.true( @@ -1091,6 +1143,27 @@ test('py3.6 can package flask with package individually & slim option', t => { 'flask is NOT packaged in function hello3' ); + const zipfiles_hello4 = listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); + t.deepEqual( + zipfiles_hello4.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello4' + ); + t.end(); }); @@ -1109,6 +1182,10 @@ test('py2.7 can package flask with package individually option', t => { zipfiles_hello.includes(`flask${sep}__init__.py`), 'flask is packaged in function hello' ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); t.true( @@ -1119,6 +1196,10 @@ test('py2.7 can package flask with package individually option', t => { zipfiles_hello2.includes(`flask${sep}__init__.py`), 'flask is packaged in function hello2' ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); t.true( @@ -1129,6 +1210,26 @@ test('py2.7 can package flask with package individually option', t => { zipfiles_hello3.includes(`flask${sep}__init__.py`), 'flask is NOT packaged in function hello3' ); + t.false( + zipfiles_hello3.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello3' + ); + + const zipfiles_hello4 = listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); t.end(); }); @@ -1153,6 +1254,10 @@ test('py2.7 can package flask with package individually & slim option', t => { zipfiles_hello.includes(`flask${sep}__init__.py`), 'flask is packaged in function hello' ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); t.true( @@ -1168,6 +1273,10 @@ test('py2.7 can package flask with package individually & slim option', t => { zipfiles_hello2.includes(`flask${sep}__init__.py`), 'flask is packaged in function hello2' ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); t.true( @@ -1183,6 +1292,26 @@ test('py2.7 can package flask with package individually & slim option', t => { zipfiles_hello3.includes(`flask${sep}__init__.py`), 'flask is NOT packaged in function hello3' ); + t.false( + zipfiles_hello3.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello3' + ); + + const zipfiles_hello4 = listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); t.end(); }); @@ -1255,6 +1384,10 @@ test('py3.6 can package lambda-decorators using vendor and invidiually option', zipfiles_hello.includes(`lambda_decorators.py`), 'lambda_decorators.py is packaged in function hello' ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); t.true( @@ -1269,6 +1402,10 @@ test('py3.6 can package lambda-decorators using vendor and invidiually option', zipfiles_hello2.includes(`lambda_decorators.py`), 'lambda_decorators.py is packaged in function hello2' ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); t.true( @@ -1283,7 +1420,26 @@ test('py3.6 can package lambda-decorators using vendor and invidiually option', zipfiles_hello3.includes(`lambda_decorators.py`), 'lambda_decorators.py is NOT packaged in function hello3' ); + t.false( + zipfiles_hello3.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello3' + ); + const zipfiles_hello4 = listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); t.end(); }); diff --git a/tests/base/fn2/__init__.py b/tests/base/fn2/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/base/fn2/fn2_handler.py b/tests/base/fn2/fn2_handler.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/base/fn2/requirements.txt b/tests/base/fn2/requirements.txt new file mode 100644 index 00000000..eea18113 --- /dev/null +++ b/tests/base/fn2/requirements.txt @@ -0,0 +1 @@ +dataclasses \ No newline at end of file diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index a62dbc01..1684ab2f 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -32,7 +32,7 @@ package: individually: ${opt:individually, self:custom.defaults.individually} exclude: - '**/*' - include: + include: - 'handler.py' functions: @@ -43,3 +43,11 @@ functions: hello3: handler: handler.hello runtime: nodejs6.10 + hello4: + handler: fn2_handler.hello + module: fn2 + package: + include: + - 'fn2/**' + + diff --git a/tests/individually/module1/handler1.py b/tests/individually/module1/handler1.py index 970b0c01..369835cd 100644 --- a/tests/individually/module1/handler1.py +++ b/tests/individually/module1/handler1.py @@ -1,6 +1,9 @@ import boto3 + def hello(event, context): - return { - 'status': 200, - } + return {"status": 200} + + +def hello2(event, context): + return {"status": 200} From e1452233a01364aba7446a123c64832309f5fa89 Mon Sep 17 00:00:00 2001 From: David Lawrence Date: Tue, 22 Jan 2019 11:23:57 -0800 Subject: [PATCH 075/328] add options to build requirements as a lambda layer --- README.md | 32 ++++++++++++++++++++++++++++++++ index.js | 12 ++++++++++++ lib/inject.js | 4 ++++ lib/layer.js | 49 +++++++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 97 insertions(+) create mode 100644 lib/layer.js diff --git a/README.md b/README.md index d8e5568a..7bb7e0e3 100644 --- a/README.md +++ b/README.md @@ -146,6 +146,38 @@ custom: ``` This will remove all folders within the installed requirements that match the names in `slimPatterns` + +### Lamba Layer +Another method for dealing with large dependencies is to put them into a +[Lambda Layer](https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html). +Simply add the `layer` option to the configuration. +```yaml +custom: + pythonRequirements: + layer: true +``` +The requirements will be zipped up and a layer will be created automatically. +Now just add the reference to the functions that will use the layer. +```yaml +functions: + hello: + handler: handler.hello + layers: + - {Ref: PythonRequirementsLambdaLayer} +``` +If the layer requires additional or custom configuration, add them onto the `layer` option. +```yaml +custom: + pythonRequirements: + layer: + name: ${self:provider.stage}-layerName + description: Python requirements lamba layer + compatibleRuntimes: + - python3.7 + licenseInfo: GPLv3 + allowedAccounts: + - '*' +``` ## Omitting Packages You can omit a package from deployment with the `noDeploy` option. Note that dependencies of omitted packages must explicitly be omitted too. By default, diff --git a/index.js b/index.js index 470fcd67..a0164d83 100644 --- a/index.js +++ b/index.js @@ -10,6 +10,7 @@ const { packRequirements } = require('./lib/zip'); const { injectAllRequirements } = require('./lib/inject'); +const { layerRequirements } = require('./lib/layer'); const { installAllRequirements } = require('./lib/pip'); const { pipfileToRequirements } = require('./lib/pipenv'); const { cleanup, cleanupCache } = require('./lib/clean'); @@ -31,6 +32,8 @@ class ServerlessPythonRequirements { slimPatterns: false, slimPatternsAppendDefaults: true, zip: false, + inject: true, + layer: false, cleanupZipHelper: true, invalidateCaches: false, fileName: 'requirements.txt', @@ -94,6 +97,14 @@ class ServerlessPythonRequirements { }`; options.dockerImage = options.dockerImage || defaultImage; } + if (options.layer) { + // If layers are being used, dependencies should not be injected. + options.inject = false; + // If layer was set as a boolean, set it to an empty object to use the layer defaults. + if (options.layer === true) { + options.layer = {}; + } + } return options; } @@ -167,6 +178,7 @@ class ServerlessPythonRequirements { } return BbPromise.bind(this) .then(removeVendorHelper) + .then(layerRequirements) .then(() => injectAllRequirements.bind(this)( arguments[1].functionObj && diff --git a/lib/inject.js b/lib/inject.js index 973ba99b..64f7b613 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -72,6 +72,10 @@ function moveModuleUp(source, target, module) { * @return {Promise} the combined promise for requirements injection. */ function injectAllRequirements(funcArtifact) { + if (!this.options.inject) { + return BbPromise.resolve(); + } + this.serverless.cli.log('Injecting required Python packages to package...'); if (this.serverless.service.package.individually) { diff --git a/lib/layer.js b/lib/layer.js new file mode 100644 index 00000000..24ea2b72 --- /dev/null +++ b/lib/layer.js @@ -0,0 +1,49 @@ +const BbPromise = require('bluebird'); +const fse = require('fs-extra'); +const path = require('path'); +const JSZip = require('jszip'); +const { writeZip, addTree } = require('./zipTree'); + +BbPromise.promisifyAll(fse); + +/** + * Zip up requirements to be used as layer package. + * @return {Promise} the JSZip object constructed. + */ +function zipRequirements() { + return addTree(new JSZip(), path.join('.serverless', 'requirements')).then(zip => + writeZip(zip, path.join('.serverless', 'pythonRequirements.zip')) + ); +} + +/** + * Creates a layer on the serverless service for the requirements zip. + * @return {Promise} + */ +function createLayers() { + this.serverless.service.layers['pythonRequirements'] = Object.assign({ + artifact: path.join('.serverless', 'pythonRequirements.zip'), + name: `${this.serverless.service.stage}-python-requirements`, + description: 'Python requirements generated by serverless-python-requirements.', + }, this.options.layer); + + return BbPromise.resolve(); +} + +/** + * Creates a layer from the installed requirements. + * @return {Promise} the combined promise for requirements layer. + */ +function layerRequirements() { + if (!this.options.layer) { + return BbPromise.resolve() + } + + this.serverless.cli.log('Packaging Python Requirements Lambda Layer...'); + + return BbPromise.bind(this) + .then(zipRequirements) + .then(createLayers) +} + +module.exports = { layerRequirements }; From 81d68649ef62f23ff6cffc85434f39a5ff4a7321 Mon Sep 17 00:00:00 2001 From: David Lawrence Date: Tue, 22 Jan 2019 12:03:25 -0800 Subject: [PATCH 076/328] fix formatting issues from prettier --- lib/layer.js | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/lib/layer.js b/lib/layer.js index 24ea2b72..8c18d4db 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -11,9 +11,9 @@ BbPromise.promisifyAll(fse); * @return {Promise} the JSZip object constructed. */ function zipRequirements() { - return addTree(new JSZip(), path.join('.serverless', 'requirements')).then(zip => - writeZip(zip, path.join('.serverless', 'pythonRequirements.zip')) - ); + return addTree(new JSZip(), path.join('.serverless', 'requirements')).then( + zip => writeZip(zip, path.join('.serverless', 'pythonRequirements.zip')) + ); } /** @@ -21,11 +21,15 @@ function zipRequirements() { * @return {Promise} */ function createLayers() { - this.serverless.service.layers['pythonRequirements'] = Object.assign({ + this.serverless.service.layers['pythonRequirements'] = Object.assign( + { artifact: path.join('.serverless', 'pythonRequirements.zip'), name: `${this.serverless.service.stage}-python-requirements`, - description: 'Python requirements generated by serverless-python-requirements.', - }, this.options.layer); + description: + 'Python requirements generated by serverless-python-requirements.' + }, + this.options.layer + ); return BbPromise.resolve(); } @@ -36,14 +40,14 @@ function createLayers() { */ function layerRequirements() { if (!this.options.layer) { - return BbPromise.resolve() + return BbPromise.resolve(); } this.serverless.cli.log('Packaging Python Requirements Lambda Layer...'); return BbPromise.bind(this) .then(zipRequirements) - .then(createLayers) + .then(createLayers); } module.exports = { layerRequirements }; From 60a3895455367768efef7eb29ba0d05268ce357b Mon Sep 17 00:00:00 2001 From: Daniel Paul Searles Date: Fri, 18 Jan 2019 22:06:09 -0800 Subject: [PATCH 077/328] Add support for poetry. Why: * [poetry](https://github.com/sdispater/poetry) is a Python dependency management and packaging tool. This change addresses the need by: * Taking the same approach as pipenv. * Install poetry on CircleCI and AppVeyor for tests. * Use chmodSync in test.js as it works better cross platform for setting exact permissions on files. See: https://github.com/nodejs/node/issues/1104 Side effects: * There may be conflicts or oddities if there is a Pipfile and a pyproject.toml. --- .tool-versions | 2 + README.md | 18 ++++ appveyor.yml | 1 + circle.yml | 11 +- index.js | 3 + lib/pip.js | 55 +++++++--- lib/poetry.js | 43 ++++++++ test.js | 117 +++++++++++++++++++- tests/poetry/.gitignore | 22 ++++ tests/poetry/_slimPatterns.yml | 2 + tests/poetry/handler.py | 5 + tests/poetry/package.json | 14 +++ tests/poetry/poetry.lock | 192 +++++++++++++++++++++++++++++++++ tests/poetry/pyproject.toml | 17 +++ tests/poetry/serverless.yml | 31 ++++++ 15 files changed, 518 insertions(+), 15 deletions(-) create mode 100644 .tool-versions create mode 100644 lib/poetry.js create mode 100644 tests/poetry/.gitignore create mode 100644 tests/poetry/_slimPatterns.yml create mode 100644 tests/poetry/handler.py create mode 100644 tests/poetry/package.json create mode 100644 tests/poetry/poetry.lock create mode 100644 tests/poetry/pyproject.toml create mode 100644 tests/poetry/serverless.yml diff --git a/.tool-versions b/.tool-versions new file mode 100644 index 00000000..f9e0b286 --- /dev/null +++ b/.tool-versions @@ -0,0 +1,2 @@ +nodejs 6.16.0 +python 3.6.8 2.7.15 diff --git a/README.md b/README.md index d8e5568a..f396dcb6 100644 --- a/README.md +++ b/README.md @@ -94,6 +94,23 @@ custom: ``` +## Poetry support :sparkles::pencil::sparkles: +NOTE: Only poetry version 1 supports the required `export` command for this +feature. As of the point this feature was added, poetry 1.0.0 was in preview +and requires that poetry is installed with the --preview flag. + +TL;DR Install poetry with the `--preview` flag. + +If you include a `pyproject.toml` and have `poetry` installed instead of a `requirements.txt` this will use +`poetry export --without-hashes -f requirements.txt` to generate them. It is fully compatible with all options such as `zip` and +`dockerizePip`. If you don't want this plugin to generate it for you, set the following option: +```yaml +custom: + pythonRequirements: + usePoetry: false +``` + + ## Dealing with Lambda's size limitations To help deal with potentially large dependencies (for example: `numpy`, `scipy` and `scikit-learn`) there is support for compressing the libraries. This does @@ -405,3 +422,4 @@ zipinfo .serverless/xxx.zip * [@alexjurkiewicz](https://github.com/alexjurkiewicz) - [docs about docker workflows](#native-code-dependencies-during-build) * [@andrewfarley](https://github.com/andrewfarley) - Implemented download caching and static caching * [@bweigel](https://github.com/bweigel) - adding the `slimPatternsAppendDefaults` option & fixing per-function packaging when some functions don't have requirements & Porting tests from bats to js! + * [@squaresurf](https://github.com/squaresurf) - adding usePoetry option diff --git a/appveyor.yml b/appveyor.yml index a8301d47..f938aeb2 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,6 +1,7 @@ version: '{build}' init: - cmd: pip install pipenv + - cmd: pip install poetry==1.0.0a2 - ps: npm i -g serverless build: off test_script: diff --git a/circle.yml b/circle.yml index f86e51af..87341cbe 100644 --- a/circle.yml +++ b/circle.yml @@ -25,6 +25,11 @@ jobs: - run: sudo apt -y update && sudo apt -y install python-pip python2.7 curl unzip # instal pipenv - run: sudo python3.6 -m pip install pipenv pip-tools + # install poetry + - run: | + curl https://raw.githubusercontent.com/sdispater/poetry/master/get-poetry.py -o get-poetry.py + python get-poetry.py --preview --yes + rm get-poetry.py # install nodejs - run: curl -sL https://deb.nodesource.com/setup_6.x | sudo bash - && sudo apt -y install nodejs # install serverless & depcheck @@ -36,4 +41,8 @@ jobs: # lint: - run: npm run lint # test! - - run: npm run test + - run: | + export PATH="$HOME/.poetry/bin:$PATH" + export LC_ALL=C.UTF-8 + export LANG=C.UTF-8 + npm run test diff --git a/index.js b/index.js index 470fcd67..dd20f7d7 100644 --- a/index.js +++ b/index.js @@ -12,6 +12,7 @@ const { const { injectAllRequirements } = require('./lib/inject'); const { installAllRequirements } = require('./lib/pip'); const { pipfileToRequirements } = require('./lib/pipenv'); +const { pyprojectTomlToRequirements } = require('./lib/poetry'); const { cleanup, cleanupCache } = require('./lib/clean'); BbPromise.promisifyAll(fse); @@ -35,6 +36,7 @@ class ServerlessPythonRequirements { invalidateCaches: false, fileName: 'requirements.txt', usePipenv: true, + usePoetry: true, pythonBin: process.platform === 'win32' ? 'python.exe' @@ -156,6 +158,7 @@ class ServerlessPythonRequirements { } return BbPromise.bind(this) .then(pipfileToRequirements) + .then(pyprojectTomlToRequirements) .then(addVendorHelper) .then(installAllRequirements) .then(packRequirements); diff --git a/lib/pip.js b/lib/pip.js index 538ad55a..df107e23 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -60,7 +60,22 @@ function generateRequirementsFile( servicePath, options ) { - if (options.usePipenv && fse.existsSync(path.join(servicePath, 'Pipfile'))) { + if ( + options.usePoetry && + fse.existsSync(path.join(servicePath, 'pyproject.toml')) + ) { + filterRequirementsFile( + path.join(servicePath, '.serverless/requirements.txt'), + targetFile, + options + ); + serverless.cli.log( + `Parsed requirements.txt from pyproject.toml in ${targetFile}...` + ); + } else if ( + options.usePipenv && + fse.existsSync(path.join(servicePath, 'Pipfile')) + ) { filterRequirementsFile( path.join(servicePath, '.serverless/requirements.txt'), targetFile, @@ -377,6 +392,31 @@ function copyVendors(vendorFolder, targetFolder, serverless) { }); } +/** + * This checks if requirements file exists. + * @param {string} servicePath + * @param {Object} options + * @param {string} fileName + */ +function requirementsFileExists(servicePath, options, fileName) { + if ( + options.usePoetry && + fse.existsSync(path.join(servicePath, 'pyproject.toml')) + ) { + return true; + } + + if (options.usePipenv && fse.existsSync(path.join(servicePath, 'Pipfile'))) { + return true; + } + + if (fse.existsSync(fileName)) { + return true; + } + + return false; +} + /** * This evaluates if requirements are actually needed to be installed, but fails * gracefully if no req file is found intentionally. It also assists with code @@ -399,17 +439,8 @@ function installRequirementsIfNeeded( const fileName = path.join(servicePath, modulePath, options.fileName); // Skip requirements generation, if requirements file doesn't exist - if (options.usePipenv) { - if ( - !fse.existsSync(path.join(servicePath, 'Pipfile')) && - !fse.existsSync(fileName) - ) { - return false; - } - } else { - if (!fse.existsSync(fileName)) { - return false; - } + if (!requirementsFileExists(servicePath, options, fileName)) { + return false; } let requirementsTxtDirectory; diff --git a/lib/poetry.js b/lib/poetry.js new file mode 100644 index 00000000..0ddf8da7 --- /dev/null +++ b/lib/poetry.js @@ -0,0 +1,43 @@ +const fse = require('fs-extra'); +const path = require('path'); +const { spawnSync } = require('child_process'); + +/** + * poetry install + */ +function pyprojectTomlToRequirements() { + if ( + !this.options.usePoetry || + !fse.existsSync(path.join(this.servicePath, 'pyproject.toml')) + ) { + return; + } + + this.serverless.cli.log('Generating requirements.txt from pyproject.toml...'); + + const res = spawnSync( + 'poetry', + ['export', '--without-hashes', '-f', 'requirements.txt'], + { + cwd: this.servicePath + } + ); + if (res.error) { + if (res.error.code === 'ENOENT') { + throw new Error( + `poetry not found! Install it according to the poetry docs.` + ); + } + throw new Error(res.error); + } + if (res.status !== 0) { + throw new Error(res.stderr); + } + fse.ensureDirSync(path.join(this.servicePath, '.serverless')); + fse.moveSync( + path.join(this.servicePath, 'requirements.txt'), + path.join(this.servicePath, '.serverless', 'requirements.txt') + ); +} + +module.exports = { pyprojectTomlToRequirements }; diff --git a/test.js b/test.js index 64d15221..08dd3f68 100644 --- a/test.js +++ b/test.js @@ -4,6 +4,7 @@ const glob = require('glob-all'); const JSZip = require('jszip'); const tape = require('tape'); const { + chmodSync, removeSync, readFileSync, copySync, @@ -710,6 +711,94 @@ test("pipenv py3.6 doesn't package bottle with noDeploy option", t => { t.end(); }); +test('poetry py3.6 can package flask with default options', t => { + process.chdir('tests/poetry'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.end(); +}); + +test('poetry py3.6 can package flask with slim option', t => { + process.chdir('tests/poetry'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--slim=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.true( + zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); +}); + +test('poetry py3.6 can package flask with slim & slimPatterns options', t => { + process.chdir('tests/poetry'); + + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--slim=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test('poetry py3.6 can package flask with zip option', t => { + process.chdir('tests/poetry'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test("poetry py3.6 doesn't package bottle with noDeploy option", t => { + process.chdir('tests/poetry'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml' + ]); + sls(['package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); +}); + test('py3.6 can package flask with zip option and no explicit include', t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -760,7 +849,8 @@ test( 's/(handler.py.*$)/$1\n - foobar/', 'serverless.yml' ]); - writeFileSync(`foobar`, '', { mode: perm }); + writeFileSync(`foobar`, ''); + chmodSync(`foobar`, perm); sls(['--vendor=./vendor', 'package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); @@ -1001,6 +1091,27 @@ test('pipenv py3.6 can package flask with slim & slimPatterns & slimPatternsAppe t.end(); }); +test('poetry py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', t => { + process.chdir('tests/poetry'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + + sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + test('py3.6 can package flask with package individually option', t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1449,7 +1560,8 @@ test( process.chdir('tests/individually'); const path = npm(['pack', '../..']); const perm = '775'; - writeFileSync(`module1${sep}foobar`, '', { mode: perm }); + writeFileSync(`module1${sep}foobar`, ''); + chmodSync(`module1${sep}foobar`, perm); npm(['i', path]); sls(['package']); @@ -1475,6 +1587,7 @@ test( const path = npm(['pack', '../..']); const perm = '775'; writeFileSync(`module1${sep}foobar`, '', { mode: perm }); + chmodSync(`module1${sep}foobar`, perm); npm(['i', path]); sls(['--dockerizePip=true', 'package']); diff --git a/tests/poetry/.gitignore b/tests/poetry/.gitignore new file mode 100644 index 00000000..3c2369dc --- /dev/null +++ b/tests/poetry/.gitignore @@ -0,0 +1,22 @@ +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# Serverless +.serverless +.requirements +unzip_requirements.py diff --git a/tests/poetry/_slimPatterns.yml b/tests/poetry/_slimPatterns.yml new file mode 100644 index 00000000..02c631b4 --- /dev/null +++ b/tests/poetry/_slimPatterns.yml @@ -0,0 +1,2 @@ +slimPatterns: + - "**/__main__.py" diff --git a/tests/poetry/handler.py b/tests/poetry/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/poetry/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/poetry/package.json b/tests/poetry/package.json new file mode 100644 index 00000000..d13fd651 --- /dev/null +++ b/tests/poetry/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-4.2.5.tgz" + } +} diff --git a/tests/poetry/poetry.lock b/tests/poetry/poetry.lock new file mode 100644 index 00000000..2cc3a756 --- /dev/null +++ b/tests/poetry/poetry.lock @@ -0,0 +1,192 @@ +[[package]] +category = "main" +description = "The AWS SDK for Python" +name = "boto3" +optional = false +python-versions = "*" +version = "1.9.80" + +[package.dependencies] +botocore = ">=1.12.80,<1.13.0" +jmespath = ">=0.7.1,<1.0.0" +s3transfer = ">=0.1.10,<0.2.0" + +[[package]] +category = "main" +description = "Low-level, data-driven core of boto 3." +name = "botocore" +optional = false +python-versions = "*" +version = "1.12.80" + +[package.dependencies] +docutils = ">=0.10" +jmespath = ">=0.7.1,<1.0.0" + +[package.dependencies.python-dateutil] +python = ">=2.7" +version = ">=2.1,<3.0.0" + +[package.dependencies.urllib3] +python = ">=3.4" +version = ">=1.20,<1.25" + +[[package]] +category = "main" +description = "Fast and simple WSGI-framework for small web-applications." +name = "bottle" +optional = false +python-versions = "*" +version = "0.12.16" + +[[package]] +category = "main" +description = "Composable command line interface toolkit" +name = "click" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "7.0" + +[[package]] +category = "main" +description = "Docutils -- Python Documentation Utilities" +name = "docutils" +optional = false +python-versions = "*" +version = "0.14" + +[[package]] +category = "main" +description = "A simple framework for building complex web applications." +name = "flask" +optional = false +python-versions = "*" +version = "1.0.2" + +[package.dependencies] +Jinja2 = ">=2.10" +Werkzeug = ">=0.14" +click = ">=5.1" +itsdangerous = ">=0.24" + +[package.extras] +dev = ["pytest (>=3)", "coverage", "tox", "sphinx", "pallets-sphinx-themes", "sphinxcontrib-log-cabinet"] +docs = ["sphinx", "pallets-sphinx-themes", "sphinxcontrib-log-cabinet"] +dotenv = ["python-dotenv"] + +[[package]] +category = "main" +description = "Various helpers to pass data to untrusted environments and back." +name = "itsdangerous" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.1.0" + +[[package]] +category = "main" +description = "A small but fast and easy to use stand-alone template engine written in pure python." +name = "jinja2" +optional = false +python-versions = "*" +version = "2.10" + +[package.dependencies] +MarkupSafe = ">=0.23" + +[package.extras] +i18n = ["Babel (>=0.8)"] + +[[package]] +category = "main" +description = "JSON Matching Expressions" +name = "jmespath" +optional = false +python-versions = "*" +version = "0.9.3" + +[[package]] +category = "main" +description = "Safely add untrusted strings to HTML/XML markup." +name = "markupsafe" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +version = "1.1.0" + +[[package]] +category = "main" +description = "Extensions to the standard Python datetime module" +marker = "python_version >= \"2.7\"" +name = "python-dateutil" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +version = "2.7.5" + +[package.dependencies] +six = ">=1.5" + +[[package]] +category = "main" +description = "An Amazon S3 Transfer Manager" +name = "s3transfer" +optional = false +python-versions = "*" +version = "0.1.13" + +[package.dependencies] +botocore = ">=1.3.0,<2.0.0" + +[[package]] +category = "main" +description = "Python 2 and 3 compatibility utilities" +marker = "python_version >= \"2.7\"" +name = "six" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*" +version = "1.12.0" + +[[package]] +category = "main" +description = "HTTP library with thread-safe connection pooling, file post, and more." +marker = "python_version >= \"3.4\"" +name = "urllib3" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" +version = "1.24.1" + +[package.extras] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] + +[[package]] +category = "main" +description = "The comprehensive WSGI web application library." +name = "werkzeug" +optional = false +python-versions = "*" +version = "0.14.1" + +[package.extras] +dev = ["coverage", "pytest", "sphinx", "tox"] +termcolor = ["termcolor"] +watchdog = ["watchdog"] + +[metadata] +content-hash = "fa5a641a2c19871b5899fbc700d6375250a5d2e327832a012296af6a31c8093a" +python-versions = "^3.6" + +[metadata.hashes] +boto3 = ["122603b00f8c458236d1bd09850bdea56fc45f271e75ca38e66dbce37f72cada", "99ec19dc4f0aa8a8354db7baebe1ff57bd18aeb6a539b28693b2e8ca8dc3d85b"] +botocore = ["76a2969278250e010253ddf514f4b54eaa7d2b1430f682874c3c2ab92f25a96d", "8c579bac9abeaff1270a7a25964b01d3db1367f42fa5f826e1303ec8a4b13cef"] +bottle = ["9c310da61e7df2b6ac257d8a90811899ccb3a9743e77e947101072a2e3186726", "ca43beafbdccabbe31b758a4f34d1e44985a9b9539516775208b2b0f903eafa0"] +click = ["2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", "5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"] +docutils = ["02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6", "51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274", "7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6"] +flask = ["2271c0070dbcb5275fad4a82e29f23ab92682dc45f9dfbc22c02ba9b9322ce48", "a080b744b7e345ccfcbc77954861cb05b3c63786e93f2b3875e0913d44b43f05"] +itsdangerous = ["321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", "b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"] +jinja2 = ["74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd", "f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4"] +jmespath = ["6a81d4c9aa62caf061cb517b4d9ad1dd300374cd4706997aff9cd6aedd61fc64", "f11b4461f425740a1d908e9a3f7365c3d2e569f6ca68a2ff8bc5bcd9676edd63"] +markupsafe = ["048ef924c1623740e70204aa7143ec592504045ae4429b59c30054cb31e3c432", "130f844e7f5bdd8e9f3f42e7102ef1d49b2e6fdf0d7526df3f87281a532d8c8b", "19f637c2ac5ae9da8bfd98cef74d64b7e1bb8a63038a3505cd182c3fac5eb4d9", "1b8a7a87ad1b92bd887568ce54b23565f3fd7018c4180136e1cf412b405a47af", "1c25694ca680b6919de53a4bb3bdd0602beafc63ff001fea2f2fc16ec3a11834", "1f19ef5d3908110e1e891deefb5586aae1b49a7440db952454b4e281b41620cd", "1fa6058938190ebe8290e5cae6c351e14e7bb44505c4a7624555ce57fbbeba0d", "31cbb1359e8c25f9f48e156e59e2eaad51cd5242c05ed18a8de6dbe85184e4b7", "3e835d8841ae7863f64e40e19477f7eb398674da6a47f09871673742531e6f4b", "4e97332c9ce444b0c2c38dd22ddc61c743eb208d916e4265a2a3b575bdccb1d3", "525396ee324ee2da82919f2ee9c9e73b012f23e7640131dd1b53a90206a0f09c", "52b07fbc32032c21ad4ab060fec137b76eb804c4b9a1c7c7dc562549306afad2", "52ccb45e77a1085ec5461cde794e1aa037df79f473cbc69b974e73940655c8d7", "5c3fbebd7de20ce93103cb3183b47671f2885307df4a17a0ad56a1dd51273d36", "5e5851969aea17660e55f6a3be00037a25b96a9b44d2083651812c99d53b14d1", "5edfa27b2d3eefa2210fb2f5d539fbed81722b49f083b2c6566455eb7422fd7e", "7d263e5770efddf465a9e31b78362d84d015cc894ca2c131901a4445eaa61ee1", "83381342bfc22b3c8c06f2dd93a505413888694302de25add756254beee8449c", "857eebb2c1dc60e4219ec8e98dfa19553dae33608237e107db9c6078b1167856", "98e439297f78fca3a6169fd330fbe88d78b3bb72f967ad9961bcac0d7fdd1550", "bf54103892a83c64db58125b3f2a43df6d2cb2d28889f14c78519394feb41492", "d9ac82be533394d341b41d78aca7ed0e0f4ba5a2231602e2f05aa87f25c51672", "e982fe07ede9fada6ff6705af70514a52beb1b2c3d25d4e873e82114cf3c5401", "edce2ea7f3dfc981c4ddc97add8a61381d9642dc3273737e756517cc03e84dd6", "efdc45ef1afc238db84cb4963aa689c0408912a0239b0721cb172b4016eb31d6", "f137c02498f8b935892d5c0172560d7ab54bc45039de8805075e19079c639a9c", "f82e347a72f955b7017a39708a3667f106e6ad4d10b25f237396a7115d8ed5fd", "fb7c206e01ad85ce57feeaaa0bf784b97fa3cad0d4a5737bc5295785f5c613a1"] +python-dateutil = ["063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93", "88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02"] +s3transfer = ["90dc18e028989c609146e241ea153250be451e05ecc0c2832565231dacdf59c1", "c7a9ec356982d5e9ab2d4b46391a7d6a950e2b04c472419f5fdec70cc0ada72f"] +six = ["3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"] +urllib3 = ["61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39", "de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"] +werkzeug = ["c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c", "d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b"] diff --git a/tests/poetry/pyproject.toml b/tests/poetry/pyproject.toml new file mode 100644 index 00000000..20e85d92 --- /dev/null +++ b/tests/poetry/pyproject.toml @@ -0,0 +1,17 @@ +[tool.poetry] +name = "poetry" +version = "0.1.0" +description = "" +authors = ["Your Name "] + +[tool.poetry.dependencies] +python = "^3.6" +Flask = "^1.0" +bottle = "^0.12.16" +boto3 = "^1.9" + +[tool.poetry.dev-dependencies] + +[build-system] +requires = ["poetry>=0.12"] +build-backend = "poetry.masonry.api" diff --git a/tests/poetry/serverless.yml b/tests/poetry/serverless.yml new file mode 100644 index 00000000..6df76a55 --- /dev/null +++ b/tests/poetry/serverless.yml @@ -0,0 +1,31 @@ +service: sls-py-req-test + +provider: + name: aws + runtime: python3.6 + +plugins: + - serverless-python-requirements +custom: + pythonRequirements: + zip: ${opt:zip, self:custom.defaults.zip} + slim: ${opt:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${opt:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} + defaults: + zip: false + slimPatterns: false + slimPatternsAppendDefaults: true + slim: false + dockerizePip: false + +package: + exclude: + - '**/*' + include: + - handler.py + +functions: + hello: + handler: handler.hello From 32dd65940111b3316707afef96e035ecec308844 Mon Sep 17 00:00:00 2001 From: David Lawrence Date: Tue, 5 Feb 2019 21:54:52 -0800 Subject: [PATCH 078/328] remove inject option. layer option can be used to infer what to do --- index.js | 3 --- lib/inject.js | 3 ++- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/index.js b/index.js index a0164d83..db8dbdc3 100644 --- a/index.js +++ b/index.js @@ -32,7 +32,6 @@ class ServerlessPythonRequirements { slimPatterns: false, slimPatternsAppendDefaults: true, zip: false, - inject: true, layer: false, cleanupZipHelper: true, invalidateCaches: false, @@ -98,8 +97,6 @@ class ServerlessPythonRequirements { options.dockerImage = options.dockerImage || defaultImage; } if (options.layer) { - // If layers are being used, dependencies should not be injected. - options.inject = false; // If layer was set as a boolean, set it to an empty object to use the layer defaults. if (options.layer === true) { options.layer = {}; diff --git a/lib/inject.js b/lib/inject.js index 64f7b613..1abbb531 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -72,7 +72,8 @@ function moveModuleUp(source, target, module) { * @return {Promise} the combined promise for requirements injection. */ function injectAllRequirements(funcArtifact) { - if (!this.options.inject) { + if (this.options.layer) { + // The requirements will be placed in a Layer, so just resolve return BbPromise.resolve(); } From 6f32335fb8b93367e7ec43c241c721868d8e61d3 Mon Sep 17 00:00:00 2001 From: David Lawrence Date: Tue, 5 Feb 2019 23:18:54 -0800 Subject: [PATCH 079/328] prepend the path in the zip folder depending on the runtime --- lib/layer.js | 56 +++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 45 insertions(+), 11 deletions(-) diff --git a/lib/layer.js b/lib/layer.js index 8c18d4db..d627fd6e 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -2,31 +2,62 @@ const BbPromise = require('bluebird'); const fse = require('fs-extra'); const path = require('path'); const JSZip = require('jszip'); -const { writeZip, addTree } = require('./zipTree'); +const { + writeZip, + addTree +} = require('./zipTree'); BbPromise.promisifyAll(fse); +/** + * Get the paths for the compatible runtimes of the layer + * @param {string[]} list of runtime paths + */ +function getRunTimeBuildPaths() { + const runtimepaths = { + 'python2.7': 'python', + 'python3.6': path.join('python', 'lib', 'python3.6', 'site-packages'), + 'python3.7': path.join('python', 'lib', 'python3.7', 'site-packages'), + }; + + let runtimes = [] + + // Defer to Layer config first + if (this.options.layer.compatibleRuntimes) { + runtimes = this.options.layer.compatibleRuntimes; + // If none provided, assume the provider runtime + } else if (this.serverless.service.provider.runtime) { + runtimes = [this.serverless.service.provider.runtime]; + // If still no runtime found, just assume latest python + } else { + runtimes = ['python3.7']; + } + + return BbPromise.resolve(runtimes.map(runtime => runtimepaths[runtime])); +} + /** * Zip up requirements to be used as layer package. + * @param {string[]} list of paths where the requirements should be put in the layer * @return {Promise} the JSZip object constructed. */ -function zipRequirements() { - return addTree(new JSZip(), path.join('.serverless', 'requirements')).then( - zip => writeZip(zip, path.join('.serverless', 'pythonRequirements.zip')) - ); +function zipRequirements(runtimepaths) { + const rootZip = new JSZip(); + const src = path.join('.serverless', 'requirements') + + return BbPromise.each(runtimepaths, (runtimepath) => addTree(rootZip.folder(runtimepath), src)) + .then(() => writeZip(rootZip, path.join('.serverless', 'pythonRequirementsLayer.zip'))) } /** * Creates a layer on the serverless service for the requirements zip. - * @return {Promise} + * @return {Promise} empty promise */ function createLayers() { - this.serverless.service.layers['pythonRequirements'] = Object.assign( - { + this.serverless.service.layers['pythonRequirements'] = Object.assign({ artifact: path.join('.serverless', 'pythonRequirements.zip'), name: `${this.serverless.service.stage}-python-requirements`, - description: - 'Python requirements generated by serverless-python-requirements.' + description: 'Python requirements generated by serverless-python-requirements.' }, this.options.layer ); @@ -46,8 +77,11 @@ function layerRequirements() { this.serverless.cli.log('Packaging Python Requirements Lambda Layer...'); return BbPromise.bind(this) + .then(getRunTimeBuildPaths) .then(zipRequirements) .then(createLayers); } -module.exports = { layerRequirements }; +module.exports = { + layerRequirements +}; From 6f4dc94928731ad83f2d16fa37649882792028e9 Mon Sep 17 00:00:00 2001 From: David Lawrence Date: Tue, 5 Feb 2019 23:30:41 -0800 Subject: [PATCH 080/328] default the compatible runtimes to the provider runtime. (If its undefined, serverless core will remove it when creating the layer.) --- lib/layer.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/layer.js b/lib/layer.js index d627fd6e..33233251 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -57,7 +57,8 @@ function createLayers() { this.serverless.service.layers['pythonRequirements'] = Object.assign({ artifact: path.join('.serverless', 'pythonRequirements.zip'), name: `${this.serverless.service.stage}-python-requirements`, - description: 'Python requirements generated by serverless-python-requirements.' + description: 'Python requirements generated by serverless-python-requirements.', + compatibleRuntimes: [this.serverless.service.provider.runtime], }, this.options.layer ); From 84edb812e8d026b1be7fc0915b18a1351ae2e7e6 Mon Sep 17 00:00:00 2001 From: David Lawrence Date: Tue, 5 Feb 2019 23:35:28 -0800 Subject: [PATCH 081/328] fix formatting with prettier --- lib/layer.js | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/lib/layer.js b/lib/layer.js index 33233251..193856fb 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -2,10 +2,7 @@ const BbPromise = require('bluebird'); const fse = require('fs-extra'); const path = require('path'); const JSZip = require('jszip'); -const { - writeZip, - addTree -} = require('./zipTree'); +const { writeZip, addTree } = require('./zipTree'); BbPromise.promisifyAll(fse); @@ -17,10 +14,10 @@ function getRunTimeBuildPaths() { const runtimepaths = { 'python2.7': 'python', 'python3.6': path.join('python', 'lib', 'python3.6', 'site-packages'), - 'python3.7': path.join('python', 'lib', 'python3.7', 'site-packages'), + 'python3.7': path.join('python', 'lib', 'python3.7', 'site-packages') }; - let runtimes = [] + let runtimes = []; // Defer to Layer config first if (this.options.layer.compatibleRuntimes) { @@ -43,10 +40,13 @@ function getRunTimeBuildPaths() { */ function zipRequirements(runtimepaths) { const rootZip = new JSZip(); - const src = path.join('.serverless', 'requirements') + const src = path.join('.serverless', 'requirements'); - return BbPromise.each(runtimepaths, (runtimepath) => addTree(rootZip.folder(runtimepath), src)) - .then(() => writeZip(rootZip, path.join('.serverless', 'pythonRequirementsLayer.zip'))) + return BbPromise.each(runtimepaths, runtimepath => + addTree(rootZip.folder(runtimepath), src) + ).then(() => + writeZip(rootZip, path.join('.serverless', 'pythonRequirementsLayer.zip')) + ); } /** @@ -54,11 +54,13 @@ function zipRequirements(runtimepaths) { * @return {Promise} empty promise */ function createLayers() { - this.serverless.service.layers['pythonRequirements'] = Object.assign({ + this.serverless.service.layers['pythonRequirements'] = Object.assign( + { artifact: path.join('.serverless', 'pythonRequirements.zip'), name: `${this.serverless.service.stage}-python-requirements`, - description: 'Python requirements generated by serverless-python-requirements.', - compatibleRuntimes: [this.serverless.service.provider.runtime], + description: + 'Python requirements generated by serverless-python-requirements.', + compatibleRuntimes: [this.serverless.service.provider.runtime] }, this.options.layer ); From 0cb238d3154b23b0adfab87f993a9ddc1b61d0ea Mon Sep 17 00:00:00 2001 From: David Lawrence Date: Tue, 5 Feb 2019 23:48:39 -0800 Subject: [PATCH 082/328] add name to contributors --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index a046eac2..f0297b66 100644 --- a/README.md +++ b/README.md @@ -455,3 +455,4 @@ zipinfo .serverless/xxx.zip * [@andrewfarley](https://github.com/andrewfarley) - Implemented download caching and static caching * [@bweigel](https://github.com/bweigel) - adding the `slimPatternsAppendDefaults` option & fixing per-function packaging when some functions don't have requirements & Porting tests from bats to js! * [@squaresurf](https://github.com/squaresurf) - adding usePoetry option + * [@david-mk-lawrence](https://github.com/david-mk-lawrence) - added Lambda Layer support From 524971e05b36e34f71cd01f83031711e13d589a2 Mon Sep 17 00:00:00 2001 From: Hiroshi Toyama Date: Wed, 6 Feb 2019 19:39:32 +0900 Subject: [PATCH 083/328] fix sls invoke local not works. --- unzip_requirements.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/unzip_requirements.py b/unzip_requirements.py index 4e3b9e51..9ba3ad54 100644 --- a/unzip_requirements.py +++ b/unzip_requirements.py @@ -13,8 +13,9 @@ if os.path.exists(tempdir): shutil.rmtree(tempdir) - zip_requirements = os.path.join( - os.environ.get('LAMBDA_TASK_ROOT', os.getcwd()), '.requirements.zip') + default_lambda_task_root = os.environ.get('LAMBDA_TASK_ROOT', os.getcwd()) + lambda_task_root = os.getcwd() if os.environ.get('IS_LOCAL') == 'true' else default_lambda_task_root + zip_requirements = os.path.join(lambda_task_root, '.requirements.zip') zipfile.ZipFile(zip_requirements, 'r').extractall(tempdir) os.rename(tempdir, pkgdir) # Atomic From cb1631a40a5a6e2a9efe76271216601627ab144b Mon Sep 17 00:00:00 2001 From: Jonathan Petitcolas Date: Wed, 6 Feb 2019 19:47:10 +0100 Subject: [PATCH 084/328] Fix typo Not the biggest PR of my life, but if I can help! :) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index f396dcb6..fb5988eb 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ This will automatically add the plugin to your project's `package.json` and the `serverless.yml`. That's all that's needed for basic use! The plugin will now bundle your python dependencies specified in your `requirements.txt` or `Pipfile` when you run `sls deploy`. -For a more in depth introduction on how to user this plugin, check out +For a more in depth introduction on how to use this plugin, check out [this post on the Serverless Blog](https://serverless.com/blog/serverless-python-packaging/) If you're on a mac, check out [these notes](#applebeersnake-mac-brew-installed-python-notes) about using python installed by brew. From 79c078fe72d22611157ef58c774477739fb7eab9 Mon Sep 17 00:00:00 2001 From: David Lawrence Date: Wed, 6 Feb 2019 20:25:07 -0800 Subject: [PATCH 085/328] always use "python" as the path in the layer. this works for python2.7, python3.6 and python3.7 --- lib/layer.js | 38 ++++---------------------------------- 1 file changed, 4 insertions(+), 34 deletions(-) diff --git a/lib/layer.js b/lib/layer.js index 193856fb..e0a57358 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -6,46 +6,17 @@ const { writeZip, addTree } = require('./zipTree'); BbPromise.promisifyAll(fse); -/** - * Get the paths for the compatible runtimes of the layer - * @param {string[]} list of runtime paths - */ -function getRunTimeBuildPaths() { - const runtimepaths = { - 'python2.7': 'python', - 'python3.6': path.join('python', 'lib', 'python3.6', 'site-packages'), - 'python3.7': path.join('python', 'lib', 'python3.7', 'site-packages') - }; - - let runtimes = []; - - // Defer to Layer config first - if (this.options.layer.compatibleRuntimes) { - runtimes = this.options.layer.compatibleRuntimes; - // If none provided, assume the provider runtime - } else if (this.serverless.service.provider.runtime) { - runtimes = [this.serverless.service.provider.runtime]; - // If still no runtime found, just assume latest python - } else { - runtimes = ['python3.7']; - } - - return BbPromise.resolve(runtimes.map(runtime => runtimepaths[runtime])); -} - /** * Zip up requirements to be used as layer package. - * @param {string[]} list of paths where the requirements should be put in the layer * @return {Promise} the JSZip object constructed. */ -function zipRequirements(runtimepaths) { +function zipRequirements() { const rootZip = new JSZip(); const src = path.join('.serverless', 'requirements'); + const runtimepath = 'python'; - return BbPromise.each(runtimepaths, runtimepath => - addTree(rootZip.folder(runtimepath), src) - ).then(() => - writeZip(rootZip, path.join('.serverless', 'pythonRequirementsLayer.zip')) + return addTree(rootZip.folder(runtimepath), src).then(() => + writeZip(rootZip, path.join('.serverless', 'pythonRequirements.zip')) ); } @@ -80,7 +51,6 @@ function layerRequirements() { this.serverless.cli.log('Packaging Python Requirements Lambda Layer...'); return BbPromise.bind(this) - .then(getRunTimeBuildPaths) .then(zipRequirements) .then(createLayers); } From b99897202d3a69ac596674007f269974768bc5bc Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Thu, 7 Feb 2019 16:01:14 -0500 Subject: [PATCH 086/328] service name and actually set stage name in layer name --- lib/layer.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/layer.js b/lib/layer.js index e0a57358..f91ae737 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -28,7 +28,7 @@ function createLayers() { this.serverless.service.layers['pythonRequirements'] = Object.assign( { artifact: path.join('.serverless', 'pythonRequirements.zip'), - name: `${this.serverless.service.stage}-python-requirements`, + name: `${this.serverless.service.service}-${this.serverless.providers.aws.getStage()}-python-requirements`, description: 'Python requirements generated by serverless-python-requirements.', compatibleRuntimes: [this.serverless.service.provider.runtime] From 56904e770a54b8622f68feb8a1e23822cf4f4287 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Thu, 7 Feb 2019 16:38:10 -0500 Subject: [PATCH 087/328] prettier --- lib/layer.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/layer.js b/lib/layer.js index f91ae737..06c70871 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -28,7 +28,9 @@ function createLayers() { this.serverless.service.layers['pythonRequirements'] = Object.assign( { artifact: path.join('.serverless', 'pythonRequirements.zip'), - name: `${this.serverless.service.service}-${this.serverless.providers.aws.getStage()}-python-requirements`, + name: `${ + this.serverless.service.service + }-${this.serverless.providers.aws.getStage()}-python-requirements`, description: 'Python requirements generated by serverless-python-requirements.', compatibleRuntimes: [this.serverless.service.provider.runtime] From 0ed802171470a16bbf4cce59734feb315a62ee01 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Mon, 11 Feb 2019 11:12:32 -0500 Subject: [PATCH 088/328] bump version! --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index b56b3037..eb372906 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "4.2.5", + "version": "4.3.0", "engines": { "node": ">=6.0" }, @@ -76,4 +76,4 @@ "singleQuote": true, "parser": "babylon" } -} \ No newline at end of file +} From bc50d364149115cf5ab898cdf409bac274aea319 Mon Sep 17 00:00:00 2001 From: tacincorporated Date: Fri, 22 Feb 2019 15:30:35 -0800 Subject: [PATCH 089/328] Support editable packages with Pipfile. Remove editableFlag from .serverless/requirements.txt when using Pipfile. --- lib/pipenv.js | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/lib/pipenv.js b/lib/pipenv.js index f131620c..6718844c 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -1,6 +1,7 @@ const fse = require('fs-extra'); const path = require('path'); const { spawnSync } = require('child_process'); +const { EOL } = require('os'); /** * pipenv install @@ -36,8 +37,24 @@ function pipfileToRequirements() { fse.ensureDirSync(path.join(this.servicePath, '.serverless')); fse.writeFileSync( path.join(this.servicePath, '.serverless/requirements.txt'), - res.stdout + removeEditableFlagFromRequirementsString(res.stdout) ); } +/** + * + * @param requirementBuffer + * @returns Buffer with editable flags remove + */ +function removeEditableFlagFromRequirementsString(requirementBuffer) { + const flagStr = '-e '; + const lines = requirementBuffer.toString('utf8').split(EOL); + for (let i = 0; i < lines.length; i++) { + if (lines[i].startsWith(flagStr)) { + lines[i] = lines[i].substring(flagStr.length); + } + } + return Buffer.from(lines.join(EOL)); +} + module.exports = { pipfileToRequirements }; From 015279e7b4589345c12f8bea76f020ab7907859e Mon Sep 17 00:00:00 2001 From: Sergey Karayev Date: Thu, 28 Feb 2019 15:08:26 -0800 Subject: [PATCH 090/328] Update slim.js --- lib/slim.js | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/slim.js b/lib/slim.js index 965be7d0..ae9155fb 100644 --- a/lib/slim.js +++ b/lib/slim.js @@ -3,7 +3,12 @@ const glob = require('glob-all'); const fse = require('fs-extra'); const getStripMode = options => { - if (options.slim === false || options.slim === 'false') { + if ( + options.strip === false || + options.strip === 'false' || + options.slim === false || + options.slim === 'false' + ) { return 'skip'; } else if (options.dockerizePip) { return 'docker'; From 6960cd93b1370dec41005dabb9ae145532a94ed4 Mon Sep 17 00:00:00 2001 From: Sergey Karayev Date: Thu, 28 Feb 2019 15:12:56 -0800 Subject: [PATCH 091/328] Update README.md --- README.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/README.md b/README.md index d1d03daa..e519289e 100644 --- a/README.md +++ b/README.md @@ -164,6 +164,16 @@ custom: This will remove all folders within the installed requirements that match the names in `slimPatterns` +#### Option not to strip binaries + +In some cases, stripping binaries leads to problems like "ELF load command address/offset not properly aligned", even when done in the Docker environment. You can still slim down the package without `*.so` files with +```yaml +custom: + pythonRequirements: + slim: true + strip: false +``` + ### Lamba Layer Another method for dealing with large dependencies is to put them into a [Lambda Layer](https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html). From d5a434af833ce91be3e33eaccd8ca7d871b5a79c Mon Sep 17 00:00:00 2001 From: Jose Luis Date: Fri, 1 Mar 2019 10:56:51 +0100 Subject: [PATCH 092/328] Update layer.js Fix the issue -> TypeError: Cannot set property 'pythonRequirements' of undefined at ServerlessPythonRequirements.createLayers (/Users/alcaljos/Projects/GMR/gmr-imu-video-synch/node_modules/serverless-python-requirements/lib/layer.js:28:56) From previous event: Since the layers object seems to not be initialized. --- lib/layer.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/layer.js b/lib/layer.js index 06c70871..dac7aee3 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -25,6 +25,7 @@ function zipRequirements() { * @return {Promise} empty promise */ function createLayers() { + this.serverless.service.layers['pythonRequirements'] = {} this.serverless.service.layers['pythonRequirements'] = Object.assign( { artifact: path.join('.serverless', 'pythonRequirements.zip'), From 113916bc83f516ebef45c5e0fedb83e97a952950 Mon Sep 17 00:00:00 2001 From: Jose Luis Date: Sun, 3 Mar 2019 14:11:34 +0100 Subject: [PATCH 093/328] Update layer.js Check if it's null or undefifned --- lib/layer.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/layer.js b/lib/layer.js index dac7aee3..8ac0b413 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -25,7 +25,9 @@ function zipRequirements() { * @return {Promise} empty promise */ function createLayers() { - this.serverless.service.layers['pythonRequirements'] = {} + if(!this.serverless.service.layers['pythonRequirements']){ + this.serverless.service.layers['pythonRequirements'] = {} + } this.serverless.service.layers['pythonRequirements'] = Object.assign( { artifact: path.join('.serverless', 'pythonRequirements.zip'), From df5638c016b10bc3dabac7aab3afb133f9c6a09c Mon Sep 17 00:00:00 2001 From: Ashton Honnecke Date: Thu, 14 Mar 2019 13:59:01 -0600 Subject: [PATCH 094/328] cache spelling in README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e519289e..11e8ae5d 100644 --- a/README.md +++ b/README.md @@ -459,7 +459,7 @@ zipinfo .serverless/xxx.zip `vendor` option * [@kichik](https://github.com/kichik) - Imposed windows & `noDeploy` support, switched to adding files straight to zip instead of creating symlinks, and - improved pip chache support when using docker. + improved pip cache support when using docker. * [@dee-me-tree-or-love](https://github.com/dee-me-tree-or-love) - the `slim` package option * [@alexjurkiewicz](https://github.com/alexjurkiewicz) - [docs about docker workflows](#native-code-dependencies-during-build) * [@andrewfarley](https://github.com/andrewfarley) - Implemented download caching and static caching From d705e83ac4738dc9b36a1e5da8a5534c94a8e489 Mon Sep 17 00:00:00 2001 From: sjoshi10 Date: Tue, 19 Mar 2019 11:42:26 -0400 Subject: [PATCH 095/328] Update package.json --- package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/package.json b/package.json index eb372906..b7aa016a 100644 --- a/package.json +++ b/package.json @@ -63,6 +63,7 @@ "md5-file": "^4.0.0", "rimraf": "^2.6.2", "shell-quote": "^1.6.1" + "sha256-file": "1.0.0" }, "eslintConfig": { "extends": "eslint:recommended", From 67306440bd3451368528f02b11155bc8e022cb27 Mon Sep 17 00:00:00 2001 From: sjoshi10 Date: Tue, 19 Mar 2019 11:43:30 -0400 Subject: [PATCH 096/328] Update package.json --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index b7aa016a..c9625698 100644 --- a/package.json +++ b/package.json @@ -62,7 +62,7 @@ "lodash.values": "^4.3.0", "md5-file": "^4.0.0", "rimraf": "^2.6.2", - "shell-quote": "^1.6.1" + "shell-quote": "^1.6.1", "sha256-file": "1.0.0" }, "eslintConfig": { From 4a4ee4de772a851854b65df85bdd41be945e8d23 Mon Sep 17 00:00:00 2001 From: sjoshi10 Date: Tue, 19 Mar 2019 11:45:49 -0400 Subject: [PATCH 097/328] Update shared.js --- lib/shared.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/shared.js b/lib/shared.js index fe48e0e0..b36fd7e3 100644 --- a/lib/shared.js +++ b/lib/shared.js @@ -1,9 +1,11 @@ const Appdir = require('appdirectory'); const rimraf = require('rimraf'); -const md5File = require('md5-file'); +//const md5File = require('md5-file'); const glob = require('glob-all'); const path = require('path'); const fse = require('fs-extra'); +const sha256File = require('sha256-file'); + /** * This helper will check if we're using static cache and have max @@ -101,7 +103,7 @@ function getUserCachePath(options) { * @return {string} */ function md5Path(fullpath) { - return md5File.sync(fullpath); + return sha256File(fullpath); } module.exports = { From be395cd2a7a36e50994949aaceed747f504efd7b Mon Sep 17 00:00:00 2001 From: sjoshi10 Date: Tue, 19 Mar 2019 12:00:55 -0400 Subject: [PATCH 098/328] Update pip.js use sha256 instead of md5 --- lib/pip.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index df107e23..3da432af 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -9,7 +9,7 @@ const { buildImage, getBindPath, getDockerUid } = require('./docker'); const { getStripCommand, getStripMode, deleteFiles } = require('./slim'); const { checkForAndDeleteMaxCacheVersions, - md5Path, + sha256Path, getRequirementsWorkingPath, getUserCachePath } = require('./shared'); @@ -474,7 +474,7 @@ function installRequirementsIfNeeded( } // Then generate our MD5 Sum of this requirements file to determine where it should "go" to and/or pull cache from - const reqChecksum = md5Path(slsReqsTxt); + const reqChecksum = sha256Path(slsReqsTxt); // Then figure out where this cache should be, if we're caching, if we're in a module, etc const workingReqsFolder = getRequirementsWorkingPath( From ae88c3de5b121b5934f14ee122aa77944ddf7a15 Mon Sep 17 00:00:00 2001 From: sjoshi10 Date: Tue, 19 Mar 2019 12:01:44 -0400 Subject: [PATCH 099/328] use sha256 instead of md5 --- lib/shared.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/shared.js b/lib/shared.js index b36fd7e3..1f8db9db 100644 --- a/lib/shared.js +++ b/lib/shared.js @@ -1,6 +1,5 @@ const Appdir = require('appdirectory'); const rimraf = require('rimraf'); -//const md5File = require('md5-file'); const glob = require('glob-all'); const path = require('path'); const fse = require('fs-extra'); @@ -102,7 +101,7 @@ function getUserCachePath(options) { * @param {string} fullpath * @return {string} */ -function md5Path(fullpath) { +function sha256Path(fullpath) { return sha256File(fullpath); } From b003462dd4a1d48e6d4bf90ce421f4d0b0912e0b Mon Sep 17 00:00:00 2001 From: sjoshi10 Date: Tue, 19 Mar 2019 12:02:10 -0400 Subject: [PATCH 100/328] Update shared.js --- lib/shared.js | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/shared.js b/lib/shared.js index 1f8db9db..4c3ef558 100644 --- a/lib/shared.js +++ b/lib/shared.js @@ -5,7 +5,6 @@ const path = require('path'); const fse = require('fs-extra'); const sha256File = require('sha256-file'); - /** * This helper will check if we're using static cache and have max * versions enabled and will delete older versions in a fifo fashion From 072caa13252db11c94ec81fd087ab40f0125b75c Mon Sep 17 00:00:00 2001 From: sjoshi10 Date: Tue, 19 Mar 2019 12:02:41 -0400 Subject: [PATCH 101/328] remove md5 --- package.json | 1 - 1 file changed, 1 deletion(-) diff --git a/package.json b/package.json index c9625698..1178d436 100644 --- a/package.json +++ b/package.json @@ -60,7 +60,6 @@ "lodash.set": "^4.3.2", "lodash.uniqby": "^4.0.0", "lodash.values": "^4.3.0", - "md5-file": "^4.0.0", "rimraf": "^2.6.2", "shell-quote": "^1.6.1", "sha256-file": "1.0.0" From ae938b7c0947ca408cc6158ea1bd150bf635f67f Mon Sep 17 00:00:00 2001 From: sjoshi10 Date: Tue, 19 Mar 2019 12:18:02 -0400 Subject: [PATCH 102/328] update test to use sh256 --- test.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test.js b/test.js index 08dd3f68..7855ef47 100644 --- a/test.js +++ b/test.js @@ -14,7 +14,7 @@ const { const { quote } = require('shell-quote'); const { sep } = require('path'); -const { getUserCachePath, md5Path } = require('./lib/shared'); +const { getUserCachePath, sha256Path } = require('./lib/shared'); const initialWorkingDir = process.cwd(); @@ -1679,7 +1679,7 @@ test('py3.6 uses static and download cache', t => { npm(['i', path]); sls(['--useDownloadCache=true', '--useStaticCache=true', 'package']); const cachepath = getUserCachePath(); - const cacheFolderHash = md5Path('.serverless/requirements.txt'); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), 'http exists in download-cache' From 8197c759f8a1c3141b05e3bd5c6d40f7a22c4b7c Mon Sep 17 00:00:00 2001 From: sjoshi10 Date: Tue, 19 Mar 2019 12:22:23 -0400 Subject: [PATCH 103/328] remove md5 --- test.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test.js b/test.js index 7855ef47..2ec71976 100644 --- a/test.js +++ b/test.js @@ -1704,7 +1704,7 @@ test( 'package' ]); const cachepath = getUserCachePath(); - const cacheFolderHash = md5Path('.serverless/requirements.txt'); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), 'http exists in download-cache' @@ -1724,7 +1724,7 @@ test('py3.6 uses static cache', t => { npm(['i', path]); sls(['--useStaticCache=true', 'package']); const cachepath = getUserCachePath(); - const cacheFolderHash = md5Path('.serverless/requirements.txt'); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); t.true( pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), 'flask exists in static-cache' @@ -1758,7 +1758,7 @@ test('py3.6 uses static cache with cacheLocation option', t => { npm(['i', path]); const cachepath = '.requirements-cache'; sls(['--useStaticCache=true', `--cacheLocation=${cachepath}`, 'package']); - const cacheFolderHash = md5Path('.serverless/requirements.txt'); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); t.true( pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), 'flask exists in static-cache' @@ -1785,7 +1785,7 @@ test( 'package' ]); const cachepath = getUserCachePath(); - const cacheFolderHash = md5Path('.serverless/requirements.txt'); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); t.true( pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), 'flask exists in static-cache' From 51e2e78c7e2f5b390d09766a45940542025d47df Mon Sep 17 00:00:00 2001 From: sjoshi10 Date: Tue, 19 Mar 2019 12:25:02 -0400 Subject: [PATCH 104/328] export sha256Path module --- lib/shared.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/shared.js b/lib/shared.js index 4c3ef558..34f61eb2 100644 --- a/lib/shared.js +++ b/lib/shared.js @@ -108,5 +108,5 @@ module.exports = { checkForAndDeleteMaxCacheVersions, getRequirementsWorkingPath, getUserCachePath, - md5Path + sha256Path }; From 68418ae3fafff5915db1a61d002f9d711f45cb0d Mon Sep 17 00:00:00 2001 From: Bradley Spink Date: Tue, 19 Mar 2019 22:52:07 +1000 Subject: [PATCH 105/328] Preserve binary file permissions in zip --- circle.yml | 2 ++ lib/inject.js | 7 ++++++- lib/zipTree.js | 19 ++++++++++++++----- test.js | 27 +++++++++++++++++++++++++-- 4 files changed, 47 insertions(+), 8 deletions(-) diff --git a/circle.yml b/circle.yml index 87341cbe..3957bc4f 100644 --- a/circle.yml +++ b/circle.yml @@ -23,6 +23,8 @@ jobs: sudo ./install.sh /usr/local # other deps - run: sudo apt -y update && sudo apt -y install python-pip python2.7 curl unzip + # upgrade python3.6 pip to latest + - run: sudo python3.6 -m pip install -U pip # instal pipenv - run: sudo python3.6 -m pip install pipenv pip-tools # install poetry diff --git a/lib/inject.js b/lib/inject.js index 1abbb531..dc30be79 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -37,7 +37,12 @@ function injectRequirements(requirementsPath, packagePath, options) { !noDeploy.has(relativeFile.split(/([-\\/]|\.py$|\.pyc$)/, 1)[0]) ) .map(([file, relativeFile]) => - zipFile(zip, relativeFile, fse.readFileAsync(file)) + Promise.all([file, relativeFile, fse.statAsync(file)]) + ) + .map(([file, relativeFile, fileStat]) => + zipFile(zip, relativeFile, fse.readFileAsync(file), { + unixPermissions: fileStat.mode + }) ) .then(() => writeZip(zip, packagePath)) ); diff --git a/lib/zipTree.js b/lib/zipTree.js index ea7a9df0..d45aded0 100644 --- a/lib/zipTree.js +++ b/lib/zipTree.js @@ -60,14 +60,23 @@ function writeZip(zip, targetPath) { * @param {string} zipPath the target path in the zip. * @param {Promise} bufferPromise a promise providing a nodebuffer. * @return {Promise} a promise providing the JSZip object. + * @param {object} fileOpts an object with the opts to save for the file in the zip. */ -function zipFile(zip, zipPath, bufferPromise) { +function zipFile(zip, zipPath, bufferPromise, fileOpts) { return bufferPromise .then(buffer => - zip.file(zipPath, buffer, { - // necessary to get the same hash when zipping the same content - date: new Date(0) - }) + zip.file( + zipPath, + buffer, + Object.assign( + {}, + { + // necessary to get the same hash when zipping the same content + date: new Date(0) + }, + fileOpts + ) + ) ) .then(() => zip); } diff --git a/test.js b/test.js index 2ec71976..78f60aad 100644 --- a/test.js +++ b/test.js @@ -9,6 +9,7 @@ const { readFileSync, copySync, writeFileSync, + statSync, pathExistsSync } = require('fs-extra'); const { quote } = require('shell-quote'); @@ -875,6 +876,12 @@ test( 'foobar has retained its executable file permissions' ); + const flaskPerm = statSync('.serverless/requirements/bin/flask').mode; + t.true( + zipfiles_with_metadata['bin/flask'].unixPermissions === flaskPerm, + 'bin/flask has retained its executable file permissions' + ); + t.end(); }, { skip: process.platform === 'win32' } @@ -1566,15 +1573,23 @@ test( npm(['i', path]); sls(['package']); - const zipfiles_hello = listZipFilesWithMetaData('.serverless/hello1.zip'); + const zipfiles_hello1 = listZipFilesWithMetaData('.serverless/hello1.zip'); t.true( - zipfiles_hello['module1/foobar'].unixPermissions + zipfiles_hello1['module1/foobar'].unixPermissions .toString(8) .slice(3, 6) === perm, 'foobar has retained its executable file permissions' ); + const zipfiles_hello2 = listZipFilesWithMetaData('.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip'); + const flaskPerm = statSync('.serverless/module2/requirements/bin/flask').mode; + + t.true( + zipfiles_hello2['bin/flask'].unixPermissions === flaskPerm, + 'bin/flask has retained its executable file permissions' + ); + t.end(); }, { skip: process.platform === 'win32' } @@ -1601,6 +1616,14 @@ test( 'foobar has retained its executable file permissions' ); + const zipfiles_hello2 = listZipFilesWithMetaData('.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip'); + const flaskPerm = statSync('.serverless/module2/requirements/bin/flask').mode; + + t.true( + zipfiles_hello2['bin/flask'].unixPermissions === flaskPerm, + 'bin/flask has retained its executable file permissions' + ); + t.end(); }, { skip: !canUseDocker() || process.platform === 'win32' } From dfe425bda53f6c51314f54bf439be2542466599f Mon Sep 17 00:00:00 2001 From: Daniel Paul Searles Date: Sun, 14 Apr 2019 06:59:02 -0700 Subject: [PATCH 106/328] Parse pyproject.toml to determine poetry use. Why: * The pyproject.toml file is for more than just poetry. This change addresses the need by: * Add test for use case where pyproject.toml is only used for black. * Add toml parser to package.json. * Update poetry.js to base poetry usage on whether or not the build system requires it. Fixes: #324 --- lib/poetry.js | 32 +++++++++++++++++++--- package.json | 5 ++-- test.js | 11 ++++++++ tests/non_build_pyproject/.gitignore | 22 +++++++++++++++ tests/non_build_pyproject/handler.py | 5 ++++ tests/non_build_pyproject/package.json | 14 ++++++++++ tests/non_build_pyproject/pyproject.toml | 10 +++++++ tests/non_build_pyproject/requirements.txt | 2 ++ tests/non_build_pyproject/serverless.yml | 21 ++++++++++++++ 9 files changed, 116 insertions(+), 6 deletions(-) create mode 100644 tests/non_build_pyproject/.gitignore create mode 100644 tests/non_build_pyproject/handler.py create mode 100644 tests/non_build_pyproject/package.json create mode 100644 tests/non_build_pyproject/pyproject.toml create mode 100644 tests/non_build_pyproject/requirements.txt create mode 100644 tests/non_build_pyproject/serverless.yml diff --git a/lib/poetry.js b/lib/poetry.js index 0ddf8da7..53b521e0 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -1,15 +1,14 @@ +const fs = require('fs'); const fse = require('fs-extra'); const path = require('path'); const { spawnSync } = require('child_process'); +const tomlParse = require('@iarna/toml/parse-string'); /** * poetry install */ function pyprojectTomlToRequirements() { - if ( - !this.options.usePoetry || - !fse.existsSync(path.join(this.servicePath, 'pyproject.toml')) - ) { + if (!this.options.usePoetry || !isPoetryProject(this.servicePath)) { return; } @@ -40,4 +39,29 @@ function pyprojectTomlToRequirements() { ); } +/** + * Check if pyproject.toml file exists and is a poetry project. + */ +function isPoetryProject(servicePath) { + const pyprojectPath = path.join(servicePath, 'pyproject.toml'); + + if (!fse.existsSync(pyprojectPath)) { + return false; + } + + const pyprojectToml = fs.readFileSync(pyprojectPath); + const pyproject = tomlParse(pyprojectToml); + + const buildSystemReqs = + (pyproject['build-system'] && pyproject['build-system']['requires']) || []; + + for (var i = 0; i < buildSystemReqs.length; i++) { + if (buildSystemReqs[i].startsWith('poetry')) { + return true; + } + } + + return false; +} + module.exports = { pyprojectTomlToRequirements }; diff --git a/package.json b/package.json index 1178d436..e105fead 100644 --- a/package.json +++ b/package.json @@ -50,6 +50,7 @@ "tape": "*" }, "dependencies": { + "@iarna/toml": "^2.2.3", "appdirectory": "^0.1.0", "bluebird": "^3.0.6", "fs-extra": "^7.0.0", @@ -61,8 +62,8 @@ "lodash.uniqby": "^4.0.0", "lodash.values": "^4.3.0", "rimraf": "^2.6.2", - "shell-quote": "^1.6.1", - "sha256-file": "1.0.0" + "sha256-file": "1.0.0", + "shell-quote": "^1.6.1" }, "eslintConfig": { "extends": "eslint:recommended", diff --git a/test.js b/test.js index 78f60aad..55f083c1 100644 --- a/test.js +++ b/test.js @@ -712,6 +712,17 @@ test("pipenv py3.6 doesn't package bottle with noDeploy option", t => { t.end(); }); +test('non build pyproject.toml uses requirements.txt', t => { + process.chdir('tests/non_build_pyproject'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.end(); +}); + test('poetry py3.6 can package flask with default options', t => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); diff --git a/tests/non_build_pyproject/.gitignore b/tests/non_build_pyproject/.gitignore new file mode 100644 index 00000000..3c2369dc --- /dev/null +++ b/tests/non_build_pyproject/.gitignore @@ -0,0 +1,22 @@ +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# Serverless +.serverless +.requirements +unzip_requirements.py diff --git a/tests/non_build_pyproject/handler.py b/tests/non_build_pyproject/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/non_build_pyproject/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/non_build_pyproject/package.json b/tests/non_build_pyproject/package.json new file mode 100644 index 00000000..d13fd651 --- /dev/null +++ b/tests/non_build_pyproject/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-4.2.5.tgz" + } +} diff --git a/tests/non_build_pyproject/pyproject.toml b/tests/non_build_pyproject/pyproject.toml new file mode 100644 index 00000000..41932632 --- /dev/null +++ b/tests/non_build_pyproject/pyproject.toml @@ -0,0 +1,10 @@ +[tool.black] +line-length = 79 +py36 = true +skip-string-normalization = true +exclude = ''' +/( + \.serverless + | node_modules +)/ +''' diff --git a/tests/non_build_pyproject/requirements.txt b/tests/non_build_pyproject/requirements.txt new file mode 100644 index 00000000..aa55d989 --- /dev/null +++ b/tests/non_build_pyproject/requirements.txt @@ -0,0 +1,2 @@ +flask +boto3 diff --git a/tests/non_build_pyproject/serverless.yml b/tests/non_build_pyproject/serverless.yml new file mode 100644 index 00000000..973132c8 --- /dev/null +++ b/tests/non_build_pyproject/serverless.yml @@ -0,0 +1,21 @@ +service: sls-py-req-test + +provider: + name: aws + runtime: python3.6 + +plugins: + - serverless-python-requirements +custom: + pythonRequirements: + usePoetry: false + +package: + exclude: + - '**/*' + include: + - handler.py + +functions: + hello: + handler: handler.hello From f3831d19954791567227824fe37ce486eb7e83a8 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Mon, 15 Apr 2019 14:11:34 -0400 Subject: [PATCH 107/328] Fix layers being undefined --- lib/layer.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/layer.js b/lib/layer.js index 8ac0b413..3376d1d1 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -25,8 +25,8 @@ function zipRequirements() { * @return {Promise} empty promise */ function createLayers() { - if(!this.serverless.service.layers['pythonRequirements']){ - this.serverless.service.layers['pythonRequirements'] = {} + if(!this.serverless.service.layers){ + this.serverless.service.layers = {} } this.serverless.service.layers['pythonRequirements'] = Object.assign( { From 82fa76d95a333ba55c506ba66b3f45ad4b220231 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Mon, 15 Apr 2019 14:31:37 -0400 Subject: [PATCH 108/328] lint --- lib/layer.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/layer.js b/lib/layer.js index 3376d1d1..ccc632d0 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -25,7 +25,7 @@ function zipRequirements() { * @return {Promise} empty promise */ function createLayers() { - if(!this.serverless.service.layers){ + if (!this.serverless.service.layers){ this.serverless.service.layers = {} } this.serverless.service.layers['pythonRequirements'] = Object.assign( From 3fee07bf86789b801b3ec12742c8a9f0f4b8d930 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Mon, 15 Apr 2019 14:40:03 -0400 Subject: [PATCH 109/328] morelint --- lib/layer.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/layer.js b/lib/layer.js index ccc632d0..54c904db 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -25,7 +25,7 @@ function zipRequirements() { * @return {Promise} empty promise */ function createLayers() { - if (!this.serverless.service.layers){ + if (!this.serverless.service.layers) { this.serverless.service.layers = {} } this.serverless.service.layers['pythonRequirements'] = Object.assign( From cce5227f212281b3c44af71a5de43ba62e14997c Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Tue, 16 Apr 2019 11:35:05 -0400 Subject: [PATCH 110/328] really fix lint --- lib/layer.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/layer.js b/lib/layer.js index 54c904db..f512ded1 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -26,7 +26,7 @@ function zipRequirements() { */ function createLayers() { if (!this.serverless.service.layers) { - this.serverless.service.layers = {} + this.serverless.service.layers = {}; } this.serverless.service.layers['pythonRequirements'] = Object.assign( { From e3775512efdd43e0b8558144d1ed01c994244aad Mon Sep 17 00:00:00 2001 From: Matthew Amos <35695811+blackmamo@users.noreply.github.com> Date: Wed, 17 Apr 2019 16:23:31 +0100 Subject: [PATCH 111/328] Junction don't copy Copying the files on windows is very slow and can be avoided by using junctions --- lib/pip.js | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index 3da432af..323a92fb 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -610,10 +610,9 @@ function installAllRequirements() { !fse.existsSync(symlinkPath) && reqsInstalledAt != symlinkPath ) { - // Windows can't symlink so we have to copy on Windows, - // it's not as fast, but at least it works + // Windows can't symlink so we have to use junction on Windows if (process.platform == 'win32') { - fse.copySync(reqsInstalledAt, symlinkPath); + fse.symlink(reqsInstalledAt, symlinkPath, 'junction'); } else { fse.symlink(reqsInstalledAt, symlinkPath); } From 62b391179d1df4cc6f38b3e8b76d10aa3694e7b4 Mon Sep 17 00:00:00 2001 From: Matthew Amos <35695811+blackmamo@users.noreply.github.com> Date: Wed, 17 Apr 2019 16:37:03 +0100 Subject: [PATCH 112/328] Advice to reduce build times This advice saved me a lot of time --- README.md | 31 ++++++++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 11e8ae5d..8ef2d991 100644 --- a/README.md +++ b/README.md @@ -174,7 +174,7 @@ custom: strip: false ``` -### Lamba Layer +### Lambda Layer Another method for dealing with large dependencies is to put them into a [Lambda Layer](https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html). Simply add the `layer` option to the configuration. @@ -439,6 +439,35 @@ zipinfo .serverless/xxx.zip ``` (If you can't see the library, you might need to adjust your package include/exclude configuration in `serverless.yml`.) +## Optimising packaging time + +If you wish to exclude most of the files in your project, and only include the source files of your lambdas and their dependencies you may well use an approach like this: + +```yaml +package: + individually: false + include: + - "./src/lambda_one/**" + - "./src/lambda_two/**" + exclude: + - "**" +``` + +This will be very slow. Serverless adds a default '"**"' include. If you are using the 'cacheLocation' parameter to this plugin, this will result in all of the cached files' names being loaded and then subsequently discarded because of the exclude pattern. To avoid this happening you can add a negated include pattern, as is observed in https://github.com/serverless/serverless/pull/5825. + +Use this approach instead: + +```yaml +package: + individually: false + include: + - "!./**" + - "./src/lambda_one/**" + - "./src/lambda_two/**" + exclude: + - "**" +``` + ## Contributors * [@dschep](https://github.com/dschep) - Lead developer & maintainer * [@azurelogic](https://github.com/azurelogic) - logging & documentation fixes From a018f5d8ffb42e1d3da20ecefda4b4519fca19fe Mon Sep 17 00:00:00 2001 From: Matthew Amos <35695811+blackmamo@users.noreply.github.com> Date: Wed, 17 Apr 2019 16:38:00 +0100 Subject: [PATCH 113/328] Wrong quotes --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8ef2d991..37254fd3 100644 --- a/README.md +++ b/README.md @@ -453,7 +453,7 @@ package: - "**" ``` -This will be very slow. Serverless adds a default '"**"' include. If you are using the 'cacheLocation' parameter to this plugin, this will result in all of the cached files' names being loaded and then subsequently discarded because of the exclude pattern. To avoid this happening you can add a negated include pattern, as is observed in https://github.com/serverless/serverless/pull/5825. +This will be very slow. Serverless adds a default `"**"` include. If you are using the `cacheLocation` parameter to this plugin, this will result in all of the cached files' names being loaded and then subsequently discarded because of the exclude pattern. To avoid this happening you can add a negated include pattern, as is observed in https://github.com/serverless/serverless/pull/5825. Use this approach instead: From 3a59c92e65ba705f4ddcc0c06bdc439dfb0e9b31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Guido=20Garc=C3=ADa?= <673420+palmerabollo@users.noreply.github.com> Date: Sun, 21 Apr 2019 12:27:46 +0200 Subject: [PATCH 114/328] docs: fix minor typo in readme --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 11e8ae5d..9d8c62e7 100644 --- a/README.md +++ b/README.md @@ -174,7 +174,7 @@ custom: strip: false ``` -### Lamba Layer +### Lambda Layer Another method for dealing with large dependencies is to put them into a [Lambda Layer](https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html). Simply add the `layer` option to the configuration. @@ -198,7 +198,7 @@ custom: pythonRequirements: layer: name: ${self:provider.stage}-layerName - description: Python requirements lamba layer + description: Python requirements lambda layer compatibleRuntimes: - python3.7 licenseInfo: GPLv3 From e6fa10d4272c243bedcf7d6ce00e61fb27602776 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" Date: Tue, 23 Apr 2019 10:14:33 +0000 Subject: [PATCH 115/328] Update is-wsl requirement from ^1.1.0 to ^2.0.0 Updates the requirements on [is-wsl](https://github.com/sindresorhus/is-wsl) to permit the latest version. - [Release notes](https://github.com/sindresorhus/is-wsl/releases) - [Commits](https://github.com/sindresorhus/is-wsl/compare/v1.1.0...v2.0.0) Signed-off-by: dependabot[bot] --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index e105fead..4fc4f0de 100644 --- a/package.json +++ b/package.json @@ -55,7 +55,7 @@ "bluebird": "^3.0.6", "fs-extra": "^7.0.0", "glob-all": "^3.1.0", - "is-wsl": "^1.1.0", + "is-wsl": "^2.0.0", "jszip": "^3.1.0", "lodash.get": "^4.4.2", "lodash.set": "^4.3.2", From d74b1c0dfbb2a90510b643b78684ba24b99845cb Mon Sep 17 00:00:00 2001 From: Chili Johnson Date: Mon, 6 May 2019 16:57:48 -0700 Subject: [PATCH 116/328] Adds a `dockerRunCmdExtraArgs` option for adding arbitrary arguments to the dockerized pip step. --- index.js | 1 + lib/pip.js | 7 +++++++ 2 files changed, 8 insertions(+) diff --git a/index.js b/index.js index b5200285..b4f79199 100644 --- a/index.js +++ b/index.js @@ -48,6 +48,7 @@ class ServerlessPythonRequirements { dockerImage: null, dockerFile: null, dockerEnv: false, + dockerRunCmdExtraArgs: [], useStaticCache: false, useDownloadCache: false, cacheLocation: false, diff --git a/lib/pip.js b/lib/pip.js index 323a92fb..40c32894 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -259,6 +259,13 @@ function installRequirements(targetFolder, serverless, options) { // Use same user so --cache-dir works dockerCmd.push('-u', getDockerUid(bindPath)); } + + if (Array.isArray(options.dockerRunCmdExtraArgs)) { + dockerCmd.push(...options.dockerRunCmdExtraArgs); + } else { + throw new Error('dockerRunCmdExtraArgs option must be an array'); + } + dockerCmd.push(dockerImage); } From e29f4e6eddd681f7a6af68d0c362b8daf7eb1cc8 Mon Sep 17 00:00:00 2001 From: Chili Johnson Date: Mon, 6 May 2019 17:50:54 -0700 Subject: [PATCH 117/328] Adds a `dockerBuildCmdExtraArgs` option for adding arbitrary arguments to the Docker build step. --- index.js | 1 + lib/docker.js | 14 ++++++++++++-- lib/pip.js | 2 +- 3 files changed, 14 insertions(+), 3 deletions(-) diff --git a/index.js b/index.js index b4f79199..6f98bcb4 100644 --- a/index.js +++ b/index.js @@ -48,6 +48,7 @@ class ServerlessPythonRequirements { dockerImage: null, dockerFile: null, dockerEnv: false, + dockerBuildCmdExtraArgs: [], dockerRunCmdExtraArgs: [], useStaticCache: false, useDownloadCache: false, diff --git a/lib/docker.js b/lib/docker.js index db2e81b5..46bbe028 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -25,11 +25,21 @@ function dockerCommand(options) { /** * Build the custom Docker image * @param {string} dockerFile + * @param {string[]} extraArgs * @return {string} The name of the built docker image. */ -function buildImage(dockerFile) { +function buildImage(dockerFile, extraArgs) { const imageName = 'sls-py-reqs-custom'; - const options = ['build', '-f', dockerFile, '-t', imageName, '.']; + const options = ['build', '-f', dockerFile, '-t', imageName]; + + if (Array.isArray(extraArgs)) { + options.push(...extraArgs); + } else { + throw new Error('dockerRunCmdExtraArgs option must be an array'); + } + + options.push('.'); + dockerCommand(options); return imageName; } diff --git a/lib/pip.js b/lib/pip.js index 40c32894..e939f7a7 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -180,7 +180,7 @@ function installRequirements(targetFolder, serverless, options) { serverless.cli.log( `Building custom docker image from ${options.dockerFile}...` ); - dockerImage = buildImage(options.dockerFile); + dockerImage = buildImage(options.dockerFile, options.dockerBuildCmdExtraArgs); } else { dockerImage = options.dockerImage; } From 978c3a6c1dc77527ee56ced2c617da4f0e98372b Mon Sep 17 00:00:00 2001 From: Chili Johnson Date: Tue, 7 May 2019 11:42:36 -0700 Subject: [PATCH 118/328] Updates README with example use of extra Docker args. --- README.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/README.md b/README.md index 4f34a38f..690bb46b 100644 --- a/README.md +++ b/README.md @@ -270,6 +270,18 @@ custom: - --compile ``` +### Extra Docker arguments + +You can specify extra arguments to be passed to [docker build](https://docs.docker.com/engine/reference/commandline/build/) during the build step, and [docker run](https://docs.docker.com/engine/reference/run/) during the dockerized pip install step: + +```yaml +custom: + pythonRequirements: + dockerizePip: true + dockerBuildCmdExtraArgs: ["--build-arg", "MY_GREAT_ARG=123"] + dockerRunCmdExtraArgs: ["-v", "${env:PWD}:/my-app"] +``` + ### Customize requirements file name [Some `pip` workflows involve using requirements files not named From e92624b9e4a2c7d6f8f506e0b0c9491a519e1f36 Mon Sep 17 00:00:00 2001 From: Chili Johnson Date: Tue, 7 May 2019 12:20:31 -0700 Subject: [PATCH 119/328] Runs lib/pip.js through formatting. --- lib/pip.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/pip.js b/lib/pip.js index e939f7a7..45c72059 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -180,7 +180,10 @@ function installRequirements(targetFolder, serverless, options) { serverless.cli.log( `Building custom docker image from ${options.dockerFile}...` ); - dockerImage = buildImage(options.dockerFile, options.dockerBuildCmdExtraArgs); + dockerImage = buildImage( + options.dockerFile, + options.dockerBuildCmdExtraArgs + ); } else { dockerImage = options.dockerImage; } From 529dfec03277355d325fde11d6fca19191b436a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Padilla?= Date: Thu, 24 Jan 2019 08:48:15 -0500 Subject: [PATCH 120/328] Default `noDeploy` to an empty list --- appveyor.yml | 1 + index.js | 12 +---------- test.js | 31 ++++++++++------------------ tests/base/requirements-w-hashes.txt | 2 ++ 4 files changed, 15 insertions(+), 31 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index f938aeb2..43b0e373 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,5 +1,6 @@ version: '{build}' init: + - cmd: python -m pip install -U pip - cmd: pip install pipenv - cmd: pip install poetry==1.0.0a2 - ps: npm i -g serverless diff --git a/index.js b/index.js index b5200285..7b29332e 100644 --- a/index.js +++ b/index.js @@ -53,17 +53,7 @@ class ServerlessPythonRequirements { cacheLocation: false, staticCacheMaxVersions: 0, pipCmdExtraArgs: [], - noDeploy: [ - 'boto3', - 'botocore', - 'docutils', - 'jmespath', - 'python-dateutil', - 's3transfer', - 'six', - 'pip', - 'setuptools' - ], + noDeploy: [], vendor: '' }, (this.serverless.service.custom && diff --git a/test.js b/test.js index 55f083c1..3017227a 100644 --- a/test.js +++ b/test.js @@ -128,7 +128,7 @@ test('default pythonBin can package flask with default options', t => { sls(['package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }); @@ -139,7 +139,7 @@ test('py3.6 can package flask with default options', t => { sls([`--pythonBin=${getPythonBin(3)}`, 'package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }); @@ -248,9 +248,9 @@ test( const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false( + t.true( zipfiles.includes(`boto3${sep}__init__.py`), - 'boto3 is NOT packaged' + 'boto3 is packaged' ); t.end(); }, @@ -379,7 +379,7 @@ test('py2.7 can package flask with default options', t => { sls([`--pythonBin=${getPythonBin(2)}`, 'package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }); @@ -556,10 +556,7 @@ test( const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false( - zipfiles.includes(`boto3${sep}__init__.py`), - 'boto3 is NOT packaged' - ); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, { skip: !canUseDocker() } @@ -631,7 +628,7 @@ test('pipenv py3.6 can package flask with default options', t => { sls(['package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }); @@ -838,11 +835,11 @@ test('py3.6 can package lambda-decorators using vendor option', t => { sls([`--vendor=./vendor`, 'package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.true( zipfiles.includes(`lambda_decorators.py`), 'lambda_decorators.py is packaged' ); - t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); t.end(); }); @@ -867,15 +864,12 @@ test( const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.true( zipfiles.includes(`lambda_decorators.py`), 'lambda_decorators.py is packaged' ); t.true(zipfiles.includes(`foobar`), 'foobar is packaged'); - t.false( - zipfiles.includes(`boto3${sep}__init__.py`), - 'boto3 is NOT packaged' - ); const zipfiles_with_metadata = listZipFilesWithMetaData( '.serverless/sls-py-req-test.zip' @@ -906,7 +900,7 @@ test('py3.6 can package flask in a project with a space in it', t => { sls(['package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }); @@ -920,10 +914,7 @@ test( sls(['--dockerizePip=true', 'package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false( - zipfiles.includes(`boto3${sep}__init__.py`), - 'boto3 is NOT packaged' - ); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, { skip: !canUseDocker() } diff --git a/tests/base/requirements-w-hashes.txt b/tests/base/requirements-w-hashes.txt index 018a6fb9..97c3d3a5 100644 --- a/tests/base/requirements-w-hashes.txt +++ b/tests/base/requirements-w-hashes.txt @@ -87,3 +87,5 @@ werkzeug==0.14.1 \ --hash=sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c \ --hash=sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b \ # via flask +futures==3.2.0 \ + --hash=sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1 \ From 86401cd5d202ffec5caee23f1f9a401dfabc18bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Padilla?= Date: Mon, 20 May 2019 16:36:36 -0400 Subject: [PATCH 121/328] Update README.md --- README.md | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/README.md b/README.md index 4f34a38f..dba4f2fa 100644 --- a/README.md +++ b/README.md @@ -207,18 +207,7 @@ custom: ``` ## Omitting Packages You can omit a package from deployment with the `noDeploy` option. Note that -dependencies of omitted packages must explicitly be omitted too. By default, -the following packages are omitted as they are already installed on Lambda: - - * boto3 - * botocore - * docutils - * jmespath - * pip - * python-dateutil - * s3transfer - * setuptools - * six +dependencies of omitted packages must explicitly be omitted too. This example makes it instead omit pytest: ```yaml From 10a1ec48eb11f1df633982747a3a7472bc779aa6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Padilla?= Date: Wed, 22 May 2019 18:06:55 -0400 Subject: [PATCH 122/328] Implement suggested changes --- README.md | 8 -------- tests/base/requirements-w-hashes.txt | 2 -- 2 files changed, 10 deletions(-) diff --git a/README.md b/README.md index dba4f2fa..b1f0054e 100644 --- a/README.md +++ b/README.md @@ -217,14 +217,6 @@ custom: - pytest ``` -To include the default omitted packages, set the `noDeploy` option to an empty -list: -```yaml -custom: - pythonRequirements: - noDeploy: [] -``` - ## Extra Config Options ### Caching You can enable two kinds of caching with this plugin which are currently both DISABLED by default. First, a download cache that will cache downloads that pip needs to compile the packages. And second, a what we call "static caching" which caches output of pip after compiling everything for your requirements file. Since generally requirements.txt files rarely change, you will often see large amounts of speed improvements when enabling the static cache feature. These caches will be shared between all your projects if no custom cacheLocation is specified (see below). diff --git a/tests/base/requirements-w-hashes.txt b/tests/base/requirements-w-hashes.txt index 97c3d3a5..018a6fb9 100644 --- a/tests/base/requirements-w-hashes.txt +++ b/tests/base/requirements-w-hashes.txt @@ -87,5 +87,3 @@ werkzeug==0.14.1 \ --hash=sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c \ --hash=sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b \ # via flask -futures==3.2.0 \ - --hash=sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1 \ From 1f8039fe6504de97b20d8b3036f7e795955b10c1 Mon Sep 17 00:00:00 2001 From: Aryik Date: Wed, 22 May 2019 18:20:55 -0500 Subject: [PATCH 123/328] Insert zipped packages at beginning of path #367 --- unzip_requirements.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/unzip_requirements.py b/unzip_requirements.py index 9ba3ad54..68f907fa 100644 --- a/unzip_requirements.py +++ b/unzip_requirements.py @@ -6,7 +6,8 @@ pkgdir = '/tmp/sls-py-req' -sys.path.append(pkgdir) +# We want our path to look like [working_dir, serverless_requirements, ...] +sys.path.insert(1, pkgdir) if not os.path.exists(pkgdir): tempdir = '/tmp/_temp-sls-py-req' From d540fa67b569785759cf925ee476b900f035f8a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Padilla?= Date: Wed, 22 May 2019 19:13:26 -0400 Subject: [PATCH 124/328] Fix tests --- test.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test.js b/test.js index 3017227a..e39d8ad5 100644 --- a/test.js +++ b/test.js @@ -716,7 +716,7 @@ test('non build pyproject.toml uses requirements.txt', t => { sls(['package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }); @@ -727,7 +727,7 @@ test('poetry py3.6 can package flask with default options', t => { sls(['package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }); From 2be2e9776a0c2aa8eec7976917ecf99217f889bd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" Date: Thu, 23 May 2019 11:35:45 +0000 Subject: [PATCH 125/328] Bump urllib3 from 1.24.1 to 1.24.2 in /tests/base Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.24.1 to 1.24.2. - [Release notes](https://github.com/urllib3/urllib3/releases) - [Changelog](https://github.com/urllib3/urllib3/blob/master/CHANGES.rst) - [Commits](https://github.com/urllib3/urllib3/compare/1.24.1...1.24.2) --- tests/base/requirements-w-hashes.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/base/requirements-w-hashes.txt b/tests/base/requirements-w-hashes.txt index 018a6fb9..92576e43 100644 --- a/tests/base/requirements-w-hashes.txt +++ b/tests/base/requirements-w-hashes.txt @@ -79,9 +79,9 @@ six==1.11.0 \ --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb \ # via python-dateutil -urllib3==1.24.1 \ - --hash=sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39 \ - --hash=sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22 \ +urllib3==1.24.2 \ + --hash=sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0 \ + --hash=sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3 \ # via botocore werkzeug==0.14.1 \ --hash=sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c \ From f8161121fdf19396dc5d73e4bc7ba8c822518673 Mon Sep 17 00:00:00 2001 From: khamaileon Date: Mon, 8 Jul 2019 20:57:56 +0200 Subject: [PATCH 126/328] Extends the default image instead of creating a fresh one --- README.md | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 0f6c763a..27f8c4eb 100644 --- a/README.md +++ b/README.md @@ -386,19 +386,10 @@ For usage of `dockerizePip` on Windows do Step 1 only if running serverless on w Some Python packages require extra OS dependencies to build successfully. To deal with this, replace the default image (`lambci/lambda:python3.6`) with a `Dockerfile` like: ```dockerfile -# AWS Lambda execution environment is based on Amazon Linux 1 -FROM amazonlinux:1 - -# Install Python 3.6 -RUN yum -y install python36 python36-pip +FROM lambci/lambda:build-python3.6 # Install your dependencies -RUN curl -s https://bootstrap.pypa.io/get-pip.py | python3 -RUN yum -y install python3-devel mysql-devel gcc - -# Set the same WORKDIR as default image -RUN mkdir /var/task -WORKDIR /var/task +RUN yum -y install mysql-devel ``` Then update your `serverless.yml`: From 0ecd2be6b7d5797a9c94126fb211101d7fb812f8 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Tue, 23 Jul 2019 14:30:20 -0400 Subject: [PATCH 127/328] update notice about required sls version. closes #374 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0f6c763a..7f576b64 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ A Serverless v1.x plugin to automatically bundle dependencies from `requirements.txt` and make them available in your `PYTHONPATH`. -**Requires Serverless >= v1.12** +**Requires Serverless >= v1.34** ## Install From 52ae41f90a089333ee31bc871d63c82a84e791b6 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Tue, 23 Jul 2019 20:51:29 -0700 Subject: [PATCH 128/328] Fix lint failure on index.js Run prettier to so lint passes and CI is green. --- index.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/index.js b/index.js index b726ff45..a3759688 100644 --- a/index.js +++ b/index.js @@ -85,9 +85,7 @@ class ServerlessPythonRequirements { ); } else if (!options.dockerFile) { // If no dockerFile is provided, use default image - const defaultImage = `lambci/lambda:build-${ - this.serverless.service.provider.runtime - }`; + const defaultImage = `lambci/lambda:build-${this.serverless.service.provider.runtime}`; options.dockerImage = options.dockerImage || defaultImage; } if (options.layer) { From 9391368c231931181ec162403b27417f860179bd Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Wed, 22 May 2019 10:28:40 -0700 Subject: [PATCH 129/328] Add support to copy extra files from the Docker image to the package This allows native OS libraries (`*.so` files) to be copied to the serverless archive to be available at runtime. This introduces the new option `dockerExtraFiles`, a list of paths on the Docker image to copy. For example, the `mysqlclient` package requires `libmysqlclient.so.1020`. One can now add an item to the `dockerExtraFiles` option in their `serverless.yml`: pythonRequirements: dockerExtraFiles: - /usr/lib64/mysql57/libmysqlclient.so.1020 This file will be available at runtime in the Lambda function. This removes the need to manage these files in a more manual way and avoids the need to commit binary files one's repository. --- README.md | 25 ++++++++++++------------- index.js | 1 + lib/pip.js | 13 ++++++++++--- 3 files changed, 23 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index 9ad43898..29ad57af 100644 --- a/README.md +++ b/README.md @@ -402,26 +402,25 @@ custom: ## Native Code Dependencies During Runtime -Some Python packages require extra OS libraries (`*.so` files) at runtime. You need to manually include these files in the root directory of your Serverless package. The simplest way to do this is to commit the files to your repository: +Some Python packages require extra OS libraries (`*.so` files) at runtime. You need to manually include these files in the root directory of your Serverless package. The simplest way to do this is to use the `dockerExtraFiles` option. -For instance, the `mysqlclient` package requires `libmysqlclient.so.1020`. If you use the Dockerfile from the previous section, you can extract this file from the builder Dockerfile: +For instance, the `mysqlclient` package requires `libmysqlclient.so.1020`. If you use the Dockerfile from the previous section, add an item to the `dockerExtraFiles` option in your `serverless.yml`: -1. Extract the library: -```bash -docker run --rm -v "$(pwd):/var/task" sls-py-reqs-custom cp -v /usr/lib64/mysql57/libmysqlclient.so.1020 . -``` -(If you get the error `Unable to find image 'sls-py-reqs-custom:latest' locally`, run `sls package` to build the image.) -2. Commit to your repo: -```bash -git add libmysqlclient.so.1020 -git commit -m "Add libmysqlclient.so.1020" +```yaml +custom: + pythonRequirements: + dockerExtraFiles: + - /usr/lib64/mysql57/libmysqlclient.so.1020 ``` -3. Verify the library gets included in your package: + +Then verify the library gets included in your package: + ```bash sls package zipinfo .serverless/xxx.zip ``` -(If you can't see the library, you might need to adjust your package include/exclude configuration in `serverless.yml`.) + +If you can't see the library, you might need to adjust your package include/exclude configuration in `serverless.yml`. ## Optimising packaging time diff --git a/index.js b/index.js index a3759688..ec373387 100644 --- a/index.js +++ b/index.js @@ -50,6 +50,7 @@ class ServerlessPythonRequirements { dockerEnv: false, dockerBuildCmdExtraArgs: [], dockerRunCmdExtraArgs: [], + dockerExtraFiles: [], useStaticCache: false, useDownloadCache: false, cacheLocation: false, diff --git a/lib/pip.js b/lib/pip.js index 45c72059..a6c80449 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -249,6 +249,16 @@ function installRequirements(targetFolder, serverless, options) { `${process.getuid()}:${process.getgid()}`, '/var/task' ]); + } else { + // Use same user so --cache-dir works + dockerCmd.push('-u', getDockerUid(bindPath)); + } + + for (let path of options.dockerExtraFiles) { + pipCmds.push(['cp', path, '/var/task/']); + } + + if (process.platform === 'linux') { if (options.useDownloadCache) { // Set the ownership of the download cache dir back to user pipCmds.push([ @@ -258,9 +268,6 @@ function installRequirements(targetFolder, serverless, options) { dockerDownloadCacheDir ]); } - } else { - // Use same user so --cache-dir works - dockerCmd.push('-u', getDockerUid(bindPath)); } if (Array.isArray(options.dockerRunCmdExtraArgs)) { From c2353af09b08a2fb12294136ccc75909dc513625 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Tue, 23 Jul 2019 20:55:20 -0700 Subject: [PATCH 130/328] Set prettier parser to "babel" to fix lint warning Fixes warning of the form: [warn] { parser: "babylon" } is deprecated; we now treat it as { parser: "babel" }. --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 4fc4f0de..f06e7174 100644 --- a/package.json +++ b/package.json @@ -75,6 +75,6 @@ "prettier": { "semi": true, "singleQuote": true, - "parser": "babylon" + "parser": "babel" } } From 63eca2b10f8f6d23a544d0e0a946de503e9e37eb Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Tue, 23 Jul 2019 21:12:13 -0700 Subject: [PATCH 131/328] Install a version of eslint compatible with node.js 6 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 4fc4f0de..5cc0fd67 100644 --- a/package.json +++ b/package.json @@ -43,7 +43,7 @@ "format": "prettier --write index.js lib/*.js test.js" }, "devDependencies": { - "eslint": "*", + "eslint": "^5.16.0", "prettier": "*", "cross-spawn": "*", "deasync-promise": "*", From 21508d3c3f41d5d741709ed794d4d413041c03ee Mon Sep 17 00:00:00 2001 From: Jayson Reis Date: Fri, 17 May 2019 12:58:04 +0200 Subject: [PATCH 132/328] Make sure that pip errors show stdout and stderr --- lib/pip.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pip.js b/lib/pip.js index a6c80449..f1d3b1fb 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -317,7 +317,7 @@ function installRequirements(targetFolder, serverless, options) { throw res.error; } if (res.status !== 0) { - throw new Error(res.stderr); + throw new Error(`STDOUT: ${res.stdout}\n\nSTDERR: ${res.stderr}`); } }); // If enabled slimming, delete files in slimPatterns From 16d0fa1aa0abe6c8561bd5630c0762d5d16ae842 Mon Sep 17 00:00:00 2001 From: Jayson Reis Date: Fri, 17 May 2019 13:00:30 +0200 Subject: [PATCH 133/328] Make tests break with git dependencies --- test.js | 3 ++- tests/poetry/poetry.lock | 54 ++++++++++++++++++++----------------- tests/poetry/pyproject.toml | 2 +- 3 files changed, 32 insertions(+), 27 deletions(-) diff --git a/test.js b/test.js index e39d8ad5..22ffb4eb 100644 --- a/test.js +++ b/test.js @@ -727,7 +727,8 @@ test('poetry py3.6 can package flask with default options', t => { sls(['package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.true(zipfiles.includes(`bottle${sep}__init__.py`), 'bottle is packaged'); + t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); t.end(); }); diff --git a/tests/poetry/poetry.lock b/tests/poetry/poetry.lock index 2cc3a756..61c3b9d0 100644 --- a/tests/poetry/poetry.lock +++ b/tests/poetry/poetry.lock @@ -4,12 +4,12 @@ description = "The AWS SDK for Python" name = "boto3" optional = false python-versions = "*" -version = "1.9.80" +version = "1.9.150" [package.dependencies] -botocore = ">=1.12.80,<1.13.0" +botocore = ">=1.12.150,<1.13.0" jmespath = ">=0.7.1,<1.0.0" -s3transfer = ">=0.1.10,<0.2.0" +s3transfer = ">=0.2.0,<0.3.0" [[package]] category = "main" @@ -17,7 +17,7 @@ description = "Low-level, data-driven core of boto 3." name = "botocore" optional = false python-versions = "*" -version = "1.12.80" +version = "1.12.150" [package.dependencies] docutils = ">=0.10" @@ -39,6 +39,10 @@ optional = false python-versions = "*" version = "0.12.16" +[package.source] +reference = "0.12.16" +type = "git" +url = "ssh://git@github.com/bottlepy/bottle.git" [[package]] category = "main" description = "Composable command line interface toolkit" @@ -88,7 +92,7 @@ description = "A small but fast and easy to use stand-alone template engine writ name = "jinja2" optional = false python-versions = "*" -version = "2.10" +version = "2.10.1" [package.dependencies] MarkupSafe = ">=0.23" @@ -102,7 +106,7 @@ description = "JSON Matching Expressions" name = "jmespath" optional = false python-versions = "*" -version = "0.9.3" +version = "0.9.4" [[package]] category = "main" @@ -110,7 +114,7 @@ description = "Safely add untrusted strings to HTML/XML markup." name = "markupsafe" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -version = "1.1.0" +version = "1.1.1" [[package]] category = "main" @@ -119,7 +123,7 @@ marker = "python_version >= \"2.7\"" name = "python-dateutil" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -version = "2.7.5" +version = "2.8.0" [package.dependencies] six = ">=1.5" @@ -130,10 +134,10 @@ description = "An Amazon S3 Transfer Manager" name = "s3transfer" optional = false python-versions = "*" -version = "0.1.13" +version = "0.2.0" [package.dependencies] -botocore = ">=1.3.0,<2.0.0" +botocore = ">=1.12.36,<2.0.0" [[package]] category = "main" @@ -151,7 +155,7 @@ marker = "python_version >= \"3.4\"" name = "urllib3" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" -version = "1.24.1" +version = "1.24.3" [package.extras] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] @@ -162,31 +166,31 @@ category = "main" description = "The comprehensive WSGI web application library." name = "werkzeug" optional = false -python-versions = "*" -version = "0.14.1" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "0.15.4" [package.extras] -dev = ["coverage", "pytest", "sphinx", "tox"] +dev = ["pytest", "coverage", "tox", "sphinx", "pallets-sphinx-themes", "sphinx-issues"] termcolor = ["termcolor"] watchdog = ["watchdog"] [metadata] -content-hash = "fa5a641a2c19871b5899fbc700d6375250a5d2e327832a012296af6a31c8093a" +content-hash = "3b275a1ce61e115344002b4303657cbb2e2c65833b1682486ed890529a434654" python-versions = "^3.6" [metadata.hashes] -boto3 = ["122603b00f8c458236d1bd09850bdea56fc45f271e75ca38e66dbce37f72cada", "99ec19dc4f0aa8a8354db7baebe1ff57bd18aeb6a539b28693b2e8ca8dc3d85b"] -botocore = ["76a2969278250e010253ddf514f4b54eaa7d2b1430f682874c3c2ab92f25a96d", "8c579bac9abeaff1270a7a25964b01d3db1367f42fa5f826e1303ec8a4b13cef"] -bottle = ["9c310da61e7df2b6ac257d8a90811899ccb3a9743e77e947101072a2e3186726", "ca43beafbdccabbe31b758a4f34d1e44985a9b9539516775208b2b0f903eafa0"] +boto3 = ["1253809000b3b9020c6dde3e8b0b75e6c02547e2760656d8bccc40fd2a7284a6", "e32a1a324ddfb652dedd550fd288ad85cc8d448ed19315f39fbe7b6171a30dc8"] +botocore = ["946fd5e85378c3c597d6b9f495706576a0fc0000b216e30346ed7ee796ff50c8", "f82e44af499a8f806c363ab4e26df82195b5b190c4169ccfbfbc98e55aa22bf7"] +bottle = [] click = ["2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", "5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"] docutils = ["02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6", "51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274", "7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6"] flask = ["2271c0070dbcb5275fad4a82e29f23ab92682dc45f9dfbc22c02ba9b9322ce48", "a080b744b7e345ccfcbc77954861cb05b3c63786e93f2b3875e0913d44b43f05"] itsdangerous = ["321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", "b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"] -jinja2 = ["74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd", "f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4"] -jmespath = ["6a81d4c9aa62caf061cb517b4d9ad1dd300374cd4706997aff9cd6aedd61fc64", "f11b4461f425740a1d908e9a3f7365c3d2e569f6ca68a2ff8bc5bcd9676edd63"] -markupsafe = ["048ef924c1623740e70204aa7143ec592504045ae4429b59c30054cb31e3c432", "130f844e7f5bdd8e9f3f42e7102ef1d49b2e6fdf0d7526df3f87281a532d8c8b", "19f637c2ac5ae9da8bfd98cef74d64b7e1bb8a63038a3505cd182c3fac5eb4d9", "1b8a7a87ad1b92bd887568ce54b23565f3fd7018c4180136e1cf412b405a47af", "1c25694ca680b6919de53a4bb3bdd0602beafc63ff001fea2f2fc16ec3a11834", "1f19ef5d3908110e1e891deefb5586aae1b49a7440db952454b4e281b41620cd", "1fa6058938190ebe8290e5cae6c351e14e7bb44505c4a7624555ce57fbbeba0d", "31cbb1359e8c25f9f48e156e59e2eaad51cd5242c05ed18a8de6dbe85184e4b7", "3e835d8841ae7863f64e40e19477f7eb398674da6a47f09871673742531e6f4b", "4e97332c9ce444b0c2c38dd22ddc61c743eb208d916e4265a2a3b575bdccb1d3", "525396ee324ee2da82919f2ee9c9e73b012f23e7640131dd1b53a90206a0f09c", "52b07fbc32032c21ad4ab060fec137b76eb804c4b9a1c7c7dc562549306afad2", "52ccb45e77a1085ec5461cde794e1aa037df79f473cbc69b974e73940655c8d7", "5c3fbebd7de20ce93103cb3183b47671f2885307df4a17a0ad56a1dd51273d36", "5e5851969aea17660e55f6a3be00037a25b96a9b44d2083651812c99d53b14d1", "5edfa27b2d3eefa2210fb2f5d539fbed81722b49f083b2c6566455eb7422fd7e", "7d263e5770efddf465a9e31b78362d84d015cc894ca2c131901a4445eaa61ee1", "83381342bfc22b3c8c06f2dd93a505413888694302de25add756254beee8449c", "857eebb2c1dc60e4219ec8e98dfa19553dae33608237e107db9c6078b1167856", "98e439297f78fca3a6169fd330fbe88d78b3bb72f967ad9961bcac0d7fdd1550", "bf54103892a83c64db58125b3f2a43df6d2cb2d28889f14c78519394feb41492", "d9ac82be533394d341b41d78aca7ed0e0f4ba5a2231602e2f05aa87f25c51672", "e982fe07ede9fada6ff6705af70514a52beb1b2c3d25d4e873e82114cf3c5401", "edce2ea7f3dfc981c4ddc97add8a61381d9642dc3273737e756517cc03e84dd6", "efdc45ef1afc238db84cb4963aa689c0408912a0239b0721cb172b4016eb31d6", "f137c02498f8b935892d5c0172560d7ab54bc45039de8805075e19079c639a9c", "f82e347a72f955b7017a39708a3667f106e6ad4d10b25f237396a7115d8ed5fd", "fb7c206e01ad85ce57feeaaa0bf784b97fa3cad0d4a5737bc5295785f5c613a1"] -python-dateutil = ["063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93", "88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02"] -s3transfer = ["90dc18e028989c609146e241ea153250be451e05ecc0c2832565231dacdf59c1", "c7a9ec356982d5e9ab2d4b46391a7d6a950e2b04c472419f5fdec70cc0ada72f"] +jinja2 = ["065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013", "14dd6caf1527abb21f08f86c784eac40853ba93edb79552aa1e4b8aef1b61c7b"] +jmespath = ["3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6", "bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c"] +markupsafe = ["00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", "09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", "09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", "1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", "24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", "29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", "43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", "46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", "500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", "535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", "62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", "6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", "717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", "79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", "7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", "88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", "8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", "98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", "9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", "9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", "ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", "b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", "b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", "b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", "ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", "c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", "cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", "e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"] +python-dateutil = ["7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", "c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"] +s3transfer = ["7b9ad3213bff7d357f888e0fab5101b56fa1a0548ee77d121c3a3dbfbef4cb2e", "f23d5cb7d862b104401d9021fc82e5fa0e0cf57b7660a1331425aab0c691d021"] six = ["3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"] -urllib3 = ["61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39", "de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"] -werkzeug = ["c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c", "d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b"] +urllib3 = ["2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", "a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb"] +werkzeug = ["865856ebb55c4dcd0630cdd8f3331a1847a819dda7e8c750d3db6f2aa6c0209c", "a0b915f0815982fb2a09161cb8f31708052d0951c3ba433ccc5e1aa276507ca6"] diff --git a/tests/poetry/pyproject.toml b/tests/poetry/pyproject.toml index 20e85d92..66c5f76d 100644 --- a/tests/poetry/pyproject.toml +++ b/tests/poetry/pyproject.toml @@ -7,7 +7,7 @@ authors = ["Your Name "] [tool.poetry.dependencies] python = "^3.6" Flask = "^1.0" -bottle = "^0.12.16" +bottle = {git = "ssh://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} boto3 = "^1.9" [tool.poetry.dev-dependencies] From 5dc20c5909123da7039d60d5794de1d6ca42a721 Mon Sep 17 00:00:00 2001 From: Jayson Reis Date: Fri, 17 May 2019 13:10:26 +0200 Subject: [PATCH 134/328] Fix poetry generated requirements.txt for git dependencies --- lib/poetry.js | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/lib/poetry.js b/lib/poetry.js index 53b521e0..7caeda8b 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -32,9 +32,19 @@ function pyprojectTomlToRequirements() { if (res.status !== 0) { throw new Error(res.stderr); } + + const editableLine = new RegExp(/^-e /gm); + const sourceRequirements = path.join(this.servicePath, 'requirements.txt'); + const requirementsContents = fse.readFileSync(sourceRequirements, { encoding: 'utf-8' }); + + if (requirementsContents.match(editableLine)) { + this.serverless.cli.log('The generated file contains -e lines, removing them...'); + fse.writeFileSync(sourceRequirements, requirementsContents.replace(editableLine, '')) + } + fse.ensureDirSync(path.join(this.servicePath, '.serverless')); fse.moveSync( - path.join(this.servicePath, 'requirements.txt'), + sourceRequirements, path.join(this.servicePath, '.serverless', 'requirements.txt') ); } From 023033eaa98bec6bd6674286bd2cb57ec28042ea Mon Sep 17 00:00:00 2001 From: Jayson Reis Date: Fri, 17 May 2019 13:10:59 +0200 Subject: [PATCH 135/328] Fix wrongly expected file in test --- test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test.js b/test.js index 22ffb4eb..0b4af9a8 100644 --- a/test.js +++ b/test.js @@ -727,7 +727,7 @@ test('poetry py3.6 can package flask with default options', t => { sls(['package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`bottle${sep}__init__.py`), 'bottle is packaged'); + t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); t.end(); }); From 558612149dbe5d964c47754d6206bf846661b2dc Mon Sep 17 00:00:00 2001 From: Jayson Reis Date: Fri, 17 May 2019 13:15:22 +0200 Subject: [PATCH 136/328] Add disclaimer about poetry and git dependencies --- README.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/README.md b/README.md index 29ad57af..6e212ef7 100644 --- a/README.md +++ b/README.md @@ -110,6 +110,21 @@ custom: usePoetry: false ``` +### Poetry with git dependencies +Poetry by default generates the exported requirements.txt file with `-e` and that breaks pip with `-t` parameter +(used to install all requirements in a specific folder). In order to fix that we remove all `-e ` from the generated file but, +for that to work you need to add the git dependencies in a specific way. + +Instead of: +```toml +[tool.poetry.dependencies] +bottle = {git = "git@github.com/bottlepy/bottle.git", tag = "0.12.16"} +``` +it has to be: +```toml +[tool.poetry.dependencies] +bottle = {git = "ssh://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} +``` ## Dealing with Lambda's size limitations To help deal with potentially large dependencies (for example: `numpy`, `scipy` From 5029548889636cd9817d54eecbada6cbdec5ce7b Mon Sep 17 00:00:00 2001 From: Jayson Reis Date: Fri, 17 May 2019 13:41:45 +0200 Subject: [PATCH 137/328] Run prettier on poetry.js --- lib/poetry.js | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/lib/poetry.js b/lib/poetry.js index 7caeda8b..bc1e82fe 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -35,11 +35,18 @@ function pyprojectTomlToRequirements() { const editableLine = new RegExp(/^-e /gm); const sourceRequirements = path.join(this.servicePath, 'requirements.txt'); - const requirementsContents = fse.readFileSync(sourceRequirements, { encoding: 'utf-8' }); - + const requirementsContents = fse.readFileSync(sourceRequirements, { + encoding: 'utf-8' + }); + if (requirementsContents.match(editableLine)) { - this.serverless.cli.log('The generated file contains -e lines, removing them...'); - fse.writeFileSync(sourceRequirements, requirementsContents.replace(editableLine, '')) + this.serverless.cli.log( + 'The generated file contains -e lines, removing them...' + ); + fse.writeFileSync( + sourceRequirements, + requirementsContents.replace(editableLine, '') + ); } fse.ensureDirSync(path.join(this.servicePath, '.serverless')); From eea44ec5a5169bd13125a5423786b29ad433402a Mon Sep 17 00:00:00 2001 From: Thomas Pansino Date: Tue, 30 Jul 2019 18:38:44 -0700 Subject: [PATCH 138/328] Rename editableLine -> editableFlag --- lib/poetry.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/poetry.js b/lib/poetry.js index bc1e82fe..ed014521 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -33,19 +33,19 @@ function pyprojectTomlToRequirements() { throw new Error(res.stderr); } - const editableLine = new RegExp(/^-e /gm); + const editableFlag = new RegExp(/^-e /gm); const sourceRequirements = path.join(this.servicePath, 'requirements.txt'); const requirementsContents = fse.readFileSync(sourceRequirements, { encoding: 'utf-8' }); - if (requirementsContents.match(editableLine)) { + if (requirementsContents.match(editableFlag)) { this.serverless.cli.log( 'The generated file contains -e lines, removing them...' ); fse.writeFileSync( sourceRequirements, - requirementsContents.replace(editableLine, '') + requirementsContents.replace(editableFlag, '') ); } From a10d29a565ca06e4852d2b4c7ee47e987676a6ad Mon Sep 17 00:00:00 2001 From: Thomas Pansino Date: Tue, 30 Jul 2019 18:40:56 -0700 Subject: [PATCH 139/328] Switch to https proto for poetry testing and re-lock --- tests/poetry/poetry.lock | 45 +++++++++++++++++++------------------ tests/poetry/pyproject.toml | 2 +- 2 files changed, 24 insertions(+), 23 deletions(-) diff --git a/tests/poetry/poetry.lock b/tests/poetry/poetry.lock index 61c3b9d0..5d8eccc7 100644 --- a/tests/poetry/poetry.lock +++ b/tests/poetry/poetry.lock @@ -4,10 +4,10 @@ description = "The AWS SDK for Python" name = "boto3" optional = false python-versions = "*" -version = "1.9.150" +version = "1.9.199" [package.dependencies] -botocore = ">=1.12.150,<1.13.0" +botocore = ">=1.12.199,<1.13.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.2.0,<0.3.0" @@ -17,10 +17,10 @@ description = "Low-level, data-driven core of boto 3." name = "botocore" optional = false python-versions = "*" -version = "1.12.150" +version = "1.12.199" [package.dependencies] -docutils = ">=0.10" +docutils = ">=0.10,<0.15" jmespath = ">=0.7.1,<1.0.0" [package.dependencies.python-dateutil] @@ -29,7 +29,7 @@ version = ">=2.1,<3.0.0" [package.dependencies.urllib3] python = ">=3.4" -version = ">=1.20,<1.25" +version = ">=1.20,<1.26" [[package]] category = "main" @@ -42,7 +42,7 @@ version = "0.12.16" [package.source] reference = "0.12.16" type = "git" -url = "ssh://git@github.com/bottlepy/bottle.git" +url = "https://git@github.com/bottlepy/bottle.git" [[package]] category = "main" description = "Composable command line interface toolkit" @@ -64,18 +64,18 @@ category = "main" description = "A simple framework for building complex web applications." name = "flask" optional = false -python-versions = "*" -version = "1.0.2" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "1.1.1" [package.dependencies] -Jinja2 = ">=2.10" -Werkzeug = ">=0.14" +Jinja2 = ">=2.10.1" +Werkzeug = ">=0.15" click = ">=5.1" itsdangerous = ">=0.24" [package.extras] -dev = ["pytest (>=3)", "coverage", "tox", "sphinx", "pallets-sphinx-themes", "sphinxcontrib-log-cabinet"] -docs = ["sphinx", "pallets-sphinx-themes", "sphinxcontrib-log-cabinet"] +dev = ["pytest", "coverage", "tox", "sphinx", "pallets-sphinx-themes", "sphinxcontrib-log-cabinet", "sphinx-issues"] +docs = ["sphinx", "pallets-sphinx-themes", "sphinxcontrib-log-cabinet", "sphinx-issues"] dotenv = ["python-dotenv"] [[package]] @@ -134,7 +134,7 @@ description = "An Amazon S3 Transfer Manager" name = "s3transfer" optional = false python-versions = "*" -version = "0.2.0" +version = "0.2.1" [package.dependencies] botocore = ">=1.12.36,<2.0.0" @@ -155,9 +155,10 @@ marker = "python_version >= \"3.4\"" name = "urllib3" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" -version = "1.24.3" +version = "1.25.3" [package.extras] +brotli = ["brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] @@ -167,7 +168,7 @@ description = "The comprehensive WSGI web application library." name = "werkzeug" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.15.4" +version = "0.15.5" [package.extras] dev = ["pytest", "coverage", "tox", "sphinx", "pallets-sphinx-themes", "sphinx-issues"] @@ -175,22 +176,22 @@ termcolor = ["termcolor"] watchdog = ["watchdog"] [metadata] -content-hash = "3b275a1ce61e115344002b4303657cbb2e2c65833b1682486ed890529a434654" +content-hash = "76568ab19fae4155c453c30ac4362880dac17b449380f5a2dd017dc8a4ec4a2f" python-versions = "^3.6" [metadata.hashes] -boto3 = ["1253809000b3b9020c6dde3e8b0b75e6c02547e2760656d8bccc40fd2a7284a6", "e32a1a324ddfb652dedd550fd288ad85cc8d448ed19315f39fbe7b6171a30dc8"] -botocore = ["946fd5e85378c3c597d6b9f495706576a0fc0000b216e30346ed7ee796ff50c8", "f82e44af499a8f806c363ab4e26df82195b5b190c4169ccfbfbc98e55aa22bf7"] +boto3 = ["0cd4a3e158f40eedb54b36b3fbe60d135db74a245f0ca8eead1af2eb6d46a649", "68e9eba6f846cf8e01973ec565afdb1adfb9612b531c15bb5c5524394db4df5b"] +botocore = ["25d87047241b7b775443570c0e790ca952f9f7491d4d6472430a4b006383a257", "e4729c1acaa936d4c5c948a18d279f92bbf61fad9b5fb03942c753ec405e427d"] bottle = [] click = ["2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", "5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"] docutils = ["02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6", "51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274", "7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6"] -flask = ["2271c0070dbcb5275fad4a82e29f23ab92682dc45f9dfbc22c02ba9b9322ce48", "a080b744b7e345ccfcbc77954861cb05b3c63786e93f2b3875e0913d44b43f05"] +flask = ["13f9f196f330c7c2c5d7a5cf91af894110ca0215ac051b5844701f2bfd934d52", "45eb5a6fd193d6cf7e0cf5d8a5b31f83d5faae0293695626f539a823e93b13f6"] itsdangerous = ["321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", "b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"] jinja2 = ["065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013", "14dd6caf1527abb21f08f86c784eac40853ba93edb79552aa1e4b8aef1b61c7b"] jmespath = ["3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6", "bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c"] markupsafe = ["00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", "09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", "09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", "1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", "24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", "29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", "43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", "46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", "500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", "535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", "62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", "6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", "717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", "79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", "7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", "88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", "8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", "98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", "9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", "9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", "ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", "b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", "b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", "b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", "ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", "c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", "cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", "e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"] python-dateutil = ["7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", "c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"] -s3transfer = ["7b9ad3213bff7d357f888e0fab5101b56fa1a0548ee77d121c3a3dbfbef4cb2e", "f23d5cb7d862b104401d9021fc82e5fa0e0cf57b7660a1331425aab0c691d021"] +s3transfer = ["6efc926738a3cd576c2a79725fed9afde92378aa5c6a957e3af010cb019fac9d", "b780f2411b824cb541dbcd2c713d0cb61c7d1bcadae204cdddda2b35cef493ba"] six = ["3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"] -urllib3 = ["2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", "a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb"] -werkzeug = ["865856ebb55c4dcd0630cdd8f3331a1847a819dda7e8c750d3db6f2aa6c0209c", "a0b915f0815982fb2a09161cb8f31708052d0951c3ba433ccc5e1aa276507ca6"] +urllib3 = ["b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1", "dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"] +werkzeug = ["87ae4e5b5366da2347eb3116c0e6c681a0e939a33b2805e2c0cbd282664932c4", "a13b74dd3c45f758d4ebdb224be8f1ab8ef58b3c0ffc1783a8c7d9f4f50227e6"] diff --git a/tests/poetry/pyproject.toml b/tests/poetry/pyproject.toml index 66c5f76d..b813968a 100644 --- a/tests/poetry/pyproject.toml +++ b/tests/poetry/pyproject.toml @@ -7,7 +7,7 @@ authors = ["Your Name "] [tool.poetry.dependencies] python = "^3.6" Flask = "^1.0" -bottle = {git = "ssh://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} +bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} boto3 = "^1.9" [tool.poetry.dev-dependencies] From da2db0ba160ee6061f160fd5a18a153f8bd51c71 Mon Sep 17 00:00:00 2001 From: Thomas Pansino Date: Tue, 30 Jul 2019 18:47:51 -0700 Subject: [PATCH 140/328] README update --- README.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 6e212ef7..f583ec46 100644 --- a/README.md +++ b/README.md @@ -120,7 +120,12 @@ Instead of: [tool.poetry.dependencies] bottle = {git = "git@github.com/bottlepy/bottle.git", tag = "0.12.16"} ``` -it has to be: +Use: +```toml +[tool.poetry.dependencies] +bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} +``` +Or, if you have an SSH key configured: ```toml [tool.poetry.dependencies] bottle = {git = "ssh://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} From 9981ba2d3d917b3bc072b9404bf6ed8c828e5cec Mon Sep 17 00:00:00 2001 From: Thomas Pansino Date: Tue, 30 Jul 2019 19:28:00 -0700 Subject: [PATCH 141/328] Fix seemingly incorrect test assertion --- test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test.js b/test.js index 0b4af9a8..b30c3c38 100644 --- a/test.js +++ b/test.js @@ -728,7 +728,7 @@ test('poetry py3.6 can package flask with default options', t => { const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); - t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }); From 56b9c1dc0f72c61bca6a6f7123bb2fd6431e092b Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Wed, 7 Aug 2019 08:21:55 -0400 Subject: [PATCH 142/328] version bump! --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 50dee31a..9f5ddfcb 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "4.3.0", + "version": "5.0.0", "engines": { "node": ">=6.0" }, From ded33985c3a6ea8a53922c17936ff0d04f3bc3d5 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Wed, 7 Aug 2019 09:14:49 -0400 Subject: [PATCH 143/328] use caches by default! --- index.js | 4 ++-- test.js | 25 +++++++++---------------- tests/base/package.json | 2 +- tests/base/serverless.yml | 4 ++-- tests/individually/package.json | 2 +- tests/non_build_pyproject/package.json | 2 +- tests/pipenv/package.json | 2 +- tests/poetry/package.json | 2 +- 8 files changed, 18 insertions(+), 25 deletions(-) diff --git a/index.js b/index.js index ec373387..b444a9b6 100644 --- a/index.js +++ b/index.js @@ -51,8 +51,8 @@ class ServerlessPythonRequirements { dockerBuildCmdExtraArgs: [], dockerRunCmdExtraArgs: [], dockerExtraFiles: [], - useStaticCache: false, - useDownloadCache: false, + useStaticCache: true, + useDownloadCache: true, cacheLocation: false, staticCacheMaxVersions: 0, pipCmdExtraArgs: [], diff --git a/test.js b/test.js index b30c3c38..d26aef74 100644 --- a/test.js +++ b/test.js @@ -1632,11 +1632,11 @@ test( { skip: !canUseDocker() || process.platform === 'win32' } ); -test('py3.6 uses download cache with useDownloadCache option', t => { +test('py3.6 uses download cache by default option', t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--useDownloadCache=true', 'package']); + sls(['package']); const cachepath = getUserCachePath(); t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), @@ -1645,12 +1645,11 @@ test('py3.6 uses download cache with useDownloadCache option', t => { t.end(); }); -test('py3.6 uses download cache with cacheLocation option', t => { +test('py3.6 uses download cache by defaul option', t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls([ - '--useDownloadCache=true', '--cacheLocation=.requirements-cache', 'package' ]); @@ -1667,7 +1666,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--useDownloadCache=true', '--dockerizePip=true', 'package']); + sls(['--dockerizePip=true', 'package']); const cachepath = getUserCachePath(); t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), @@ -1679,13 +1678,12 @@ test( ); test( - 'py3.6 uses download cache with dockerizePip + cacheLocation option', + 'py3.6 uses download cache with dockerizePip by default option', t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls([ - '--useDownloadCache=true', '--dockerizePip=true', '--cacheLocation=.requirements-cache', 'package' @@ -1703,7 +1701,7 @@ test('py3.6 uses static and download cache', t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--useDownloadCache=true', '--useStaticCache=true', 'package']); + sls(['package']); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); t.true( @@ -1724,8 +1722,6 @@ test( const path = npm(['pack', '../..']); npm(['i', path]); sls([ - '--useDownloadCache=true', - '--useStaticCache=true', '--dockerizePip=true', 'package' ]); @@ -1748,7 +1744,7 @@ test('py3.6 uses static cache', t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--useStaticCache=true', 'package']); + sls(['package']); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); t.true( @@ -1767,7 +1763,7 @@ test('py3.6 uses static cache', t => { `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' ); - sls(['--useStaticCache=true', 'package']); + sls(['package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true( @@ -1783,7 +1779,7 @@ test('py3.6 uses static cache with cacheLocation option', t => { const path = npm(['pack', '../..']); npm(['i', path]); const cachepath = '.requirements-cache'; - sls(['--useStaticCache=true', `--cacheLocation=${cachepath}`, 'package']); + sls([`--cacheLocation=${cachepath}`, 'package']); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); t.true( pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), @@ -1805,7 +1801,6 @@ test( const path = npm(['pack', '../..']); npm(['i', path]); sls([ - '--useStaticCache=true', '--dockerizePip=true', '--slim=true', 'package' @@ -1829,7 +1824,6 @@ test( 'injected new file into static cache folder' ); sls([ - '--useStaticCache=true', '--dockerizePip=true', '--slim=true', 'package' @@ -1858,7 +1852,6 @@ test( const path = npm(['pack', '../..']); npm(['i', path]); sls([ - '--useDownloadCache=true', '--dockerizePip=true', '--slim=true', 'package' diff --git a/tests/base/package.json b/tests/base/package.json index d13fd651..db241a8c 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.2.5.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.0.0.tgz" } } diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index 1684ab2f..b356c514 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -25,8 +25,8 @@ custom: zip: false dockerizePip: false individually: false - useStaticCache: false - useDownloadCache: false + useStaticCache: true + useDownloadCache: true package: individually: ${opt:individually, self:custom.defaults.individually} diff --git a/tests/individually/package.json b/tests/individually/package.json index d13fd651..db241a8c 100644 --- a/tests/individually/package.json +++ b/tests/individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.2.5.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.0.0.tgz" } } diff --git a/tests/non_build_pyproject/package.json b/tests/non_build_pyproject/package.json index d13fd651..db241a8c 100644 --- a/tests/non_build_pyproject/package.json +++ b/tests/non_build_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.2.5.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.0.0.tgz" } } diff --git a/tests/pipenv/package.json b/tests/pipenv/package.json index d13fd651..db241a8c 100644 --- a/tests/pipenv/package.json +++ b/tests/pipenv/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.2.5.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.0.0.tgz" } } diff --git a/tests/poetry/package.json b/tests/poetry/package.json index d13fd651..db241a8c 100644 --- a/tests/poetry/package.json +++ b/tests/poetry/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.2.5.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.0.0.tgz" } } From ec15f50fea2112c7c9be47fae286075dbde2b37c Mon Sep 17 00:00:00 2001 From: mokamoto12 Date: Sun, 11 Aug 2019 01:40:47 +0900 Subject: [PATCH 144/328] prevent file hash from changing --- lib/inject.js | 5 +++-- test.js | 12 ++++++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/lib/inject.js b/lib/inject.js index dc30be79..e17800c4 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -39,9 +39,10 @@ function injectRequirements(requirementsPath, packagePath, options) { .map(([file, relativeFile]) => Promise.all([file, relativeFile, fse.statAsync(file)]) ) - .map(([file, relativeFile, fileStat]) => + .mapSeries(([file, relativeFile, fileStat]) => zipFile(zip, relativeFile, fse.readFileAsync(file), { - unixPermissions: fileStat.mode + unixPermissions: fileStat.mode, + createFolders: false }) ) .then(() => writeZip(zip, packagePath)) diff --git a/test.js b/test.js index d26aef74..97b2d26b 100644 --- a/test.js +++ b/test.js @@ -2,6 +2,7 @@ const crossSpawn = require('cross-spawn'); const deasync = require('deasync-promise'); const glob = require('glob-all'); const JSZip = require('jszip'); +const sha256File = require('sha256-file'); const tape = require('tape'); const { chmodSync, @@ -132,6 +133,17 @@ test('default pythonBin can package flask with default options', t => { t.end(); }); +test('py3.6 packages have the same hash', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package']); + const fileHash = sha256File('.serverless/sls-py-req-test.zip'); + sls(['package']); + t.equal(sha256File('.serverless/sls-py-req-test.zip'), fileHash, 'packages have the same hash'); + t.end(); +}); + test('py3.6 can package flask with default options', t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); From 835ef2f45f340c2beb85683eae4d2a48d34b6f9e Mon Sep 17 00:00:00 2001 From: mokamoto12 Date: Sun, 11 Aug 2019 02:53:31 +0900 Subject: [PATCH 145/328] poetry 1.0.0b1 export prints to stdout by default --- lib/poetry.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/poetry.js b/lib/poetry.js index ed014521..d18ac064 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -35,9 +35,7 @@ function pyprojectTomlToRequirements() { const editableFlag = new RegExp(/^-e /gm); const sourceRequirements = path.join(this.servicePath, 'requirements.txt'); - const requirementsContents = fse.readFileSync(sourceRequirements, { - encoding: 'utf-8' - }); + const requirementsContents = res.stdout.toString(); if (requirementsContents.match(editableFlag)) { this.serverless.cli.log( From 05d1819a90f4fe050ef8c77f20adafda5314620e Mon Sep 17 00:00:00 2001 From: mokamoto12 Date: Sun, 11 Aug 2019 21:50:28 +0900 Subject: [PATCH 146/328] fix resolving requirements recursively --- lib/pip.js | 27 ++++++++++++++++++--- test.js | 23 ++++++++++++++++++ tests/base/requirements-common.txt | 1 + tests/base/requirements-w-nested.txt | 3 +++ tests/individually/module1/requirements.txt | 1 + tests/individually/requirements-common.txt | 1 + 6 files changed, 52 insertions(+), 4 deletions(-) create mode 100644 tests/base/requirements-common.txt create mode 100644 tests/base/requirements-w-nested.txt create mode 100644 tests/individually/requirements-common.txt diff --git a/lib/pip.js b/lib/pip.js index f1d3b1fb..caf48f3e 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -338,6 +338,28 @@ function dockerPathForWin(path) { return path; } } + +/** + * get requirements from requirements.txt + * @param {string} source + * @return {string[]} + */ +function getRequirements(source) { + const requirements = fse + .readFileSync(source, { encoding: 'utf-8' }) + .replace(/\\\n/g, ' ') + .split(/\r?\n/); + + return requirements.reduce((acc, req) => { + req = req.trim(); + if (!req.startsWith('-r')) { + return [...acc, req]; + } + source = path.join(path.dirname(source), req.replace(/^-r\s+/, '')); + return [...acc, ...getRequirements(source)]; + }, []); +} + /** create a filtered requirements.txt without anything from noDeploy * then remove all comments and empty lines, and sort the list which * assist with matching the static cache. The sorting will skip any @@ -351,10 +373,7 @@ function dockerPathForWin(path) { */ function filterRequirementsFile(source, target, options) { const noDeploy = new Set(options.noDeploy || []); - const requirements = fse - .readFileSync(source, { encoding: 'utf-8' }) - .replace(/\\\n/g, ' ') - .split(/\r?\n/); + const requirements = getRequirements(source); var prepend = []; const filteredRequirements = requirements.filter(req => { req = req.trim(); diff --git a/test.js b/test.js index d26aef74..d8af3954 100644 --- a/test.js +++ b/test.js @@ -157,6 +157,21 @@ test('py3.6 can package flask with hashes', t => { t.end(); }); +test('py3.6 can package flask with nested', t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + `--pythonBin=${getPythonBin(3)}`, + '--fileName=requirements-w-nested.txt', + 'package' + ]); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + test('py3.6 can package flask with zip option', t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1458,6 +1473,10 @@ test('py3.6 can package only requirements of module', t => { zipfiles_hello.includes(`pyaml${sep}__init__.py`), 'pyaml is packaged in function hello1' ); + t.true( + zipfiles_hello.includes(`boto3${sep}__init__.py`), + 'boto3 is packaged in function hello1' + ); t.false( zipfiles_hello.includes(`flask${sep}__init__.py`), 'flask is NOT packaged in function hello1' @@ -1478,6 +1497,10 @@ test('py3.6 can package only requirements of module', t => { zipfiles_hello2.includes(`pyaml${sep}__init__.py`), 'pyaml is NOT packaged in function hello2' ); + t.false( + zipfiles_hello2.includes(`boto3${sep}__init__.py`), + 'boto3 is NOT packaged in function hello2' + ); t.true( zipfiles_hello2.includes(`flask${sep}__init__.py`), 'flask is packaged in function hello2' diff --git a/tests/base/requirements-common.txt b/tests/base/requirements-common.txt new file mode 100644 index 00000000..30ddf823 --- /dev/null +++ b/tests/base/requirements-common.txt @@ -0,0 +1 @@ +boto3 diff --git a/tests/base/requirements-w-nested.txt b/tests/base/requirements-w-nested.txt new file mode 100644 index 00000000..4d73c837 --- /dev/null +++ b/tests/base/requirements-w-nested.txt @@ -0,0 +1,3 @@ +flask +bottle +-r requirements-common.txt diff --git a/tests/individually/module1/requirements.txt b/tests/individually/module1/requirements.txt index 2e64be1f..9b7a216a 100644 --- a/tests/individually/module1/requirements.txt +++ b/tests/individually/module1/requirements.txt @@ -1 +1,2 @@ +-r ../requirements-common.txt pyaml diff --git a/tests/individually/requirements-common.txt b/tests/individually/requirements-common.txt new file mode 100644 index 00000000..30ddf823 --- /dev/null +++ b/tests/individually/requirements-common.txt @@ -0,0 +1 @@ +boto3 From 9750662191292d309af60ba3529172bd069502ba Mon Sep 17 00:00:00 2001 From: mokamoto12 Date: Tue, 13 Aug 2019 00:04:34 +0900 Subject: [PATCH 147/328] Fix for works with old and new versions --- lib/poetry.js | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/lib/poetry.js b/lib/poetry.js index d18ac064..27ad9476 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -35,7 +35,11 @@ function pyprojectTomlToRequirements() { const editableFlag = new RegExp(/^-e /gm); const sourceRequirements = path.join(this.servicePath, 'requirements.txt'); - const requirementsContents = res.stdout.toString(); + const requirementsContents = + res.stdout.toString() || + fse.readFileSync(sourceRequirements, { + encoding: 'utf-8' + }); if (requirementsContents.match(editableFlag)) { this.serverless.cli.log( From f7048bd66f6d964215bed9f424113a656efbd3ab Mon Sep 17 00:00:00 2001 From: mokamoto12 Date: Tue, 13 Aug 2019 00:30:11 +0900 Subject: [PATCH 148/328] Add comment --- lib/poetry.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/poetry.js b/lib/poetry.js index 27ad9476..2e4a697f 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -36,7 +36,7 @@ function pyprojectTomlToRequirements() { const editableFlag = new RegExp(/^-e /gm); const sourceRequirements = path.join(this.servicePath, 'requirements.txt'); const requirementsContents = - res.stdout.toString() || + res.stdout.toString() || // As of poetry 1.0.0b1, requirements.txt is printed to standard output when the -o option is not specified. fse.readFileSync(sourceRequirements, { encoding: 'utf-8' }); From cf22a7c45c7ffeb30a340b19bc1ee4bed6249259 Mon Sep 17 00:00:00 2001 From: mokamoto12 Date: Tue, 13 Aug 2019 03:19:19 +0900 Subject: [PATCH 149/328] Add trim and add test --- lib/poetry.js | 2 +- test.js | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/lib/poetry.js b/lib/poetry.js index 2e4a697f..ddf85201 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -36,7 +36,7 @@ function pyprojectTomlToRequirements() { const editableFlag = new RegExp(/^-e /gm); const sourceRequirements = path.join(this.servicePath, 'requirements.txt'); const requirementsContents = - res.stdout.toString() || // As of poetry 1.0.0b1, requirements.txt is printed to standard output when the -o option is not specified. + res.stdout.toString().trim() || // As of poetry 1.0.0b1, requirements.txt is printed to standard output when the -o option is not specified. fse.readFileSync(sourceRequirements, { encoding: 'utf-8' }); diff --git a/test.js b/test.js index d26aef74..d28daa6e 100644 --- a/test.js +++ b/test.js @@ -51,6 +51,7 @@ const sls = mkCommand('sls'); const git = mkCommand('git'); const npm = mkCommand('npm'); const perl = mkCommand('perl'); +const poetry = mkCommand('poetry'); const setup = () => { removeSync(getUserCachePath()); @@ -1874,3 +1875,17 @@ test( }, { skip: !canUseDocker() } ); + +// From this point on, the version of the poetry is 1.0.0a0 +test('poetry1.0.0a0 py3.6 can package flask with default options', t => { + process.chdir('tests/poetry'); + const path = npm(['pack', '../..']); + npm(['i', path]); + poetry(['self', 'update', '--preview', '1.0.0a0']); + sls(['package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); From f0fab0e6ec6be238c1630074048ed981ca4d51e0 Mon Sep 17 00:00:00 2001 From: Thomas Pansino Date: Mon, 26 Aug 2019 18:44:21 -0700 Subject: [PATCH 150/328] Overwrite existing requirements.txt file when generating from poetry --- lib/poetry.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/poetry.js b/lib/poetry.js index ddf85201..48589640 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -54,7 +54,8 @@ function pyprojectTomlToRequirements() { fse.ensureDirSync(path.join(this.servicePath, '.serverless')); fse.moveSync( sourceRequirements, - path.join(this.servicePath, '.serverless', 'requirements.txt') + path.join(this.servicePath, '.serverless', 'requirements.txt'), + { "overwrite": true } ); } From 5b38b5c817039a7b89b5987be423506cabc56580 Mon Sep 17 00:00:00 2001 From: Tom Pansino Date: Wed, 28 Aug 2019 17:00:34 -0700 Subject: [PATCH 151/328] Update misleading message wording --- lib/poetry.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/poetry.js b/lib/poetry.js index 48589640..8ce00bda 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -43,7 +43,7 @@ function pyprojectTomlToRequirements() { if (requirementsContents.match(editableFlag)) { this.serverless.cli.log( - 'The generated file contains -e lines, removing them...' + 'The generated file contains -e flags, removing them...' ); fse.writeFileSync( sourceRequirements, From 2ab01cf84c8c14f55726da6f3704e3fedc40664e Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Mon, 28 Oct 2019 15:33:44 -0400 Subject: [PATCH 152/328] SFE support when using package:individually --- lib/inject.js | 13 +++++++++++-- package.json | 2 +- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/lib/inject.js b/lib/inject.js index e17800c4..aced0fcf 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -62,11 +62,20 @@ function moveModuleUp(source, target, module) { return fse .readFileAsync(source) .then(buffer => JSZip.loadAsync(buffer)) - .then(sourceZip => sourceZip.filter(file => file.startsWith(module + '/'))) + .then(sourceZip => + sourceZip.filter( + file => + file.startsWith(module + '/') || + file.startsWith('serverless_sdk/') || + file.match(/s_.*\.py/) !== null + ) + ) .map(srcZipObj => zipFile( targetZip, - srcZipObj.name.replace(module + '/', ''), + srcZipObj.name.startsWith(module + '/') + ? srcZipObj.name.replace(module + '/', '') + : srcZipObj.name, srcZipObj.async('nodebuffer') ) ) diff --git a/package.json b/package.json index 9f5ddfcb..a767eb34 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "5.0.0", + "version": "5.0.1", "engines": { "node": ">=6.0" }, From 3233c60b2f37d1a974b4cdf31bfa2b828c9a44c4 Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Mon, 4 Nov 2019 15:42:03 -0500 Subject: [PATCH 153/328] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index f583ec46..95c18d56 100644 --- a/README.md +++ b/README.md @@ -239,7 +239,7 @@ custom: ## Extra Config Options ### Caching -You can enable two kinds of caching with this plugin which are currently both DISABLED by default. First, a download cache that will cache downloads that pip needs to compile the packages. And second, a what we call "static caching" which caches output of pip after compiling everything for your requirements file. Since generally requirements.txt files rarely change, you will often see large amounts of speed improvements when enabling the static cache feature. These caches will be shared between all your projects if no custom cacheLocation is specified (see below). +You can enable two kinds of caching with this plugin which are currently both ENABLED by default. First, a download cache that will cache downloads that pip needs to compile the packages. And second, a what we call "static caching" which caches output of pip after compiling everything for your requirements file. Since generally requirements.txt files rarely change, you will often see large amounts of speed improvements when enabling the static cache feature. These caches will be shared between all your projects if no custom cacheLocation is specified (see below). _**Please note:** This has replaced the previously recommended usage of "--cache-dir" in the pipCmdExtraArgs_ ```yaml From 5db477fa32070ae5c834f635ea4c55061156f126 Mon Sep 17 00:00:00 2001 From: Justin Plock Date: Wed, 27 Nov 2019 12:28:41 -0500 Subject: [PATCH 154/328] Expand pip commands into separate entries --- lib/pip.js | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index caf48f3e..05fa9392 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -123,9 +123,16 @@ function installRequirements(targetFolder, serverless, options) { options.pythonBin, '-m', 'pip', - 'install', - ...options.pipCmdExtraArgs + 'install' ]; + + if (Array.isArray(options.pipCmdExtraArgs) && options.pipCmdExtraArgs.length > 0) { + options.pipCmdExtraArgs.forEach(cmd => { + const parts = cmd.split(/\s+/, 2); + pipCmd.push(...parts); + }); + } + const pipCmds = [pipCmd]; const postCmds = []; // Check if we're using the legacy --cache-dir command... From 94127300367521f4ec798272476392ea69b75693 Mon Sep 17 00:00:00 2001 From: Joe Crawforth Date: Tue, 7 Jan 2020 10:45:29 +0000 Subject: [PATCH 155/328] commiting first attempt at resovling #445 --- lib/poetry.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/poetry.js b/lib/poetry.js index ddf85201..00c60562 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -16,7 +16,7 @@ function pyprojectTomlToRequirements() { const res = spawnSync( 'poetry', - ['export', '--without-hashes', '-f', 'requirements.txt'], + ['export', '--without-hashes', '-f', 'requirements.txt', '>', 'requirements.txt'], { cwd: this.servicePath } From bc5933626e25cbf7aa4ed59b02a8fa9fd44b5e78 Mon Sep 17 00:00:00 2001 From: Joe Crawforth <38247715+JoeyC1990@users.noreply.github.com> Date: Tue, 7 Jan 2020 17:16:24 +0000 Subject: [PATCH 156/328] Changing to use the output option --- lib/poetry.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/poetry.js b/lib/poetry.js index 00c60562..2fa21440 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -16,7 +16,7 @@ function pyprojectTomlToRequirements() { const res = spawnSync( 'poetry', - ['export', '--without-hashes', '-f', 'requirements.txt', '>', 'requirements.txt'], + ['export', '--without-hashes', '-f', 'requirements.txt', '-o', 'requirements.txt'], { cwd: this.servicePath } From 6f1a8843beb0fd4cdef0002e3ba450f0eb36eb9f Mon Sep 17 00:00:00 2001 From: Jim Pudar Date: Tue, 7 Jan 2020 16:31:26 -0500 Subject: [PATCH 157/328] Correct imprecise regex in moveModuleUp function (fixes #444) The existing regex had the side effect of matching python files from other modules if their filename or path happened to contain the string `s_`. --- lib/inject.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/inject.js b/lib/inject.js index aced0fcf..876e1b75 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -67,7 +67,7 @@ function moveModuleUp(source, target, module) { file => file.startsWith(module + '/') || file.startsWith('serverless_sdk/') || - file.match(/s_.*\.py/) !== null + file.match(/^s_.*\.py/) !== null ) ) .map(srcZipObj => From e9ebf6fac60078223966af853c98cd55d9a12710 Mon Sep 17 00:00:00 2001 From: Joe Crawforth Date: Wed, 29 Jan 2020 09:32:18 +0000 Subject: [PATCH 158/328] updating readme --- README.md | 2 +- lib/poetry.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 95c18d56..e9f30b6c 100644 --- a/README.md +++ b/README.md @@ -102,7 +102,7 @@ and requires that poetry is installed with the --preview flag. TL;DR Install poetry with the `--preview` flag. If you include a `pyproject.toml` and have `poetry` installed instead of a `requirements.txt` this will use -`poetry export --without-hashes -f requirements.txt` to generate them. It is fully compatible with all options such as `zip` and +`poetry export --without-hashes -f requirements.txt -o requirements.txt` to generate them. It is fully compatible with all options such as `zip` and `dockerizePip`. If you don't want this plugin to generate it for you, set the following option: ```yaml custom: diff --git a/lib/poetry.js b/lib/poetry.js index 00c60562..2fa21440 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -16,7 +16,7 @@ function pyprojectTomlToRequirements() { const res = spawnSync( 'poetry', - ['export', '--without-hashes', '-f', 'requirements.txt', '>', 'requirements.txt'], + ['export', '--without-hashes', '-f', 'requirements.txt', '-o', 'requirements.txt'], { cwd: this.servicePath } From 86074eddf09d856a5fbd1770f89dd8286e21710b Mon Sep 17 00:00:00 2001 From: Michael Warkentin Date: Fri, 7 Feb 2020 16:21:26 -0500 Subject: [PATCH 159/328] Remove outdated note about poetry 1.0 Poetry has released multiple 1.x versions now: https://github.com/python-poetry/poetry/tree/1.0.3 --- README.md | 6 ------ 1 file changed, 6 deletions(-) diff --git a/README.md b/README.md index 95c18d56..a4226632 100644 --- a/README.md +++ b/README.md @@ -95,12 +95,6 @@ custom: ## Poetry support :sparkles::pencil::sparkles: -NOTE: Only poetry version 1 supports the required `export` command for this -feature. As of the point this feature was added, poetry 1.0.0 was in preview -and requires that poetry is installed with the --preview flag. - -TL;DR Install poetry with the `--preview` flag. - If you include a `pyproject.toml` and have `poetry` installed instead of a `requirements.txt` this will use `poetry export --without-hashes -f requirements.txt` to generate them. It is fully compatible with all options such as `zip` and `dockerizePip`. If you don't want this plugin to generate it for you, set the following option: From a2029be2307e877ea2400f7819b1250b4465fe6e Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Mon, 17 Feb 2020 19:37:08 -0500 Subject: [PATCH 160/328] chore(ci): update test environment to node 10 Node 6.x has been EOL since April 2019, and Node 8.x becaome EOL in Dec 2019. This pushes the CI environment to use an Active LTS version of Node. Refs: https://github.com/nodejs/Release Signed-off-by: Mike Fiedler --- circle.yml | 2 +- package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/circle.yml b/circle.yml index 3957bc4f..495b9ca6 100644 --- a/circle.yml +++ b/circle.yml @@ -33,7 +33,7 @@ jobs: python get-poetry.py --preview --yes rm get-poetry.py # install nodejs - - run: curl -sL https://deb.nodesource.com/setup_6.x | sudo bash - && sudo apt -y install nodejs + - run: curl -sL https://deb.nodesource.com/setup_10.x | sudo bash - && sudo apt -y install nodejs # install serverless & depcheck - run: npm install -g serverless depcheck # install deps diff --git a/package.json b/package.json index a767eb34..60cd1ea2 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "serverless-python-requirements", "version": "5.0.1", "engines": { - "node": ">=6.0" + "node": ">=10.0" }, "description": "Serverless Python Requirements Plugin", "author": "United Income ", From e8ded7c8bffe6ce855d380c70910f97da5a796ea Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Mon, 17 Feb 2020 20:16:06 -0500 Subject: [PATCH 161/328] chore(ci): remove depcheck from circle testing Uncertain of why this is currently failing - it's possible the global version of node in the `circleci/classic` image is incompatible - that should be investigated independently. In the meantime, the rest of CI isn't being run. Signed-off-by: Mike Fiedler --- circle.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/circle.yml b/circle.yml index 495b9ca6..ef3ec251 100644 --- a/circle.yml +++ b/circle.yml @@ -35,11 +35,9 @@ jobs: # install nodejs - run: curl -sL https://deb.nodesource.com/setup_10.x | sudo bash - && sudo apt -y install nodejs # install serverless & depcheck - - run: npm install -g serverless depcheck + - run: npm install -g serverless # install deps - run: npm i - # depcheck - - run: depcheck . # lint: - run: npm run lint # test! From d4e168c220664b071711ca95bef92765b9dcdb2b Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Mon, 17 Feb 2020 20:28:00 -0500 Subject: [PATCH 162/328] style: apply prettier to existing files These un-linted changes have crept in due to lack of CI making it here. Signed-off-by: Mike Fiedler --- lib/pip.js | 16 +++++++--------- lib/poetry.js | 9 ++++++++- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index 05fa9392..14864794 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -119,20 +119,18 @@ function installRequirements(targetFolder, serverless, options) { ); const dockerCmd = []; - const pipCmd = [ - options.pythonBin, - '-m', - 'pip', - 'install' - ]; - - if (Array.isArray(options.pipCmdExtraArgs) && options.pipCmdExtraArgs.length > 0) { + const pipCmd = [options.pythonBin, '-m', 'pip', 'install']; + + if ( + Array.isArray(options.pipCmdExtraArgs) && + options.pipCmdExtraArgs.length > 0 + ) { options.pipCmdExtraArgs.forEach(cmd => { const parts = cmd.split(/\s+/, 2); pipCmd.push(...parts); }); } - + const pipCmds = [pipCmd]; const postCmds = []; // Check if we're using the legacy --cache-dir command... diff --git a/lib/poetry.js b/lib/poetry.js index 2fa21440..7f041c83 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -16,7 +16,14 @@ function pyprojectTomlToRequirements() { const res = spawnSync( 'poetry', - ['export', '--without-hashes', '-f', 'requirements.txt', '-o', 'requirements.txt'], + [ + 'export', + '--without-hashes', + '-f', + 'requirements.txt', + '-o', + 'requirements.txt' + ], { cwd: this.servicePath } From 0a865eb4308413840196fb372bbd77ed3051f53e Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Mon, 17 Feb 2020 20:58:20 -0500 Subject: [PATCH 163/328] chore(ci): use cmd instead of powershell Appveyor testing is failing due to a warning during installation of the `serverless` package, since the `fsevents` dependency is targeting the `darwin` architecture. Signed-off-by: Mike Fiedler --- appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/appveyor.yml b/appveyor.yml index 43b0e373..d7ecdcb9 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -3,7 +3,7 @@ init: - cmd: python -m pip install -U pip - cmd: pip install pipenv - cmd: pip install poetry==1.0.0a2 - - ps: npm i -g serverless + - cmd: npm i -g serverless build: off test_script: - cmd: >- From ca2bfc1694001dd86ca8fd774c92b8dee826171f Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Mon, 17 Feb 2020 21:16:56 -0500 Subject: [PATCH 164/328] test: remove poetry test for pre-release version The test was introduced to test the behavior of a particular pre-release version of poetry, prior to version 1.0.0 being released. It has since been released on Dec 12, 2019. Refs: https://python-poetry.org/blog/announcing-poetry-1-0-0.html Refs: #395 Signed-off-by: Mike Fiedler --- test.js | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/test.js b/test.js index 5ba46f2c..987c862c 100644 --- a/test.js +++ b/test.js @@ -52,7 +52,6 @@ const sls = mkCommand('sls'); const git = mkCommand('git'); const npm = mkCommand('npm'); const perl = mkCommand('perl'); -const poetry = mkCommand('poetry'); const setup = () => { removeSync(getUserCachePath()); @@ -1910,17 +1909,3 @@ test( }, { skip: !canUseDocker() } ); - -// From this point on, the version of the poetry is 1.0.0a0 -test('poetry1.0.0a0 py3.6 can package flask with default options', t => { - process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); - npm(['i', path]); - poetry(['self', 'update', '--preview', '1.0.0a0']); - sls(['package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); -}); From e559abb5a896756fbf8220dc1e31a379d909104c Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Mon, 17 Feb 2020 21:27:01 -0500 Subject: [PATCH 165/328] chore(ci): update appveyor test environment Update to run against Python 3.x instead of EOL 2.x Signed-off-by: Mike Fiedler --- appveyor.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index d7ecdcb9..920d78a3 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,8 +1,8 @@ version: '{build}' init: - - cmd: python -m pip install -U pip - - cmd: pip install pipenv - - cmd: pip install poetry==1.0.0a2 + - cmd: python3 -m pip install -U pip + - cmd: pip3 install pipenv + - cmd: pip3 install poetry - cmd: npm i -g serverless build: off test_script: From 89b03e1d6b75284568b01d3b6b26ff801dbcfc72 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Tue, 18 Feb 2020 18:03:33 +0000 Subject: [PATCH 166/328] Bump rimraf from 2.7.1 to 3.0.2 Bumps [rimraf](https://github.com/isaacs/rimraf) from 2.7.1 to 3.0.2. - [Release notes](https://github.com/isaacs/rimraf/releases) - [Changelog](https://github.com/isaacs/rimraf/blob/master/CHANGELOG.md) - [Commits](https://github.com/isaacs/rimraf/compare/v2.7.1...v3.0.2) Signed-off-by: dependabot-preview[bot] --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 60cd1ea2..aba8b96f 100644 --- a/package.json +++ b/package.json @@ -61,7 +61,7 @@ "lodash.set": "^4.3.2", "lodash.uniqby": "^4.0.0", "lodash.values": "^4.3.0", - "rimraf": "^2.6.2", + "rimraf": "^3.0.2", "sha256-file": "1.0.0", "shell-quote": "^1.6.1" }, From f0bc3685024daed14cc0fb4422f2bb38e81db841 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Feb 2020 19:38:50 +0000 Subject: [PATCH 167/328] Bump werkzeug from 0.14.1 to 0.15.3 in /tests/base Bumps [werkzeug](https://github.com/pallets/werkzeug) from 0.14.1 to 0.15.3. - [Release notes](https://github.com/pallets/werkzeug/releases) - [Changelog](https://github.com/pallets/werkzeug/blob/master/CHANGES.rst) - [Commits](https://github.com/pallets/werkzeug/compare/0.14.1...0.15.3) Signed-off-by: dependabot[bot] --- tests/base/requirements-w-hashes.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/base/requirements-w-hashes.txt b/tests/base/requirements-w-hashes.txt index 92576e43..2ee1ab77 100644 --- a/tests/base/requirements-w-hashes.txt +++ b/tests/base/requirements-w-hashes.txt @@ -83,7 +83,7 @@ urllib3==1.24.2 \ --hash=sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0 \ --hash=sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3 \ # via botocore -werkzeug==0.14.1 \ - --hash=sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c \ - --hash=sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b \ +werkzeug==0.15.3 \ + --hash=sha256:97660b282aa7e29f94f3fe378e5c7162d7ab9d601a8dbb1cbb2ffc8f0e54607d \ + --hash=sha256:cfd1281b1748288e59762c0e174d64d8bcb2b70e7c57bc4a1203c8825af24ac3 \ # via flask From 369c98128646df8fd4879093ab32e737c0947912 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Feb 2020 19:39:10 +0000 Subject: [PATCH 168/328] Bump werkzeug from 0.14.1 to 0.15.3 in /tests/pipenv Bumps [werkzeug](https://github.com/pallets/werkzeug) from 0.14.1 to 0.15.3. - [Release notes](https://github.com/pallets/werkzeug/releases) - [Changelog](https://github.com/pallets/werkzeug/blob/master/CHANGES.rst) - [Commits](https://github.com/pallets/werkzeug/compare/0.14.1...0.15.3) Signed-off-by: dependabot[bot] --- tests/pipenv/Pipfile.lock | 109 ++++++++++++++++++++++---------------- 1 file changed, 64 insertions(+), 45 deletions(-) diff --git a/tests/pipenv/Pipfile.lock b/tests/pipenv/Pipfile.lock index 84bde3cd..fb840a39 100644 --- a/tests/pipenv/Pipfile.lock +++ b/tests/pipenv/Pipfile.lock @@ -3,19 +3,6 @@ "hash": { "sha256": "ef2bf8ae3e097071390b1bceee7f9b5944c959aea100e9f0ee6a53df3c57275b" }, - "host-environment-markers": { - "implementation_name": "cpython", - "implementation_version": "3.6.3", - "os_name": "posix", - "platform_machine": "x86_64", - "platform_python_implementation": "CPython", - "platform_release": "4.13.0-32-generic", - "platform_system": "Linux", - "platform_version": "#35-Ubuntu SMP Thu Jan 25 09:13:46 UTC 2018", - "python_full_version": "3.6.3", - "python_version": "3.6", - "sys_platform": "linux" - }, "pipfile-spec": 6, "requires": {}, "sources": [ @@ -35,10 +22,10 @@ }, "botocore": { "hashes": [ - "sha256:a91430f0bfbf7c13edc474c3f0d46449108aaebcd6d8e82a5bf9aebe17b42258", - "sha256:b2c9e0fd6d14910f759a33c19f8315dddedbb3c5569472b7be7ceed4f001a675" + "sha256:898f10e68a7a1c2be621caf046d29a8f782c0ea866d644d5be46472c00a3dee9", + "sha256:a80a23e080f4a93d11a1c067a69304dd407d18c358cba1e0df8c96f56c9e98b4" ], - "version": "==1.8.36" + "version": "==1.8.50" }, "bottle": { "hashes": [ @@ -48,18 +35,17 @@ }, "click": { "hashes": [ - "sha256:29f99fc6125fbc931b758dc053b3114e55c77a6e4c6c3a2674a2dc986016381d", - "sha256:f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b" + "sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", + "sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7" ], - "version": "==6.7" + "version": "==7.0" }, "docutils": { "hashes": [ - "sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6", - "sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6", - "sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274" + "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", + "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" ], - "version": "==0.14" + "version": "==0.16" }, "flask": { "hashes": [ @@ -70,57 +56,90 @@ }, "itsdangerous": { "hashes": [ - "sha256:cbb3fcf8d3e33df861709ecaf89d9e6629cff0a217bc2848f1b41cd30d360519" + "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", + "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749" ], - "version": "==0.24" + "version": "==1.1.0" }, "jinja2": { "hashes": [ - "sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd", - "sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4" + "sha256:93187ffbc7808079673ef52771baa950426fd664d3aad1d0fa3e95644360e250", + "sha256:b0eaf100007721b5c16c1fc1eecb87409464edc10469ddc9a22a27a99123be49" ], - "version": "==2.10" + "version": "==2.11.1" }, "jmespath": { "hashes": [ - "sha256:f11b4461f425740a1d908e9a3f7365c3d2e569f6ca68a2ff8bc5bcd9676edd63", - "sha256:6a81d4c9aa62caf061cb517b4d9ad1dd300374cd4706997aff9cd6aedd61fc64" + "sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6", + "sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c" ], - "version": "==0.9.3" + "version": "==0.9.4" }, "markupsafe": { "hashes": [ - "sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665" + "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", + "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", + "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", + "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", + "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", + "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", + "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", + "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", + "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", + "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", + "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", + "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b", + "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", + "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", + "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", + "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", + "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", + "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", + "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", + "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", + "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", + "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", + "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", + "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", + "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", + "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", + "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", + "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", + "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", + "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", + "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", + "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", + "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" ], - "version": "==1.0" + "version": "==1.1.1" }, "python-dateutil": { "hashes": [ - "sha256:95511bae634d69bc7329ba55e646499a842bc4ec342ad54a8cdb65645a0aad3c", - "sha256:891c38b2a02f5bb1be3e4793866c8df49c7d19baabf9c1bad62547e0b4866aca" + "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", + "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a" ], - "version": "==2.6.1" + "version": "==2.8.1" }, "s3transfer": { "hashes": [ - "sha256:23c156ca4d64b022476c92c44bf938bef71af9ce0dcd8fd6585e7bce52f66e47", - "sha256:10891b246296e0049071d56c32953af05cea614dca425a601e4c0be35990121e" + "sha256:90dc18e028989c609146e241ea153250be451e05ecc0c2832565231dacdf59c1", + "sha256:c7a9ec356982d5e9ab2d4b46391a7d6a950e2b04c472419f5fdec70cc0ada72f" ], - "version": "==0.1.12" + "version": "==0.1.13" }, "six": { "hashes": [ - "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb", - "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9" + "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", + "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" ], - "version": "==1.11.0" + "version": "==1.14.0" }, "werkzeug": { "hashes": [ - "sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b", - "sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c" + "sha256:97660b282aa7e29f94f3fe378e5c7162d7ab9d601a8dbb1cbb2ffc8f0e54607d", + "sha256:cfd1281b1748288e59762c0e174d64d8bcb2b70e7c57bc4a1203c8825af24ac3" ], - "version": "==0.14.1" + "version": "==0.15.3" } }, "develop": {} From 30de0fee8e0f383ce936400a93592db8f8fa44d8 Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Tue, 18 Feb 2020 22:04:01 -0500 Subject: [PATCH 169/328] chore(ci): remove bats testing references In #282 the test suite was ported from BATS to TAPE. Signed-off-by: Mike Fiedler --- circle.yml | 5 ----- test.js | 4 ---- 2 files changed, 9 deletions(-) diff --git a/circle.yml b/circle.yml index ef3ec251..5741c743 100644 --- a/circle.yml +++ b/circle.yml @@ -16,11 +16,6 @@ jobs: ./configure make sudo make altinstall - # install bats - - run: | - git clone https://github.com/sstephenson/bats.git - cd bats - sudo ./install.sh /usr/local # other deps - run: sudo apt -y update && sudo apt -y install python-pip python2.7 curl unzip # upgrade python3.6 pip to latest diff --git a/test.js b/test.js index 987c862c..cda0a614 100644 --- a/test.js +++ b/test.js @@ -221,10 +221,6 @@ test('py3.6 can package flask with slim option', t => { t.end(); }); -/* - * News tests NOT in test.bats - */ - test('py3.6 can package flask with slim & slimPatterns options', t => { process.chdir('tests/base'); From 206c1d52270a1c1bd43300d6d93ceeb07ee25cfa Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Tue, 18 Feb 2020 22:42:36 -0500 Subject: [PATCH 170/328] test: store test result artifacts This is an initial implementation in trying to set up CircleCI Test Metadata. I've decided to start with ESLint, so as to have a basis for expanding reporting to other jUnit-style reports, like TAP. Refs: https://circleci.com/docs/2.0/collect-test-data/#eslint Signed-off-by: Mike Fiedler --- circle.yml | 6 +++++- package.json | 1 + 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/circle.yml b/circle.yml index ef3ec251..138b608f 100644 --- a/circle.yml +++ b/circle.yml @@ -39,10 +39,14 @@ jobs: # install deps - run: npm i # lint: - - run: npm run lint + - run: npm run ci:lint # test! - run: | export PATH="$HOME/.poetry/bin:$PATH" export LC_ALL=C.UTF-8 export LANG=C.UTF-8 npm run test + - store_test_results: + path: ~/reports + - store_artifacts: + path: ~/reports diff --git a/package.json b/package.json index aba8b96f..dc887bbd 100644 --- a/package.json +++ b/package.json @@ -38,6 +38,7 @@ "main": "index.js", "bin": {}, "scripts": { + "ci:lint": "eslint *.js lib/*.js --format junit --output-file ~/reports/eslint.xml && prettier -c index.js lib/*.js", "test": "node test.js", "lint": "eslint *.js lib/*.js && prettier -l index.js lib/*.js || (echo need formatting ; exit 1)", "format": "prettier --write index.js lib/*.js test.js" From 375241182af34e29c3e73b9311db6b9c5b0b8ec5 Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Tue, 18 Feb 2020 22:51:07 -0500 Subject: [PATCH 171/328] test: ensure dev-packages are not included As raised in #322, add a test for Pipenv to ensure `dev-packages` are not included in the target deployment package. Signed-off-by: Mike Fiedler --- test.js | 1 + tests/pipenv/Pipfile | 3 + tests/pipenv/Pipfile.lock | 133 +++++++++++++++++++++++++++++++------- 3 files changed, 115 insertions(+), 22 deletions(-) diff --git a/test.js b/test.js index 987c862c..f348b595 100644 --- a/test.js +++ b/test.js @@ -656,6 +656,7 @@ test('pipenv py3.6 can package flask with default options', t => { const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.false(zipfiles.includes(`pytest${sep}__init__.py`), 'dev-package pytest is NOT packaged'); t.end(); }); diff --git a/tests/pipenv/Pipfile b/tests/pipenv/Pipfile index 3798645f..0d65eb75 100644 --- a/tests/pipenv/Pipfile +++ b/tests/pipenv/Pipfile @@ -6,3 +6,6 @@ verify_ssl = true Flask = "*" bottle = "*" boto3 = "*" + +[dev-packages] +pytest = "*" diff --git a/tests/pipenv/Pipfile.lock b/tests/pipenv/Pipfile.lock index fb840a39..4b68385d 100644 --- a/tests/pipenv/Pipfile.lock +++ b/tests/pipenv/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "ef2bf8ae3e097071390b1bceee7f9b5944c959aea100e9f0ee6a53df3c57275b" + "sha256": "82666b88a005fce4645bad0e283c9eddf23446745dc6228888183a1fed2f7185" }, "pipfile-spec": 6, "requires": {}, @@ -15,23 +15,24 @@ "default": { "boto3": { "hashes": [ - "sha256:49bda3ac6e69c2d0a34c37fc4ec47efd73e5f5cf86e34524b1918857aa74d797", - "sha256:5430b5cd532fe56ccc9eaf1ed433ac74805811b931ae1e44eb896af98a1297f0" + "sha256:68e32e2d1c911b0e8408278c7603f0f46c31780b46c44d23346ccef71b3f10dc", + "sha256:967c7a5ac484fe627706e241dfc9294a6220c863ceb53a4f34e3fe9e11a71d7a" ], - "version": "==1.5.22" + "version": "==1.12.2" }, "botocore": { "hashes": [ - "sha256:898f10e68a7a1c2be621caf046d29a8f782c0ea866d644d5be46472c00a3dee9", - "sha256:a80a23e080f4a93d11a1c067a69304dd407d18c358cba1e0df8c96f56c9e98b4" + "sha256:00bff61d899c4f12abe020527452e08cf49b3b60400c5d0d9f83c00b7d18c642", + "sha256:5ffdf30746dbfca59d31d2059789168255e96bd98a17a65f8edb3b6de0a96b3e" ], - "version": "==1.8.50" + "version": "==1.15.2" }, "bottle": { "hashes": [ - "sha256:39b751aee0b167be8dffb63ca81b735bbf1dd0905b3bc42761efedee8f123355" + "sha256:0819b74b145a7def225c0e83b16a4d5711fde751cd92bae467a69efce720f69e", + "sha256:43157254e88f32c6be16f8d9eb1f1d1472396a4e174ebd2bf62544854ecf37e7" ], - "version": "==0.12.13" + "version": "==0.12.18" }, "click": { "hashes": [ @@ -42,17 +43,18 @@ }, "docutils": { "hashes": [ - "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", - "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc" + "sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0", + "sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827", + "sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99" ], - "version": "==0.16" + "version": "==0.15.2" }, "flask": { "hashes": [ - "sha256:0749df235e3ff61ac108f69ac178c9770caeaccad2509cb762ce1f65570a8856", - "sha256:49f44461237b69ecd901cc7ce66feea0319b9158743dd27a2899962ab214dac1" + "sha256:13f9f196f330c7c2c5d7a5cf91af894110ca0215ac051b5844701f2bfd934d52", + "sha256:45eb5a6fd193d6cf7e0cf5d8a5b31f83d5faae0293695626f539a823e93b13f6" ], - "version": "==0.12.2" + "version": "==1.1.1" }, "itsdangerous": { "hashes": [ @@ -122,10 +124,10 @@ }, "s3transfer": { "hashes": [ - "sha256:90dc18e028989c609146e241ea153250be451e05ecc0c2832565231dacdf59c1", - "sha256:c7a9ec356982d5e9ab2d4b46391a7d6a950e2b04c472419f5fdec70cc0ada72f" + "sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13", + "sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db" ], - "version": "==0.1.13" + "version": "==0.3.3" }, "six": { "hashes": [ @@ -134,13 +136,100 @@ ], "version": "==1.14.0" }, + "urllib3": { + "hashes": [ + "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc", + "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc" + ], + "markers": "python_version != '3.4'", + "version": "==1.25.8" + }, "werkzeug": { "hashes": [ - "sha256:97660b282aa7e29f94f3fe378e5c7162d7ab9d601a8dbb1cbb2ffc8f0e54607d", - "sha256:cfd1281b1748288e59762c0e174d64d8bcb2b70e7c57bc4a1203c8825af24ac3" + "sha256:169ba8a33788476292d04186ab33b01d6add475033dfc07215e6d219cc077096", + "sha256:6dc65cf9091cf750012f56f2cad759fa9e879f511b5ff8685e456b4e3bf90d16" ], - "version": "==0.15.3" + "version": "==1.0.0" } }, - "develop": {} + "develop": { + "attrs": { + "hashes": [ + "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", + "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" + ], + "version": "==19.3.0" + }, + "importlib-metadata": { + "hashes": [ + "sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302", + "sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b" + ], + "markers": "python_version < '3.8'", + "version": "==1.5.0" + }, + "more-itertools": { + "hashes": [ + "sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c", + "sha256:b1ddb932186d8a6ac451e1d95844b382f55e12686d51ca0c68b6f61f2ab7a507" + ], + "version": "==8.2.0" + }, + "packaging": { + "hashes": [ + "sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73", + "sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334" + ], + "version": "==20.1" + }, + "pluggy": { + "hashes": [ + "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", + "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" + ], + "version": "==0.13.1" + }, + "py": { + "hashes": [ + "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa", + "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0" + ], + "version": "==1.8.1" + }, + "pyparsing": { + "hashes": [ + "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f", + "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec" + ], + "version": "==2.4.6" + }, + "pytest": { + "hashes": [ + "sha256:0d5fe9189a148acc3c3eb2ac8e1ac0742cb7618c084f3d228baaec0c254b318d", + "sha256:ff615c761e25eb25df19edddc0b970302d2a9091fbce0e7213298d85fb61fef6" + ], + "version": "==5.3.5" + }, + "six": { + "hashes": [ + "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", + "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" + ], + "version": "==1.14.0" + }, + "wcwidth": { + "hashes": [ + "sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603", + "sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8" + ], + "version": "==0.1.8" + }, + "zipp": { + "hashes": [ + "sha256:12248a63bbdf7548f89cb4c7cda4681e537031eda29c02ea29674bc6854460c2", + "sha256:7c0f8e91abc0dc07a5068f315c52cb30c66bfbc581e5b50704c8a2f6ebae794a" + ], + "version": "==3.0.0" + } + } } From a94002a85ebce73c4051d76476aabaa711206880 Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Thu, 20 Feb 2020 14:31:22 -0500 Subject: [PATCH 172/328] chore(ci): remove appveyor for now The current Appveyor setup still dates back to before the project was placed under UnitedIncome's GitHub org, and no longer works fully. Instead of trying to triage and fix, remove from the testing stack until we make a concerted effort to bring back Windows-based testing. At that time, we may pursue GitHub Actions instead. Refs: #468 Signed-off-by: Mike Fiedler --- README.md | 1 - appveyor.yml | 12 ------------ 2 files changed, 13 deletions(-) delete mode 100644 appveyor.yml diff --git a/README.md b/README.md index 28b1272d..e41d9956 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,6 @@ [![serverless](http://public.serverless.com/badges/v3.svg)](http://www.serverless.com) [![CircleCI](https://circleci.com/gh/UnitedIncome/serverless-python-requirements.svg?style=shield)](https://circleci.com/gh/UnitedIncome/serverless-python-requirements) -[![appveyor](https://ci.appveyor.com/api/projects/status/biel93xc535nxvi2?svg=true)](https://ci.appveyor.com/project/dschep/serverless-python-requirements) [![npm](https://img.shields.io/npm/v/serverless-python-requirements.svg)](https://www.npmjs.com/package/serverless-python-requirements) [![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg)](https://github.com/prettier/prettier) diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 920d78a3..00000000 --- a/appveyor.yml +++ /dev/null @@ -1,12 +0,0 @@ -version: '{build}' -init: - - cmd: python3 -m pip install -U pip - - cmd: pip3 install pipenv - - cmd: pip3 install poetry - - cmd: npm i -g serverless -build: off -test_script: - - cmd: >- - npm i - - node test.js From 79fdcdb5ff018f0cadbfe6d1f61df45b2ab68089 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Thu, 20 Feb 2020 23:25:45 +0000 Subject: [PATCH 173/328] Update fs-extra requirement from ^7.0.0 to ^8.1.0 Updates the requirements on [fs-extra](https://github.com/jprichardson/node-fs-extra) to permit the latest version. - [Release notes](https://github.com/jprichardson/node-fs-extra/releases) - [Changelog](https://github.com/jprichardson/node-fs-extra/blob/master/CHANGELOG.md) - [Commits](https://github.com/jprichardson/node-fs-extra/compare/7.0.0...8.1.0) Signed-off-by: dependabot-preview[bot] --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index dc887bbd..d75ac754 100644 --- a/package.json +++ b/package.json @@ -54,7 +54,7 @@ "@iarna/toml": "^2.2.3", "appdirectory": "^0.1.0", "bluebird": "^3.0.6", - "fs-extra": "^7.0.0", + "fs-extra": "^8.1.0", "glob-all": "^3.1.0", "is-wsl": "^2.0.0", "jszip": "^3.1.0", From e9e15a7532fdc003a1cf46ad7ee724b7cb323afd Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Sat, 22 Feb 2020 08:38:50 -0500 Subject: [PATCH 174/328] docs: update readme style Using MarkdownLint in VSCode, this file was showing many linter errors, mostly around whitespace issues, but a few others that made it annoying enough when looking at the file, it was inconsistent and difficult to read. Also handles a few existing notes about caching. Refs #412 Resolves #420 Resolves #424 Signed-off-by: Mike Fiedler --- README.md | 150 +++++++++++++++++++++++++++++++++++++----------------- 1 file changed, 104 insertions(+), 46 deletions(-) diff --git a/README.md b/README.md index e41d9956..263ab009 100644 --- a/README.md +++ b/README.md @@ -8,11 +8,11 @@ A Serverless v1.x plugin to automatically bundle dependencies from `requirements.txt` and make them available in your `PYTHONPATH`. -**Requires Serverless >= v1.34** +## Requires Serverless >= v1.34** ## Install -``` +```shell sls plugin install -n serverless-python-requirements ``` @@ -25,45 +25,52 @@ For a more in depth introduction on how to use this plugin, check out If you're on a mac, check out [these notes](#applebeersnake-mac-brew-installed-python-notes) about using python installed by brew. +## Cross compiling -## Cross compiling! Compiling non-pure-Python modules or fetching their manylinux wheels is supported on non-linux OSs via the use of Docker and the [docker-lambda](https://github.com/lambci/docker-lambda) image. To enable docker usage, add the following to your `serverless.yml`: + ```yaml custom: pythonRequirements: dockerizePip: true ``` + The dockerizePip option supports a special case in addition to booleans of `'non-linux'` which makes it dockerize only on non-linux environments. - To utilize your own Docker container instead of the default, add the following to your `serverless.yml`: + ```yaml custom: pythonRequirements: dockerImage: :tag ``` + This must be the full image name and tag to use, including the runtime specific tag if applicable. Alternatively, you can define your Docker image in your own Dockerfile and add the following to your `serverless.yml`: + ```yaml custom: pythonRequirements: dockerFile: ./path/to/Dockerfile ``` + With `Dockerfile` the path to the Dockerfile that must be in the current folder (or a subfolder). Please note the `dockerImage` and the `dockerFile` are mutually exclusive. To install requirements from private git repositories, add the following to your `serverless.yml`: + ```yaml custom: pythonRequirements: dockerizePip: true dockerSsh: true ``` + The `dockerSsh` option will mount your `$HOME/.ssh/id_rsa` and `$HOME/.ssh/known_hosts` as a volume in the docker container. If your SSH key is password protected, you can use `ssh-agent` because `$SSH_AUTH_SOCK` is also mounted & the env var set. @@ -73,6 +80,7 @@ failure. You can also pass environment variables to docker by specifying them in `dockerEnv` option: + ```yaml custom: pythonRequirements: @@ -82,21 +90,24 @@ custom: [:checkered_flag: Windows notes](#checkered_flag-windows-dockerizepip-notes) -## Pipenv support :sparkles::cake::sparkles: +## :sparkles::cake::sparkles: Pipenv support + If you include a `Pipfile` and have `pipenv` installed instead of a `requirements.txt` this will use `pipenv lock -r` to generate them. It is fully compatible with all options such as `zip` and `dockerizePip`. If you don't want this plugin to generate it for you, set the following option: + ```yaml custom: pythonRequirements: usePipenv: false ``` +## :sparkles::pencil::sparkles: Poetry support -## Poetry support :sparkles::pencil::sparkles: If you include a `pyproject.toml` and have `poetry` installed instead of a `requirements.txt` this will use `poetry export --without-hashes -f requirements.txt -o requirements.txt` to generate them. It is fully compatible with all options such as `zip` and `dockerizePip`. If you don't want this plugin to generate it for you, set the following option: + ```yaml custom: pythonRequirements: @@ -104,31 +115,39 @@ custom: ``` ### Poetry with git dependencies + Poetry by default generates the exported requirements.txt file with `-e` and that breaks pip with `-t` parameter -(used to install all requirements in a specific folder). In order to fix that we remove all `-e ` from the generated file but, +(used to install all requirements in a specific folder). In order to fix that we remove all `-e` from the generated file but, for that to work you need to add the git dependencies in a specific way. Instead of: + ```toml [tool.poetry.dependencies] bottle = {git = "git@github.com/bottlepy/bottle.git", tag = "0.12.16"} ``` + Use: + ```toml [tool.poetry.dependencies] bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} ``` + Or, if you have an SSH key configured: + ```toml [tool.poetry.dependencies] bottle = {git = "ssh://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} ``` ## Dealing with Lambda's size limitations + To help deal with potentially large dependencies (for example: `numpy`, `scipy` and `scikit-learn`) there is support for compressing the libraries. This does require a minor change to your code to decompress them. To enable this add the following to your `serverless.yml`: + ```yaml custom: pythonRequirements: @@ -136,28 +155,35 @@ custom: ``` and add this to your handler module before any code that imports your deps: + ```python try: import unzip_requirements except ImportError: pass ``` + ### Slim Package + _Works on non 'win32' environments: Docker, WSL are included_ To remove the tests, information and caches from the installed packages, enable the `slim` option. This will: `strip` the `.so` files, remove `__pycache__` and `dist-info` directories as well as `.pyc` and `.pyo` files. + ```yaml custom: pythonRequirements: slim: true ``` + #### Custom Removal Patterns + To specify additional directories to remove from the installed packages, define a list of patterns in the serverless config using the `slimPatterns` option and glob syntax. These paterns will be added to the default ones (`**/*.py[c|o]`, `**/__pycache__*`, `**/*.dist-info*`). Note, the glob syntax matches against whole paths, so to match a file in any directory, start your pattern with `**/`. + ```yaml custom: pythonRequirements: @@ -165,7 +191,9 @@ custom: slimPatterns: - "**/*.egg-info*" ``` + To overwrite the default patterns set the option `slimPatternsAppendDefaults` to `false` (`true` by default). + ```yaml custom: pythonRequirements: @@ -174,12 +202,14 @@ custom: slimPatterns: - "**/*.egg-info*" ``` + This will remove all folders within the installed requirements that match the names in `slimPatterns` #### Option not to strip binaries In some cases, stripping binaries leads to problems like "ELF load command address/offset not properly aligned", even when done in the Docker environment. You can still slim down the package without `*.so` files with + ```yaml custom: pythonRequirements: @@ -188,16 +218,20 @@ custom: ``` ### Lambda Layer + Another method for dealing with large dependencies is to put them into a [Lambda Layer](https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html). Simply add the `layer` option to the configuration. + ```yaml custom: pythonRequirements: layer: true ``` + The requirements will be zipped up and a layer will be created automatically. Now just add the reference to the functions that will use the layer. + ```yaml functions: hello: @@ -205,7 +239,9 @@ functions: layers: - {Ref: PythonRequirementsLambdaLayer} ``` + If the layer requires additional or custom configuration, add them onto the `layer` option. + ```yaml custom: pythonRequirements: @@ -218,11 +254,14 @@ custom: allowedAccounts: - '*' ``` + ## Omitting Packages + You can omit a package from deployment with the `noDeploy` option. Note that dependencies of omitted packages must explicitly be omitted too. This example makes it instead omit pytest: + ```yaml custom: pythonRequirements: @@ -231,20 +270,28 @@ custom: ``` ## Extra Config Options + ### Caching -You can enable two kinds of caching with this plugin which are currently both ENABLED by default. First, a download cache that will cache downloads that pip needs to compile the packages. And second, a what we call "static caching" which caches output of pip after compiling everything for your requirements file. Since generally requirements.txt files rarely change, you will often see large amounts of speed improvements when enabling the static cache feature. These caches will be shared between all your projects if no custom cacheLocation is specified (see below). + +You can enable two kinds of caching with this plugin which are currently both ENABLED by default. +First, a download cache that will cache downloads that pip needs to compile the packages. +And second, a what we call "static caching" which caches output of pip after compiling everything for your requirements file. +Since generally `requirements.txt` files rarely change, you will often see large amounts of speed improvements when enabling the static cache feature. +These caches will be shared between all your projects if no custom `cacheLocation` is specified (see below). _**Please note:** This has replaced the previously recommended usage of "--cache-dir" in the pipCmdExtraArgs_ + ```yaml custom: pythonRequirements: useDownloadCache: true useStaticCache: true ``` -_Additionally, In future versions of this plugin, both caching features will probably be enabled by default_ -### Other caching options... +### Other caching options + There are two additional options related to caching. You can specify where in your system that this plugin caches with the `cacheLocation` option. By default it will figure out automatically where based on your username and your OS to store the cache via the [appdirectory](https://www.npmjs.com/package/appdirectory) module. Additionally, you can specify how many max static caches to store with `staticCacheMaxVersions`, as a simple attempt to limit disk space usage for caching. This is DISABLED (set to 0) by default. Example: + ```yaml custom: pythonRequirements: @@ -256,7 +303,9 @@ custom: ``` ### Extra pip arguments + You can specify extra arguments [supported by pip](https://pip.pypa.io/en/stable/reference/pip_install/#options) to be passed to pip like this: + ```yaml custom: pythonRequirements: @@ -276,8 +325,8 @@ custom: dockerRunCmdExtraArgs: ["-v", "${env:PWD}:/my-app"] ``` - ### Customize requirements file name + [Some `pip` workflows involve using requirements files not named `requirements.txt`](https://www.kennethreitz.org/essays/a-better-pip-workflow). To support these, this plugin has the following option: @@ -289,9 +338,11 @@ custom: ``` ### Per-function requirements + If you have different python functions, with different sets of requirements, you can avoid including all the unecessary dependencies of your functions by using the following structure: -``` + +```shell ├── serverless.yml ├── function1 │ ├── requirements.txt @@ -300,7 +351,9 @@ including all the unecessary dependencies of your functions by using the followi ├── requirements.txt └── index.py ``` + With the content of your `serverless.yml` containing: + ```yml package: individually: true @@ -313,19 +366,23 @@ functions: handler: index.handler module: function2 ``` + The result is 2 zip archives, with only the requirements for function1 in the first one, and only the requirements for function2 in the second one. Quick notes on the config file: - * The `module` field must be used to tell the plugin where to find the `requirements.txt` file for + +* The `module` field must be used to tell the plugin where to find the `requirements.txt` file for each function. - * The `handler` field must not be prefixed by the folder name (already known through `module`) as +* The `handler` field must not be prefixed by the folder name (already known through `module`) as the root of the zip artifact is already the path to your function. ### Customize Python executable + Sometimes your Python executable isn't available on your `$PATH` as `python2.7` or `python3.6` (for example, windows or using pyenv). To support this, this plugin has the following option: + ```yaml custom: pythonRequirements: @@ -333,11 +390,13 @@ custom: ``` ### Vendor library directory + For certain libraries, default packaging produces too large an installation, even when zipping. In those cases it may be necessary to tailor make a version of the module. In that case you can store them in a directory and use the `vendor` option, and the plugin will copy them along with all the other dependencies to install: + ```yaml custom: pythonRequirements: @@ -348,9 +407,6 @@ functions: vendor: ./hello-vendor # The option is also available at the function level ``` - - - ## Manual invocations The `.requirements` and `requirements.zip`(if using zip support) files are left @@ -364,15 +420,17 @@ If you are using your own Python library, you have to cleanup `.requirements` on any update. You can use the following option to cleanup `.requirements` everytime you package. -``` +```yaml custom: pythonRequirements: invalidateCaches: true ``` ## :apple::beer::snake: Mac Brew installed Python notes + [Brew wilfully breaks the `--target` option with no seeming intention to fix it](https://github.com/Homebrew/brew/pull/821) which causes issues since this uses that option. There are a few easy workarounds for this: + * Install Python from [python.org](https://www.python.org/downloads/) and specify it with the [`pythonBin` option](#customize-python-executable). @@ -388,12 +446,12 @@ Also, [brew seems to cause issues with pipenv](https://github.com/dschep/lambda- so make sure you install pipenv using pip. ## :checkered_flag: Windows `dockerizePip` notes + For usage of `dockerizePip` on Windows do Step 1 only if running serverless on windows, or do both Step 1 & 2 if running serverless inside WSL. 1. [Enabling shared volume in Windows Docker Taskbar settings](https://forums.docker.com/t/docker-data-volumes-and-windows-mounts/31499/2) 1. [Installing the Docker client on Windows Subsystem for Linux (Ubuntu)](https://medium.com/@sebagomez/installing-the-docker-client-on-ubuntus-windows-subsystem-for-linux-612b392a44c4) - ## Native Code Dependencies During Build Some Python packages require extra OS dependencies to build successfully. To deal with this, replace the default image (`lambci/lambda:python3.6`) with a `Dockerfile` like: @@ -449,7 +507,7 @@ package: - "**" ``` -This will be very slow. Serverless adds a default `"**"` include. If you are using the `cacheLocation` parameter to this plugin, this will result in all of the cached files' names being loaded and then subsequently discarded because of the exclude pattern. To avoid this happening you can add a negated include pattern, as is observed in https://github.com/serverless/serverless/pull/5825. +This will be very slow. Serverless adds a default `"**"` include. If you are using the `cacheLocation` parameter to this plugin, this will result in all of the cached files' names being loaded and then subsequently discarded because of the exclude pattern. To avoid this happening you can add a negated include pattern, as is observed in . Use this approach instead: @@ -465,29 +523,29 @@ package: ``` ## Contributors - * [@dschep](https://github.com/dschep) - Lead developer & maintainer - * [@azurelogic](https://github.com/azurelogic) - logging & documentation fixes - * [@abetomo](https://github.com/abetomo) - style & linting - * [@angstwad](https://github.com/angstwad) - `deploy --function` support - * [@mather](https://github.com/mather) - the cache invalidation option - * [@rmax](https://github.com/rmax) - the extra pip args option - * [@bsamuel-ui](https://github.com/bsamuel-ui) - Python 3 support - * [@suxor42](https://github.com/suxor42) - fixing permission issues with Docker on Linux - * [@mbeltran213](https://github.com/mbeltran213) - fixing docker linux -u option bug - * [@Tethik](https://github.com/Tethik) - adding usePipenv option - * [@miketheman](https://github.com/miketheman) - fixing bug with includes when using zip option - * [@wattdave](https://github.com/wattdave) - fixing bug when using `deploymentBucket` - * [@heri16](https://github.com/heri16) - fixing Docker support in Windows - * [@ryansb](https://github.com/ryansb) - package individually support - * [@cgrimal](https://github.com/cgrimal) - Private SSH Repo access in Docker, `dockerFile` option - to build a custom docker image, real per-function requirements, and the - `vendor` option - * [@kichik](https://github.com/kichik) - Imposed windows & `noDeploy` support, - switched to adding files straight to zip instead of creating symlinks, and - improved pip cache support when using docker. - * [@dee-me-tree-or-love](https://github.com/dee-me-tree-or-love) - the `slim` package option - * [@alexjurkiewicz](https://github.com/alexjurkiewicz) - [docs about docker workflows](#native-code-dependencies-during-build) - * [@andrewfarley](https://github.com/andrewfarley) - Implemented download caching and static caching - * [@bweigel](https://github.com/bweigel) - adding the `slimPatternsAppendDefaults` option & fixing per-function packaging when some functions don't have requirements & Porting tests from bats to js! - * [@squaresurf](https://github.com/squaresurf) - adding usePoetry option - * [@david-mk-lawrence](https://github.com/david-mk-lawrence) - added Lambda Layer support + +* [@dschep](https://github.com/dschep) - Lead developer & maintainer +* [@azurelogic](https://github.com/azurelogic) - logging & documentation fixes +* [@abetomo](https://github.com/abetomo) - style & linting +* [@angstwad](https://github.com/angstwad) - `deploy --function` support +* [@mather](https://github.com/mather) - the cache invalidation option +* [@rmax](https://github.com/rmax) - the extra pip args option +* [@bsamuel-ui](https://github.com/bsamuel-ui) - Python 3 support +* [@suxor42](https://github.com/suxor42) - fixing permission issues with Docker on Linux +* [@mbeltran213](https://github.com/mbeltran213) - fixing docker linux -u option bug +* [@Tethik](https://github.com/Tethik) - adding usePipenv option +* [@miketheman](https://github.com/miketheman) - fixing bug with includes when using zip option +* [@wattdave](https://github.com/wattdave) - fixing bug when using `deploymentBucket` +* [@heri16](https://github.com/heri16) - fixing Docker support in Windows +* [@ryansb](https://github.com/ryansb) - package individually support +* [@cgrimal](https://github.com/cgrimal) - Private SSH Repo access in Docker, `dockerFile` option + to build a custom docker image, real per-function requirements, and the `vendor` option +* [@kichik](https://github.com/kichik) - Imposed windows & `noDeploy` support, + switched to adding files straight to zip instead of creating symlinks, and + improved pip cache support when using docker. +* [@dee-me-tree-or-love](https://github.com/dee-me-tree-or-love) - the `slim` package option +* [@alexjurkiewicz](https://github.com/alexjurkiewicz) - [docs about docker workflows](#native-code-dependencies-during-build) +* [@andrewfarley](https://github.com/andrewfarley) - Implemented download caching and static caching +* [@bweigel](https://github.com/bweigel) - adding the `slimPatternsAppendDefaults` option & fixing per-function packaging when some functions don't have requirements & Porting tests from bats to js! +* [@squaresurf](https://github.com/squaresurf) - adding usePoetry option +* [@david-mk-lawrence](https://github.com/david-mk-lawrence) - added Lambda Layer support From 320cda202b43180be563f64b1c45fca099fb9651 Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Sat, 22 Feb 2020 10:08:19 -0500 Subject: [PATCH 175/328] chore(test): update fixture serverless.yml syntax The fixture tests files were raising a couple of warnings on incorrect YAML structure, based on the Serverless IDE: https://github.com/threadheap/serverless-ide-vscode/blob/054264da94ed17c42e6e3ba0dd662bd00114e05b/packages/serverless-framework-schema/json/aws/common/runtime.json https://github.com/threadheap/serverless-ide-vscode/blob/631165a6b5a928ef8f59ba9361af3c6b79a8c38d/packages/serverless-framework-schema/json/common/package-config.json#L13-L19 Signed-off-by: Mike Fiedler --- tests/base/serverless.yml | 2 +- tests/individually/serverless.yml | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index b356c514..0763da0a 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -42,7 +42,7 @@ functions: handler: handler.hello hello3: handler: handler.hello - runtime: nodejs6.10 + runtime: nodejs8.10 hello4: handler: fn2_handler.hello module: fn2 diff --git a/tests/individually/serverless.yml b/tests/individually/serverless.yml index c7cf3802..0e41cb06 100644 --- a/tests/individually/serverless.yml +++ b/tests/individually/serverless.yml @@ -6,7 +6,8 @@ provider: package: individually: true - exclude: 'node_modules/**' + exclude: + - 'node_modules/**' custom: pythonRequirements: dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} From fe1d10f0b849051dbd398a1d7ac57ea55c49241f Mon Sep 17 00:00:00 2001 From: Ben Samuel Date: Tue, 18 Feb 2020 16:54:05 -0500 Subject: [PATCH 176/328] Version 5.1.0 Changes: - Removes compatibility with pre-release `poetry`. (Minor version bump.) - Bump `werkzeug` version to address security issue. - Correct word splitting on `pip` commands. - circle CI is working again; much thanks to @miketheman Known risks: - Windows CI is still down --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index d75ac754..ce5d4e4f 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "5.0.1", + "version": "5.1.0", "engines": { "node": ">=10.0" }, From f304b52aae4c8cc2079a30abf83cceab84f4da3f Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Sat, 22 Feb 2020 12:22:24 -0500 Subject: [PATCH 177/328] test: remove parser option from prettier The `parser` option exists to force prettier to conform to a particular style. The docs recommend that this only be implemented in an `overrides` section, to prevent parsing one file as another. See https://prettier.io/docs/en/configuration.html#setting-the-parserdocsenoptionshtmlparser-option Since the code is linted correctly without the override, and we want to lint Markdown, remove this configuration. Signed-off-by: Mike Fiedler --- package.json | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/package.json b/package.json index d75ac754..ea39efcd 100644 --- a/package.json +++ b/package.json @@ -75,7 +75,6 @@ }, "prettier": { "semi": true, - "singleQuote": true, - "parser": "babel" + "singleQuote": true } } From fab6276e0ae3d44096d0fd49bfcd98a03bfbe13e Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Sat, 22 Feb 2020 12:11:25 -0500 Subject: [PATCH 178/328] test: add markdown files to prettier scope Updates commands to catch when Markdown is malformed. Refs: https://github.com/UnitedIncome/serverless-python-requirements/pull/471#pullrequestreview-363037926 Signed-off-by: Mike Fiedler --- package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index ea39efcd..43203752 100644 --- a/package.json +++ b/package.json @@ -38,10 +38,10 @@ "main": "index.js", "bin": {}, "scripts": { - "ci:lint": "eslint *.js lib/*.js --format junit --output-file ~/reports/eslint.xml && prettier -c index.js lib/*.js", + "ci:lint": "eslint *.js lib/*.js --format junit --output-file ~/reports/eslint.xml && prettier -c '{.,lib}/*.{js,md}'", "test": "node test.js", - "lint": "eslint *.js lib/*.js && prettier -l index.js lib/*.js || (echo need formatting ; exit 1)", - "format": "prettier --write index.js lib/*.js test.js" + "lint": "eslint *.js lib/*.js && prettier -c '{.,lib}/*.{js,md}'", + "format": "prettier --write '{.,lib}/*.{js,md}'" }, "devDependencies": { "eslint": "^5.16.0", From c23d7a90bd6ff0842c636bcb8995cbea70563877 Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Sat, 22 Feb 2020 12:24:59 -0500 Subject: [PATCH 179/328] style(docs): format example markdown via prettier Signed-off-by: Mike Fiedler --- example_native_deps/README.md | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/example_native_deps/README.md b/example_native_deps/README.md index 6d3aae9c..016ee11e 100644 --- a/example_native_deps/README.md +++ b/example_native_deps/README.md @@ -1,10 +1,12 @@ -### native compilation example +# Native compilation example + Uses `dockerizePip` to deploy numpy-scipy-sklearn demo. -### test +## Test + As in other examples, use node version >= 6. -``` +```bash cd example_native_deps npm install --prefix . serverless-python-requirements sls deploy --verbose @@ -13,10 +15,10 @@ sls invoke -f hello --verbose --log ...expected result: -``` +```json { - "numpy": "1.13.3", - "scipy": "1.0.0", - "sklearn": "0.19.1" + "numpy": "1.13.3", + "scipy": "1.0.0", + "sklearn": "0.19.1" } ``` From 4caba998a6455cc8fa39c8c85e0e91868d47b7a5 Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Sat, 22 Feb 2020 12:37:05 -0500 Subject: [PATCH 180/328] style(docs): format the README.md via prettier Signed-off-by: Mike Fiedler --- README.md | 104 ++++++++++++++++++++++++++++-------------------------- 1 file changed, 54 insertions(+), 50 deletions(-) diff --git a/README.md b/README.md index 263ab009..3196d8c8 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ A Serverless v1.x plugin to automatically bundle dependencies from `requirements.txt` and make them available in your `PYTHONPATH`. -## Requires Serverless >= v1.34** +## Requires Serverless >= v1.34 ## Install @@ -145,8 +145,8 @@ bottle = {git = "ssh://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} To help deal with potentially large dependencies (for example: `numpy`, `scipy` and `scikit-learn`) there is support for compressing the libraries. This does -require a minor change to your code to decompress them. To enable this add the -following to your `serverless.yml`: +require a minor change to your code to decompress them. To enable this add the +following to your `serverless.yml`: ```yaml custom: @@ -189,7 +189,7 @@ custom: pythonRequirements: slim: true slimPatterns: - - "**/*.egg-info*" + - '**/*.egg-info*' ``` To overwrite the default patterns set the option `slimPatternsAppendDefaults` to `false` (`true` by default). @@ -200,7 +200,7 @@ custom: slim: true slimPatternsAppendDefaults: false slimPatterns: - - "**/*.egg-info*" + - '**/*.egg-info*' ``` This will remove all folders within the installed requirements that match @@ -237,7 +237,7 @@ functions: hello: handler: handler.hello layers: - - {Ref: PythonRequirementsLambdaLayer} + - { Ref: PythonRequirementsLambdaLayer } ``` If the layer requires additional or custom configuration, add them onto the `layer` option. @@ -279,7 +279,7 @@ And second, a what we call "static caching" which caches output of pip after com Since generally `requirements.txt` files rarely change, you will often see large amounts of speed improvements when enabling the static cache feature. These caches will be shared between all your projects if no custom `cacheLocation` is specified (see below). - _**Please note:** This has replaced the previously recommended usage of "--cache-dir" in the pipCmdExtraArgs_ +_**Please note:** This has replaced the previously recommended usage of "--cache-dir" in the pipCmdExtraArgs_ ```yaml custom: @@ -290,7 +290,12 @@ custom: ### Other caching options -There are two additional options related to caching. You can specify where in your system that this plugin caches with the `cacheLocation` option. By default it will figure out automatically where based on your username and your OS to store the cache via the [appdirectory](https://www.npmjs.com/package/appdirectory) module. Additionally, you can specify how many max static caches to store with `staticCacheMaxVersions`, as a simple attempt to limit disk space usage for caching. This is DISABLED (set to 0) by default. Example: +There are two additional options related to caching. +You can specify where in your system that this plugin caches with the `cacheLocation` option. +By default it will figure out automatically where based on your username and your OS to store the cache via the [appdirectory](https://www.npmjs.com/package/appdirectory) module. +Additionally, you can specify how many max static caches to store with `staticCacheMaxVersions`, as a simple attempt to limit disk space usage for caching. +This is DISABLED (set to 0) by default. +Example: ```yaml custom: @@ -299,7 +304,6 @@ custom: useDownloadCache: true cacheLocation: '/home/user/.my_cache_goes_here' staticCacheMaxVersions: 10 - ``` ### Extra pip arguments @@ -309,8 +313,8 @@ You can specify extra arguments [supported by pip](https://pip.pypa.io/en/stable ```yaml custom: pythonRequirements: - pipCmdExtraArgs: - - --compile + pipCmdExtraArgs: + - --compile ``` ### Extra Docker arguments @@ -321,8 +325,8 @@ You can specify extra arguments to be passed to [docker build](https://docs.dock custom: pythonRequirements: dockerizePip: true - dockerBuildCmdExtraArgs: ["--build-arg", "MY_GREAT_ARG=123"] - dockerRunCmdExtraArgs: ["-v", "${env:PWD}:/my-app"] + dockerBuildCmdExtraArgs: ['--build-arg', 'MY_GREAT_ARG=123'] + dockerRunCmdExtraArgs: ['-v', '${env:PWD}:/my-app'] ``` ### Customize requirements file name @@ -372,10 +376,10 @@ the requirements for function2 in the second one. Quick notes on the config file: -* The `module` field must be used to tell the plugin where to find the `requirements.txt` file for -each function. -* The `handler` field must not be prefixed by the folder name (already known through `module`) as -the root of the zip artifact is already the path to your function. +- The `module` field must be used to tell the plugin where to find the `requirements.txt` file for + each function. +- The `handler` field must not be prefixed by the folder name (already known through `module`) as + the root of the zip artifact is already the path to your function. ### Customize Python executable @@ -431,16 +435,16 @@ custom: [Brew wilfully breaks the `--target` option with no seeming intention to fix it](https://github.com/Homebrew/brew/pull/821) which causes issues since this uses that option. There are a few easy workarounds for this: -* Install Python from [python.org](https://www.python.org/downloads/) and specify it with the -[`pythonBin` option](#customize-python-executable). +- Install Python from [python.org](https://www.python.org/downloads/) and specify it with the + [`pythonBin` option](#customize-python-executable). OR -* Create a virtualenv and activate it while using serverless. +- Create a virtualenv and activate it while using serverless. OR -* [Install Docker](https://docs.docker.com/docker-for-mac/install/) and use the [`dockerizePip` option](#cross-compiling). +- [Install Docker](https://docs.docker.com/docker-for-mac/install/) and use the [`dockerizePip` option](#cross-compiling). Also, [brew seems to cause issues with pipenv](https://github.com/dschep/lambda-decorators/issues/4#event-1418928080), so make sure you install pipenv using pip. @@ -501,10 +505,10 @@ If you wish to exclude most of the files in your project, and only include the s package: individually: false include: - - "./src/lambda_one/**" - - "./src/lambda_two/**" + - './src/lambda_one/**' + - './src/lambda_two/**' exclude: - - "**" + - '**' ``` This will be very slow. Serverless adds a default `"**"` include. If you are using the `cacheLocation` parameter to this plugin, this will result in all of the cached files' names being loaded and then subsequently discarded because of the exclude pattern. To avoid this happening you can add a negated include pattern, as is observed in . @@ -515,37 +519,37 @@ Use this approach instead: package: individually: false include: - - "!./**" - - "./src/lambda_one/**" - - "./src/lambda_two/**" + - '!./**' + - './src/lambda_one/**' + - './src/lambda_two/**' exclude: - - "**" + - '**' ``` ## Contributors -* [@dschep](https://github.com/dschep) - Lead developer & maintainer -* [@azurelogic](https://github.com/azurelogic) - logging & documentation fixes -* [@abetomo](https://github.com/abetomo) - style & linting -* [@angstwad](https://github.com/angstwad) - `deploy --function` support -* [@mather](https://github.com/mather) - the cache invalidation option -* [@rmax](https://github.com/rmax) - the extra pip args option -* [@bsamuel-ui](https://github.com/bsamuel-ui) - Python 3 support -* [@suxor42](https://github.com/suxor42) - fixing permission issues with Docker on Linux -* [@mbeltran213](https://github.com/mbeltran213) - fixing docker linux -u option bug -* [@Tethik](https://github.com/Tethik) - adding usePipenv option -* [@miketheman](https://github.com/miketheman) - fixing bug with includes when using zip option -* [@wattdave](https://github.com/wattdave) - fixing bug when using `deploymentBucket` -* [@heri16](https://github.com/heri16) - fixing Docker support in Windows -* [@ryansb](https://github.com/ryansb) - package individually support -* [@cgrimal](https://github.com/cgrimal) - Private SSH Repo access in Docker, `dockerFile` option +- [@dschep](https://github.com/dschep) - Lead developer & maintainer +- [@azurelogic](https://github.com/azurelogic) - logging & documentation fixes +- [@abetomo](https://github.com/abetomo) - style & linting +- [@angstwad](https://github.com/angstwad) - `deploy --function` support +- [@mather](https://github.com/mather) - the cache invalidation option +- [@rmax](https://github.com/rmax) - the extra pip args option +- [@bsamuel-ui](https://github.com/bsamuel-ui) - Python 3 support +- [@suxor42](https://github.com/suxor42) - fixing permission issues with Docker on Linux +- [@mbeltran213](https://github.com/mbeltran213) - fixing docker linux -u option bug +- [@Tethik](https://github.com/Tethik) - adding usePipenv option +- [@miketheman](https://github.com/miketheman) - fixing bug with includes when using zip option +- [@wattdave](https://github.com/wattdave) - fixing bug when using `deploymentBucket` +- [@heri16](https://github.com/heri16) - fixing Docker support in Windows +- [@ryansb](https://github.com/ryansb) - package individually support +- [@cgrimal](https://github.com/cgrimal) - Private SSH Repo access in Docker, `dockerFile` option to build a custom docker image, real per-function requirements, and the `vendor` option -* [@kichik](https://github.com/kichik) - Imposed windows & `noDeploy` support, +- [@kichik](https://github.com/kichik) - Imposed windows & `noDeploy` support, switched to adding files straight to zip instead of creating symlinks, and improved pip cache support when using docker. -* [@dee-me-tree-or-love](https://github.com/dee-me-tree-or-love) - the `slim` package option -* [@alexjurkiewicz](https://github.com/alexjurkiewicz) - [docs about docker workflows](#native-code-dependencies-during-build) -* [@andrewfarley](https://github.com/andrewfarley) - Implemented download caching and static caching -* [@bweigel](https://github.com/bweigel) - adding the `slimPatternsAppendDefaults` option & fixing per-function packaging when some functions don't have requirements & Porting tests from bats to js! -* [@squaresurf](https://github.com/squaresurf) - adding usePoetry option -* [@david-mk-lawrence](https://github.com/david-mk-lawrence) - added Lambda Layer support +- [@dee-me-tree-or-love](https://github.com/dee-me-tree-or-love) - the `slim` package option +- [@alexjurkiewicz](https://github.com/alexjurkiewicz) - [docs about docker workflows](#native-code-dependencies-during-build) +- [@andrewfarley](https://github.com/andrewfarley) - Implemented download caching and static caching +- [@bweigel](https://github.com/bweigel) - adding the `slimPatternsAppendDefaults` option & fixing per-function packaging when some functions don't have requirements & Porting tests from bats to js! +- [@squaresurf](https://github.com/squaresurf) - adding usePoetry option +- [@david-mk-lawrence](https://github.com/david-mk-lawrence) - added Lambda Layer support From fde4edea22ab0bcda860e0fb0b60f86a2b42e5ec Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Sat, 22 Feb 2020 12:40:51 -0500 Subject: [PATCH 181/328] style(tests): format test via prettier Signed-off-by: Mike Fiedler --- test.js | 58 +++++++++++++++++++++++++-------------------------------- 1 file changed, 25 insertions(+), 33 deletions(-) diff --git a/test.js b/test.js index 39745216..59cb6a76 100644 --- a/test.js +++ b/test.js @@ -140,7 +140,11 @@ test('py3.6 packages have the same hash', t => { sls(['package']); const fileHash = sha256File('.serverless/sls-py-req-test.zip'); sls(['package']); - t.equal(sha256File('.serverless/sls-py-req-test.zip'), fileHash, 'packages have the same hash'); + t.equal( + sha256File('.serverless/sls-py-req-test.zip'), + fileHash, + 'packages have the same hash' + ); t.end(); }); @@ -271,10 +275,7 @@ test( const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.includes(`boto3${sep}__init__.py`), - 'boto3 is packaged' - ); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, { skip: !canUseDocker() } @@ -652,7 +653,10 @@ test('pipenv py3.6 can package flask with default options', t => { const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.false(zipfiles.includes(`pytest${sep}__init__.py`), 'dev-package pytest is NOT packaged'); + t.false( + zipfiles.includes(`pytest${sep}__init__.py`), + 'dev-package pytest is NOT packaged' + ); t.end(); }); @@ -1617,8 +1621,11 @@ test( 'foobar has retained its executable file permissions' ); - const zipfiles_hello2 = listZipFilesWithMetaData('.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip'); - const flaskPerm = statSync('.serverless/module2/requirements/bin/flask').mode; + const zipfiles_hello2 = listZipFilesWithMetaData( + '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' + ); + const flaskPerm = statSync('.serverless/module2/requirements/bin/flask') + .mode; t.true( zipfiles_hello2['bin/flask'].unixPermissions === flaskPerm, @@ -1651,8 +1658,11 @@ test( 'foobar has retained its executable file permissions' ); - const zipfiles_hello2 = listZipFilesWithMetaData('.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip'); - const flaskPerm = statSync('.serverless/module2/requirements/bin/flask').mode; + const zipfiles_hello2 = listZipFilesWithMetaData( + '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' + ); + const flaskPerm = statSync('.serverless/module2/requirements/bin/flask') + .mode; t.true( zipfiles_hello2['bin/flask'].unixPermissions === flaskPerm, @@ -1681,10 +1691,7 @@ test('py3.6 uses download cache by defaul option', t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - '--cacheLocation=.requirements-cache', - 'package' - ]); + sls(['--cacheLocation=.requirements-cache', 'package']); t.true( pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), 'cache directoy exists' @@ -1753,10 +1760,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - '--dockerizePip=true', - 'package' - ]); + sls(['--dockerizePip=true', 'package']); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); t.true( @@ -1832,11 +1836,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - '--dockerizePip=true', - '--slim=true', - 'package' - ]); + sls(['--dockerizePip=true', '--slim=true', 'package']); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); t.true( @@ -1855,11 +1855,7 @@ test( `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' ); - sls([ - '--dockerizePip=true', - '--slim=true', - 'package' - ]); + sls(['--dockerizePip=true', '--slim=true', 'package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); t.true( @@ -1883,11 +1879,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - '--dockerizePip=true', - '--slim=true', - 'package' - ]); + sls(['--dockerizePip=true', '--slim=true', 'package']); const cachepath = getUserCachePath(); t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), From 00b65cc6592bdd7aa1701e7ad1f0b25ffdac3835 Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Sat, 22 Feb 2020 13:53:12 -0500 Subject: [PATCH 182/328] chore(ci): move circle config to modern location At some point since this was introduced, the location of the config file changed. Refs: https://circleci.com/docs/2.0/config-intro/ Signed-off-by: Mike Fiedler --- circle.yml => .circleci/config.yml | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename circle.yml => .circleci/config.yml (100%) diff --git a/circle.yml b/.circleci/config.yml similarity index 100% rename from circle.yml rename to .circleci/config.yml From ebc7d91ae371f606a1fb94a8f65b906b27e9b370 Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Sat, 22 Feb 2020 16:48:32 -0500 Subject: [PATCH 183/328] chore(dependencies): update eslint Keep the eslint version fresh, so we don't fall too far behind and have trouble updating later. Signed-off-by: Mike Fiedler --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index ce5d4e4f..864754f9 100644 --- a/package.json +++ b/package.json @@ -44,7 +44,7 @@ "format": "prettier --write index.js lib/*.js test.js" }, "devDependencies": { - "eslint": "^5.16.0", + "eslint": "^6.8.0", "prettier": "*", "cross-spawn": "*", "deasync-promise": "*", From 17cbf9c8790c9255644b118504edce4b8abfcd98 Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Tue, 25 Feb 2020 08:38:18 -0500 Subject: [PATCH 184/328] fix: check if pyproject has poetry earlier During the evaluation in the package phase, we determine whether a `requirements.txt` file exists, or whether we need to generate one. Since the `pyproject.toml` file is used by poetry, but only if a stanza is contained inside the file, use the function `isPoetryProject()` along with the configuration value, thereby reducing the need for a project to have to declare a configuration override. Refs #324 Refs #344 Fixes #400 Signed-off-by: Mike Fiedler --- lib/pip.js | 9 +++++++-- lib/poetry.js | 2 +- test.js | 10 ++++++++++ tests/non_poetry_pyproject/.gitignore | 22 ++++++++++++++++++++++ tests/non_poetry_pyproject/handler.py | 5 +++++ tests/non_poetry_pyproject/package.json | 14 ++++++++++++++ tests/non_poetry_pyproject/pyproject.toml | 10 ++++++++++ tests/non_poetry_pyproject/serverless.yml | 18 ++++++++++++++++++ 8 files changed, 87 insertions(+), 3 deletions(-) create mode 100644 tests/non_poetry_pyproject/.gitignore create mode 100644 tests/non_poetry_pyproject/handler.py create mode 100644 tests/non_poetry_pyproject/package.json create mode 100644 tests/non_poetry_pyproject/pyproject.toml create mode 100644 tests/non_poetry_pyproject/serverless.yml diff --git a/lib/pip.js b/lib/pip.js index 14864794..ea969c4f 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -7,6 +7,7 @@ const { spawnSync } = require('child_process'); const { quote } = require('shell-quote'); const { buildImage, getBindPath, getDockerUid } = require('./docker'); const { getStripCommand, getStripMode, deleteFiles } = require('./slim'); +const { isPoetryProject } = require('./poetry'); const { checkForAndDeleteMaxCacheVersions, sha256Path, @@ -62,7 +63,9 @@ function generateRequirementsFile( ) { if ( options.usePoetry && - fse.existsSync(path.join(servicePath, 'pyproject.toml')) + fse.existsSync( + path.join(servicePath, 'pyproject.toml') && isPoetryProject(servicePath) + ) ) { filterRequirementsFile( path.join(servicePath, '.serverless/requirements.txt'), @@ -442,7 +445,9 @@ function copyVendors(vendorFolder, targetFolder, serverless) { function requirementsFileExists(servicePath, options, fileName) { if ( options.usePoetry && - fse.existsSync(path.join(servicePath, 'pyproject.toml')) + fse.existsSync( + path.join(servicePath, 'pyproject.toml') && isPoetryProject(servicePath) + ) ) { return true; } diff --git a/lib/poetry.js b/lib/poetry.js index 7f041c83..9bf0424c 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -90,4 +90,4 @@ function isPoetryProject(servicePath) { return false; } -module.exports = { pyprojectTomlToRequirements }; +module.exports = { pyprojectTomlToRequirements, isPoetryProject }; diff --git a/test.js b/test.js index 39745216..cfa31a38 100644 --- a/test.js +++ b/test.js @@ -744,6 +744,16 @@ test('non build pyproject.toml uses requirements.txt', t => { t.end(); }); +test('non poetry pyproject.toml without requirements.txt packages handler only', t => { + process.chdir('tests/non_poetry_pyproject'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`handler.py`), 'handler is packaged'); + t.end(); +}); + test('poetry py3.6 can package flask with default options', t => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); diff --git a/tests/non_poetry_pyproject/.gitignore b/tests/non_poetry_pyproject/.gitignore new file mode 100644 index 00000000..3c2369dc --- /dev/null +++ b/tests/non_poetry_pyproject/.gitignore @@ -0,0 +1,22 @@ +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# Serverless +.serverless +.requirements +unzip_requirements.py diff --git a/tests/non_poetry_pyproject/handler.py b/tests/non_poetry_pyproject/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/non_poetry_pyproject/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/non_poetry_pyproject/package.json b/tests/non_poetry_pyproject/package.json new file mode 100644 index 00000000..752c49c6 --- /dev/null +++ b/tests/non_poetry_pyproject/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-5.1.0.tgz" + } +} diff --git a/tests/non_poetry_pyproject/pyproject.toml b/tests/non_poetry_pyproject/pyproject.toml new file mode 100644 index 00000000..41932632 --- /dev/null +++ b/tests/non_poetry_pyproject/pyproject.toml @@ -0,0 +1,10 @@ +[tool.black] +line-length = 79 +py36 = true +skip-string-normalization = true +exclude = ''' +/( + \.serverless + | node_modules +)/ +''' diff --git a/tests/non_poetry_pyproject/serverless.yml b/tests/non_poetry_pyproject/serverless.yml new file mode 100644 index 00000000..2456a72a --- /dev/null +++ b/tests/non_poetry_pyproject/serverless.yml @@ -0,0 +1,18 @@ +service: sls-py-req-test + +provider: + name: aws + runtime: python3.6 + +plugins: + - serverless-python-requirements + +package: + exclude: + - '**/*' + include: + - handler.py + +functions: + hello: + handler: handler.hello From 2740896b381e0ae426ab4fedb903f66532f2ca9b Mon Sep 17 00:00:00 2001 From: Benjamin Samuel Date: Fri, 28 Feb 2020 10:11:45 -0500 Subject: [PATCH 185/328] Add github actions based on dschep's fork --- .github/workflows/publish.yml | 16 +++++++++++ .github/workflows/test.yml | 54 +++++++++++++++++++++++++++++++++++ 2 files changed, 70 insertions(+) create mode 100644 .github/workflows/publish.yml create mode 100644 .github/workflows/test.yml diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 00000000..baf74a56 --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,16 @@ +name: Publish + +on: [release] + +jobs: + publish-npm: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-node@v1 + with: + version: 12 + registry-url: https://registry.npmjs.org/ + - run: npm publish + env: + NODE_AUTH_TOKEN: ${{secrets.npm_token}} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 00000000..454b0fac --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,54 @@ +name: Test + +on: + pull_request: + branches: + - master + push: + branches: + - master + +jobs: + build: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest, macOS-latest] + python-version: [3.6, 3.7, 2.7] + node-version: [8, 10, 12] + steps: + - uses: actions/checkout@v2 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + version: ${{ matrix.python-version }} + - name: Set up Node ${{ matrix.node-version }} + uses: actions/setup-node@v1 + with: + version: ${{ matrix.node-version }} + - name: Install pipenv + uses: dschep/install-pipenv-action@v1 + - name: Install poetry + uses: dschep/install-poetry-action@v1.1 + with: + preview: false + + - name: Install serverless + run: npm install -g serverless + + - name: Install deps + run: npm install + + - name: Lint + run: npm run ci:lint + + - name: Test + run: | + PATH=$HOME/.poetry/bin:$PATH \ + LC_ALL=C.UTF-8 \ + LANG=C.UTF-8 \ + npm run test + env: + RUNTIME: python${{ matrix.python-version }} From f33c79acc3b54f9f6e4b60b2755ac1faf149c536 Mon Sep 17 00:00:00 2001 From: Benjamin Samuel Date: Thu, 26 Mar 2020 14:41:19 -0400 Subject: [PATCH 186/328] Fix paren issue from PR - Also restrict prettier to 1.x until we've closed out some PRs. --- lib/pip.js | 5 ++--- package.json | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index ea969c4f..ffe59797 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -63,9 +63,8 @@ function generateRequirementsFile( ) { if ( options.usePoetry && - fse.existsSync( - path.join(servicePath, 'pyproject.toml') && isPoetryProject(servicePath) - ) + fse.existsSync(path.join(servicePath, 'pyproject.toml')) && + isPoetryProject(servicePath) ) { filterRequirementsFile( path.join(servicePath, '.serverless/requirements.txt'), diff --git a/package.json b/package.json index 4995497f..f96f5453 100644 --- a/package.json +++ b/package.json @@ -45,7 +45,7 @@ }, "devDependencies": { "eslint": "^5.16.0", - "prettier": "*", + "prettier": "^1", "cross-spawn": "*", "deasync-promise": "*", "tape": "*" From 3cb06b6f3f8b6fa282ac7fbb3219ccee4d09d552 Mon Sep 17 00:00:00 2001 From: Ben Samuel Date: Fri, 28 Feb 2020 13:17:11 -0500 Subject: [PATCH 187/328] Making tests conditional - Restrict lint to only run once - Add skip clauses for python versions --- .github/workflows/test.yml | 2 + test.js | 910 +++++++++++++++++++++---------------- tests/base/package.json | 2 +- 3 files changed, 513 insertions(+), 401 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 454b0fac..ce7b67ca 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -43,6 +43,8 @@ jobs: - name: Lint run: npm run ci:lint + # Don't run lint a hundred times, also it breaks on windows. + if: matrix.python-version == 3.7 && matrix.node-version == 12 && matrix.os == 'ubuntu-latest' - name: Test run: | diff --git a/test.js b/test.js index 59cb6a76..984dbceb 100644 --- a/test.js +++ b/test.js @@ -14,7 +14,7 @@ const { pathExistsSync } = require('fs-extra'); const { quote } = require('shell-quote'); -const { sep } = require('path'); +const { sep, delimiter } = require('path'); const { getUserCachePath, sha256Path } = require('./lib/shared'); @@ -94,11 +94,39 @@ const test = (desc, func, opts = {}) => } }); +const executableExtension = process.platform === 'win32' ? '.exe' : ''; +const executableSearch = process.env.PATH.split(delimiter); +const whichCache = {}; + +const which = (name, extra) => { + const found = whichCache[name]; + if (found !== undefined) { + return found; + } + const fullName = `${name}${executableExtension}`; + for (const path of (extra || []).concat(executableSearch)) { + const fullPath = `${path}${sep}${fullName}`; + if (pathExistsSync(fullPath)) { + whichCache[name] = fullPath; + return fullPath; + } + } + whichCache[name] = null; + return null; +}; + const getPythonBin = (version = 3) => { if (![2, 3].includes(version)) throw new Error('version must be 2 or 3'); - if (process.platform === 'win32') - return `c:/python${version === 2 ? '27' : '36'}-x64/python.exe`; - else return version === 2 ? 'python2.7' : 'python3.6'; + const extra = []; + /* if (process.platform === 'win32') + * extra.push(...glob.sync(`c:/python${version}*`)); */ + const bin = which(`python${version}`, extra); + if (bin === null) throw new Error(`Can't find python${version} on PATH`); + return bin; +}; + +const hasPython = version => { + return getPythonBin(version) !== null; }; const listZipFiles = filename => @@ -148,86 +176,108 @@ test('py3.6 packages have the same hash', t => { t.end(); }); -test('py3.6 can package flask with default options', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); -}); +test( + 'py3.6 can package flask with default options', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--pythonBin=${getPythonBin(3)}`, 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); + }, + { skip: !hasPython(3) } +); -test('py3.6 can package flask with hashes', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(3)}`, - '--fileName=requirements-w-hashes.txt', - 'package' - ]); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.end(); -}); +test( + 'py3.6 can package flask with hashes', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + `--pythonBin=${getPythonBin(3)}`, + '--fileName=requirements-w-hashes.txt', + 'package' + ]); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.end(); + }, + { skip: !hasPython(3) } +); -test('py3.6 can package flask with nested', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(3)}`, - '--fileName=requirements-w-nested.txt', - 'package' - ]); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); -}); +test( + 'py3.6 can package flask with nested', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + `--pythonBin=${getPythonBin(3)}`, + '--fileName=requirements-w-nested.txt', + 'package' + ]); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); + }, + { skip: !hasPython(3) } +); -test('py3.6 can package flask with zip option', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.end(); -}); +test( + 'py3.6 can package flask with zip option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true( + zipfiles.includes(`unzip_requirements.py`), + 'unzip util is packaged' + ); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); + }, + { skip: !hasPython(3) } +); -test('py3.6 can package flask with slim option', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, '--slim=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, - '__main__.py files are packaged' - ); - t.end(); -}); +test( + 'py3.6 can package flask with slim option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--pythonBin=${getPythonBin(3)}`, '--slim=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.true( + zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); + }, + { skip: !hasPython(3) } +); test('py3.6 can package flask with slim & slimPatterns options', t => { process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -247,23 +297,27 @@ test('py3.6 can package flask with slim & slimPatterns options', t => { t.end(); }); -test("py3.6 doesn't package bottle with noDeploy option", t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' - ]); - sls([`--pythonBin=${getPythonBin(3)}`, 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); - t.end(); -}); +test( + "py3.6 doesn't package bottle with noDeploy option", + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml' + ]); + sls([`--pythonBin=${getPythonBin(3)}`, 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); + }, + { skip: !hasPython(3) } +); test( 'py3.6 can package flask with dockerizePip option', @@ -308,7 +362,6 @@ test( 'py3.6 can package flask with slim & dockerizePip & slimPatterns options', t => { process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -336,7 +389,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--zip=true', 'package']); + // sls(['--dockerizePip=true', '--zip=true', 'package']); const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); const zippedReqs = listRequirementsZipFiles( @@ -396,53 +449,68 @@ test( { skip: !canUseDocker() } ); -test('py2.7 can package flask with default options', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([`--pythonBin=${getPythonBin(2)}`, 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); -}); +test( + 'py2.7 can package flask with default options', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--pythonBin=${getPythonBin(2)}`, 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); + }, + { skip: !hasPython(2) } +); -test('py2.7 can package flask with slim option', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([`--pythonBin=${getPythonBin(2)}`, '--slim=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, - '__main__.py files are packaged' - ); - t.end(); -}); +test( + 'py2.7 can package flask with slim option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--pythonBin=${getPythonBin(2)}`, '--slim=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.true( + zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); + }, + { skip: !hasPython(2) } +); -test('py2.7 can package flask with zip option', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([`--pythonBin=${getPythonBin(2)}`, '--zip=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.end(); -}); +test( + 'py2.7 can package flask with zip option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--pythonBin=${getPythonBin(2)}`, '--zip=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true( + zipfiles.includes(`unzip_requirements.py`), + 'unzip util is packaged' + ); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); + }, + { skip: !hasPython(2) } +); test( 'py2.7 can package flask with slim & dockerizePip & slimPatterns options', @@ -472,26 +540,30 @@ test( ); t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(2) } ); -test("py2.7 doesn't package bottle with noDeploy option", t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' - ]); - sls([`--pythonBin=${getPythonBin(2)}`, 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); - t.end(); -}); +test( + "py2.7 doesn't package bottle with noDeploy option", + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml' + ]); + sls([`--pythonBin=${getPythonBin(2)}`, 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); + }, + { skip: !hasPython(2) } +); test( 'py2.7 can package flask with zip & dockerizePip option', @@ -528,7 +600,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(2) } ); test( @@ -567,7 +639,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(2) } ); test( @@ -583,7 +655,7 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(2) } ); test( @@ -611,7 +683,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(2) } ); test( @@ -642,7 +714,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(2) } ); test('pipenv py3.6 can package flask with default options', t => { @@ -701,23 +773,30 @@ test('pipenv py3.6 can package flask with slim & slimPatterns options', t => { t.end(); }); -test('pipenv py3.6 can package flask with zip option', t => { - process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.end(); -}); +test( + 'pipenv py3.6 can package flask with zip option', + t => { + process.chdir('tests/pipenv'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true( + zipfiles.includes(`unzip_requirements.py`), + 'unzip util is packaged' + ); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); + }, + { skip: !hasPython(3) } +); test("pipenv py3.6 doesn't package bottle with noDeploy option", t => { process.chdir('tests/pipenv'); @@ -801,23 +880,30 @@ test('poetry py3.6 can package flask with slim & slimPatterns options', t => { t.end(); }); -test('poetry py3.6 can package flask with zip option', t => { - process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.end(); -}); +test( + 'poetry py3.6 can package flask with zip option', + t => { + process.chdir('tests/poetry'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true( + zipfiles.includes(`unzip_requirements.py`), + 'unzip util is packaged' + ); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); + }, + { skip: !hasPython(3) } +); test("poetry py3.6 doesn't package bottle with noDeploy option", t => { process.chdir('tests/poetry'); @@ -965,41 +1051,48 @@ test('py3.6 supports custom file name with fileName option', t => { t.end(); }); -test("py3.6 doesn't package bottle with zip option", t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' - ]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - const zippedReqs = listRequirementsZipFiles( - '.serverless/sls-py-req-test.zip' - ); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.true( - zippedReqs.includes(`flask/__init__.py`), - 'flask is packaged in the .requirements.zip file' - ); - t.false( - zippedReqs.includes(`bottle.py`), - 'bottle is NOT packaged in the .requirements.zip file' - ); - t.end(); -}); +test( + "py3.6 doesn't package bottle with zip option", + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml' + ]); + sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = listRequirementsZipFiles( + '.serverless/sls-py-req-test.zip' + ); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true( + zipfiles.includes(`unzip_requirements.py`), + 'unzip util is packaged' + ); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.true( + zippedReqs.includes(`flask/__init__.py`), + 'flask is packaged in the .requirements.zip file' + ); + t.false( + zippedReqs.includes(`bottle.py`), + 'bottle is NOT packaged in the .requirements.zip file' + ); + t.end(); + }, + { skip: !hasPython(3) } +); test('py3.6 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', t => { process.chdir('tests/base'); @@ -1023,14 +1116,44 @@ test('py3.6 can package flask with slim, slimPatterns & slimPatternsAppendDefaul }); test( - 'py3.6 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', + 'py3.6 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', + t => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + '--dockerizePip=true', + '--slim=true', + '--slimPatternsAppendDefaults=false', + 'package' + ]); + + const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); + }, + { skip: !canUseDocker() } +); + +test( + 'py2.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false options', t => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); sls([ - '--dockerizePip=true', + '--runtime=python2.7', '--slim=true', '--slimPatternsAppendDefaults=false', 'package' @@ -1049,35 +1172,9 @@ test( ); t.end(); }, - { skip: !canUseDocker() } + { skip: !hasPython(2) } ); -test('py2.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false options', t => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([ - '--runtime=python2.7', - '--slim=true', - '--slimPatternsAppendDefaults=false', - 'package' - ]); - - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); -}); - test( 'py2.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', t => { @@ -1105,7 +1202,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(2) } ); test('pipenv py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', t => { @@ -1316,154 +1413,167 @@ test('py3.6 can package flask with package individually & slim option', t => { t.end(); }); -test('py2.7 can package flask with package individually option', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--individually=true', '--runtime=python2.7', 'package']); +test( + 'py2.7 can package flask with package individually option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--individually=true', '--runtime=python2.7', 'package']); - const zipfiles_hello = listZipFiles('.serverless/hello.zip'); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); + const zipfiles_hello = listZipFiles('.serverless/hello.zip'); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); - const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); + const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); - const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello3' - ); + const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello3' + ); - const zipfiles_hello4 = listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); + const zipfiles_hello4 = listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); - t.end(); -}); + t.end(); + }, + { skip: !hasPython(2) } +); -test('py2.7 can package flask with package individually & slim option', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--individually=true', '--runtime=python2.7', '--slim=true', 'package']); +test( + 'py2.7 can package flask with package individually & slim option', + t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + '--individually=true', + '--runtime=python2.7', + '--slim=true', + 'package' + ]); - const zipfiles_hello = listZipFiles('.serverless/hello.zip'); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged in function hello' - ); - t.deepEqual( - zipfiles_hello.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); + const zipfiles_hello = listZipFiles('.serverless/hello.zip'); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged in function hello' + ); + t.deepEqual( + zipfiles_hello.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); - const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged in function hello2' - ); - t.deepEqual( - zipfiles_hello2.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); + const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.deepEqual( + zipfiles_hello2.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); - const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello3' - ); - t.deepEqual( - zipfiles_hello3.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello3' - ); + const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged in function hello3' + ); + t.deepEqual( + zipfiles_hello3.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello3' + ); - const zipfiles_hello4 = listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); + const zipfiles_hello4 = listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); - t.end(); -}); + t.end(); + }, + { skip: !hasPython(2) } +); test('py3.6 can package only requirements of module', t => { process.chdir('tests/individually'); diff --git a/tests/base/package.json b/tests/base/package.json index db241a8c..752c49c6 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.1.0.tgz" } } From 4363d566f1173d1e8ca6acb4089e8537b38ceb88 Mon Sep 17 00:00:00 2001 From: Ben Samuel Date: Fri, 28 Feb 2020 14:31:34 -0500 Subject: [PATCH 188/328] Control the python versions fetched more carefully. Check that the mapping isn't empty and the python command didn't return null stdout. --- .github/workflows/test.yml | 18 +++---- package.json | 3 +- test.js | 65 ++++++++++++++++---------- tests/individually/package.json | 2 +- tests/non_build_pyproject/package.json | 2 +- tests/pipenv/package.json | 2 +- tests/poetry/package.json | 2 +- 7 files changed, 53 insertions(+), 41 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ce7b67ca..678407b3 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -28,12 +28,8 @@ jobs: uses: actions/setup-node@v1 with: version: ${{ matrix.node-version }} - - name: Install pipenv - uses: dschep/install-pipenv-action@v1 - - name: Install poetry - uses: dschep/install-poetry-action@v1.1 - with: - preview: false + - name: Install pipenv, poetry + run: python${{ matrix.python-version}} -m pip install pipenv poetry setuptools - name: Install serverless run: npm install -g serverless @@ -47,10 +43,8 @@ jobs: if: matrix.python-version == 3.7 && matrix.node-version == 12 && matrix.os == 'ubuntu-latest' - name: Test - run: | - PATH=$HOME/.poetry/bin:$PATH \ - LC_ALL=C.UTF-8 \ - LANG=C.UTF-8 \ - npm run test + run: npm run test env: - RUNTIME: python${{ matrix.python-version }} + USE_PYTHON: ${{ matrix.python-version }} + LC_ALL: C.UTF-8 + LANG: C.UTF-8 diff --git a/package.json b/package.json index 4995497f..48aa4bd0 100644 --- a/package.json +++ b/package.json @@ -48,7 +48,8 @@ "prettier": "*", "cross-spawn": "*", "deasync-promise": "*", - "tape": "*" + "tape": "*", + "lodash": "^4.16.15" }, "dependencies": { "@iarna/toml": "^2.2.3", diff --git a/test.js b/test.js index 984dbceb..96eeb056 100644 --- a/test.js +++ b/test.js @@ -15,6 +15,7 @@ const { } = require('fs-extra'); const { quote } = require('shell-quote'); const { sep, delimiter } = require('path'); +const { _ } = require('lodash'); const { getUserCachePath, sha256Path } = require('./lib/shared'); @@ -94,39 +95,55 @@ const test = (desc, func, opts = {}) => } }); -const executableExtension = process.platform === 'win32' ? '.exe' : ''; -const executableSearch = process.env.PATH.split(delimiter); -const whichCache = {}; - -const which = (name, extra) => { - const found = whichCache[name]; - if (found !== undefined) { - return found; +const availablePythons = (() => { + const versions = []; + const mapping = {}; + if (process.env.USE_PYTHON) { + versions.push( + ...process.env.USE_PYTHON.split(',').map(v => v.toString().trim()) + ); + } else { + versions.push('3.8', '3.7', '3.6', '2.7'); } - const fullName = `${name}${executableExtension}`; - for (const path of (extra || []).concat(executableSearch)) { - const fullPath = `${path}${sep}${fullName}`; - if (pathExistsSync(fullPath)) { - whichCache[name] = fullPath; - return fullPath; + const exe = process.platform === 'win32' ? '.exe' : ''; + for (const ver of _.uniq( + _.concat( + versions, + versions.map(v => v[0]), + [''] + ) + )) { + const python = `python${ver}${exe}`; + const { stdout, stderr, status } = crossSpawn.sync(python, [ + '-c', + 'import sys; sys.stdout.write(".".join(map(str, sys.version_info[:2])))' + ]); + const realVer = stdout && stdout.toString().trim(); + if (!status && realVer && _.includes(versions, realVer)) { + for (const recommend of [realVer, realVer[0]]) { + if (!mapping[recommend]) { + mapping[recommend] = python; + } + } } } - whichCache[name] = null; - return null; -}; + if (_.isEmpty(mapping)) { + throw new Error(`No pythons available meeting ${versions}`); + } + return mapping; +})(); const getPythonBin = (version = 3) => { - if (![2, 3].includes(version)) throw new Error('version must be 2 or 3'); - const extra = []; - /* if (process.platform === 'win32') - * extra.push(...glob.sync(`c:/python${version}*`)); */ - const bin = which(`python${version}`, extra); - if (bin === null) throw new Error(`Can't find python${version} on PATH`); + const bin = availablePythons[String(version)]; + if (!bin) + throw new Error( + `No python version ${version} available, only ${availablePythons}` + ); return bin; }; const hasPython = version => { - return getPythonBin(version) !== null; + return Boolean(availablePythons[String(version)]); }; const listZipFiles = filename => diff --git a/tests/individually/package.json b/tests/individually/package.json index db241a8c..752c49c6 100644 --- a/tests/individually/package.json +++ b/tests/individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.1.0.tgz" } } diff --git a/tests/non_build_pyproject/package.json b/tests/non_build_pyproject/package.json index db241a8c..752c49c6 100644 --- a/tests/non_build_pyproject/package.json +++ b/tests/non_build_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.1.0.tgz" } } diff --git a/tests/pipenv/package.json b/tests/pipenv/package.json index db241a8c..752c49c6 100644 --- a/tests/pipenv/package.json +++ b/tests/pipenv/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.1.0.tgz" } } diff --git a/tests/poetry/package.json b/tests/poetry/package.json index db241a8c..752c49c6 100644 --- a/tests/poetry/package.json +++ b/tests/poetry/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.1.0.tgz" } } From 92a16799006e5598b3b06f583153e4eb113224a6 Mon Sep 17 00:00:00 2001 From: Ben Samuel Date: Fri, 28 Feb 2020 15:18:40 -0500 Subject: [PATCH 189/328] Check what versions are on the path --- .github/workflows/test.yml | 25 +++++++++++++++++++++---- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 678407b3..008fcffe 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -23,13 +23,30 @@ jobs: - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 with: - version: ${{ matrix.python-version }} + python-version: ${{ matrix.python-version }} + - name: Set up Node ${{ matrix.node-version }} uses: actions/setup-node@v1 with: - version: ${{ matrix.node-version }} - - name: Install pipenv, poetry - run: python${{ matrix.python-version}} -m pip install pipenv poetry setuptools + node-version: ${{ matrix.node-version }} + + - name: Check python is available + run: | + echo "PYTHON" + python --version + echo "PYTHONX.X" + python${{ matrix.python-version}} --version + echo "PIP" + pip --version + echo "PIPX.X" + pip${{matrix.python-version}} --version + echo "ok" + + - name: Install setuptools + run: python${{ matrix.python-version }} -m pip install setuptools wheel + + - name: Install pipenv / poetry + run: python${{ matrix.python-version }} -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless From efa04390edc802260920ce2cebc6c23eae3ad8ab Mon Sep 17 00:00:00 2001 From: Ben Samuel Date: Fri, 28 Feb 2020 15:32:47 -0500 Subject: [PATCH 190/328] Platform specific steps; deasync is dumping core - Use tape-promise and async tests. --- .github/workflows/test.yml | 16 +- package.json | 2 +- test.js | 1946 +++++++++++++++++++----------------- 3 files changed, 1043 insertions(+), 921 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 008fcffe..9ac4274b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -30,23 +30,15 @@ jobs: with: node-version: ${{ matrix.node-version }} - - name: Check python is available + - name: Check python version run: | - echo "PYTHON" python --version - echo "PYTHONX.X" - python${{ matrix.python-version}} --version - echo "PIP" - pip --version - echo "PIPX.X" - pip${{matrix.python-version}} --version - echo "ok" - name: Install setuptools - run: python${{ matrix.python-version }} -m pip install setuptools wheel + run: python -m pip install setuptools wheel - name: Install pipenv / poetry - run: python${{ matrix.python-version }} -m pip install pipenv poetry + run: python -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless @@ -62,6 +54,6 @@ jobs: - name: Test run: npm run test env: - USE_PYTHON: ${{ matrix.python-version }} + USE_PYTHON: python LC_ALL: C.UTF-8 LANG: C.UTF-8 diff --git a/package.json b/package.json index 48aa4bd0..66d538e5 100644 --- a/package.json +++ b/package.json @@ -47,8 +47,8 @@ "eslint": "^5.16.0", "prettier": "*", "cross-spawn": "*", - "deasync-promise": "*", "tape": "*", + "tape-promise": "*", "lodash": "^4.16.15" }, "dependencies": { diff --git a/test.js b/test.js index 96eeb056..a27c05ee 100644 --- a/test.js +++ b/test.js @@ -1,20 +1,20 @@ const crossSpawn = require('cross-spawn'); -const deasync = require('deasync-promise'); const glob = require('glob-all'); const JSZip = require('jszip'); const sha256File = require('sha256-file'); -const tape = require('tape'); +const tape = require('tape-promise/tape'); const { chmodSync, removeSync, readFileSync, + readFile, copySync, writeFileSync, statSync, pathExistsSync } = require('fs-extra'); const { quote } = require('shell-quote'); -const { sep, delimiter } = require('path'); +const { sep } = require('path'); const { _ } = require('lodash'); const { getUserCachePath, sha256Path } = require('./lib/shared'); @@ -56,6 +56,7 @@ const perl = mkCommand('perl'); const setup = () => { removeSync(getUserCachePath()); + process.chdir(initialWorkingDir); }; const teardown = () => { @@ -83,10 +84,10 @@ const teardown = () => { }; const test = (desc, func, opts = {}) => - tape.test(desc, opts, t => { + tape.test(desc, opts, async t => { setup(); try { - func(t); + await func(t); } catch (err) { t.fail(err); t.end(); @@ -96,31 +97,37 @@ const test = (desc, func, opts = {}) => }); const availablePythons = (() => { - const versions = []; + const binaries = []; const mapping = {}; if (process.env.USE_PYTHON) { - versions.push( + binaries.push( ...process.env.USE_PYTHON.split(',').map(v => v.toString().trim()) ); } else { - versions.push('3.8', '3.7', '3.6', '2.7'); + binaries.push( + 'python', + 'python3', + 'python3.6', + 'python36', + 'python3.7', + 'python37', + 'python3.8', + 'python38', + 'python2', + 'python2.7', + 'python27' + ); } const exe = process.platform === 'win32' ? '.exe' : ''; - for (const ver of _.uniq( - _.concat( - versions, - versions.map(v => v[0]), - [''] - ) - )) { - const python = `python${ver}${exe}`; - const { stdout, stderr, status } = crossSpawn.sync(python, [ + for (const bin of binaries) { + const python = `${bin}${exe}`; + const { stdout, status } = crossSpawn.sync(python, [ '-c', 'import sys; sys.stdout.write(".".join(map(str, sys.version_info[:2])))' ]); - const realVer = stdout && stdout.toString().trim(); - if (!status && realVer && _.includes(versions, realVer)) { - for (const recommend of [realVer, realVer[0]]) { + const ver = stdout && stdout.toString().trim(); + if (!status && ver) { + for (const recommend of [ver, ver.split('.')[0]]) { if (!mapping[recommend]) { mapping[recommend] = python; } @@ -128,17 +135,14 @@ const availablePythons = (() => { } } if (_.isEmpty(mapping)) { - throw new Error(`No pythons available meeting ${versions}`); + throw new Error('No pythons found'); } return mapping; })(); -const getPythonBin = (version = 3) => { +const getPythonBin = version => { const bin = availablePythons[String(version)]; - if (!bin) - throw new Error( - `No python version ${version} available, only ${availablePythons}` - ); + if (!bin) throw new Error(`No python version ${version} available`); return bin; }; @@ -146,14 +150,23 @@ const hasPython = version => { return Boolean(availablePythons[String(version)]); }; -const listZipFiles = filename => - Object.keys(deasync(new JSZip().loadAsync(readFileSync(filename))).files); -const listZipFilesWithMetaData = filename => - Object(deasync(new JSZip().loadAsync(readFileSync(filename))).files); -const listRequirementsZipFiles = filename => { - const zip = deasync(new JSZip().loadAsync(readFileSync(filename))); - const reqsBuffer = deasync(zip.file('.requirements.zip').async('nodebuffer')); - const reqsZip = deasync(new JSZip().loadAsync(reqsBuffer)); +const listZipFiles = async function(filename) { + const file = await readFile(filename); + const zip = await new JSZip().loadAsync(file); + return Object.keys(zip.files); +}; + +const listZipFilesWithMetaData = async function(filename) { + const file = await readFile(filename); + const zip = await new JSZip().loadAsync(file); + return Object(zip.files); +}; + +const listRequirementsZipFiles = async function(filename) { + const file = await readFile(filename); + const zip = await new JSZip().loadAsync(file); + const reqsBuffer = await zip.file('.requirements.zip').async('nodebuffer'); + const reqsZip = await new JSZip().loadAsync(reqsBuffer); return Object.keys(reqsZip.files); }; @@ -167,40 +180,48 @@ const canUseDocker = () => { return result.status === 0; }; -test('default pythonBin can package flask with default options', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); -}); +test( + 'default pythonBin can package flask with default options', + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); + }, + { skip: !hasPython(3.6) } +); -test('py3.6 packages have the same hash', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); - const fileHash = sha256File('.serverless/sls-py-req-test.zip'); - sls(['package']); - t.equal( - sha256File('.serverless/sls-py-req-test.zip'), - fileHash, - 'packages have the same hash' - ); - t.end(); -}); +test( + 'py3.6 packages have the same hash', + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package']); + const fileHash = sha256File('.serverless/sls-py-req-test.zip'); + sls(['package']); + t.equal( + sha256File('.serverless/sls-py-req-test.zip'), + fileHash, + 'packages have the same hash' + ); + t.end(); + }, + { skip: !hasPython(3.6) } +); test( 'py3.6 can package flask with default options', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls([`--pythonBin=${getPythonBin(3)}`, 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); @@ -210,7 +231,7 @@ test( test( 'py3.6 can package flask with hashes', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -219,7 +240,7 @@ test( '--fileName=requirements-w-hashes.txt', 'package' ]); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.end(); }, @@ -228,7 +249,7 @@ test( test( 'py3.6 can package flask with nested', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -237,7 +258,7 @@ test( '--fileName=requirements-w-nested.txt', 'package' ]); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); @@ -247,12 +268,12 @@ test( test( 'py3.6 can package flask with zip option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('.requirements.zip'), 'zipped requirements are packaged' @@ -272,12 +293,12 @@ test( test( 'py3.6 can package flask with slim option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls([`--pythonBin=${getPythonBin(3)}`, '--slim=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( zipfiles.filter(filename => filename.endsWith('.pyc')), @@ -293,30 +314,34 @@ test( { skip: !hasPython(3) } ); -test('py3.6 can package flask with slim & slimPatterns options', t => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--slim=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); -}); +test( + 'py3.6 can package flask with slim & slimPatterns options', + async t => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--slim=true', 'package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); + }, + { skip: !hasPython(3.6) } +); test( "py3.6 doesn't package bottle with noDeploy option", - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -328,7 +353,7 @@ test( 'serverless.yml' ]); sls([`--pythonBin=${getPythonBin(3)}`, 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); t.end(); @@ -338,28 +363,28 @@ test( test( 'py3.6 can package flask with dockerizePip option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls(['--dockerizePip=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(3.6) } ); test( 'py3.6 can package flask with slim & dockerizePip option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls(['--dockerizePip=true', '--slim=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( zipfiles.filter(filename => filename.endsWith('.pyc')), @@ -372,18 +397,18 @@ test( ); t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(3.6) } ); test( 'py3.6 can package flask with slim & dockerizePip & slimPatterns options', - t => { + async t => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); sls(['--dockerizePip=true', '--slim=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( zipfiles.filter(filename => filename.endsWith('.pyc')), @@ -397,19 +422,19 @@ test( ); t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(3.6) } ); test( 'py3.6 can package flask with zip & dockerizePip option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - // sls(['--dockerizePip=true', '--zip=true', 'package']); + sls(['--dockerizePip=true', '--zip=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - const zippedReqs = listRequirementsZipFiles( + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = await listRequirementsZipFiles( '.serverless/sls-py-req-test.zip' ); t.true( @@ -430,19 +455,19 @@ test( ); t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(3.6) } ); test( 'py3.6 can package flask with zip & slim & dockerizePip option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls(['--dockerizePip=true', '--zip=true', '--slim=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - const zippedReqs = listRequirementsZipFiles( + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = await listRequirementsZipFiles( '.serverless/sls-py-req-test.zip' ); t.true( @@ -463,17 +488,17 @@ test( ); t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(3.6) } ); test( 'py2.7 can package flask with default options', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls([`--pythonBin=${getPythonBin(2)}`, 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); @@ -483,12 +508,12 @@ test( test( 'py2.7 can package flask with slim option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls([`--pythonBin=${getPythonBin(2)}`, '--slim=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( zipfiles.filter(filename => filename.endsWith('.pyc')), @@ -506,12 +531,12 @@ test( test( 'py2.7 can package flask with zip option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls([`--pythonBin=${getPythonBin(2)}`, '--zip=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('.requirements.zip'), 'zipped requirements are packaged' @@ -531,7 +556,7 @@ test( test( 'py2.7 can package flask with slim & dockerizePip & slimPatterns options', - t => { + async t => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -543,7 +568,7 @@ test( '--slim=true', 'package' ]); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( zipfiles.filter(filename => filename.endsWith('.pyc')), @@ -562,7 +587,7 @@ test( test( "py2.7 doesn't package bottle with noDeploy option", - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -574,7 +599,7 @@ test( 'serverless.yml' ]); sls([`--pythonBin=${getPythonBin(2)}`, 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); t.end(); @@ -584,7 +609,7 @@ test( test( 'py2.7 can package flask with zip & dockerizePip option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -595,8 +620,8 @@ test( 'package' ]); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - const zippedReqs = listRequirementsZipFiles( + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = await listRequirementsZipFiles( '.serverless/sls-py-req-test.zip' ); t.true( @@ -622,7 +647,7 @@ test( test( 'py2.7 can package flask with zip & slim & dockerizePip option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -634,8 +659,8 @@ test( 'package' ]); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - const zippedReqs = listRequirementsZipFiles( + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = await listRequirementsZipFiles( '.serverless/sls-py-req-test.zip' ); t.true( @@ -661,13 +686,13 @@ test( test( 'py2.7 can package flask with dockerizePip option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls([`--pythonBin=${getPythonBin(2)}`, '--dockerizePip=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); @@ -677,7 +702,7 @@ test( test( 'py2.7 can package flask with slim & dockerizePip option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -687,7 +712,7 @@ test( '--slim=true', 'package' ]); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( zipfiles.filter(filename => filename.endsWith('.pyc')), @@ -705,7 +730,7 @@ test( test( 'py2.7 can package flask with slim & dockerizePip & slimPatterns options', - t => { + async t => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -717,7 +742,7 @@ test( '--slim=true', 'package' ]); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( zipfiles.filter(filename => filename.endsWith('.pyc')), @@ -734,12 +759,12 @@ test( { skip: !canUseDocker() || !hasPython(2) } ); -test('pipenv py3.6 can package flask with default options', t => { +test('pipenv py3.6 can package flask with default options', async t => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); sls(['package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.false( @@ -749,162 +774,63 @@ test('pipenv py3.6 can package flask with default options', t => { t.end(); }); -test('pipenv py3.6 can package flask with slim option', t => { - process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--slim=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, - '__main__.py files are packaged' - ); - t.end(); -}); - -test('pipenv py3.6 can package flask with slim & slimPatterns options', t => { - process.chdir('tests/pipenv'); - - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--slim=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); -}); - test( - 'pipenv py3.6 can package flask with zip option', - t => { + 'pipenv py3.6 can package flask with slim option', + async t => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' + sls(['--slim=true', 'package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' ); t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" + zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' ); t.end(); }, - { skip: !hasPython(3) } + { skip: !hasPython(3.6) } ); -test("pipenv py3.6 doesn't package bottle with noDeploy option", t => { - process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' - ]); - sls(['package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); - t.end(); -}); - -test('non build pyproject.toml uses requirements.txt', t => { - process.chdir('tests/non_build_pyproject'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); -}); - -test('poetry py3.6 can package flask with default options', t => { - process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); -}); - -test('poetry py3.6 can package flask with slim option', t => { - process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--slim=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, - '__main__.py files are packaged' - ); - t.end(); -}); - -test('poetry py3.6 can package flask with slim & slimPatterns options', t => { - process.chdir('tests/poetry'); +test( + 'pipenv py3.6 can package flask with slim & slimPatterns options', + async t => { + process.chdir('tests/pipenv'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--slim=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); -}); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--slim=true', 'package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); + }, + { skip: !hasPython(3.6) } +); test( - 'poetry py3.6 can package flask with zip option', - t => { - process.chdir('tests/poetry'); + 'pipenv py3.6 can package flask with zip option', + async t => { + process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('.requirements.zip'), 'zipped requirements are packaged' @@ -919,171 +845,327 @@ test( ); t.end(); }, - { skip: !hasPython(3) } + { skip: !hasPython(3.6) } ); -test("poetry py3.6 doesn't package bottle with noDeploy option", t => { - process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' - ]); - sls(['package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); - t.end(); -}); - -test('py3.6 can package flask with zip option and no explicit include', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl(['-p', '-i.bak', '-e', 's/include://', 'serverless.yml']); - perl(['-p', '-i.bak', '-e', 's/^.*handler.py.*$//', 'serverless.yml']); - sls(['--zip=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.end(); -}); - -test('py3.6 can package lambda-decorators using vendor option', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([`--vendor=./vendor`, 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.true( - zipfiles.includes(`lambda_decorators.py`), - 'lambda_decorators.py is packaged' - ); - t.end(); -}); - test( - "Don't nuke execute perms", - t => { - process.chdir('tests/base'); + "pipenv py3.6 doesn't package bottle with noDeploy option", + async t => { + process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); - const perm = '775'; - npm(['i', path]); perl([ '-p', '-i.bak', '-e', - 's/(handler.py.*$)/$1\n - foobar/', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', 'serverless.yml' ]); - writeFileSync(`foobar`, ''); - chmodSync(`foobar`, perm); - sls(['--vendor=./vendor', 'package']); - - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + sls(['package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.true( - zipfiles.includes(`lambda_decorators.py`), - 'lambda_decorators.py is packaged' - ); - t.true(zipfiles.includes(`foobar`), 'foobar is packaged'); - - const zipfiles_with_metadata = listZipFilesWithMetaData( - '.serverless/sls-py-req-test.zip' - ); - t.true( - zipfiles_with_metadata['foobar'].unixPermissions - .toString(8) - .slice(3, 6) === perm, - 'foobar has retained its executable file permissions' - ); - - const flaskPerm = statSync('.serverless/requirements/bin/flask').mode; - t.true( - zipfiles_with_metadata['bin/flask'].unixPermissions === flaskPerm, - 'bin/flask has retained its executable file permissions' - ); - + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); t.end(); }, - { skip: process.platform === 'win32' } + { skip: !hasPython(3.6) } ); -test('py3.6 can package flask in a project with a space in it', t => { - copySync('tests/base', 'tests/base with a space'); - process.chdir('tests/base with a space'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); -}); - test( - 'py3.6 can package flask in a project with a space in it with docker', - t => { - copySync('tests/base', 'tests/base with a space'); - process.chdir('tests/base with a space'); + 'non build pyproject.toml uses requirements.txt', + async t => { + process.chdir('tests/non_build_pyproject'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + sls(['package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !canUseDocker() } + { skip: !hasPython(3.6) } ); -test('py3.6 supports custom file name with fileName option', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - writeFileSync('puck', 'requests'); - npm(['i', path]); - sls(['--fileName=puck', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes(`requests${sep}__init__.py`), - 'requests is packaged' - ); - t.false(zipfiles.includes(`flask${sep}__init__.py`), 'flask is NOT packaged'); - t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); - t.end(); -}); - test( - "py3.6 doesn't package bottle with zip option", - t => { - process.chdir('tests/base'); + 'poetry py3.6 can package flask with default options', + async t => { + process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' + sls(['package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); + }, + { skip: !hasPython(3.6) } +); + +test( + 'poetry py3.6 can package flask with slim option', + async t => { + process.chdir('tests/poetry'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--slim=true', 'package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.true( + zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); + }, + { skip: !hasPython(3.6) } +); + +test( + 'poetry py3.6 can package flask with slim & slimPatterns options', + async t => { + process.chdir('tests/poetry'); + + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--slim=true', 'package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); + }, + { skip: !hasPython(3.6) } +); + +test( + 'poetry py3.6 can package flask with zip option', + async t => { + process.chdir('tests/poetry'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true( + zipfiles.includes(`unzip_requirements.py`), + 'unzip util is packaged' + ); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); + }, + { skip: !hasPython(3) } +); + +test( + "poetry py3.6 doesn't package bottle with noDeploy option", + async t => { + process.chdir('tests/poetry'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml' + ]); + sls(['package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); + }, + { skip: !hasPython(3.6) } +); + +test( + 'py3.6 can package flask with zip option and no explicit include', + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl(['-p', '-i.bak', '-e', 's/include://', 'serverless.yml']); + perl(['-p', '-i.bak', '-e', 's/^.*handler.py.*$//', 'serverless.yml']); + sls(['--zip=true', 'package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true( + zipfiles.includes(`unzip_requirements.py`), + 'unzip util is packaged' + ); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); + }, + { skip: !hasPython(3.6) } +); + +test( + 'py3.6 can package lambda-decorators using vendor option', + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([`--vendor=./vendor`, 'package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.true( + zipfiles.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged' + ); + t.end(); + }, + { skip: !hasPython(3.6) } +); + +test( + "Don't nuke execute perms", + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + const perm = '775'; + + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(handler.py.*$)/$1\n - foobar/', + 'serverless.yml' + ]); + writeFileSync(`foobar`, ''); + chmodSync(`foobar`, perm); + sls(['--vendor=./vendor', 'package']); + + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.true( + zipfiles.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged' + ); + t.true(zipfiles.includes(`foobar`), 'foobar is packaged'); + + const zipfiles_with_metadata = await listZipFilesWithMetaData( + '.serverless/sls-py-req-test.zip' + ); + t.true( + zipfiles_with_metadata['foobar'].unixPermissions + .toString(8) + .slice(3, 6) === perm, + 'foobar has retained its executable file permissions' + ); + + const flaskPerm = statSync('.serverless/requirements/bin/flask').mode; + t.true( + zipfiles_with_metadata['bin/flask'].unixPermissions === flaskPerm, + 'bin/flask has retained its executable file permissions' + ); + + t.end(); + }, + { skip: process.platform === 'win32' || !hasPython(3.6) } +); + +test( + 'py3.6 can package flask in a project with a space in it', + async t => { + copySync('tests/base', 'tests/base with a space'); + process.chdir('tests/base with a space'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); + }, + { skip: !hasPython(3.6) } +); + +test( + 'py3.6 can package flask in a project with a space in it with docker', + async t => { + copySync('tests/base', 'tests/base with a space'); + process.chdir('tests/base with a space'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--dockerizePip=true', 'package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); + }, + { skip: !canUseDocker() || !hasPython(3.6) } +); + +test( + 'py3.6 supports custom file name with fileName option', + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + writeFileSync('puck', 'requests'); + npm(['i', path]); + sls(['--fileName=puck', 'package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes(`requests${sep}__init__.py`), + 'requests is packaged' + ); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged' + ); + t.false( + zipfiles.includes(`boto3${sep}__init__.py`), + 'boto3 is NOT packaged' + ); + t.end(); + }, + { skip: !hasPython(3.6) } +); + +test( + "py3.6 doesn't package bottle with zip option", + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml' ]); sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - const zippedReqs = listRequirementsZipFiles( + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = await listRequirementsZipFiles( '.serverless/sls-py-req-test.zip' ); t.true( @@ -1111,30 +1193,34 @@ test( { skip: !hasPython(3) } ); -test('py3.6 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', t => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); +test( + 'py3.6 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', + async t => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); -}); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); + }, + { skip: !hasPython(3.6) } +); test( 'py3.6 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', - t => { + async t => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -1146,7 +1232,7 @@ test( 'package' ]); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, @@ -1159,12 +1245,12 @@ test( ); t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(3.6) } ); test( 'py2.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false options', - t => { + async t => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -1176,7 +1262,7 @@ test( 'package' ]); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, @@ -1189,12 +1275,12 @@ test( ); t.end(); }, - { skip: !hasPython(2) } + { skip: !hasPython(2.7) } ); test( 'py2.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', - t => { + async t => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -1206,7 +1292,7 @@ test( '--slimPatternsAppendDefaults=false', 'package' ]); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, @@ -1219,226 +1305,242 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(2) } + { skip: !canUseDocker() || !hasPython(2.7) } ); -test('pipenv py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', t => { - process.chdir('tests/pipenv'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); +test( + 'pipenv py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', + async t => { + process.chdir('tests/pipenv'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); - sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); -}); + sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); + }, + { skip: !hasPython(3.6) } +); -test('poetry py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', t => { - process.chdir('tests/poetry'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); +test( + 'poetry py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', + async t => { + process.chdir('tests/poetry'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); - sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); -}); + sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter(filename => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); + }, + { skip: !hasPython(3.6) } +); -test('py3.6 can package flask with package individually option', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--individually=true', 'package']); +test( + 'py3.6 can package flask with package individually option', + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--individually=true', 'package']); - const zipfiles_hello = listZipFiles('.serverless/hello.zip'); - t.false( - zipfiles_hello.includes(`fn2${sep}__init__.py`), - 'fn2 is NOT packaged in function hello' - ); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); + const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); + t.false( + zipfiles_hello.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello' + ); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged in function hello' + ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); - const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); - t.false( - zipfiles_hello2.includes(`fn2${sep}__init__.py`), - 'fn2 is NOT packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); + const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); + t.false( + zipfiles_hello2.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); - const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); - t.false( - zipfiles_hello3.includes(`fn2${sep}__init__.py`), - 'fn2 is NOT packaged in function hello3' - ); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); + const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); + t.false( + zipfiles_hello3.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello3' + ); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); - const zipfiles_hello4 = listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.false( - zipfiles_hello4.includes(`fn2${sep}__init__.py`), - 'fn2 is NOT packaged in function hello4' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); + const zipfiles_hello4 = await listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.false( + zipfiles_hello4.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello4' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); - t.end(); -}); + t.end(); + }, + { skip: !hasPython(3.6) } +); -test('py3.6 can package flask with package individually & slim option', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--individually=true', '--slim=true', 'package']); +test( + 'py3.6 can package flask with package individually & slim option', + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--individually=true', '--slim=true', 'package']); - const zipfiles_hello = listZipFiles('.serverless/hello.zip'); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged in function hello' - ); - t.deepEqual( - zipfiles_hello.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); + const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged in function hello' + ); + t.deepEqual( + zipfiles_hello.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); - const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged in function hello2' - ); - t.deepEqual( - zipfiles_hello2.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); + const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.deepEqual( + zipfiles_hello2.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); - const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello3' - ); - t.deepEqual( - zipfiles_hello3.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); + const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged in function hello3' + ); + t.deepEqual( + zipfiles_hello3.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); - const zipfiles_hello4 = listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); - t.deepEqual( - zipfiles_hello4.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello4' - ); + const zipfiles_hello4 = await listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); + t.deepEqual( + zipfiles_hello4.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello4' + ); - t.end(); -}); + t.end(); + }, + { skip: !hasPython(3.6) } +); test( 'py2.7 can package flask with package individually option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls(['--individually=true', '--runtime=python2.7', 'package']); - const zipfiles_hello = listZipFiles('.serverless/hello.zip'); + const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); t.true( zipfiles_hello.includes('handler.py'), 'handler.py is packaged in function hello' @@ -1452,7 +1554,7 @@ test( 'dataclasses is NOT packaged in function hello' ); - const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); + const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); t.true( zipfiles_hello2.includes('handler.py'), 'handler.py is packaged in function hello2' @@ -1466,7 +1568,7 @@ test( 'dataclasses is NOT packaged in function hello2' ); - const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); + const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); t.true( zipfiles_hello3.includes('handler.py'), 'handler.py is packaged in function hello3' @@ -1480,7 +1582,98 @@ test( 'dataclasses is NOT packaged in function hello3' ); - const zipfiles_hello4 = listZipFiles( + const zipfiles_hello4 = await listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); + + t.end(); + }, + { skip: !hasPython(2.7) } +); + +test( + 'py2.7 can package flask with package individually & slim option', + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + '--individually=true', + '--runtime=python2.7', + '--slim=true', + 'package' + ]); + + const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged in function hello' + ); + t.deepEqual( + zipfiles_hello.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); + + const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.deepEqual( + zipfiles_hello2.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); + + const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged in function hello3' + ); + t.deepEqual( + zipfiles_hello3.filter(filename => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello3' + ); + + const zipfiles_hello4 = await listZipFiles( '.serverless/fn2-sls-py-req-test-dev-hello4.zip' ); t.true( @@ -1498,80 +1691,133 @@ test( t.end(); }, - { skip: !hasPython(2) } + { skip: !hasPython(2.7) } ); test( - 'py2.7 can package flask with package individually & slim option', - t => { + 'py3.6 can package only requirements of module', + async t => { + process.chdir('tests/individually'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package']); + + const zipfiles_hello = await listZipFiles( + '.serverless/module1-sls-py-req-test-indiv-dev-hello1.zip' + ); + t.true( + zipfiles_hello.includes('handler1.py'), + 'handler1.py is packaged at root level in function hello1' + ); + t.false( + zipfiles_hello.includes('handler2.py'), + 'handler2.py is NOT packaged at root level in function hello1' + ); + t.true( + zipfiles_hello.includes(`pyaml${sep}__init__.py`), + 'pyaml is packaged in function hello1' + ); + t.true( + zipfiles_hello.includes(`boto3${sep}__init__.py`), + 'boto3 is packaged in function hello1' + ); + t.false( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello1' + ); + + const zipfiles_hello2 = await listZipFiles( + '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' + ); + t.true( + zipfiles_hello2.includes('handler2.py'), + 'handler2.py is packaged at root level in function hello2' + ); + t.false( + zipfiles_hello2.includes('handler1.py'), + 'handler1.py is NOT packaged at root level in function hello2' + ); + t.false( + zipfiles_hello2.includes(`pyaml${sep}__init__.py`), + 'pyaml is NOT packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`boto3${sep}__init__.py`), + 'boto3 is NOT packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + + t.end(); + }, + { skip: !hasPython(3.6) } +); + +test( + 'py3.6 can package lambda-decorators using vendor and invidiually option', + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - '--individually=true', - '--runtime=python2.7', - '--slim=true', - 'package' - ]); + sls(['--individually=true', '--vendor=./vendor', 'package']); - const zipfiles_hello = listZipFiles('.serverless/hello.zip'); + const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); t.true( zipfiles_hello.includes('handler.py'), - 'handler.py is packaged in function hello' - ); - t.deepEqual( - zipfiles_hello.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello' + 'handler.py is packaged at root level in function hello' ); t.true( zipfiles_hello.includes(`flask${sep}__init__.py`), 'flask is packaged in function hello' ); + t.true( + zipfiles_hello.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged in function hello' + ); t.false( zipfiles_hello.includes(`dataclasses.py`), 'dataclasses is NOT packaged in function hello' ); - const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); + const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); t.true( zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged in function hello2' - ); - t.deepEqual( - zipfiles_hello2.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello2' + 'handler.py is packaged at root level in function hello2' ); t.true( zipfiles_hello2.includes(`flask${sep}__init__.py`), 'flask is packaged in function hello2' ); + t.true( + zipfiles_hello2.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged in function hello2' + ); t.false( zipfiles_hello2.includes(`dataclasses.py`), 'dataclasses is NOT packaged in function hello2' ); - const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); + const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); t.true( zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello3' - ); - t.deepEqual( - zipfiles_hello3.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello3' + 'handler.py is packaged at root level in function hello3' ); t.false( zipfiles_hello3.includes(`flask${sep}__init__.py`), 'flask is NOT packaged in function hello3' ); + t.false( + zipfiles_hello3.includes(`lambda_decorators.py`), + 'lambda_decorators.py is NOT packaged in function hello3' + ); t.false( zipfiles_hello3.includes(`dataclasses.py`), 'dataclasses is NOT packaged in function hello3' ); - const zipfiles_hello4 = listZipFiles( + const zipfiles_hello4 = await listZipFiles( '.serverless/fn2-sls-py-req-test-dev-hello4.zip' ); t.true( @@ -1586,150 +1832,14 @@ test( zipfiles_hello4.includes(`flask${sep}__init__.py`), 'flask is NOT packaged in function hello4' ); - t.end(); }, - { skip: !hasPython(2) } + { skip: !hasPython(3.6) } ); -test('py3.6 can package only requirements of module', t => { - process.chdir('tests/individually'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); - - const zipfiles_hello = listZipFiles( - '.serverless/module1-sls-py-req-test-indiv-dev-hello1.zip' - ); - t.true( - zipfiles_hello.includes('handler1.py'), - 'handler1.py is packaged at root level in function hello1' - ); - t.false( - zipfiles_hello.includes('handler2.py'), - 'handler2.py is NOT packaged at root level in function hello1' - ); - t.true( - zipfiles_hello.includes(`pyaml${sep}__init__.py`), - 'pyaml is packaged in function hello1' - ); - t.true( - zipfiles_hello.includes(`boto3${sep}__init__.py`), - 'boto3 is packaged in function hello1' - ); - t.false( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello1' - ); - - const zipfiles_hello2 = listZipFiles( - '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' - ); - t.true( - zipfiles_hello2.includes('handler2.py'), - 'handler2.py is packaged at root level in function hello2' - ); - t.false( - zipfiles_hello2.includes('handler1.py'), - 'handler1.py is NOT packaged at root level in function hello2' - ); - t.false( - zipfiles_hello2.includes(`pyaml${sep}__init__.py`), - 'pyaml is NOT packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`boto3${sep}__init__.py`), - 'boto3 is NOT packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - - t.end(); -}); - -test('py3.6 can package lambda-decorators using vendor and invidiually option', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--individually=true', '--vendor=./vendor', 'package']); - - const zipfiles_hello = listZipFiles('.serverless/hello.zip'); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged at root level in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`lambda_decorators.py`), - 'lambda_decorators.py is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); - - const zipfiles_hello2 = listZipFiles('.serverless/hello2.zip'); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged at root level in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`lambda_decorators.py`), - 'lambda_decorators.py is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); - - const zipfiles_hello3 = listZipFiles('.serverless/hello3.zip'); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged at root level in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`lambda_decorators.py`), - 'lambda_decorators.py is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello3' - ); - - const zipfiles_hello4 = listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); - t.end(); -}); - test( "Don't nuke execute perms when using individually", - t => { + async t => { process.chdir('tests/individually'); const path = npm(['pack', '../..']); const perm = '775'; @@ -1739,7 +1849,7 @@ test( npm(['i', path]); sls(['package']); - const zipfiles_hello1 = listZipFilesWithMetaData('.serverless/hello1.zip'); + const zipfiles_hello1 = await listZipFilesWithMetaData('.serverless/hello1.zip'); t.true( zipfiles_hello1['module1/foobar'].unixPermissions @@ -1748,7 +1858,7 @@ test( 'foobar has retained its executable file permissions' ); - const zipfiles_hello2 = listZipFilesWithMetaData( + const zipfiles_hello2 = await listZipFilesWithMetaData( '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' ); const flaskPerm = statSync('.serverless/module2/requirements/bin/flask') @@ -1761,12 +1871,12 @@ test( t.end(); }, - { skip: process.platform === 'win32' } + { skip: process.platform === 'win32' || !hasPython(3.6) } ); test( "Don't nuke execute perms when using individually w/docker", - t => { + async t => { process.chdir('tests/individually'); const path = npm(['pack', '../..']); const perm = '775'; @@ -1776,7 +1886,7 @@ test( npm(['i', path]); sls(['--dockerizePip=true', 'package']); - const zipfiles_hello = listZipFilesWithMetaData('.serverless/hello1.zip'); + const zipfiles_hello = await listZipFilesWithMetaData('.serverless/hello1.zip'); t.true( zipfiles_hello['module1/foobar'].unixPermissions @@ -1785,7 +1895,7 @@ test( 'foobar has retained its executable file permissions' ); - const zipfiles_hello2 = listZipFilesWithMetaData( + const zipfiles_hello2 = await listZipFilesWithMetaData( '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' ); const flaskPerm = statSync('.serverless/module2/requirements/bin/flask') @@ -1798,37 +1908,45 @@ test( t.end(); }, - { skip: !canUseDocker() || process.platform === 'win32' } + { skip: !canUseDocker() || process.platform === 'win32' || !hasPython(3.6) } ); -test('py3.6 uses download cache by default option', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); - const cachepath = getUserCachePath(); - t.true( - pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), - 'cache directoy exists' - ); - t.end(); -}); +test( + 'py3.6 uses download cache by default option', + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package']); + const cachepath = getUserCachePath(); + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'cache directoy exists' + ); + t.end(); + }, + { skip: !hasPython(3.6) } +); -test('py3.6 uses download cache by defaul option', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--cacheLocation=.requirements-cache', 'package']); - t.true( - pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), - 'cache directoy exists' - ); - t.end(); -}); +test( + 'py3.6 uses download cache by defaul option', + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--cacheLocation=.requirements-cache', 'package']); + t.true( + pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), + 'cache directoy exists' + ); + t.end(); + }, + { skip: !hasPython(3.6) } +); test( 'py3.6 uses download cache with dockerizePip option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1840,12 +1958,12 @@ test( ); t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(3.6) } ); test( 'py3.6 uses download cache with dockerizePip by default option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1860,30 +1978,34 @@ test( ); t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(3.6) } ); -test('py3.6 uses static and download cache', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); - const cachepath = getUserCachePath(); - const cacheFolderHash = sha256Path('.serverless/requirements.txt'); - t.true( - pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), - 'http exists in download-cache' - ); - t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), - 'flask exists in static-cache' - ); - t.end(); -}); +test( + 'py3.6 uses static and download cache', + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package']); + const cachepath = getUserCachePath(); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'http exists in download-cache' + ); + t.true( + pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + 'flask exists in static-cache' + ); + t.end(); + }, + { skip: !hasPython(3.6) } +); test( 'py3.6 uses static and download cache with dockerizePip option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1900,66 +2022,74 @@ test( ); t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(3.6) } ); -test('py3.6 uses static cache', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); - const cachepath = getUserCachePath(); - const cacheFolderHash = sha256Path('.serverless/requirements.txt'); - t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), - 'flask exists in static-cache' - ); - t.true( - pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}.completed_requirements` - ), - '.completed_requirements exists in static-cache' - ); +test( + 'py3.6 uses static cache', + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package']); + const cachepath = getUserCachePath(); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + t.true( + pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + 'flask exists in static-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}.completed_requirements` + ), + '.completed_requirements exists in static-cache' + ); - // py3.6 checking that static cache actually pulls from cache (by poisoning it) - writeFileSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}injected_file_is_bad_form`, - 'injected new file into static cache folder' - ); - sls(['package']); + // py3.6 checking that static cache actually pulls from cache (by poisoning it) + writeFileSync( + `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}injected_file_is_bad_form`, + 'injected new file into static cache folder' + ); + sls(['package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('injected_file_is_bad_form'), - "static cache is really used when running 'sls package' again" - ); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('injected_file_is_bad_form'), + "static cache is really used when running 'sls package' again" + ); - t.end(); -}); + t.end(); + }, + { skip: !hasPython(3.6) } +); -test('py3.6 uses static cache with cacheLocation option', t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - const cachepath = '.requirements-cache'; - sls([`--cacheLocation=${cachepath}`, 'package']); - const cacheFolderHash = sha256Path('.serverless/requirements.txt'); - t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), - 'flask exists in static-cache' - ); - t.true( - pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}.completed_requirements` - ), - '.completed_requirements exists in static-cache' - ); - t.end(); -}); +test( + 'py3.6 uses static cache with cacheLocation option', + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + const cachepath = '.requirements-cache'; + sls([`--cacheLocation=${cachepath}`, 'package']); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + t.true( + pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + 'flask exists in static-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}.completed_requirements` + ), + '.completed_requirements exists in static-cache' + ); + t.end(); + }, + { skip: !hasPython(3.6) } +); test( 'py3.6 uses static cache with dockerizePip & slim option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1984,7 +2114,7 @@ test( ); sls(['--dockerizePip=true', '--slim=true', 'package']); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('injected_file_is_bad_form'), "static cache is really used when running 'sls package' again" @@ -1997,12 +2127,12 @@ test( t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(3.6) } ); test( 'py3.6 uses download cache with dockerizePip & slim option', - t => { + async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2013,7 +2143,7 @@ test( 'http exists in download-cache' ); - const zipfiles = listZipFiles('.serverless/sls-py-req-test.zip'); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( zipfiles.filter(filename => filename.endsWith('.pyc')), @@ -2023,5 +2153,5 @@ test( t.end(); }, - { skip: !canUseDocker() } + { skip: !canUseDocker() || !hasPython(3.6) } ); From d0a3d6d3ebb37361959784ec60a14e6bfe1c3568 Mon Sep 17 00:00:00 2001 From: Ben Samuel Date: Thu, 26 Mar 2020 16:53:36 -0400 Subject: [PATCH 191/328] Get some more output when things are failing. --- .eslintrc.js | 18 ++++ .github/workflows/test.yml | 31 ++++--- lib/docker.js | 8 +- package.json | 2 +- test.js | 164 ++++++++++++++++++++++++------------- 5 files changed, 148 insertions(+), 75 deletions(-) create mode 100644 .eslintrc.js diff --git a/.eslintrc.js b/.eslintrc.js new file mode 100644 index 00000000..3a92d73e --- /dev/null +++ b/.eslintrc.js @@ -0,0 +1,18 @@ +module.exports = { + env: { + commonjs: true, + es6: true, + node: true + }, + extends: 'eslint:recommended', + globals: { + Atomics: 'readonly', + SharedArrayBuffer: 'readonly' + }, + parserOptions: { + ecmaVersion: 2018 + }, + rules: { + 'no-console': 0 + } +}; diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9ac4274b..764da3f4 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,12 +1,12 @@ name: Test -on: - pull_request: - branches: - - master - push: - branches: - - master +on: [push] + # pull_request: + # branches: + # - master + # push: + # branches: + # - master jobs: build: @@ -15,8 +15,8 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, windows-latest, macOS-latest] - python-version: [3.6, 3.7, 2.7] - node-version: [8, 10, 12] + python-version: [3.6, 2.7] # 3.7 + node-version: [12] # 8, 10 steps: - uses: actions/checkout@v2 @@ -35,7 +35,7 @@ jobs: python --version - name: Install setuptools - run: python -m pip install setuptools wheel + run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry run: python -m pip install pipenv poetry @@ -49,7 +49,7 @@ jobs: - name: Lint run: npm run ci:lint # Don't run lint a hundred times, also it breaks on windows. - if: matrix.python-version == 3.7 && matrix.node-version == 12 && matrix.os == 'ubuntu-latest' + if: matrix.python-version == 3.6 && matrix.node-version == 12 && matrix.os == 'ubuntu-latest' - name: Test run: npm run test @@ -57,3 +57,12 @@ jobs: USE_PYTHON: python LC_ALL: C.UTF-8 LANG: C.UTF-8 + if: matrix.os != 'macOS-latest' + + - name: Test (Mac) + run: npm run test + env: + USE_PYTHON: python + LC_ALL: en_US.UTF-8 + LANG: en_US.UTF-8 + if: matrix.os == 'macOS-latest' diff --git a/lib/docker.js b/lib/docker.js index 46bbe028..1132f1de 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -73,6 +73,7 @@ function findTestFile(servicePath) { * @return {boolean} */ function tryBindPath(serverless, bindPath, testFile) { + const debug = process.env.SLS_DEBUG; const options = [ 'run', '--rm', @@ -83,13 +84,12 @@ function tryBindPath(serverless, bindPath, testFile) { `/test/${testFile}` ]; try { + if (debug) serverless.cli.log(`Trying bindPath ${bindPath} (${options})`); const ps = dockerCommand(options); - if (process.env.SLS_DEBUG) { - serverless.cli.log(`Trying bindPath ${bindPath} (${options})`); - serverless.cli.log(ps.stdout.trim()); - } + if (debug) serverless.cli.log(ps.stdout.trim()); return ps.stdout.trim() === `/test/${testFile}`; } catch (err) { + if (debug) serverless.cli.log(`Finding bindPath failed with ${err}`); return false; } } diff --git a/package.json b/package.json index 66d538e5..7c3f407a 100644 --- a/package.json +++ b/package.json @@ -45,7 +45,7 @@ }, "devDependencies": { "eslint": "^5.16.0", - "prettier": "*", + "prettier": "^1", "cross-spawn": "*", "tape": "*", "tape-promise": "*", diff --git a/test.js b/test.js index a27c05ee..6375ec7a 100644 --- a/test.js +++ b/test.js @@ -6,7 +6,6 @@ const tape = require('tape-promise/tape'); const { chmodSync, removeSync, - readFileSync, readFile, copySync, writeFileSync, @@ -27,11 +26,7 @@ const mkCommand = cmd => (args, options = {}) => { args, Object.assign( { - env: Object.assign( - process.env, - { SLS_DEBUG: 't' }, - process.env.CI ? { LC_ALL: 'C.UTF-8', LANG: 'C.UTF-8' } : {} - ) + env: Object.assign({}, process.env, { SLS_DEBUG: 't' }) }, options ) @@ -60,39 +55,58 @@ const setup = () => { }; const teardown = () => { - [ - 'puck', - 'puck2', - 'puck3', - 'node_modules', - '.serverless', - '.requirements.zip', - '.requirements-cache', - 'foobar', - 'package-lock.json', - 'slimPatterns.yml', - 'serverless.yml.bak', - 'module1/foobar', - getUserCachePath(), - ...glob.sync('serverless-python-requirements-*.tgz') - ].map(path => removeSync(path)); - if (!process.cwd().endsWith('base with a space')) { - git(['checkout', 'serverless.yml']); + const cwd = process.cwd(); + if (!cwd.startsWith(initialWorkingDir)) { + throw new Error(`Somehow cd'd into ${cwd}`); + } + if (cwd != initialWorkingDir) { + [ + 'puck', + 'puck2', + 'puck3', + 'node_modules', + '.serverless', + '.requirements.zip', + '.requirements-cache', + 'foobar', + 'package-lock.json', + 'slimPatterns.yml', + 'serverless.yml.bak', + 'module1/foobar', + getUserCachePath(), + ...glob.sync('serverless-python-requirements-*.tgz') + ].map(path => removeSync(path)); + if (!cwd.endsWith('base with a space')) { + try { + git(['checkout', 'serverless.yml']); + } catch (err) { + console.error( + `At ${cwd} failed to checkout 'serverless.yml' with ${err}.` + ); + throw err; + } + } + process.chdir(initialWorkingDir); } - process.chdir(initialWorkingDir); removeSync('tests/base with a space'); }; const test = (desc, func, opts = {}) => tape.test(desc, opts, async t => { setup(); + let ended = false; try { await func(t); + ended = true; } catch (err) { t.fail(err); - t.end(); } finally { - teardown(); + try { + teardown(); + } catch (err) { + t.fail(err); + } + if (!ended) t.end(); } }); @@ -104,6 +118,7 @@ const availablePythons = (() => { ...process.env.USE_PYTHON.split(',').map(v => v.toString().trim()) ); } else { + // For running outside of CI binaries.push( 'python', 'python3', @@ -180,6 +195,9 @@ const canUseDocker = () => { return result.status === 0; }; +// Skip if running on these platforms. +const brokenOn = (...platforms) => platforms.indexOf(process.platform) != -1; + test( 'default pythonBin can package flask with default options', async t => { @@ -497,7 +515,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--pythonBin=${getPythonBin(2)}`, 'package']); + sls([`--pythonBin=${getPythonBin(2)}`, '--runtime=python2.7', 'package']); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -512,7 +530,12 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--pythonBin=${getPythonBin(2)}`, '--slim=true', 'package']); + sls([ + `--pythonBin=${getPythonBin(2)}`, + '--runtime=python2.7', + '--slim=true', + 'package' + ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -535,7 +558,12 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--pythonBin=${getPythonBin(2)}`, '--zip=true', 'package']); + sls([ + `--pythonBin=${getPythonBin(2)}`, + '--runtime=python2.7', + '--zip=true', + 'package' + ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('.requirements.zip'), @@ -564,6 +592,7 @@ test( npm(['i', path]); sls([ `--pythonBin=${getPythonBin(2)}`, + '--runtime=python2.7', '--dockerizePip=true', '--slim=true', 'package' @@ -582,7 +611,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(2) } + { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } ); test( @@ -598,7 +627,7 @@ test( 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', 'serverless.yml' ]); - sls([`--pythonBin=${getPythonBin(2)}`, 'package']); + sls([`--pythonBin=${getPythonBin(2)}`, '--runtime=python2.7', 'package']); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); @@ -615,6 +644,7 @@ test( npm(['i', path]); sls([ `--pythonBin=${getPythonBin(2)}`, + '--runtime=python2.7', '--dockerizePip=true', '--zip=true', 'package' @@ -642,7 +672,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(2) } + { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } ); test( @@ -653,6 +683,7 @@ test( npm(['i', path]); sls([ `--pythonBin=${getPythonBin(2)}`, + '--runtime=python2.7', '--dockerizePip=true', '--zip=true', '--slim=true', @@ -681,7 +712,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(2) } + { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } ); test( @@ -690,14 +721,19 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--pythonBin=${getPythonBin(2)}`, '--dockerizePip=true', 'package']); + sls([ + `--pythonBin=${getPythonBin(2)}`, + '--runtime=python2.7', + '--dockerizePip=true', + 'package' + ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !canUseDocker() || !hasPython(2) } + { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } ); test( @@ -708,6 +744,7 @@ test( npm(['i', path]); sls([ `--pythonBin=${getPythonBin(2)}`, + '--runtime=python2.7', '--dockerizePip=true', '--slim=true', 'package' @@ -725,7 +762,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(2) } + { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } ); test( @@ -738,6 +775,7 @@ test( npm(['i', path]); sls([ `--pythonBin=${getPythonBin(2)}`, + '--runtime=python2.7', '--dockerizePip=true', '--slim=true', 'package' @@ -756,23 +794,27 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(2) } + { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } ); -test('pipenv py3.6 can package flask with default options', async t => { - process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.false( - zipfiles.includes(`pytest${sep}__init__.py`), - 'dev-package pytest is NOT packaged' - ); - t.end(); -}); +test( + 'pipenv py3.6 can package flask with default options', + async t => { + process.chdir('tests/pipenv'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.false( + zipfiles.includes(`pytest${sep}__init__.py`), + 'dev-package pytest is NOT packaged' + ); + t.end(); + }, + { skip: !hasPython(3.6) } +); test( 'pipenv py3.6 can package flask with slim option', @@ -1275,7 +1317,7 @@ test( ); t.end(); }, - { skip: !hasPython(2.7) } + { skip: !hasPython(2.7) || brokenOn('win32') } ); test( @@ -1849,7 +1891,9 @@ test( npm(['i', path]); sls(['package']); - const zipfiles_hello1 = await listZipFilesWithMetaData('.serverless/hello1.zip'); + const zipfiles_hello1 = await listZipFilesWithMetaData( + '.serverless/hello1.zip' + ); t.true( zipfiles_hello1['module1/foobar'].unixPermissions @@ -1886,7 +1930,9 @@ test( npm(['i', path]); sls(['--dockerizePip=true', 'package']); - const zipfiles_hello = await listZipFilesWithMetaData('.serverless/hello1.zip'); + const zipfiles_hello = await listZipFilesWithMetaData( + '.serverless/hello1.zip' + ); t.true( zipfiles_hello['module1/foobar'].unixPermissions @@ -1921,7 +1967,7 @@ test( const cachepath = getUserCachePath(); t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), - 'cache directoy exists' + 'cache directory exists' ); t.end(); }, @@ -1937,7 +1983,7 @@ test( sls(['--cacheLocation=.requirements-cache', 'package']); t.true( pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), - 'cache directoy exists' + 'cache directory exists' ); t.end(); }, @@ -1954,7 +2000,7 @@ test( const cachepath = getUserCachePath(); t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), - 'cache directoy exists' + 'cache directory exists' ); t.end(); }, @@ -1974,7 +2020,7 @@ test( ]); t.true( pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), - 'cache directoy exists' + 'cache directory exists' ); t.end(); }, From 5d956e938bdd7d5429ed482e2c7216062a9c8513 Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Sat, 22 Feb 2020 16:48:32 -0500 Subject: [PATCH 192/328] chore(dependencies): update eslint Keep the eslint version fresh, so we don't fall too far behind and have trouble updating later. Signed-off-by: Mike Fiedler --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 7c3f407a..567f7e30 100644 --- a/package.json +++ b/package.json @@ -44,7 +44,7 @@ "format": "prettier --write '{.,lib}/*.{js,md}'" }, "devDependencies": { - "eslint": "^5.16.0", + "eslint": "^6.8.0", "prettier": "^1", "cross-spawn": "*", "tape": "*", From 5048c6d4072b8c6e6b83be1d6f0f11b92d0e0882 Mon Sep 17 00:00:00 2001 From: Mike Fiedler Date: Tue, 25 Feb 2020 08:38:18 -0500 Subject: [PATCH 193/328] fix: check if pyproject has poetry earlier During the evaluation in the package phase, we determine whether a `requirements.txt` file exists, or whether we need to generate one. Since the `pyproject.toml` file is used by poetry, but only if a stanza is contained inside the file, use the function `isPoetryProject()` along with the configuration value, thereby reducing the need for a project to have to declare a configuration override. Refs #324 Refs #344 Fixes #400 Signed-off-by: Mike Fiedler --- lib/pip.js | 9 +++++++-- lib/poetry.js | 2 +- tests/non_poetry_pyproject/.gitignore | 22 ++++++++++++++++++++++ tests/non_poetry_pyproject/handler.py | 5 +++++ tests/non_poetry_pyproject/package.json | 14 ++++++++++++++ tests/non_poetry_pyproject/pyproject.toml | 10 ++++++++++ tests/non_poetry_pyproject/serverless.yml | 18 ++++++++++++++++++ 7 files changed, 77 insertions(+), 3 deletions(-) create mode 100644 tests/non_poetry_pyproject/.gitignore create mode 100644 tests/non_poetry_pyproject/handler.py create mode 100644 tests/non_poetry_pyproject/package.json create mode 100644 tests/non_poetry_pyproject/pyproject.toml create mode 100644 tests/non_poetry_pyproject/serverless.yml diff --git a/lib/pip.js b/lib/pip.js index 14864794..ea969c4f 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -7,6 +7,7 @@ const { spawnSync } = require('child_process'); const { quote } = require('shell-quote'); const { buildImage, getBindPath, getDockerUid } = require('./docker'); const { getStripCommand, getStripMode, deleteFiles } = require('./slim'); +const { isPoetryProject } = require('./poetry'); const { checkForAndDeleteMaxCacheVersions, sha256Path, @@ -62,7 +63,9 @@ function generateRequirementsFile( ) { if ( options.usePoetry && - fse.existsSync(path.join(servicePath, 'pyproject.toml')) + fse.existsSync( + path.join(servicePath, 'pyproject.toml') && isPoetryProject(servicePath) + ) ) { filterRequirementsFile( path.join(servicePath, '.serverless/requirements.txt'), @@ -442,7 +445,9 @@ function copyVendors(vendorFolder, targetFolder, serverless) { function requirementsFileExists(servicePath, options, fileName) { if ( options.usePoetry && - fse.existsSync(path.join(servicePath, 'pyproject.toml')) + fse.existsSync( + path.join(servicePath, 'pyproject.toml') && isPoetryProject(servicePath) + ) ) { return true; } diff --git a/lib/poetry.js b/lib/poetry.js index 7f041c83..9bf0424c 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -90,4 +90,4 @@ function isPoetryProject(servicePath) { return false; } -module.exports = { pyprojectTomlToRequirements }; +module.exports = { pyprojectTomlToRequirements, isPoetryProject }; diff --git a/tests/non_poetry_pyproject/.gitignore b/tests/non_poetry_pyproject/.gitignore new file mode 100644 index 00000000..3c2369dc --- /dev/null +++ b/tests/non_poetry_pyproject/.gitignore @@ -0,0 +1,22 @@ +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# Serverless +.serverless +.requirements +unzip_requirements.py diff --git a/tests/non_poetry_pyproject/handler.py b/tests/non_poetry_pyproject/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/non_poetry_pyproject/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/non_poetry_pyproject/package.json b/tests/non_poetry_pyproject/package.json new file mode 100644 index 00000000..752c49c6 --- /dev/null +++ b/tests/non_poetry_pyproject/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-5.1.0.tgz" + } +} diff --git a/tests/non_poetry_pyproject/pyproject.toml b/tests/non_poetry_pyproject/pyproject.toml new file mode 100644 index 00000000..41932632 --- /dev/null +++ b/tests/non_poetry_pyproject/pyproject.toml @@ -0,0 +1,10 @@ +[tool.black] +line-length = 79 +py36 = true +skip-string-normalization = true +exclude = ''' +/( + \.serverless + | node_modules +)/ +''' diff --git a/tests/non_poetry_pyproject/serverless.yml b/tests/non_poetry_pyproject/serverless.yml new file mode 100644 index 00000000..2456a72a --- /dev/null +++ b/tests/non_poetry_pyproject/serverless.yml @@ -0,0 +1,18 @@ +service: sls-py-req-test + +provider: + name: aws + runtime: python3.6 + +plugins: + - serverless-python-requirements + +package: + exclude: + - '**/*' + include: + - handler.py + +functions: + hello: + handler: handler.hello From 91eca4974a800cfed2dad7a2c967efcca3f6dac5 Mon Sep 17 00:00:00 2001 From: Benjamin Samuel Date: Thu, 26 Mar 2020 14:41:19 -0400 Subject: [PATCH 194/328] Fix paren issue from PR - Also restrict prettier to 1.x until we've closed out some PRs. --- lib/pip.js | 5 ++--- package.json | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index ea969c4f..ffe59797 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -63,9 +63,8 @@ function generateRequirementsFile( ) { if ( options.usePoetry && - fse.existsSync( - path.join(servicePath, 'pyproject.toml') && isPoetryProject(servicePath) - ) + fse.existsSync(path.join(servicePath, 'pyproject.toml')) && + isPoetryProject(servicePath) ) { filterRequirementsFile( path.join(servicePath, '.serverless/requirements.txt'), diff --git a/package.json b/package.json index 567f7e30..7c3f407a 100644 --- a/package.json +++ b/package.json @@ -44,7 +44,7 @@ "format": "prettier --write '{.,lib}/*.{js,md}'" }, "devDependencies": { - "eslint": "^6.8.0", + "eslint": "^5.16.0", "prettier": "^1", "cross-spawn": "*", "tape": "*", From 5986055b99ad8eb43d090ba93c15be5d60663c79 Mon Sep 17 00:00:00 2001 From: Ben Samuel Date: Thu, 26 Mar 2020 20:02:42 -0400 Subject: [PATCH 195/328] Missed a bindPath failure. Remove circle config. Skip DockerizePip tests on windows. --- .circleci/config.yml | 47 -------------------------------- .github/workflows/lint.yml | 25 +++++++++++++++++ .github/workflows/test.yml | 5 ---- README.md | 4 +-- lib/pip.js | 5 ++-- test.js | 56 +++++++++++++++++++++++++++----------- 6 files changed, 69 insertions(+), 73 deletions(-) delete mode 100644 .circleci/config.yml create mode 100644 .github/workflows/lint.yml diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 25f419c4..00000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,47 +0,0 @@ -version: 2 -jobs: - build: - working_directory: ~/sls-py-req - machine: - image: circleci/classic - steps: - - checkout - # Build python3.6. gross. (based on my gist here: https://git.io/vxMFG ) - - run: | - sudo apt-get update - sudo apt-get install build-essential tk-dev libncurses5-dev libncursesw5-dev libreadline6-dev libdb5.3-dev libgdbm-dev libsqlite3-dev libssl-dev libbz2-dev libexpat1-dev liblzma-dev zlib1g-dev - wget https://www.python.org/ftp/python/3.6.5/Python-3.6.5.tar.xz - tar xf Python-3.6.5.tar.xz - cd Python-3.6.5 - ./configure - make - sudo make altinstall - # other deps - - run: sudo apt -y update && sudo apt -y install python-pip python2.7 curl unzip - # upgrade python3.6 pip to latest - - run: sudo python3.6 -m pip install -U pip - # instal pipenv - - run: sudo python3.6 -m pip install pipenv pip-tools - # install poetry - - run: | - curl https://raw.githubusercontent.com/sdispater/poetry/master/get-poetry.py -o get-poetry.py - python get-poetry.py --preview --yes - rm get-poetry.py - # install nodejs - - run: curl -sL https://deb.nodesource.com/setup_10.x | sudo bash - && sudo apt -y install nodejs - # install serverless & depcheck - - run: npm install -g serverless - # install deps - - run: npm i - # lint: - - run: npm run ci:lint - # test! - - run: | - export PATH="$HOME/.poetry/bin:$PATH" - export LC_ALL=C.UTF-8 - export LANG=C.UTF-8 - npm run test - - store_test_results: - path: ~/reports - - store_artifacts: - path: ~/reports diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 00000000..cb9c708b --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,25 @@ +name: Lint + +on: [push] + +jobs: + build: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + node-version: [12] + steps: + - uses: actions/checkout@v2 + + - name: Set up Node ${{ matrix.node-version }} + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + + - name: Install deps + run: npm install + + - name: Lint + run: npm run ci:lint diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 764da3f4..e5613000 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -46,11 +46,6 @@ jobs: - name: Install deps run: npm install - - name: Lint - run: npm run ci:lint - # Don't run lint a hundred times, also it breaks on windows. - if: matrix.python-version == 3.6 && matrix.node-version == 12 && matrix.os == 'ubuntu-latest' - - name: Test run: npm run test env: diff --git a/README.md b/README.md index 3196d8c8..1c90881c 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,11 @@ # Serverless Python Requirements [![serverless](http://public.serverless.com/badges/v3.svg)](http://www.serverless.com) -[![CircleCI](https://circleci.com/gh/UnitedIncome/serverless-python-requirements.svg?style=shield)](https://circleci.com/gh/UnitedIncome/serverless-python-requirements) +![Github Actions](https://github.com/UnitedIncome/serverless-python-requirements/workflows/Test/badge.svg) [![npm](https://img.shields.io/npm/v/serverless-python-requirements.svg)](https://www.npmjs.com/package/serverless-python-requirements) [![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg)](https://github.com/prettier/prettier) -A Serverless v1.x plugin to automatically bundle dependencies from +A Serverless v5.x plugin to automatically bundle dependencies from `requirements.txt` and make them available in your `PYTHONPATH`. ## Requires Serverless >= v1.34 diff --git a/lib/pip.js b/lib/pip.js index ffe59797..7d1777a4 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -444,9 +444,8 @@ function copyVendors(vendorFolder, targetFolder, serverless) { function requirementsFileExists(servicePath, options, fileName) { if ( options.usePoetry && - fse.existsSync( - path.join(servicePath, 'pyproject.toml') && isPoetryProject(servicePath) - ) + fse.existsSync(path.join(servicePath, 'pyproject.toml')) && + isPoetryProject(servicePath) ) { return true; } diff --git a/test.js b/test.js index 6375ec7a..f5fe9560 100644 --- a/test.js +++ b/test.js @@ -91,8 +91,18 @@ const teardown = () => { removeSync('tests/base with a space'); }; +const testFilter = (() => { + const elems = process.argv.slice(2); // skip ['node', 'test.js'] + if (elems.length) { + return desc => + elems.some(text => desc.search(text) != -1) ? tape.test : tape.test.skip; + } else { + return () => tape.test; + } +})(); + const test = (desc, func, opts = {}) => - tape.test(desc, opts, async t => { + testFilter(desc)(desc, opts, async t => { setup(); let ended = false; try { @@ -262,7 +272,7 @@ test( t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.end(); }, - { skip: !hasPython(3) } + { skip: !hasPython(3) || brokenOn('win32') } ); test( @@ -392,7 +402,7 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) } + { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } ); test( @@ -415,7 +425,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) } + { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } ); test( @@ -440,7 +450,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) } + { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } ); test( @@ -473,7 +483,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) } + { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } ); test( @@ -506,7 +516,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) } + { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } ); test( @@ -927,6 +937,20 @@ test( { skip: !hasPython(3.6) } ); +test( + 'non poetry pyproject.toml without requirements.txt packages handler only', + async t => { + process.chdir('tests/non_poetry_pyproject'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package']); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`handler.py`), 'handler is packaged'); + t.end(); + }, + { skip: !hasPython(3.6) } +); + test( 'poetry py3.6 can package flask with default options', async t => { @@ -1163,7 +1187,7 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) } + { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } ); test( @@ -1287,7 +1311,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) } + { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } ); test( @@ -1347,7 +1371,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(2.7) } + { skip: !canUseDocker() || !hasPython(2.7) || brokenOn('win32') } ); test( @@ -1975,7 +1999,7 @@ test( ); test( - 'py3.6 uses download cache by defaul option', + 'py3.6 uses download cache by default', async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2004,7 +2028,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) } + { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } ); test( @@ -2024,7 +2048,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) } + { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } ); test( @@ -2068,7 +2092,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) } + { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } ); test( @@ -2173,7 +2197,7 @@ test( t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) } + { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } ); test( @@ -2199,5 +2223,5 @@ test( t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) } + { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } ); From c04effa13724ef4e4259d135a37192cb1660cf8a Mon Sep 17 00:00:00 2001 From: Benjamin Samuel Date: Mon, 4 May 2020 15:02:31 -0400 Subject: [PATCH 196/328] PR comments: - Drop .eslintrc.js in favor of package.json - Remove aspirational comments. - Correct README. - Don't increase usage of lodash. --- .eslintrc.js | 18 ------------------ .github/workflows/test.yml | 10 ++-------- README.md | 2 +- package.json | 7 +++++++ test.js | 3 +-- 5 files changed, 11 insertions(+), 29 deletions(-) delete mode 100644 .eslintrc.js diff --git a/.eslintrc.js b/.eslintrc.js deleted file mode 100644 index 3a92d73e..00000000 --- a/.eslintrc.js +++ /dev/null @@ -1,18 +0,0 @@ -module.exports = { - env: { - commonjs: true, - es6: true, - node: true - }, - extends: 'eslint:recommended', - globals: { - Atomics: 'readonly', - SharedArrayBuffer: 'readonly' - }, - parserOptions: { - ecmaVersion: 2018 - }, - rules: { - 'no-console': 0 - } -}; diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e5613000..f25cddd0 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,12 +1,6 @@ name: Test on: [push] - # pull_request: - # branches: - # - master - # push: - # branches: - # - master jobs: build: @@ -15,8 +9,8 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, windows-latest, macOS-latest] - python-version: [3.6, 2.7] # 3.7 - node-version: [12] # 8, 10 + python-version: [3.6, 2.7] + node-version: [12] steps: - uses: actions/checkout@v2 diff --git a/README.md b/README.md index 1c90881c..87acbbe3 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ [![npm](https://img.shields.io/npm/v/serverless-python-requirements.svg)](https://www.npmjs.com/package/serverless-python-requirements) [![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg)](https://github.com/prettier/prettier) -A Serverless v5.x plugin to automatically bundle dependencies from +A Serverless v1.x plugin to automatically bundle dependencies from `requirements.txt` and make them available in your `PYTHONPATH`. ## Requires Serverless >= v1.34 diff --git a/package.json b/package.json index 7c3f407a..b5b3c394 100644 --- a/package.json +++ b/package.json @@ -70,8 +70,15 @@ "eslintConfig": { "extends": "eslint:recommended", "env": { + "commonjs": true, "node": true, "es6": true + }, + "parserOptions": { + "ecmaVersion": 2018 + }, + "rules": { + "no-console": "off" } }, "prettier": { diff --git a/test.js b/test.js index f5fe9560..a623067d 100644 --- a/test.js +++ b/test.js @@ -14,7 +14,6 @@ const { } = require('fs-extra'); const { quote } = require('shell-quote'); const { sep } = require('path'); -const { _ } = require('lodash'); const { getUserCachePath, sha256Path } = require('./lib/shared'); @@ -159,7 +158,7 @@ const availablePythons = (() => { } } } - if (_.isEmpty(mapping)) { + if (!Object.entries(mapping).length) { throw new Error('No pythons found'); } return mapping; From 4e7b815f2ec98764220dd49b8fc7612fe2406441 Mon Sep 17 00:00:00 2001 From: Daniel Rice Date: Thu, 7 May 2020 22:31:01 -0600 Subject: [PATCH 197/328] Remove line associated with old poetry version --- lib/poetry.js | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/lib/poetry.js b/lib/poetry.js index 108f90dd..0607b9d2 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -42,11 +42,7 @@ function pyprojectTomlToRequirements() { const editableFlag = new RegExp(/^-e /gm); const sourceRequirements = path.join(this.servicePath, 'requirements.txt'); - const requirementsContents = - res.stdout.toString().trim() || // As of poetry 1.0.0b1, requirements.txt is printed to standard output when the -o option is not specified. - fse.readFileSync(sourceRequirements, { - encoding: 'utf-8' - }); + const requirementsContents = fse.readFileSync(sourceRequirements, { encoding: 'utf-8' }); if (requirementsContents.match(editableFlag)) { this.serverless.cli.log( From 0c33a6f180175a905ef0c549cf34c1f488b77583 Mon Sep 17 00:00:00 2001 From: Daniel Rice Date: Fri, 8 May 2020 08:27:54 -0600 Subject: [PATCH 198/328] prettier --- lib/poetry.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/poetry.js b/lib/poetry.js index 0607b9d2..345224d3 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -42,7 +42,9 @@ function pyprojectTomlToRequirements() { const editableFlag = new RegExp(/^-e /gm); const sourceRequirements = path.join(this.servicePath, 'requirements.txt'); - const requirementsContents = fse.readFileSync(sourceRequirements, { encoding: 'utf-8' }); + const requirementsContents = fse.readFileSync(sourceRequirements, { + encoding: "utf-8" + }); if (requirementsContents.match(editableFlag)) { this.serverless.cli.log( From 8bd26240916a7431e3822a65e8a4f6b7d8c39526 Mon Sep 17 00:00:00 2001 From: April King Date: Mon, 11 May 2020 14:34:41 -0500 Subject: [PATCH 199/328] Fix typo in README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3196d8c8..935d6a34 100644 --- a/README.md +++ b/README.md @@ -180,7 +180,7 @@ custom: To specify additional directories to remove from the installed packages, define a list of patterns in the serverless config using the `slimPatterns` -option and glob syntax. These paterns will be added to the default ones (`**/*.py[c|o]`, `**/__pycache__*`, `**/*.dist-info*`). +option and glob syntax. These patterns will be added to the default ones (`**/*.py[c|o]`, `**/__pycache__*`, `**/*.dist-info*`). Note, the glob syntax matches against whole paths, so to match a file in any directory, start your pattern with `**/`. From f7936fe1388660a512553d31c6b599ccc05a37b2 Mon Sep 17 00:00:00 2001 From: Benjamin Samuel Date: Fri, 29 May 2020 15:01:20 -0400 Subject: [PATCH 200/328] Per PR comments: Don't use USE_PYTHON in CI, prefer that for developer environments.. --- .github/workflows/test.yml | 2 -- test.js | 10 ---------- 2 files changed, 12 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f25cddd0..662c731b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -43,7 +43,6 @@ jobs: - name: Test run: npm run test env: - USE_PYTHON: python LC_ALL: C.UTF-8 LANG: C.UTF-8 if: matrix.os != 'macOS-latest' @@ -51,7 +50,6 @@ jobs: - name: Test (Mac) run: npm run test env: - USE_PYTHON: python LC_ALL: en_US.UTF-8 LANG: en_US.UTF-8 if: matrix.os == 'macOS-latest' diff --git a/test.js b/test.js index a623067d..c04da5de 100644 --- a/test.js +++ b/test.js @@ -130,16 +130,6 @@ const availablePythons = (() => { // For running outside of CI binaries.push( 'python', - 'python3', - 'python3.6', - 'python36', - 'python3.7', - 'python37', - 'python3.8', - 'python38', - 'python2', - 'python2.7', - 'python27' ); } const exe = process.platform === 'win32' ? '.exe' : ''; From d36a29ff0f30f2ca2d12d23a99e24c5cd0bb0cfa Mon Sep 17 00:00:00 2001 From: Benjamin Samuel Date: Fri, 29 May 2020 15:14:33 -0400 Subject: [PATCH 201/328] Run prettier. --- lib/poetry.js | 2 +- test.js | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/lib/poetry.js b/lib/poetry.js index 108f90dd..320c1d85 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -62,7 +62,7 @@ function pyprojectTomlToRequirements() { fse.moveSync( sourceRequirements, path.join(this.servicePath, '.serverless', 'requirements.txt'), - { "overwrite": true } + { overwrite: true } ); } diff --git a/test.js b/test.js index c04da5de..0b589b99 100644 --- a/test.js +++ b/test.js @@ -128,9 +128,7 @@ const availablePythons = (() => { ); } else { // For running outside of CI - binaries.push( - 'python', - ); + binaries.push('python'); } const exe = process.platform === 'win32' ? '.exe' : ''; for (const bin of binaries) { From 39b1b2f1e654e670451fd08878f0de9469c4611b Mon Sep 17 00:00:00 2001 From: ofercaspi <53738748+ofercaspi@users.noreply.github.com> Date: Tue, 1 Sep 2020 17:14:22 +0300 Subject: [PATCH 202/328] Update poetry.js --- lib/poetry.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/poetry.js b/lib/poetry.js index 320c1d85..984baf52 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -22,7 +22,8 @@ function pyprojectTomlToRequirements() { '-f', 'requirements.txt', '-o', - 'requirements.txt' + 'requirements.txt', + '--with-credentials' ], { cwd: this.servicePath From 6d3127de18db1ad911a1b3e25d899997bbe88cde Mon Sep 17 00:00:00 2001 From: ofercaspi <53738748+ofercaspi@users.noreply.github.com> Date: Tue, 1 Sep 2020 17:15:17 +0300 Subject: [PATCH 203/328] update README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 87acbbe3..ecafd913 100644 --- a/README.md +++ b/README.md @@ -105,7 +105,7 @@ custom: ## :sparkles::pencil::sparkles: Poetry support If you include a `pyproject.toml` and have `poetry` installed instead of a `requirements.txt` this will use -`poetry export --without-hashes -f requirements.txt -o requirements.txt` to generate them. It is fully compatible with all options such as `zip` and +`poetry export --without-hashes -f requirements.txt -o requirements.txt --with-credentials` to generate them. It is fully compatible with all options such as `zip` and `dockerizePip`. If you don't want this plugin to generate it for you, set the following option: ```yaml From c35dc060f62b158f079f30c488a6ee6896bfb660 Mon Sep 17 00:00:00 2001 From: Le Phuong Date: Thu, 12 Mar 2020 14:55:53 +0900 Subject: [PATCH 204/328] fix single function deployment --- index.js | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/index.js b/index.js index b444a9b6..a3bd3256 100644 --- a/index.js +++ b/index.js @@ -151,6 +151,19 @@ class ServerlessPythonRequirements { .then(cleanup) .then(removeVendorHelper); + const setupArtifactPathCapturing = () => { + // Reference: + // https://github.com/serverless/serverless/blob/9591d5a232c641155613d23b0f88ca05ea51b436/lib/plugins/package/lib/packageService.js#L139 + // The packageService#packageFunction does set artifact path back to the function config. + // As long as the function config's "package" attribute wasn't undefined, we can still use it + // later to access the artifact path. + for (const functionName in this.serverless.service.functions) { + if (!serverless.service.functions[functionName].package) { + serverless.service.functions[functionName].package = {} + } + } + } + const before = () => { if (!isFunctionRuntimePython(arguments)) { return; @@ -160,7 +173,8 @@ class ServerlessPythonRequirements { .then(pyprojectTomlToRequirements) .then(addVendorHelper) .then(installAllRequirements) - .then(packRequirements); + .then(packRequirements) + .then(setupArtifactPathCapturing); }; const after = () => { From 41a49cc94976016a27682149a3b4427978e778ee Mon Sep 17 00:00:00 2001 From: Le Phuong Date: Thu, 12 Mar 2020 15:04:09 +0900 Subject: [PATCH 205/328] prettier --- index.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/index.js b/index.js index a3bd3256..a61ca8fb 100644 --- a/index.js +++ b/index.js @@ -159,10 +159,10 @@ class ServerlessPythonRequirements { // later to access the artifact path. for (const functionName in this.serverless.service.functions) { if (!serverless.service.functions[functionName].package) { - serverless.service.functions[functionName].package = {} + serverless.service.functions[functionName].package = {}; } } - } + }; const before = () => { if (!isFunctionRuntimePython(arguments)) { From a25dd2458c9518323f4c5b4a392a239ed5078025 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C3=AA=20Ho=C3=A0ng=20Ph=C6=B0=C6=A1ng?= Date: Fri, 18 Sep 2020 15:51:08 +0900 Subject: [PATCH 206/328] Update test.yml --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 662c731b..5efe904c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,6 +1,6 @@ name: Test -on: [push] +on: [push, pull-request] jobs: build: From a655d3ea5f3b833f70f10a69f7ca35bc3c46d6fc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C3=AA=20Ho=C3=A0ng=20Ph=C6=B0=C6=A1ng?= Date: Fri, 18 Sep 2020 16:15:32 +0900 Subject: [PATCH 207/328] Update test.yml --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 5efe904c..77b64aa6 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,6 +1,6 @@ name: Test -on: [push, pull-request] +on: [push, pull_request] jobs: build: From 599bea5126eae787e61e7b91f4697ce269848564 Mon Sep 17 00:00:00 2001 From: Benjamin Samuel Date: Sat, 19 Sep 2020 10:15:38 -0400 Subject: [PATCH 208/328] Make actions run on PRs. --- .github/workflows/lint.yml | 2 +- .github/workflows/test.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index cb9c708b..1c2f5215 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,6 +1,6 @@ name: Lint -on: [push] +on: [push, pull-request] jobs: build: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 662c731b..5efe904c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,6 +1,6 @@ name: Test -on: [push] +on: [push, pull-request] jobs: build: From 60592808b7f78b81c02f36ad66acfb05f8e6c461 Mon Sep 17 00:00:00 2001 From: Benjamin Samuel Date: Sat, 19 Sep 2020 10:21:26 -0400 Subject: [PATCH 209/328] Of course they use underscores. --- .github/workflows/lint.yml | 2 +- .github/workflows/test.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 1c2f5215..285917dd 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,6 +1,6 @@ name: Lint -on: [push, pull-request] +on: [push, pull_request] jobs: build: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 5efe904c..77b64aa6 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,6 +1,6 @@ name: Test -on: [push, pull-request] +on: [push, pull_request] jobs: build: From d86b7fbcd61da9ae54a658c3a8c879be3f8b74f5 Mon Sep 17 00:00:00 2001 From: Ben Samuel Date: Tue, 22 Sep 2020 16:02:43 -0400 Subject: [PATCH 210/328] Correct tests failing in master (#552) * Executable permissions should be 755 not 775 to be saved properly in zip file. --- test.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test.js b/test.js index 0b589b99..e5af10c6 100644 --- a/test.js +++ b/test.js @@ -1101,7 +1101,7 @@ test( async t => { process.chdir('tests/base'); const path = npm(['pack', '../..']); - const perm = '775'; + const perm = '755'; npm(['i', path]); perl([ @@ -1895,7 +1895,7 @@ test( async t => { process.chdir('tests/individually'); const path = npm(['pack', '../..']); - const perm = '775'; + const perm = '755'; writeFileSync(`module1${sep}foobar`, ''); chmodSync(`module1${sep}foobar`, perm); @@ -1934,7 +1934,7 @@ test( async t => { process.chdir('tests/individually'); const path = npm(['pack', '../..']); - const perm = '775'; + const perm = '755'; writeFileSync(`module1${sep}foobar`, '', { mode: perm }); chmodSync(`module1${sep}foobar`, perm); From 632f389b70b46384bd52f9ffb1d99eccbfd35147 Mon Sep 17 00:00:00 2001 From: Jack Henschel Date: Fri, 17 Apr 2020 17:17:42 +0300 Subject: [PATCH 211/328] Handle all pip install flags properly while generating requirements.txt With this patch, the plugin will respect all pip install short flags, not just -f and -i. https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format --- lib/pip.js | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index 7d1777a4..4cacfcaa 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -372,7 +372,7 @@ function getRequirements(source) { * assist with matching the static cache. The sorting will skip any * lines starting with -- as those are typically ordered at the * start of a file ( eg: --index-url / --extra-index-url ) or any - * lines that start with -f or -i, Please see: + * lines that start with -c, -e, -f, -i or -r, Please see: * https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format * @param {string} source requirements * @param {string} target requirements where results are written @@ -389,8 +389,11 @@ function filterRequirementsFile(source, target, options) { return false; } else if ( req.startsWith('--') || + req.startsWith('-c') || + req.startsWith('-e') || req.startsWith('-f') || - req.startsWith('-i') + req.startsWith('-i') || + req.startsWith('-r') ) { // If we have options (prefixed with --) keep them for later prepend.push(req); From 91a6fc602793d680690197fe8e9a23dea82f2751 Mon Sep 17 00:00:00 2001 From: Jack Henschel Date: Sun, 19 Apr 2020 13:26:24 +0300 Subject: [PATCH 212/328] Strip out all -e flags when processing requirements file Previously, there were various with installing editable packages in different python / pip versions. https://github.com/UnitedIncome/serverless-python-requirements/issues/240 This adds logic to strip out all '-e' editable flags from the requirements.txt file and issues a warning to the CLI. fixes #240 --- lib/pip.js | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index 4cacfcaa..32cb9a83 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -69,7 +69,8 @@ function generateRequirementsFile( filterRequirementsFile( path.join(servicePath, '.serverless/requirements.txt'), targetFile, - options + options, + serverless ); serverless.cli.log( `Parsed requirements.txt from pyproject.toml in ${targetFile}...` @@ -81,13 +82,14 @@ function generateRequirementsFile( filterRequirementsFile( path.join(servicePath, '.serverless/requirements.txt'), targetFile, - options + options, + serverless ); serverless.cli.log( `Parsed requirements.txt from Pipfile in ${targetFile}...` ); } else { - filterRequirementsFile(requirementsPath, targetFile, options); + filterRequirementsFile(requirementsPath, targetFile, options, serverless); serverless.cli.log( `Generated requirements from ${requirementsPath} in ${targetFile}...` ); @@ -378,7 +380,7 @@ function getRequirements(source) { * @param {string} target requirements where results are written * @param {Object} options */ -function filterRequirementsFile(source, target, options) { +function filterRequirementsFile(source, target, options, serverless) { const noDeploy = new Set(options.noDeploy || []); const requirements = getRequirements(source); var prepend = []; @@ -395,7 +397,17 @@ function filterRequirementsFile(source, target, options) { req.startsWith('-i') || req.startsWith('-r') ) { - // If we have options (prefixed with --) keep them for later + if (req.startsWith('-e')) { + // strip out editable flags + // not required inside final archive and avoids pip bugs + // see https://github.com/UnitedIncome/serverless-python-requirements/issues/240 + req = req.split('-e')[1].trim(); + serverless.cli.log( + `Warning: Stripping -e flag from requirement ${req}` + ); + } + + // Keep options for later prepend.push(req); return false; } else if (req === '') { From 7726b2f8d8becef45b9f9689baeaf006d815c0c0 Mon Sep 17 00:00:00 2001 From: Jack Henschel Date: Sun, 19 Apr 2020 14:04:32 +0300 Subject: [PATCH 213/328] Add test to check whether -e requirements are correctly packaged. Previously, requirements with the -e flag where completely ignored https://github.com/UnitedIncome/serverless-python-requirements/issues/36 With the latest patch, the -e flag are stripped out, but the requirement itself is preserved. This patch adds a check to test this desired behavior. --- test.js | 22 ++++++++++++++++++++++ tests/base/requirements-w-editable.txt | 1 + 2 files changed, 23 insertions(+) create mode 100644 tests/base/requirements-w-editable.txt diff --git a/test.js b/test.js index e5af10c6..e35b521c 100644 --- a/test.js +++ b/test.js @@ -376,6 +376,28 @@ test( { skip: !hasPython(3) } ); +test( + 'py3.6 can package boto3 with editable', + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls([ + `--pythonBin=${getPythonBin(3)}`, + '--fileName=requirements-w-editable.txt', + 'package' + ]); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.true( + zipfiles.includes(`botocore${sep}__init__.py`), + 'botocore is packaged' + ); + t.end(); + }, + { skip: !hasPython(3) } +); + test( 'py3.6 can package flask with dockerizePip option', async t => { diff --git a/tests/base/requirements-w-editable.txt b/tests/base/requirements-w-editable.txt new file mode 100644 index 00000000..a7c63986 --- /dev/null +++ b/tests/base/requirements-w-editable.txt @@ -0,0 +1 @@ +-e git+https://github.com/boto/boto3.git#egg=boto3 From ef98c34d9568006f471af0197efef67b8b0b164c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Anders=20=C3=85hsman?= Date: Mon, 12 Oct 2020 20:56:03 +0200 Subject: [PATCH 214/328] Remove unnecessary curly braces from Lambda Layer example. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 94594c55..0beeeaf9 100644 --- a/README.md +++ b/README.md @@ -237,7 +237,7 @@ functions: hello: handler: handler.hello layers: - - { Ref: PythonRequirementsLambdaLayer } + - Ref: PythonRequirementsLambdaLayer ``` If the layer requires additional or custom configuration, add them onto the `layer` option. From 9ec2a2a10d8f5f51ee31cde4402abd07d5089b93 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Wed, 25 Nov 2020 00:13:18 +0000 Subject: [PATCH 215/328] Create Dependabot config file --- .github/dependabot.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..aeb65346 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,12 @@ +version: 2 +updates: +- package-ecosystem: npm + directory: "/" + schedule: + interval: daily + time: "10:00" + open-pull-requests-limit: 10 + ignore: + - dependency-name: eslint + versions: + - "> 5.16.0" From 7f2178f7725d8bfdfbbab774fbf0eb0f58d538f6 Mon Sep 17 00:00:00 2001 From: Bryant Biggs Date: Fri, 22 Jan 2021 21:52:46 -0500 Subject: [PATCH 216/328] chore: update dependencies to latest --- package.json | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/package.json b/package.json index b5b3c394..a52565c7 100644 --- a/package.json +++ b/package.json @@ -44,28 +44,28 @@ "format": "prettier --write '{.,lib}/*.{js,md}'" }, "devDependencies": { - "eslint": "^5.16.0", - "prettier": "^1", + "eslint": "^7.18.0", + "prettier": "^2", "cross-spawn": "*", "tape": "*", "tape-promise": "*", - "lodash": "^4.16.15" + "lodash": "^4.17.20" }, "dependencies": { - "@iarna/toml": "^2.2.3", + "@iarna/toml": "^2.2.5", "appdirectory": "^0.1.0", - "bluebird": "^3.0.6", - "fs-extra": "^8.1.0", - "glob-all": "^3.1.0", - "is-wsl": "^2.0.0", - "jszip": "^3.1.0", + "bluebird": "^3.7.2", + "fs-extra": "^9.1.0", + "glob-all": "^3.2.1", + "is-wsl": "^2.2.0", + "jszip": "^3.5.0", "lodash.get": "^4.4.2", "lodash.set": "^4.3.2", - "lodash.uniqby": "^4.0.0", + "lodash.uniqby": "^4.7.0", "lodash.values": "^4.3.0", "rimraf": "^3.0.2", "sha256-file": "1.0.0", - "shell-quote": "^1.6.1" + "shell-quote": "^1.7.2" }, "eslintConfig": { "extends": "eslint:recommended", From fed16ed6765c3a3b8d65b1e0230f8c39e0de1070 Mon Sep 17 00:00:00 2001 From: Bryant Biggs Date: Fri, 22 Jan 2021 21:53:06 -0500 Subject: [PATCH 217/328] chore: run formatting on codebase with updated libs --- lib/clean.js | 6 +++--- lib/docker.js | 4 ++-- lib/inject.js | 26 +++++++++++++------------- lib/layer.js | 8 +++----- lib/pip.js | 26 ++++++++++++++------------ lib/pipenv.js | 2 +- lib/poetry.js | 6 +++--- lib/shared.js | 6 +++--- lib/slim.js | 6 +++--- lib/zip.js | 22 +++++++++++----------- lib/zipTree.js | 16 ++++++++-------- 11 files changed, 64 insertions(+), 64 deletions(-) diff --git a/lib/clean.js b/lib/clean.js index 119ab586..e0bff238 100644 --- a/lib/clean.js +++ b/lib/clean.js @@ -14,7 +14,7 @@ function cleanup() { const artifacts = ['.requirements']; if (this.options.zip) { if (this.serverless.service.package.individually) { - this.targetFuncs.forEach(f => { + this.targetFuncs.forEach((f) => { artifacts.push(path.join(f.module, '.requirements.zip')); artifacts.push(path.join(f.module, 'unzip_requirements.py')); }); @@ -25,7 +25,7 @@ function cleanup() { } return BbPromise.all( - artifacts.map(artifact => + artifacts.map((artifact) => fse.removeAsync(path.join(this.servicePath, artifact)) ) ); @@ -47,7 +47,7 @@ function cleanupCache() { const promises = []; glob .sync([path.join(cacheLocation, '*slspyc/')], { mark: true, dot: false }) - .forEach(file => { + .forEach((file) => { promises.push(fse.removeAsync(file)); }); return BbPromise.all(promises); diff --git a/lib/docker.js b/lib/docker.js index 1132f1de..328e3088 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -81,7 +81,7 @@ function tryBindPath(serverless, bindPath, testFile) { `${bindPath}:/test`, 'alpine', 'ls', - `/test/${testFile}` + `/test/${testFile}`, ]; try { if (debug) serverless.cli.log(`Trying bindPath ${bindPath} (${options})`); @@ -167,7 +167,7 @@ function getDockerUid(bindPath) { 'stat', '-c', '%u', - '/bin/sh' + '/bin/sh', ]; const ps = dockerCommand(options); return ps.stdout.trim(); diff --git a/lib/inject.js b/lib/inject.js index 876e1b75..3cad758d 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -21,15 +21,15 @@ function injectRequirements(requirementsPath, packagePath, options) { return fse .readFileAsync(packagePath) - .then(buffer => JSZip.loadAsync(buffer)) - .then(zip => + .then((buffer) => JSZip.loadAsync(buffer)) + .then((zip) => BbPromise.resolve( glob.sync([path.join(requirementsPath, '**')], { mark: true, - dot: true + dot: true, }) ) - .map(file => [file, path.relative(requirementsPath, file)]) + .map((file) => [file, path.relative(requirementsPath, file)]) .filter( ([file, relativeFile]) => !file.endsWith('/') && @@ -42,7 +42,7 @@ function injectRequirements(requirementsPath, packagePath, options) { .mapSeries(([file, relativeFile, fileStat]) => zipFile(zip, relativeFile, fse.readFileAsync(file), { unixPermissions: fileStat.mode, - createFolders: false + createFolders: false, }) ) .then(() => writeZip(zip, packagePath)) @@ -61,16 +61,16 @@ function moveModuleUp(source, target, module) { return fse .readFileAsync(source) - .then(buffer => JSZip.loadAsync(buffer)) - .then(sourceZip => + .then((buffer) => JSZip.loadAsync(buffer)) + .then((sourceZip) => sourceZip.filter( - file => + (file) => file.startsWith(module + '/') || file.startsWith('serverless_sdk/') || file.match(/^s_.*\.py/) !== null ) ) - .map(srcZipObj => + .map((srcZipObj) => zipFile( targetZip, srcZipObj.name.startsWith(module + '/') @@ -96,18 +96,18 @@ function injectAllRequirements(funcArtifact) { if (this.serverless.service.package.individually) { return BbPromise.resolve(this.targetFuncs) - .filter(func => + .filter((func) => (func.runtime || this.serverless.service.provider.runtime).match( /^python.*/ ) ) - .map(func => { + .map((func) => { if (!get(func, 'module')) { set(func, ['module'], '.'); } return func; }) - .map(func => { + .map((func) => { if (func.module !== '.') { const artifact = func.package ? func.package.artifact : funcArtifact; const newArtifact = path.join( @@ -122,7 +122,7 @@ function injectAllRequirements(funcArtifact) { return func; } }) - .map(func => { + .map((func) => { return this.options.zip ? func : injectRequirements( diff --git a/lib/layer.js b/lib/layer.js index f512ded1..12d338ec 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -36,7 +36,7 @@ function createLayers() { }-${this.serverless.providers.aws.getStage()}-python-requirements`, description: 'Python requirements generated by serverless-python-requirements.', - compatibleRuntimes: [this.serverless.service.provider.runtime] + compatibleRuntimes: [this.serverless.service.provider.runtime], }, this.options.layer ); @@ -55,11 +55,9 @@ function layerRequirements() { this.serverless.cli.log('Packaging Python Requirements Lambda Layer...'); - return BbPromise.bind(this) - .then(zipRequirements) - .then(createLayers); + return BbPromise.bind(this).then(zipRequirements).then(createLayers); } module.exports = { - layerRequirements + layerRequirements, }; diff --git a/lib/pip.js b/lib/pip.js index 32cb9a83..244010c8 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -12,7 +12,7 @@ const { checkForAndDeleteMaxCacheVersions, sha256Path, getRequirementsWorkingPath, - getUserCachePath + getUserCachePath, } = require('./shared'); /** @@ -22,7 +22,7 @@ const { * @return {string[][]} a list of valid commands. */ function filterCommands(commands) { - return commands.filter(cmd => Boolean(cmd) && cmd.length > 0); + return commands.filter((cmd) => Boolean(cmd) && cmd.length > 0); } /** @@ -129,7 +129,7 @@ function installRequirements(targetFolder, serverless, options) { Array.isArray(options.pipCmdExtraArgs) && options.pipCmdExtraArgs.length > 0 ) { - options.pipCmdExtraArgs.forEach(cmd => { + options.pipCmdExtraArgs.forEach((cmd) => { const parts = cmd.split(/\s+/, 2); pipCmd.push(...parts); }); @@ -140,9 +140,11 @@ function installRequirements(targetFolder, serverless, options) { // Check if we're using the legacy --cache-dir command... if (options.pipCmdExtraArgs.indexOf('--cache-dir') > -1) { if (options.dockerizePip) { - throw 'Error: You can not use --cache-dir with Docker any more, please\n' + + throw ( + 'Error: You can not use --cache-dir with Docker any more, please\n' + ' use the new option useDownloadCache instead. Please see:\n' + - ' https://github.com/UnitedIncome/serverless-python-requirements#caching'; + ' https://github.com/UnitedIncome/serverless-python-requirements#caching' + ); } else { serverless.cli.log('=================================================='); serverless.cli.log( @@ -239,7 +241,7 @@ function installRequirements(targetFolder, serverless, options) { if (options.dockerEnv) { // Add environment variables to docker run cmd - options.dockerEnv.forEach(function(item) { + options.dockerEnv.forEach(function (item) { dockerCmd.push('-e', item); }); } @@ -256,7 +258,7 @@ function installRequirements(targetFolder, serverless, options) { 'chown', '-R', `${process.getuid()}:${process.getgid()}`, - '/var/task' + '/var/task', ]); } else { // Use same user so --cache-dir works @@ -274,7 +276,7 @@ function installRequirements(targetFolder, serverless, options) { 'chown', '-R', `${process.getuid()}:${process.getgid()}`, - dockerDownloadCacheDir + dockerDownloadCacheDir, ]); } } @@ -384,7 +386,7 @@ function filterRequirementsFile(source, target, options, serverless) { const noDeploy = new Set(options.noDeploy || []); const requirements = getRequirements(source); var prepend = []; - const filteredRequirements = requirements.filter(req => { + const filteredRequirements = requirements.filter((req) => { req = req.trim(); if (req.startsWith('#')) { // Skip comments @@ -440,7 +442,7 @@ function copyVendors(vendorFolder, targetFolder, serverless) { `Copying vendor libraries from ${vendorFolder} to ${targetFolder}...` ); - fse.readdirSync(vendorFolder).map(file => { + fse.readdirSync(vendorFolder).map((file) => { let source = path.join(vendorFolder, file); let dest = path.join(targetFolder, file); if (fse.existsSync(dest)) { @@ -603,12 +605,12 @@ function installAllRequirements() { if (this.serverless.service.package.individually) { let doneModules = []; this.targetFuncs - .filter(func => + .filter((func) => (func.runtime || this.serverless.service.provider.runtime).match( /^python.*/ ) ) - .map(f => { + .map((f) => { if (!get(f, 'module')) { set(f, ['module'], '.'); } diff --git a/lib/pipenv.js b/lib/pipenv.js index 6718844c..063fb5d8 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -20,7 +20,7 @@ function pipfileToRequirements() { 'pipenv', ['lock', '--requirements', '--keep-outdated'], { - cwd: this.servicePath + cwd: this.servicePath, } ); if (res.error) { diff --git a/lib/poetry.js b/lib/poetry.js index 984baf52..c9062df2 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -23,10 +23,10 @@ function pyprojectTomlToRequirements() { 'requirements.txt', '-o', 'requirements.txt', - '--with-credentials' + '--with-credentials', ], { - cwd: this.servicePath + cwd: this.servicePath, } ); if (res.error) { @@ -46,7 +46,7 @@ function pyprojectTomlToRequirements() { const requirementsContents = res.stdout.toString().trim() || // As of poetry 1.0.0b1, requirements.txt is printed to standard output when the -o option is not specified. fse.readFileSync(sourceRequirements, { - encoding: 'utf-8' + encoding: 'utf-8', }); if (requirementsContents.match(editableFlag)) { diff --git a/lib/shared.js b/lib/shared.js index 34f61eb2..79b60cef 100644 --- a/lib/shared.js +++ b/lib/shared.js @@ -27,7 +27,7 @@ function checkForAndDeleteMaxCacheVersions(options, serverless) { // Check if we have too many if (files.length >= options.staticCacheMaxVersions) { // Sort by modified time - files.sort(function(a, b) { + files.sort(function (a, b) { return ( fse.statSync(a).mtime.getTime() - fse.statSync(b).mtime.getTime() ); @@ -90,7 +90,7 @@ function getUserCachePath(options) { // Otherwise, find/use the python-ey appdirs cache location const dirs = new Appdir({ appName: 'serverless-python-requirements', - appAuthor: 'UnitedIncome' + appAuthor: 'UnitedIncome', }); return dirs.userCache(); } @@ -108,5 +108,5 @@ module.exports = { checkForAndDeleteMaxCacheVersions, getRequirementsWorkingPath, getUserCachePath, - sha256Path + sha256Path, }; diff --git a/lib/slim.js b/lib/slim.js index ae9155fb..8ead7fcc 100644 --- a/lib/slim.js +++ b/lib/slim.js @@ -2,7 +2,7 @@ const isWsl = require('is-wsl'); const glob = require('glob-all'); const fse = require('fs-extra'); -const getStripMode = options => { +const getStripMode = (options) => { if ( options.strip === false || options.strip === 'false' || @@ -30,7 +30,7 @@ const getStripCommand = (options, folderPath) => [ '-exec', 'strip', '{}', - ';' + ';', ]; const deleteFiles = (options, folderPath) => { @@ -55,5 +55,5 @@ const deleteFiles = (options, folderPath) => { module.exports = { getStripMode, getStripCommand, - deleteFiles + deleteFiles, }; diff --git a/lib/zip.js b/lib/zip.js index 1139d0d9..9076fb81 100644 --- a/lib/zip.js +++ b/lib/zip.js @@ -17,7 +17,7 @@ function addVendorHelper() { if (this.options.zip) { if (this.serverless.service.package.individually) { return BbPromise.resolve(this.targetFuncs) - .map(f => { + .map((f) => { if (!get(f, 'package.include')) { set(f, ['package', 'include'], []); } @@ -27,8 +27,8 @@ function addVendorHelper() { f.package.include.push('unzip_requirements.py'); return f; }) - .then(functions => uniqBy(functions, func => func.module)) - .map(f => { + .then((functions) => uniqBy(functions, (func) => func.module)) + .map((f) => { this.serverless.cli.log( `Adding Python requirements helper to ${f.module}...` ); @@ -63,14 +63,14 @@ function removeVendorHelper() { if (this.options.zip && this.options.cleanupZipHelper) { if (this.serverless.service.package.individually) { return BbPromise.resolve(this.targetFuncs) - .map(f => { + .map((f) => { if (!get(f, 'module')) { set(f, ['module'], '.'); } return f; }) - .then(funcs => uniqBy(funcs, f => f.module)) - .map(f => { + .then((funcs) => uniqBy(funcs, (f) => f.module)) + .map((f) => { this.serverless.cli.log( `Removing Python requirements helper from ${f.module}...` ); @@ -95,14 +95,14 @@ function packRequirements() { if (this.options.zip) { if (this.serverless.service.package.individually) { return BbPromise.resolve(this.targetFuncs) - .map(f => { + .map((f) => { if (!get(f, 'module')) { set(f, ['module'], '.'); } return f; }) - .then(funcs => uniqBy(funcs, f => f.module)) - .map(f => { + .then((funcs) => uniqBy(funcs, (f) => f.module)) + .map((f) => { this.serverless.cli.log( `Zipping required Python packages for ${f.module}...` ); @@ -110,12 +110,12 @@ function packRequirements() { return addTree( new JSZip(), `.serverless/${f.module}/requirements` - ).then(zip => writeZip(zip, `${f.module}/.requirements.zip`)); + ).then((zip) => writeZip(zip, `${f.module}/.requirements.zip`)); }); } else { this.serverless.cli.log('Zipping required Python packages...'); this.serverless.service.package.include.push('.requirements.zip'); - return addTree(new JSZip(), '.serverless/requirements').then(zip => + return addTree(new JSZip(), '.serverless/requirements').then((zip) => writeZip(zip, path.join(this.servicePath, '.requirements.zip')) ); } diff --git a/lib/zipTree.js b/lib/zipTree.js index d45aded0..1654f665 100644 --- a/lib/zipTree.js +++ b/lib/zipTree.js @@ -15,17 +15,17 @@ function addTree(zip, src) { return fse .readdirAsync(srcN) - .map(name => { + .map((name) => { const srcPath = path.join(srcN, name); - return fse.statAsync(srcPath).then(stat => { + return fse.statAsync(srcPath).then((stat) => { if (stat.isDirectory()) { return addTree(zip.folder(name), srcPath); } else { const opts = { date: stat.mtime, unixPermissions: stat.mode }; return fse .readFileAsync(srcPath) - .then(data => zip.file(name, data, opts)); + .then((data) => zip.file(name, data, opts)); } }); }) @@ -43,10 +43,10 @@ function writeZip(zip, targetPath) { platform: process.platform == 'win32' ? 'DOS' : 'UNIX', compression: 'DEFLATE', compressionOptions: { - level: 9 - } + level: 9, + }, }; - return new BbPromise(resolve => + return new BbPromise((resolve) => zip .generateNodeStream(opts) .pipe(fse.createWriteStream(targetPath)) @@ -64,7 +64,7 @@ function writeZip(zip, targetPath) { */ function zipFile(zip, zipPath, bufferPromise, fileOpts) { return bufferPromise - .then(buffer => + .then((buffer) => zip.file( zipPath, buffer, @@ -72,7 +72,7 @@ function zipFile(zip, zipPath, bufferPromise, fileOpts) { {}, { // necessary to get the same hash when zipping the same content - date: new Date(0) + date: new Date(0), }, fileOpts ) From 238df7b7640b86d18e582bbf51b0a4dcf4b31e45 Mon Sep 17 00:00:00 2001 From: Bryant Biggs Date: Mon, 15 Mar 2021 22:23:13 -0400 Subject: [PATCH 218/328] chore: re-update --- .tool-versions | 2 - package.json | 10 +- yarn.lock | 1483 ++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 1488 insertions(+), 7 deletions(-) delete mode 100644 .tool-versions create mode 100644 yarn.lock diff --git a/.tool-versions b/.tool-versions deleted file mode 100644 index f9e0b286..00000000 --- a/.tool-versions +++ /dev/null @@ -1,2 +0,0 @@ -nodejs 6.16.0 -python 3.6.8 2.7.15 diff --git a/package.json b/package.json index a52565c7..945141e6 100644 --- a/package.json +++ b/package.json @@ -44,12 +44,12 @@ "format": "prettier --write '{.,lib}/*.{js,md}'" }, "devDependencies": { - "eslint": "^7.18.0", - "prettier": "^2", "cross-spawn": "*", + "eslint": "^7.22.0", + "lodash": "^4.17.21", + "prettier": "^2", "tape": "*", - "tape-promise": "*", - "lodash": "^4.17.20" + "tape-promise": "*" }, "dependencies": { "@iarna/toml": "^2.2.5", @@ -58,7 +58,7 @@ "fs-extra": "^9.1.0", "glob-all": "^3.2.1", "is-wsl": "^2.2.0", - "jszip": "^3.5.0", + "jszip": "^3.6.0", "lodash.get": "^4.4.2", "lodash.set": "^4.3.2", "lodash.uniqby": "^4.7.0", diff --git a/yarn.lock b/yarn.lock new file mode 100644 index 00000000..e1d476d3 --- /dev/null +++ b/yarn.lock @@ -0,0 +1,1483 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@babel/code-frame@7.12.11": + version "7.12.11" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.11.tgz#f4ad435aa263db935b8f10f2c552d23fb716a63f" + integrity sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw== + dependencies: + "@babel/highlight" "^7.10.4" + +"@babel/helper-validator-identifier@^7.12.11": + version "7.12.11" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz#c9a1f021917dcb5ccf0d4e453e399022981fc9ed" + integrity sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw== + +"@babel/highlight@^7.10.4": + version "7.13.10" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.13.10.tgz#a8b2a66148f5b27d666b15d81774347a731d52d1" + integrity sha512-5aPpe5XQPzflQrFwL1/QoeHkP2MsA4JCntcXHRhEsdsfPVkvPi2w7Qix4iV7t5S/oC9OodGrggd8aco1g3SZFg== + dependencies: + "@babel/helper-validator-identifier" "^7.12.11" + chalk "^2.0.0" + js-tokens "^4.0.0" + +"@eslint/eslintrc@^0.4.0": + version "0.4.0" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.0.tgz#99cc0a0584d72f1df38b900fb062ba995f395547" + integrity sha512-2ZPCc+uNbjV5ERJr+aKSPRwZgKd2z11x0EgLvb1PURmUrn9QNRXFqje0Ldq454PfAVyaJYyrDvvIKSFP4NnBog== + dependencies: + ajv "^6.12.4" + debug "^4.1.1" + espree "^7.3.0" + globals "^12.1.0" + ignore "^4.0.6" + import-fresh "^3.2.1" + js-yaml "^3.13.1" + minimatch "^3.0.4" + strip-json-comments "^3.1.1" + +"@iarna/toml@^2.2.5": + version "2.2.5" + resolved "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.5.tgz#b32366c89b43c6f8cefbdefac778b9c828e3ba8c" + integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== + +acorn-jsx@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.1.tgz#fc8661e11b7ac1539c47dbfea2e72b3af34d267b" + integrity sha512-K0Ptm/47OKfQRpNQ2J/oIN/3QYiK6FwW+eJbILhsdxh2WTLdl+30o8aGdTbm5JbffpFFAg/g+zi1E+jvJha5ng== + +acorn@^7.4.0: + version "7.4.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" + integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== + +ajv@^6.10.0, ajv@^6.12.4: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ajv@^7.0.2: + version "7.2.1" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-7.2.1.tgz#a5ac226171912447683524fa2f1248fcf8bac83d" + integrity sha512-+nu0HDv7kNSOua9apAVc979qd932rrZeb3WOvoiD31A/p1mIE5/9bN2027pE2rOPYEdS3UHzsvof4hY+lM9/WQ== + dependencies: + fast-deep-equal "^3.1.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + uri-js "^4.2.2" + +ansi-colors@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348" + integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA== + +ansi-regex@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.0.tgz#388539f55179bf39339c81af30a654d69f87cb75" + integrity sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg== + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +appdirectory@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/appdirectory/-/appdirectory-0.1.0.tgz#eb6c816320e7b2ab16f5ed997f28d8205df56375" + integrity sha1-62yBYyDnsqsW9e2ZfyjYIF31Y3U= + +argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +array-filter@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/array-filter/-/array-filter-1.0.0.tgz#baf79e62e6ef4c2a4c0b831232daffec251f9d83" + integrity sha1-uveeYubvTCpMC4MSMtr/7CUfnYM= + +astral-regex@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31" + integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== + +at-least-node@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" + integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== + +available-typed-arrays@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.2.tgz#6b098ca9d8039079ee3f77f7b783c4480ba513f5" + integrity sha512-XWX3OX8Onv97LMk/ftVyBibpGwY5a8SmuxZPzeOxqmuEqUCOM9ZE+uIaD1VNJ5QnvU2UQusvmKbuM1FR8QWGfQ== + dependencies: + array-filter "^1.0.0" + +balanced-match@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" + integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= + +bluebird@^3.7.2: + version "3.7.2" + resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" + integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +call-bind@^1.0.0, call-bind@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camelcase@^5.0.0: + version "5.3.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + +chalk@^2.0.0: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chalk@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.0.tgz#4e14870a618d9e2edd97dd8345fd9d9dc315646a" + integrity sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +cliui@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1" + integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^6.2.0" + +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= + +core-util-is@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= + +cross-spawn@*, cross-spawn@^7.0.2: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +debug@^4.0.1, debug@^4.1.1: + version "4.3.1" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" + integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== + dependencies: + ms "2.1.2" + +decamelize@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= + +deep-equal@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-2.0.5.tgz#55cd2fe326d83f9cbf7261ef0e060b3f724c5cb9" + integrity sha512-nPiRgmbAtm1a3JsnLCf6/SLfXcjyN5v8L1TXzdCmHrXJ4hx+gW/w1YCcn7z8gJtSiDArZCgYtbao3QqLm/N1Sw== + dependencies: + call-bind "^1.0.0" + es-get-iterator "^1.1.1" + get-intrinsic "^1.0.1" + is-arguments "^1.0.4" + is-date-object "^1.0.2" + is-regex "^1.1.1" + isarray "^2.0.5" + object-is "^1.1.4" + object-keys "^1.1.1" + object.assign "^4.1.2" + regexp.prototype.flags "^1.3.0" + side-channel "^1.0.3" + which-boxed-primitive "^1.0.1" + which-collection "^1.0.1" + which-typed-array "^1.1.2" + +deep-is@^0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" + integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= + +define-properties@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" + integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== + dependencies: + object-keys "^1.0.12" + +defined@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" + integrity sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM= + +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +dotignore@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/dotignore/-/dotignore-0.1.2.tgz#f942f2200d28c3a76fbdd6f0ee9f3257c8a2e905" + integrity sha512-UGGGWfSauusaVJC+8fgV+NVvBXkCTmVv7sk6nojDZZvuOUNGUy0Zk4UpHQD6EDjS0jpBwcACvH4eofvyzBcRDw== + dependencies: + minimatch "^3.0.4" + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +enquirer@^2.3.5: + version "2.3.6" + resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d" + integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg== + dependencies: + ansi-colors "^4.1.1" + +es-abstract@^1.18.0-next.1, es-abstract@^1.18.0-next.2: + version "1.18.0" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.18.0.tgz#ab80b359eecb7ede4c298000390bc5ac3ec7b5a4" + integrity sha512-LJzK7MrQa8TS0ja2w3YNLzUgJCGPdPOV1yVvezjNnS89D+VR08+Szt2mz3YB2Dck/+w5tfIq/RoUAFqJJGM2yw== + dependencies: + call-bind "^1.0.2" + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + get-intrinsic "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.2" + is-callable "^1.2.3" + is-negative-zero "^2.0.1" + is-regex "^1.1.2" + is-string "^1.0.5" + object-inspect "^1.9.0" + object-keys "^1.1.1" + object.assign "^4.1.2" + string.prototype.trimend "^1.0.4" + string.prototype.trimstart "^1.0.4" + unbox-primitive "^1.0.0" + +es-get-iterator@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/es-get-iterator/-/es-get-iterator-1.1.2.tgz#9234c54aba713486d7ebde0220864af5e2b283f7" + integrity sha512-+DTO8GYwbMCwbywjimwZMHp8AuYXOS2JZFWoi2AlPOS3ebnII9w/NLpNZtA7A0YLaVDw+O7KFCeoIV7OPvM7hQ== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.0" + has-symbols "^1.0.1" + is-arguments "^1.1.0" + is-map "^2.0.2" + is-set "^2.0.2" + is-string "^1.0.5" + isarray "^2.0.5" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= + +eslint-scope@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + dependencies: + esrecurse "^4.3.0" + estraverse "^4.1.1" + +eslint-utils@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27" + integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg== + dependencies: + eslint-visitor-keys "^1.1.0" + +eslint-visitor-keys@^1.1.0, eslint-visitor-keys@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e" + integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== + +eslint-visitor-keys@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.0.0.tgz#21fdc8fbcd9c795cc0321f0563702095751511a8" + integrity sha512-QudtT6av5WXels9WjIM7qz1XD1cWGvX4gGXvp/zBn9nXG02D0utdU3Em2m/QjTnrsk6bBjmCygl3rmj118msQQ== + +eslint@^7.22.0: + version "7.22.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.22.0.tgz#07ecc61052fec63661a2cab6bd507127c07adc6f" + integrity sha512-3VawOtjSJUQiiqac8MQc+w457iGLfuNGLFn8JmF051tTKbh5/x/0vlcEj8OgDCaw7Ysa2Jn8paGshV7x2abKXg== + dependencies: + "@babel/code-frame" "7.12.11" + "@eslint/eslintrc" "^0.4.0" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.0.1" + doctrine "^3.0.0" + enquirer "^2.3.5" + eslint-scope "^5.1.1" + eslint-utils "^2.1.0" + eslint-visitor-keys "^2.0.0" + espree "^7.3.1" + esquery "^1.4.0" + esutils "^2.0.2" + file-entry-cache "^6.0.1" + functional-red-black-tree "^1.0.1" + glob-parent "^5.0.0" + globals "^13.6.0" + ignore "^4.0.6" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + js-yaml "^3.13.1" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash "^4.17.21" + minimatch "^3.0.4" + natural-compare "^1.4.0" + optionator "^0.9.1" + progress "^2.0.0" + regexpp "^3.1.0" + semver "^7.2.1" + strip-ansi "^6.0.0" + strip-json-comments "^3.1.0" + table "^6.0.4" + text-table "^0.2.0" + v8-compile-cache "^2.0.3" + +espree@^7.3.0, espree@^7.3.1: + version "7.3.1" + resolved "https://registry.yarnpkg.com/espree/-/espree-7.3.1.tgz#f2df330b752c6f55019f8bd89b7660039c1bbbb6" + integrity sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g== + dependencies: + acorn "^7.4.0" + acorn-jsx "^5.3.1" + eslint-visitor-keys "^1.3.0" + +esprima@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +esquery@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" + integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^4.1.1: + version "4.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estraverse@^5.1.0, estraverse@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.2.0.tgz#307df42547e6cc7324d3cf03c155d5cdb8c53880" + integrity sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +fast-deep-equal@^3.1.1: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= + +file-entry-cache@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== + dependencies: + flat-cache "^3.0.4" + +find-up@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + +flat-cache@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + dependencies: + flatted "^3.1.0" + rimraf "^3.0.2" + +flatted@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.1.1.tgz#c4b489e80096d9df1dfc97c79871aea7c617c469" + integrity sha512-zAoAQiudy+r5SvnSw3KJy5os/oRJYHzrzja/tBDqrZtNhUw8bt6y8OBzMWcjWr+8liV8Eb6yOhw8WZ7VFZ5ZzA== + +for-each@^0.3.3: + version "0.3.3" + resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" + integrity sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw== + dependencies: + is-callable "^1.1.3" + +foreach@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/foreach/-/foreach-2.0.5.tgz#0bee005018aeb260d0a3af3ae658dd0136ec1b99" + integrity sha1-C+4AUBiusmDQo6865ljdATbsG5k= + +fs-extra@^9.1.0: + version "9.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" + integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== + dependencies: + at-least-node "^1.0.0" + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +functional-red-black-tree@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" + integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= + +get-caller-file@^2.0.1: + version "2.0.5" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +get-intrinsic@^1.0.1, get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.1.tgz#15f59f376f855c446963948f0d24cd3637b4abc6" + integrity sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.1" + +glob-all@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/glob-all/-/glob-all-3.2.1.tgz#082ca81afd2247cbd3ed2149bb2630f4dc877d95" + integrity sha512-x877rVkzB3ipid577QOp+eQCR6M5ZyiwrtaYgrX/z3EThaSPFtLDwBXFHc3sH1cG0R0vFYI5SRYeWMMSEyXkUw== + dependencies: + glob "^7.1.2" + yargs "^15.3.1" + +glob-parent@^5.0.0: + version "5.1.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob@^7.1.2, glob@^7.1.3, glob@^7.1.6: + version "7.1.6" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" + integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +globals@^12.1.0: + version "12.4.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-12.4.0.tgz#a18813576a41b00a24a97e7f815918c2e19925f8" + integrity sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg== + dependencies: + type-fest "^0.8.1" + +globals@^13.6.0: + version "13.6.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.6.0.tgz#d77138e53738567bb96a3916ff6f6b487af20ef7" + integrity sha512-YFKCX0SiPg7l5oKYCJ2zZGxcXprVXHcSnVuvzrT3oSENQonVLqM5pf9fN5dLGZGyCjhw8TN8Btwe/jKnZ0pjvQ== + dependencies: + type-fest "^0.20.2" + +graceful-fs@^4.1.6, graceful-fs@^4.2.0: + version "4.2.6" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.6.tgz#ff040b2b0853b23c3d31027523706f1885d76bee" + integrity sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ== + +has-bigints@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.1.tgz#64fe6acb020673e3b78db035a5af69aa9d07b113" + integrity sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-symbols@^1.0.0, has-symbols@^1.0.1, has-symbols@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.2.tgz#165d3070c00309752a1236a479331e3ac56f1423" + integrity sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw== + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +ignore@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" + integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== + +immediate@~3.0.5: + version "3.0.6" + resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.0.6.tgz#9db1dbd0faf8de6fbe0f5dd5e56bb606280de69b" + integrity sha1-nbHb0Pr43m++D13V5Wu2BigN5ps= + +import-fresh@^3.0.0, import-fresh@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@^2.0.4, inherits@~2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +is-arguments@^1.0.4, is-arguments@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.0.tgz#62353031dfbee07ceb34656a6bde59efecae8dd9" + integrity sha512-1Ij4lOMPl/xB5kBDn7I+b2ttPMKa8szhEIrXDuXQD/oe3HJLTLhqhgGspwgyGd6MOywBUqVvYicF72lkgDnIHg== + dependencies: + call-bind "^1.0.0" + +is-bigint@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.1.tgz#6923051dfcbc764278540b9ce0e6b3213aa5ebc2" + integrity sha512-J0ELF4yHFxHy0cmSxZuheDOz2luOdVvqjwmEcj8H/L1JHeuEDSDbeRP+Dk9kFVk5RTFzbucJ2Kb9F7ixY2QaCg== + +is-boolean-object@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.0.tgz#e2aaad3a3a8fca34c28f6eee135b156ed2587ff0" + integrity sha512-a7Uprx8UtD+HWdyYwnD1+ExtTgqQtD2k/1yJgtXP6wnMm8byhkoTZRl+95LLThpzNZJ5aEvi46cdH+ayMFRwmA== + dependencies: + call-bind "^1.0.0" + +is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.3.tgz#8b1e0500b73a1d76c70487636f368e519de8db8e" + integrity sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ== + +is-core-module@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.2.0.tgz#97037ef3d52224d85163f5597b2b63d9afed981a" + integrity sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ== + dependencies: + has "^1.0.3" + +is-date-object@^1.0.1, is-date-object@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.2.tgz#bda736f2cd8fd06d32844e7743bfa7494c3bfd7e" + integrity sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g== + +is-docker@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.1.1.tgz#4125a88e44e450d384e09047ede71adc2d144156" + integrity sha512-ZOoqiXfEwtGknTiuDEy8pN2CfE3TxMHprvNer1mXiqwkOT77Rw3YVrUQ52EqAOU3QAWDQ+bQdx7HJzrv7LS2Hw== + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-glob@^4.0.0, is-glob@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" + integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== + dependencies: + is-extglob "^2.1.1" + +is-map@^2.0.1, is-map@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-map/-/is-map-2.0.2.tgz#00922db8c9bf73e81b7a335827bc2a43f2b91127" + integrity sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg== + +is-negative-zero@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.1.tgz#3de746c18dda2319241a53675908d8f766f11c24" + integrity sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w== + +is-number-object@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.4.tgz#36ac95e741cf18b283fc1ddf5e83da798e3ec197" + integrity sha512-zohwelOAur+5uXtk8O3GPQ1eAcu4ZX3UwxQhUlfFFMNpUd83gXgjbhJh6HmB6LUNV/ieOLQuDwJO3dWJosUeMw== + +is-promise@^2.1.0: + version "2.2.2" + resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.2.2.tgz#39ab959ccbf9a774cf079f7b40c7a26f763135f1" + integrity sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ== + +is-regex@^1.1.1, is-regex@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.2.tgz#81c8ebde4db142f2cf1c53fc86d6a45788266251" + integrity sha512-axvdhb5pdhEVThqJzYXwMlVuZwC+FF2DpcOhTS+y/8jVq4trxyPgfcwIxIKiyeuLlSQYKkmUaPQJ8ZE4yNKXDg== + dependencies: + call-bind "^1.0.2" + has-symbols "^1.0.1" + +is-set@^2.0.1, is-set@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-set/-/is-set-2.0.2.tgz#90755fa4c2562dc1c5d4024760d6119b94ca18ec" + integrity sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g== + +is-string@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.5.tgz#40493ed198ef3ff477b8c7f92f644ec82a5cd3a6" + integrity sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ== + +is-symbol@^1.0.2, is-symbol@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" + integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== + dependencies: + has-symbols "^1.0.1" + +is-typed-array@^1.1.3: + version "1.1.5" + resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.5.tgz#f32e6e096455e329eb7b423862456aa213f0eb4e" + integrity sha512-S+GRDgJlR3PyEbsX/Fobd9cqpZBuvUS+8asRqYDMLCb2qMzt1oz5m5oxQCxOgUDxiWsOVNi4yaF+/uvdlHlYug== + dependencies: + available-typed-arrays "^1.0.2" + call-bind "^1.0.2" + es-abstract "^1.18.0-next.2" + foreach "^2.0.5" + has-symbols "^1.0.1" + +is-weakmap@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-weakmap/-/is-weakmap-2.0.1.tgz#5008b59bdc43b698201d18f62b37b2ca243e8cf2" + integrity sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA== + +is-weakset@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-weakset/-/is-weakset-2.0.1.tgz#e9a0af88dbd751589f5e50d80f4c98b780884f83" + integrity sha512-pi4vhbhVHGLxohUw7PhGsueT4vRGFoXhP7+RGN0jKIv9+8PWYCQTqtADngrxOm2g46hoH0+g8uZZBzMrvVGDmw== + +is-wsl@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" + integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== + dependencies: + is-docker "^2.0.0" + +isarray@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.5.tgz#8af1e4c1221244cc62459faf38940d4e644a5723" + integrity sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw== + +isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= + +js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@^3.13.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema-traverse@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" + integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE= + +jsonfile@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" + integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== + dependencies: + universalify "^2.0.0" + optionalDependencies: + graceful-fs "^4.1.6" + +jszip@^3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/jszip/-/jszip-3.6.0.tgz#839b72812e3f97819cc13ac4134ffced95dd6af9" + integrity sha512-jgnQoG9LKnWO3mnVNBnfhkh0QknICd1FGSrXcgrl67zioyJ4wgx25o9ZqwNtrROSflGBCGYnJfjrIyRIby1OoQ== + dependencies: + lie "~3.3.0" + pako "~1.0.2" + readable-stream "~2.3.6" + set-immediate-shim "~1.0.1" + +levn@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + +lie@~3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/lie/-/lie-3.3.0.tgz#dcf82dee545f46074daf200c7c1c5a08e0f40f6a" + integrity sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ== + dependencies: + immediate "~3.0.5" + +locate-path@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + dependencies: + p-locate "^4.1.0" + +lodash.get@^4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" + integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk= + +lodash.set@^4.3.2: + version "4.3.2" + resolved "https://registry.yarnpkg.com/lodash.set/-/lodash.set-4.3.2.tgz#d8757b1da807dde24816b0d6a84bea1a76230b23" + integrity sha1-2HV7HagH3eJIFrDWqEvqGnYjCyM= + +lodash.uniqby@^4.7.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/lodash.uniqby/-/lodash.uniqby-4.7.0.tgz#d99c07a669e9e6d24e1362dfe266c67616af1302" + integrity sha1-2ZwHpmnp5tJOE2Lf4mbGdhavEwI= + +lodash.values@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/lodash.values/-/lodash.values-4.3.0.tgz#a3a6c2b0ebecc5c2cba1c17e6e620fe81b53d347" + integrity sha1-o6bCsOvsxcLLocF+bmIP6BtT00c= + +lodash@^4.17.20, lodash@^4.17.21: + version "4.17.21" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +mimic-fn@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" + integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== + +minimatch@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== + dependencies: + brace-expansion "^1.1.7" + +minimist@^1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" + integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= + +object-inspect@^1.9.0: + version "1.9.0" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.9.0.tgz#c90521d74e1127b67266ded3394ad6116986533a" + integrity sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw== + +object-is@^1.1.4, object-is@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.5.tgz#b9deeaa5fc7f1846a0faecdceec138e5778f53ac" + integrity sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + +object-keys@^1.0.12, object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object.assign@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940" + integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + has-symbols "^1.0.1" + object-keys "^1.1.1" + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= + dependencies: + wrappy "1" + +onetime@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" + integrity sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ= + dependencies: + mimic-fn "^1.0.0" + +optionator@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" + +p-limit@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + dependencies: + p-try "^2.0.0" + +p-locate@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + dependencies: + p-limit "^2.2.0" + +p-try@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +pako@~1.0.2: + version "1.0.11" + resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf" + integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +path-exists@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= + +path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" + integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== + +prelude-ls@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +prettier@^2: + version "2.2.1" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.2.1.tgz#795a1a78dd52f073da0cd42b21f9c91381923ff5" + integrity sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q== + +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +progress@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" + integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== + +punycode@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +readable-stream@~2.3.6: + version "2.3.7" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" + integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +regexp.prototype.flags@^1.3.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.3.1.tgz#7ef352ae8d159e758c0eadca6f8fcb4eef07be26" + integrity sha512-JiBdRBq91WlY7uRJ0ds7R+dU02i6LKi8r3BuQhNXn+kmeLN+EfHhfjqMRis1zJxnlu88hq/4dx0P2OP3APRTOA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + +regexpp@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.1.0.tgz#206d0ad0a5648cffbdb8ae46438f3dc51c9f78e2" + integrity sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q== + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= + +require-from-string@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + +require-main-filename@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" + integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve@^2.0.0-next.3: + version "2.0.0-next.3" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.3.tgz#d41016293d4a8586a39ca5d9b5f15cbea1f55e46" + integrity sha512-W8LucSynKUIDu9ylraa7ueVZ7hc0uAgJBxVsQSKOXOyle8a93qXhcz+XAXZ8bIq2d6i4Ehddn6Evt+0/UwKk6Q== + dependencies: + is-core-module "^2.2.0" + path-parse "^1.0.6" + +resumer@^0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/resumer/-/resumer-0.0.0.tgz#f1e8f461e4064ba39e82af3cdc2a8c893d076759" + integrity sha1-8ej0YeQGS6Oegq883CqMiT0HZ1k= + dependencies: + through "~2.3.4" + +rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +semver@^7.2.1: + version "7.3.4" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.4.tgz#27aaa7d2e4ca76452f98d3add093a72c943edc97" + integrity sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw== + dependencies: + lru-cache "^6.0.0" + +set-blocking@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= + +set-immediate-shim@~1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz#4b2b1b27eb808a9f8dcc481a58e5e56f599f3f61" + integrity sha1-SysbJ+uAip+NzEgaWOXlb1mfP2E= + +sha256-file@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/sha256-file/-/sha256-file-1.0.0.tgz#02cade5e658da3fbc167c3270bdcdfd5409f1b65" + integrity sha512-nqf+g0veqgQAkDx0U2y2Tn2KWyADuuludZTw9A7J3D+61rKlIIl9V5TS4mfnwKuXZOH9B7fQyjYJ9pKRHIsAyg== + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +shell-quote@^1.7.2: + version "1.7.2" + resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.7.2.tgz#67a7d02c76c9da24f99d20808fcaded0e0e04be2" + integrity sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg== + +side-channel@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +slice-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-4.0.0.tgz#500e8dd0fd55b05815086255b3195adf2a45fe6b" + integrity sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ== + dependencies: + ansi-styles "^4.0.0" + astral-regex "^2.0.0" + is-fullwidth-code-point "^3.0.0" + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= + +string-width@^4.1.0, string-width@^4.2.0: + version "4.2.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.2.tgz#dafd4f9559a7585cfba529c6a0a4f73488ebd4c5" + integrity sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.0" + +string.prototype.trim@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.4.tgz#6014689baf5efaf106ad031a5fa45157666ed1bd" + integrity sha512-hWCk/iqf7lp0/AgTF7/ddO1IWtSNPASjlzCicV5irAVdE1grjsneK26YG6xACMBEdCvO8fUST0UzDMh/2Qy+9Q== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.18.0-next.2" + +string.prototype.trimend@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz#e75ae90c2942c63504686c18b287b4a0b1a45f80" + integrity sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + +string.prototype.trimstart@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz#b36399af4ab2999b4c9c648bd7a3fb2bb26feeed" + integrity sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +strip-ansi@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532" + integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w== + dependencies: + ansi-regex "^5.0.0" + +strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +table@^6.0.4: + version "6.0.7" + resolved "https://registry.yarnpkg.com/table/-/table-6.0.7.tgz#e45897ffbcc1bcf9e8a87bf420f2c9e5a7a52a34" + integrity sha512-rxZevLGTUzWna/qBLObOe16kB2RTnnbhciwgPbMMlazz1yZGVEgnZK762xyVdVznhqxrfCeBMmMkgOOaPwjH7g== + dependencies: + ajv "^7.0.2" + lodash "^4.17.20" + slice-ansi "^4.0.0" + string-width "^4.2.0" + +tape-promise@*: + version "4.0.0" + resolved "https://registry.yarnpkg.com/tape-promise/-/tape-promise-4.0.0.tgz#c1f3553959b2e9d64b1546e7276b8a017c616897" + integrity sha512-mNi5yhWAKDuNgZCfFKeZbsXvraVOf+I8UZG+lf+aoRrzX4+jd4mpNBjYh16/VcpEMUtS0iFndBgnfxxZbtyLFw== + dependencies: + is-promise "^2.1.0" + onetime "^2.0.0" + +tape@*: + version "5.2.2" + resolved "https://registry.yarnpkg.com/tape/-/tape-5.2.2.tgz#a98475ecf30aa0ed2a89c36439bb9438d24d2184" + integrity sha512-grXrzPC1ly2kyTMKdqxh5GiLpb0BpNctCuecTB0psHX4Gu0nc+uxWR4xKjTh/4CfQlH4zhvTM2/EXmHXp6v/uA== + dependencies: + call-bind "^1.0.2" + deep-equal "^2.0.5" + defined "^1.0.0" + dotignore "^0.1.2" + for-each "^0.3.3" + glob "^7.1.6" + has "^1.0.3" + inherits "^2.0.4" + is-regex "^1.1.2" + minimist "^1.2.5" + object-inspect "^1.9.0" + object-is "^1.1.5" + object.assign "^4.1.2" + resolve "^2.0.0-next.3" + resumer "^0.0.0" + string.prototype.trim "^1.2.4" + through "^2.3.8" + +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= + +through@^2.3.8, through@~2.3.4: + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= + +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + +type-fest@^0.20.2: + version "0.20.2" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + +type-fest@^0.8.1: + version "0.8.1" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" + integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== + +unbox-primitive@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.0.tgz#eeacbc4affa28e9b3d36b5eaeccc50b3251b1d3f" + integrity sha512-P/51NX+JXyxK/aigg1/ZgyccdAxm5K1+n8+tvqSntjOivPt19gvm1VC49RWYetsiub8WViUchdxl/KWHHB0kzA== + dependencies: + function-bind "^1.1.1" + has-bigints "^1.0.0" + has-symbols "^1.0.0" + which-boxed-primitive "^1.0.1" + +universalify@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" + integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= + +v8-compile-cache@^2.0.3: + version "2.3.0" + resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" + integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== + +which-boxed-primitive@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + dependencies: + is-bigint "^1.0.1" + is-boolean-object "^1.1.0" + is-number-object "^1.0.4" + is-string "^1.0.5" + is-symbol "^1.0.3" + +which-collection@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/which-collection/-/which-collection-1.0.1.tgz#70eab71ebbbd2aefaf32f917082fc62cdcb70906" + integrity sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A== + dependencies: + is-map "^2.0.1" + is-set "^2.0.1" + is-weakmap "^2.0.1" + is-weakset "^2.0.1" + +which-module@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" + integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= + +which-typed-array@^1.1.2: + version "1.1.4" + resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.4.tgz#8fcb7d3ee5adf2d771066fba7cf37e32fe8711ff" + integrity sha512-49E0SpUe90cjpoc7BOJwyPHRqSAd12c10Qm2amdEZrJPCY2NDxaW01zHITrem+rnETY3dwrbH3UUrUwagfCYDA== + dependencies: + available-typed-arrays "^1.0.2" + call-bind "^1.0.0" + es-abstract "^1.18.0-next.1" + foreach "^2.0.5" + function-bind "^1.1.1" + has-symbols "^1.0.1" + is-typed-array "^1.1.3" + +which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +word-wrap@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +wrap-ansi@^6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" + integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + +y18n@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.1.tgz#8db2b83c31c5d75099bb890b23f3094891e247d4" + integrity sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yargs-parser@^18.1.2: + version "18.1.3" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0" + integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ== + dependencies: + camelcase "^5.0.0" + decamelize "^1.2.0" + +yargs@^15.3.1: + version "15.4.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8" + integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A== + dependencies: + cliui "^6.0.0" + decamelize "^1.2.0" + find-up "^4.1.0" + get-caller-file "^2.0.1" + require-directory "^2.1.1" + require-main-filename "^2.0.0" + set-blocking "^2.0.0" + string-width "^4.2.0" + which-module "^2.0.0" + y18n "^4.0.0" + yargs-parser "^18.1.2" From aabccdffa82e590cf1e5d610bc2a15e66750b695 Mon Sep 17 00:00:00 2001 From: Bryant Biggs Date: Mon, 15 Mar 2021 22:24:52 -0400 Subject: [PATCH 219/328] chore: ignore yarn.lock file --- .gitignore | 1 + yarn.lock | 1483 ---------------------------------------------------- 2 files changed, 1 insertion(+), 1483 deletions(-) delete mode 100644 yarn.lock diff --git a/.gitignore b/.gitignore index 85e60616..db119782 100644 --- a/.gitignore +++ b/.gitignore @@ -45,3 +45,4 @@ __pycache__ #NODE STUFF package-lock.json +yarn.lock diff --git a/yarn.lock b/yarn.lock deleted file mode 100644 index e1d476d3..00000000 --- a/yarn.lock +++ /dev/null @@ -1,1483 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@babel/code-frame@7.12.11": - version "7.12.11" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.11.tgz#f4ad435aa263db935b8f10f2c552d23fb716a63f" - integrity sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw== - dependencies: - "@babel/highlight" "^7.10.4" - -"@babel/helper-validator-identifier@^7.12.11": - version "7.12.11" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz#c9a1f021917dcb5ccf0d4e453e399022981fc9ed" - integrity sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw== - -"@babel/highlight@^7.10.4": - version "7.13.10" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.13.10.tgz#a8b2a66148f5b27d666b15d81774347a731d52d1" - integrity sha512-5aPpe5XQPzflQrFwL1/QoeHkP2MsA4JCntcXHRhEsdsfPVkvPi2w7Qix4iV7t5S/oC9OodGrggd8aco1g3SZFg== - dependencies: - "@babel/helper-validator-identifier" "^7.12.11" - chalk "^2.0.0" - js-tokens "^4.0.0" - -"@eslint/eslintrc@^0.4.0": - version "0.4.0" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.0.tgz#99cc0a0584d72f1df38b900fb062ba995f395547" - integrity sha512-2ZPCc+uNbjV5ERJr+aKSPRwZgKd2z11x0EgLvb1PURmUrn9QNRXFqje0Ldq454PfAVyaJYyrDvvIKSFP4NnBog== - dependencies: - ajv "^6.12.4" - debug "^4.1.1" - espree "^7.3.0" - globals "^12.1.0" - ignore "^4.0.6" - import-fresh "^3.2.1" - js-yaml "^3.13.1" - minimatch "^3.0.4" - strip-json-comments "^3.1.1" - -"@iarna/toml@^2.2.5": - version "2.2.5" - resolved "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.5.tgz#b32366c89b43c6f8cefbdefac778b9c828e3ba8c" - integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== - -acorn-jsx@^5.3.1: - version "5.3.1" - resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.1.tgz#fc8661e11b7ac1539c47dbfea2e72b3af34d267b" - integrity sha512-K0Ptm/47OKfQRpNQ2J/oIN/3QYiK6FwW+eJbILhsdxh2WTLdl+30o8aGdTbm5JbffpFFAg/g+zi1E+jvJha5ng== - -acorn@^7.4.0: - version "7.4.1" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" - integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== - -ajv@^6.10.0, ajv@^6.12.4: - version "6.12.6" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" - integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - -ajv@^7.0.2: - version "7.2.1" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-7.2.1.tgz#a5ac226171912447683524fa2f1248fcf8bac83d" - integrity sha512-+nu0HDv7kNSOua9apAVc979qd932rrZeb3WOvoiD31A/p1mIE5/9bN2027pE2rOPYEdS3UHzsvof4hY+lM9/WQ== - dependencies: - fast-deep-equal "^3.1.1" - json-schema-traverse "^1.0.0" - require-from-string "^2.0.2" - uri-js "^4.2.2" - -ansi-colors@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348" - integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA== - -ansi-regex@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.0.tgz#388539f55179bf39339c81af30a654d69f87cb75" - integrity sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg== - -ansi-styles@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" - integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== - dependencies: - color-convert "^1.9.0" - -ansi-styles@^4.0.0, ansi-styles@^4.1.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" - integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== - dependencies: - color-convert "^2.0.1" - -appdirectory@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/appdirectory/-/appdirectory-0.1.0.tgz#eb6c816320e7b2ab16f5ed997f28d8205df56375" - integrity sha1-62yBYyDnsqsW9e2ZfyjYIF31Y3U= - -argparse@^1.0.7: - version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" - integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== - dependencies: - sprintf-js "~1.0.2" - -array-filter@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/array-filter/-/array-filter-1.0.0.tgz#baf79e62e6ef4c2a4c0b831232daffec251f9d83" - integrity sha1-uveeYubvTCpMC4MSMtr/7CUfnYM= - -astral-regex@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31" - integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== - -at-least-node@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" - integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== - -available-typed-arrays@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.2.tgz#6b098ca9d8039079ee3f77f7b783c4480ba513f5" - integrity sha512-XWX3OX8Onv97LMk/ftVyBibpGwY5a8SmuxZPzeOxqmuEqUCOM9ZE+uIaD1VNJ5QnvU2UQusvmKbuM1FR8QWGfQ== - dependencies: - array-filter "^1.0.0" - -balanced-match@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" - integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= - -bluebird@^3.7.2: - version "3.7.2" - resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" - integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== - -brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -call-bind@^1.0.0, call-bind@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" - integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== - dependencies: - function-bind "^1.1.1" - get-intrinsic "^1.0.2" - -callsites@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" - integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== - -camelcase@^5.0.0: - version "5.3.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" - integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== - -chalk@^2.0.0: - version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - -chalk@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.0.tgz#4e14870a618d9e2edd97dd8345fd9d9dc315646a" - integrity sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -cliui@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1" - integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ== - dependencies: - string-width "^4.2.0" - strip-ansi "^6.0.0" - wrap-ansi "^6.2.0" - -color-convert@^1.9.0: - version "1.9.3" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" - integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== - dependencies: - color-name "1.1.3" - -color-convert@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" - integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== - dependencies: - color-name "~1.1.4" - -color-name@1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= - -color-name@~1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" - integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== - -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= - -core-util-is@~1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" - integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= - -cross-spawn@*, cross-spawn@^7.0.2: - version "7.0.3" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" - integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== - dependencies: - path-key "^3.1.0" - shebang-command "^2.0.0" - which "^2.0.1" - -debug@^4.0.1, debug@^4.1.1: - version "4.3.1" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" - integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== - dependencies: - ms "2.1.2" - -decamelize@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" - integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= - -deep-equal@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-2.0.5.tgz#55cd2fe326d83f9cbf7261ef0e060b3f724c5cb9" - integrity sha512-nPiRgmbAtm1a3JsnLCf6/SLfXcjyN5v8L1TXzdCmHrXJ4hx+gW/w1YCcn7z8gJtSiDArZCgYtbao3QqLm/N1Sw== - dependencies: - call-bind "^1.0.0" - es-get-iterator "^1.1.1" - get-intrinsic "^1.0.1" - is-arguments "^1.0.4" - is-date-object "^1.0.2" - is-regex "^1.1.1" - isarray "^2.0.5" - object-is "^1.1.4" - object-keys "^1.1.1" - object.assign "^4.1.2" - regexp.prototype.flags "^1.3.0" - side-channel "^1.0.3" - which-boxed-primitive "^1.0.1" - which-collection "^1.0.1" - which-typed-array "^1.1.2" - -deep-is@^0.1.3: - version "0.1.3" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" - integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= - -define-properties@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" - integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== - dependencies: - object-keys "^1.0.12" - -defined@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" - integrity sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM= - -doctrine@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" - integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== - dependencies: - esutils "^2.0.2" - -dotignore@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/dotignore/-/dotignore-0.1.2.tgz#f942f2200d28c3a76fbdd6f0ee9f3257c8a2e905" - integrity sha512-UGGGWfSauusaVJC+8fgV+NVvBXkCTmVv7sk6nojDZZvuOUNGUy0Zk4UpHQD6EDjS0jpBwcACvH4eofvyzBcRDw== - dependencies: - minimatch "^3.0.4" - -emoji-regex@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" - integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== - -enquirer@^2.3.5: - version "2.3.6" - resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d" - integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg== - dependencies: - ansi-colors "^4.1.1" - -es-abstract@^1.18.0-next.1, es-abstract@^1.18.0-next.2: - version "1.18.0" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.18.0.tgz#ab80b359eecb7ede4c298000390bc5ac3ec7b5a4" - integrity sha512-LJzK7MrQa8TS0ja2w3YNLzUgJCGPdPOV1yVvezjNnS89D+VR08+Szt2mz3YB2Dck/+w5tfIq/RoUAFqJJGM2yw== - dependencies: - call-bind "^1.0.2" - es-to-primitive "^1.2.1" - function-bind "^1.1.1" - get-intrinsic "^1.1.1" - has "^1.0.3" - has-symbols "^1.0.2" - is-callable "^1.2.3" - is-negative-zero "^2.0.1" - is-regex "^1.1.2" - is-string "^1.0.5" - object-inspect "^1.9.0" - object-keys "^1.1.1" - object.assign "^4.1.2" - string.prototype.trimend "^1.0.4" - string.prototype.trimstart "^1.0.4" - unbox-primitive "^1.0.0" - -es-get-iterator@^1.1.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/es-get-iterator/-/es-get-iterator-1.1.2.tgz#9234c54aba713486d7ebde0220864af5e2b283f7" - integrity sha512-+DTO8GYwbMCwbywjimwZMHp8AuYXOS2JZFWoi2AlPOS3ebnII9w/NLpNZtA7A0YLaVDw+O7KFCeoIV7OPvM7hQ== - dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.1.0" - has-symbols "^1.0.1" - is-arguments "^1.1.0" - is-map "^2.0.2" - is-set "^2.0.2" - is-string "^1.0.5" - isarray "^2.0.5" - -es-to-primitive@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" - integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== - dependencies: - is-callable "^1.1.4" - is-date-object "^1.0.1" - is-symbol "^1.0.2" - -escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= - -eslint-scope@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" - integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== - dependencies: - esrecurse "^4.3.0" - estraverse "^4.1.1" - -eslint-utils@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27" - integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg== - dependencies: - eslint-visitor-keys "^1.1.0" - -eslint-visitor-keys@^1.1.0, eslint-visitor-keys@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e" - integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== - -eslint-visitor-keys@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.0.0.tgz#21fdc8fbcd9c795cc0321f0563702095751511a8" - integrity sha512-QudtT6av5WXels9WjIM7qz1XD1cWGvX4gGXvp/zBn9nXG02D0utdU3Em2m/QjTnrsk6bBjmCygl3rmj118msQQ== - -eslint@^7.22.0: - version "7.22.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.22.0.tgz#07ecc61052fec63661a2cab6bd507127c07adc6f" - integrity sha512-3VawOtjSJUQiiqac8MQc+w457iGLfuNGLFn8JmF051tTKbh5/x/0vlcEj8OgDCaw7Ysa2Jn8paGshV7x2abKXg== - dependencies: - "@babel/code-frame" "7.12.11" - "@eslint/eslintrc" "^0.4.0" - ajv "^6.10.0" - chalk "^4.0.0" - cross-spawn "^7.0.2" - debug "^4.0.1" - doctrine "^3.0.0" - enquirer "^2.3.5" - eslint-scope "^5.1.1" - eslint-utils "^2.1.0" - eslint-visitor-keys "^2.0.0" - espree "^7.3.1" - esquery "^1.4.0" - esutils "^2.0.2" - file-entry-cache "^6.0.1" - functional-red-black-tree "^1.0.1" - glob-parent "^5.0.0" - globals "^13.6.0" - ignore "^4.0.6" - import-fresh "^3.0.0" - imurmurhash "^0.1.4" - is-glob "^4.0.0" - js-yaml "^3.13.1" - json-stable-stringify-without-jsonify "^1.0.1" - levn "^0.4.1" - lodash "^4.17.21" - minimatch "^3.0.4" - natural-compare "^1.4.0" - optionator "^0.9.1" - progress "^2.0.0" - regexpp "^3.1.0" - semver "^7.2.1" - strip-ansi "^6.0.0" - strip-json-comments "^3.1.0" - table "^6.0.4" - text-table "^0.2.0" - v8-compile-cache "^2.0.3" - -espree@^7.3.0, espree@^7.3.1: - version "7.3.1" - resolved "https://registry.yarnpkg.com/espree/-/espree-7.3.1.tgz#f2df330b752c6f55019f8bd89b7660039c1bbbb6" - integrity sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g== - dependencies: - acorn "^7.4.0" - acorn-jsx "^5.3.1" - eslint-visitor-keys "^1.3.0" - -esprima@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - -esquery@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" - integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== - dependencies: - estraverse "^5.1.0" - -esrecurse@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" - integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== - dependencies: - estraverse "^5.2.0" - -estraverse@^4.1.1: - version "4.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" - integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== - -estraverse@^5.1.0, estraverse@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.2.0.tgz#307df42547e6cc7324d3cf03c155d5cdb8c53880" - integrity sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ== - -esutils@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" - integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== - -fast-deep-equal@^3.1.1: - version "3.1.3" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" - integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== - -fast-json-stable-stringify@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" - integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== - -fast-levenshtein@^2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" - integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= - -file-entry-cache@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" - integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== - dependencies: - flat-cache "^3.0.4" - -find-up@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" - integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== - dependencies: - locate-path "^5.0.0" - path-exists "^4.0.0" - -flat-cache@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" - integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== - dependencies: - flatted "^3.1.0" - rimraf "^3.0.2" - -flatted@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.1.1.tgz#c4b489e80096d9df1dfc97c79871aea7c617c469" - integrity sha512-zAoAQiudy+r5SvnSw3KJy5os/oRJYHzrzja/tBDqrZtNhUw8bt6y8OBzMWcjWr+8liV8Eb6yOhw8WZ7VFZ5ZzA== - -for-each@^0.3.3: - version "0.3.3" - resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" - integrity sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw== - dependencies: - is-callable "^1.1.3" - -foreach@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/foreach/-/foreach-2.0.5.tgz#0bee005018aeb260d0a3af3ae658dd0136ec1b99" - integrity sha1-C+4AUBiusmDQo6865ljdATbsG5k= - -fs-extra@^9.1.0: - version "9.1.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" - integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== - dependencies: - at-least-node "^1.0.0" - graceful-fs "^4.2.0" - jsonfile "^6.0.1" - universalify "^2.0.0" - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= - -function-bind@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" - integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== - -functional-red-black-tree@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" - integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= - -get-caller-file@^2.0.1: - version "2.0.5" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" - integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== - -get-intrinsic@^1.0.1, get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.1.tgz#15f59f376f855c446963948f0d24cd3637b4abc6" - integrity sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q== - dependencies: - function-bind "^1.1.1" - has "^1.0.3" - has-symbols "^1.0.1" - -glob-all@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/glob-all/-/glob-all-3.2.1.tgz#082ca81afd2247cbd3ed2149bb2630f4dc877d95" - integrity sha512-x877rVkzB3ipid577QOp+eQCR6M5ZyiwrtaYgrX/z3EThaSPFtLDwBXFHc3sH1cG0R0vFYI5SRYeWMMSEyXkUw== - dependencies: - glob "^7.1.2" - yargs "^15.3.1" - -glob-parent@^5.0.0: - version "5.1.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" - integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== - dependencies: - is-glob "^4.0.1" - -glob@^7.1.2, glob@^7.1.3, glob@^7.1.6: - version "7.1.6" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" - integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.4" - once "^1.3.0" - path-is-absolute "^1.0.0" - -globals@^12.1.0: - version "12.4.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-12.4.0.tgz#a18813576a41b00a24a97e7f815918c2e19925f8" - integrity sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg== - dependencies: - type-fest "^0.8.1" - -globals@^13.6.0: - version "13.6.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.6.0.tgz#d77138e53738567bb96a3916ff6f6b487af20ef7" - integrity sha512-YFKCX0SiPg7l5oKYCJ2zZGxcXprVXHcSnVuvzrT3oSENQonVLqM5pf9fN5dLGZGyCjhw8TN8Btwe/jKnZ0pjvQ== - dependencies: - type-fest "^0.20.2" - -graceful-fs@^4.1.6, graceful-fs@^4.2.0: - version "4.2.6" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.6.tgz#ff040b2b0853b23c3d31027523706f1885d76bee" - integrity sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ== - -has-bigints@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.1.tgz#64fe6acb020673e3b78db035a5af69aa9d07b113" - integrity sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA== - -has-flag@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" - integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= - -has-flag@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" - integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== - -has-symbols@^1.0.0, has-symbols@^1.0.1, has-symbols@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.2.tgz#165d3070c00309752a1236a479331e3ac56f1423" - integrity sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw== - -has@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" - integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== - dependencies: - function-bind "^1.1.1" - -ignore@^4.0.6: - version "4.0.6" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" - integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== - -immediate@~3.0.5: - version "3.0.6" - resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.0.6.tgz#9db1dbd0faf8de6fbe0f5dd5e56bb606280de69b" - integrity sha1-nbHb0Pr43m++D13V5Wu2BigN5ps= - -import-fresh@^3.0.0, import-fresh@^3.2.1: - version "3.3.0" - resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" - integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== - dependencies: - parent-module "^1.0.0" - resolve-from "^4.0.0" - -imurmurhash@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= - -inflight@^1.0.4: - version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2, inherits@^2.0.4, inherits@~2.0.3: - version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" - integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== - -is-arguments@^1.0.4, is-arguments@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.0.tgz#62353031dfbee07ceb34656a6bde59efecae8dd9" - integrity sha512-1Ij4lOMPl/xB5kBDn7I+b2ttPMKa8szhEIrXDuXQD/oe3HJLTLhqhgGspwgyGd6MOywBUqVvYicF72lkgDnIHg== - dependencies: - call-bind "^1.0.0" - -is-bigint@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.1.tgz#6923051dfcbc764278540b9ce0e6b3213aa5ebc2" - integrity sha512-J0ELF4yHFxHy0cmSxZuheDOz2luOdVvqjwmEcj8H/L1JHeuEDSDbeRP+Dk9kFVk5RTFzbucJ2Kb9F7ixY2QaCg== - -is-boolean-object@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.0.tgz#e2aaad3a3a8fca34c28f6eee135b156ed2587ff0" - integrity sha512-a7Uprx8UtD+HWdyYwnD1+ExtTgqQtD2k/1yJgtXP6wnMm8byhkoTZRl+95LLThpzNZJ5aEvi46cdH+ayMFRwmA== - dependencies: - call-bind "^1.0.0" - -is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.3.tgz#8b1e0500b73a1d76c70487636f368e519de8db8e" - integrity sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ== - -is-core-module@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.2.0.tgz#97037ef3d52224d85163f5597b2b63d9afed981a" - integrity sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ== - dependencies: - has "^1.0.3" - -is-date-object@^1.0.1, is-date-object@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.2.tgz#bda736f2cd8fd06d32844e7743bfa7494c3bfd7e" - integrity sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g== - -is-docker@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.1.1.tgz#4125a88e44e450d384e09047ede71adc2d144156" - integrity sha512-ZOoqiXfEwtGknTiuDEy8pN2CfE3TxMHprvNer1mXiqwkOT77Rw3YVrUQ52EqAOU3QAWDQ+bQdx7HJzrv7LS2Hw== - -is-extglob@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" - integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= - -is-fullwidth-code-point@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" - integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== - -is-glob@^4.0.0, is-glob@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" - integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== - dependencies: - is-extglob "^2.1.1" - -is-map@^2.0.1, is-map@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/is-map/-/is-map-2.0.2.tgz#00922db8c9bf73e81b7a335827bc2a43f2b91127" - integrity sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg== - -is-negative-zero@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.1.tgz#3de746c18dda2319241a53675908d8f766f11c24" - integrity sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w== - -is-number-object@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.4.tgz#36ac95e741cf18b283fc1ddf5e83da798e3ec197" - integrity sha512-zohwelOAur+5uXtk8O3GPQ1eAcu4ZX3UwxQhUlfFFMNpUd83gXgjbhJh6HmB6LUNV/ieOLQuDwJO3dWJosUeMw== - -is-promise@^2.1.0: - version "2.2.2" - resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.2.2.tgz#39ab959ccbf9a774cf079f7b40c7a26f763135f1" - integrity sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ== - -is-regex@^1.1.1, is-regex@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.2.tgz#81c8ebde4db142f2cf1c53fc86d6a45788266251" - integrity sha512-axvdhb5pdhEVThqJzYXwMlVuZwC+FF2DpcOhTS+y/8jVq4trxyPgfcwIxIKiyeuLlSQYKkmUaPQJ8ZE4yNKXDg== - dependencies: - call-bind "^1.0.2" - has-symbols "^1.0.1" - -is-set@^2.0.1, is-set@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/is-set/-/is-set-2.0.2.tgz#90755fa4c2562dc1c5d4024760d6119b94ca18ec" - integrity sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g== - -is-string@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.5.tgz#40493ed198ef3ff477b8c7f92f644ec82a5cd3a6" - integrity sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ== - -is-symbol@^1.0.2, is-symbol@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" - integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== - dependencies: - has-symbols "^1.0.1" - -is-typed-array@^1.1.3: - version "1.1.5" - resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.5.tgz#f32e6e096455e329eb7b423862456aa213f0eb4e" - integrity sha512-S+GRDgJlR3PyEbsX/Fobd9cqpZBuvUS+8asRqYDMLCb2qMzt1oz5m5oxQCxOgUDxiWsOVNi4yaF+/uvdlHlYug== - dependencies: - available-typed-arrays "^1.0.2" - call-bind "^1.0.2" - es-abstract "^1.18.0-next.2" - foreach "^2.0.5" - has-symbols "^1.0.1" - -is-weakmap@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-weakmap/-/is-weakmap-2.0.1.tgz#5008b59bdc43b698201d18f62b37b2ca243e8cf2" - integrity sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA== - -is-weakset@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-weakset/-/is-weakset-2.0.1.tgz#e9a0af88dbd751589f5e50d80f4c98b780884f83" - integrity sha512-pi4vhbhVHGLxohUw7PhGsueT4vRGFoXhP7+RGN0jKIv9+8PWYCQTqtADngrxOm2g46hoH0+g8uZZBzMrvVGDmw== - -is-wsl@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" - integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== - dependencies: - is-docker "^2.0.0" - -isarray@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.5.tgz#8af1e4c1221244cc62459faf38940d4e644a5723" - integrity sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw== - -isarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" - integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= - -isexe@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" - integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= - -js-tokens@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" - integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== - -js-yaml@^3.13.1: - version "3.14.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" - integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - -json-schema-traverse@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" - integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== - -json-schema-traverse@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" - integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== - -json-stable-stringify-without-jsonify@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" - integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE= - -jsonfile@^6.0.1: - version "6.1.0" - resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" - integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== - dependencies: - universalify "^2.0.0" - optionalDependencies: - graceful-fs "^4.1.6" - -jszip@^3.6.0: - version "3.6.0" - resolved "https://registry.yarnpkg.com/jszip/-/jszip-3.6.0.tgz#839b72812e3f97819cc13ac4134ffced95dd6af9" - integrity sha512-jgnQoG9LKnWO3mnVNBnfhkh0QknICd1FGSrXcgrl67zioyJ4wgx25o9ZqwNtrROSflGBCGYnJfjrIyRIby1OoQ== - dependencies: - lie "~3.3.0" - pako "~1.0.2" - readable-stream "~2.3.6" - set-immediate-shim "~1.0.1" - -levn@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" - integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== - dependencies: - prelude-ls "^1.2.1" - type-check "~0.4.0" - -lie@~3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/lie/-/lie-3.3.0.tgz#dcf82dee545f46074daf200c7c1c5a08e0f40f6a" - integrity sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ== - dependencies: - immediate "~3.0.5" - -locate-path@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" - integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== - dependencies: - p-locate "^4.1.0" - -lodash.get@^4.4.2: - version "4.4.2" - resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" - integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk= - -lodash.set@^4.3.2: - version "4.3.2" - resolved "https://registry.yarnpkg.com/lodash.set/-/lodash.set-4.3.2.tgz#d8757b1da807dde24816b0d6a84bea1a76230b23" - integrity sha1-2HV7HagH3eJIFrDWqEvqGnYjCyM= - -lodash.uniqby@^4.7.0: - version "4.7.0" - resolved "https://registry.yarnpkg.com/lodash.uniqby/-/lodash.uniqby-4.7.0.tgz#d99c07a669e9e6d24e1362dfe266c67616af1302" - integrity sha1-2ZwHpmnp5tJOE2Lf4mbGdhavEwI= - -lodash.values@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/lodash.values/-/lodash.values-4.3.0.tgz#a3a6c2b0ebecc5c2cba1c17e6e620fe81b53d347" - integrity sha1-o6bCsOvsxcLLocF+bmIP6BtT00c= - -lodash@^4.17.20, lodash@^4.17.21: - version "4.17.21" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" - integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== - -lru-cache@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" - integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== - dependencies: - yallist "^4.0.0" - -mimic-fn@^1.0.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" - integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== - -minimatch@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== - dependencies: - brace-expansion "^1.1.7" - -minimist@^1.2.5: - version "1.2.5" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" - integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== - -ms@2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" - integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== - -natural-compare@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" - integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= - -object-inspect@^1.9.0: - version "1.9.0" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.9.0.tgz#c90521d74e1127b67266ded3394ad6116986533a" - integrity sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw== - -object-is@^1.1.4, object-is@^1.1.5: - version "1.1.5" - resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.5.tgz#b9deeaa5fc7f1846a0faecdceec138e5778f53ac" - integrity sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - -object-keys@^1.0.12, object-keys@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" - integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== - -object.assign@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940" - integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ== - dependencies: - call-bind "^1.0.0" - define-properties "^1.1.3" - has-symbols "^1.0.1" - object-keys "^1.1.1" - -once@^1.3.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= - dependencies: - wrappy "1" - -onetime@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" - integrity sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ= - dependencies: - mimic-fn "^1.0.0" - -optionator@^0.9.1: - version "0.9.1" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" - integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== - dependencies: - deep-is "^0.1.3" - fast-levenshtein "^2.0.6" - levn "^0.4.1" - prelude-ls "^1.2.1" - type-check "^0.4.0" - word-wrap "^1.2.3" - -p-limit@^2.2.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" - integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== - dependencies: - p-try "^2.0.0" - -p-locate@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" - integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== - dependencies: - p-limit "^2.2.0" - -p-try@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" - integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== - -pako@~1.0.2: - version "1.0.11" - resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf" - integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== - -parent-module@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" - integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== - dependencies: - callsites "^3.0.0" - -path-exists@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" - integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= - -path-key@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" - integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== - -path-parse@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" - integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== - -prelude-ls@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" - integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== - -prettier@^2: - version "2.2.1" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.2.1.tgz#795a1a78dd52f073da0cd42b21f9c91381923ff5" - integrity sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q== - -process-nextick-args@~2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" - integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== - -progress@^2.0.0: - version "2.0.3" - resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" - integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== - -punycode@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" - integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== - -readable-stream@~2.3.6: - version "2.3.7" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" - integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" - -regexp.prototype.flags@^1.3.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.3.1.tgz#7ef352ae8d159e758c0eadca6f8fcb4eef07be26" - integrity sha512-JiBdRBq91WlY7uRJ0ds7R+dU02i6LKi8r3BuQhNXn+kmeLN+EfHhfjqMRis1zJxnlu88hq/4dx0P2OP3APRTOA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - -regexpp@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.1.0.tgz#206d0ad0a5648cffbdb8ae46438f3dc51c9f78e2" - integrity sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q== - -require-directory@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" - integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= - -require-from-string@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" - integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== - -require-main-filename@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" - integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== - -resolve-from@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" - integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== - -resolve@^2.0.0-next.3: - version "2.0.0-next.3" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.3.tgz#d41016293d4a8586a39ca5d9b5f15cbea1f55e46" - integrity sha512-W8LucSynKUIDu9ylraa7ueVZ7hc0uAgJBxVsQSKOXOyle8a93qXhcz+XAXZ8bIq2d6i4Ehddn6Evt+0/UwKk6Q== - dependencies: - is-core-module "^2.2.0" - path-parse "^1.0.6" - -resumer@^0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/resumer/-/resumer-0.0.0.tgz#f1e8f461e4064ba39e82af3cdc2a8c893d076759" - integrity sha1-8ej0YeQGS6Oegq883CqMiT0HZ1k= - dependencies: - through "~2.3.4" - -rimraf@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" - integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== - dependencies: - glob "^7.1.3" - -safe-buffer@~5.1.0, safe-buffer@~5.1.1: - version "5.1.2" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" - integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== - -semver@^7.2.1: - version "7.3.4" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.4.tgz#27aaa7d2e4ca76452f98d3add093a72c943edc97" - integrity sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw== - dependencies: - lru-cache "^6.0.0" - -set-blocking@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" - integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= - -set-immediate-shim@~1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz#4b2b1b27eb808a9f8dcc481a58e5e56f599f3f61" - integrity sha1-SysbJ+uAip+NzEgaWOXlb1mfP2E= - -sha256-file@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/sha256-file/-/sha256-file-1.0.0.tgz#02cade5e658da3fbc167c3270bdcdfd5409f1b65" - integrity sha512-nqf+g0veqgQAkDx0U2y2Tn2KWyADuuludZTw9A7J3D+61rKlIIl9V5TS4mfnwKuXZOH9B7fQyjYJ9pKRHIsAyg== - -shebang-command@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" - integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== - dependencies: - shebang-regex "^3.0.0" - -shebang-regex@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" - integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== - -shell-quote@^1.7.2: - version "1.7.2" - resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.7.2.tgz#67a7d02c76c9da24f99d20808fcaded0e0e04be2" - integrity sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg== - -side-channel@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" - integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== - dependencies: - call-bind "^1.0.0" - get-intrinsic "^1.0.2" - object-inspect "^1.9.0" - -slice-ansi@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-4.0.0.tgz#500e8dd0fd55b05815086255b3195adf2a45fe6b" - integrity sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ== - dependencies: - ansi-styles "^4.0.0" - astral-regex "^2.0.0" - is-fullwidth-code-point "^3.0.0" - -sprintf-js@~1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= - -string-width@^4.1.0, string-width@^4.2.0: - version "4.2.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.2.tgz#dafd4f9559a7585cfba529c6a0a4f73488ebd4c5" - integrity sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.0" - -string.prototype.trim@^1.2.4: - version "1.2.4" - resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.4.tgz#6014689baf5efaf106ad031a5fa45157666ed1bd" - integrity sha512-hWCk/iqf7lp0/AgTF7/ddO1IWtSNPASjlzCicV5irAVdE1grjsneK26YG6xACMBEdCvO8fUST0UzDMh/2Qy+9Q== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.18.0-next.2" - -string.prototype.trimend@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz#e75ae90c2942c63504686c18b287b4a0b1a45f80" - integrity sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - -string.prototype.trimstart@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz#b36399af4ab2999b4c9c648bd7a3fb2bb26feeed" - integrity sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - -string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" - -strip-ansi@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532" - integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w== - dependencies: - ansi-regex "^5.0.0" - -strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" - integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== - -supports-color@^5.3.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" - integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== - dependencies: - has-flag "^3.0.0" - -supports-color@^7.1.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" - integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== - dependencies: - has-flag "^4.0.0" - -table@^6.0.4: - version "6.0.7" - resolved "https://registry.yarnpkg.com/table/-/table-6.0.7.tgz#e45897ffbcc1bcf9e8a87bf420f2c9e5a7a52a34" - integrity sha512-rxZevLGTUzWna/qBLObOe16kB2RTnnbhciwgPbMMlazz1yZGVEgnZK762xyVdVznhqxrfCeBMmMkgOOaPwjH7g== - dependencies: - ajv "^7.0.2" - lodash "^4.17.20" - slice-ansi "^4.0.0" - string-width "^4.2.0" - -tape-promise@*: - version "4.0.0" - resolved "https://registry.yarnpkg.com/tape-promise/-/tape-promise-4.0.0.tgz#c1f3553959b2e9d64b1546e7276b8a017c616897" - integrity sha512-mNi5yhWAKDuNgZCfFKeZbsXvraVOf+I8UZG+lf+aoRrzX4+jd4mpNBjYh16/VcpEMUtS0iFndBgnfxxZbtyLFw== - dependencies: - is-promise "^2.1.0" - onetime "^2.0.0" - -tape@*: - version "5.2.2" - resolved "https://registry.yarnpkg.com/tape/-/tape-5.2.2.tgz#a98475ecf30aa0ed2a89c36439bb9438d24d2184" - integrity sha512-grXrzPC1ly2kyTMKdqxh5GiLpb0BpNctCuecTB0psHX4Gu0nc+uxWR4xKjTh/4CfQlH4zhvTM2/EXmHXp6v/uA== - dependencies: - call-bind "^1.0.2" - deep-equal "^2.0.5" - defined "^1.0.0" - dotignore "^0.1.2" - for-each "^0.3.3" - glob "^7.1.6" - has "^1.0.3" - inherits "^2.0.4" - is-regex "^1.1.2" - minimist "^1.2.5" - object-inspect "^1.9.0" - object-is "^1.1.5" - object.assign "^4.1.2" - resolve "^2.0.0-next.3" - resumer "^0.0.0" - string.prototype.trim "^1.2.4" - through "^2.3.8" - -text-table@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" - integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= - -through@^2.3.8, through@~2.3.4: - version "2.3.8" - resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" - integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= - -type-check@^0.4.0, type-check@~0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" - integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== - dependencies: - prelude-ls "^1.2.1" - -type-fest@^0.20.2: - version "0.20.2" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" - integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== - -type-fest@^0.8.1: - version "0.8.1" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" - integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== - -unbox-primitive@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.0.tgz#eeacbc4affa28e9b3d36b5eaeccc50b3251b1d3f" - integrity sha512-P/51NX+JXyxK/aigg1/ZgyccdAxm5K1+n8+tvqSntjOivPt19gvm1VC49RWYetsiub8WViUchdxl/KWHHB0kzA== - dependencies: - function-bind "^1.1.1" - has-bigints "^1.0.0" - has-symbols "^1.0.0" - which-boxed-primitive "^1.0.1" - -universalify@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" - integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== - -uri-js@^4.2.2: - version "4.4.1" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" - integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== - dependencies: - punycode "^2.1.0" - -util-deprecate@~1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= - -v8-compile-cache@^2.0.3: - version "2.3.0" - resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" - integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== - -which-boxed-primitive@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" - integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== - dependencies: - is-bigint "^1.0.1" - is-boolean-object "^1.1.0" - is-number-object "^1.0.4" - is-string "^1.0.5" - is-symbol "^1.0.3" - -which-collection@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/which-collection/-/which-collection-1.0.1.tgz#70eab71ebbbd2aefaf32f917082fc62cdcb70906" - integrity sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A== - dependencies: - is-map "^2.0.1" - is-set "^2.0.1" - is-weakmap "^2.0.1" - is-weakset "^2.0.1" - -which-module@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" - integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= - -which-typed-array@^1.1.2: - version "1.1.4" - resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.4.tgz#8fcb7d3ee5adf2d771066fba7cf37e32fe8711ff" - integrity sha512-49E0SpUe90cjpoc7BOJwyPHRqSAd12c10Qm2amdEZrJPCY2NDxaW01zHITrem+rnETY3dwrbH3UUrUwagfCYDA== - dependencies: - available-typed-arrays "^1.0.2" - call-bind "^1.0.0" - es-abstract "^1.18.0-next.1" - foreach "^2.0.5" - function-bind "^1.1.1" - has-symbols "^1.0.1" - is-typed-array "^1.1.3" - -which@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" - integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== - dependencies: - isexe "^2.0.0" - -word-wrap@^1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" - integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== - -wrap-ansi@^6.2.0: - version "6.2.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" - integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - -wrappy@1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= - -y18n@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.1.tgz#8db2b83c31c5d75099bb890b23f3094891e247d4" - integrity sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ== - -yallist@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" - integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== - -yargs-parser@^18.1.2: - version "18.1.3" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0" - integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ== - dependencies: - camelcase "^5.0.0" - decamelize "^1.2.0" - -yargs@^15.3.1: - version "15.4.1" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8" - integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A== - dependencies: - cliui "^6.0.0" - decamelize "^1.2.0" - find-up "^4.1.0" - get-caller-file "^2.0.1" - require-directory "^2.1.1" - require-main-filename "^2.0.0" - set-blocking "^2.0.0" - string-width "^4.2.0" - which-module "^2.0.0" - y18n "^4.0.0" - yargs-parser "^18.1.2" From d5cd5d8b14745d36a92c5931caa9d9fea9a97054 Mon Sep 17 00:00:00 2001 From: Bryant Biggs Date: Mon, 15 Mar 2021 22:32:09 -0400 Subject: [PATCH 220/328] chore: update ci-cd workflows and versions used --- .github/dependabot.yml | 2 +- .github/workflows/lint.yml | 9 ++------- .github/workflows/publish.yml | 8 +++++--- .github/workflows/test.yml | 7 +++---- package.json | 2 +- 5 files changed, 12 insertions(+), 16 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index aeb65346..ac29398e 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -9,4 +9,4 @@ updates: ignore: - dependency-name: eslint versions: - - "> 5.16.0" + - "> 7.22.0" diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 285917dd..1e6b9ee8 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -4,19 +4,14 @@ on: [push, pull_request] jobs: build: - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest] - node-version: [12] + runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Set up Node ${{ matrix.node-version }} uses: actions/setup-node@v1 with: - node-version: ${{ matrix.node-version }} + node-version: 14 - name: Install deps run: npm install diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index baf74a56..6a1e7d26 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -6,11 +6,13 @@ jobs: publish-npm: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 - - uses: actions/setup-node@v1 + - uses: actions/checkout@v2 + + - uses: actions/setup-node@v2 with: - version: 12 + version: 14 registry-url: https://registry.npmjs.org/ + - run: npm publish env: NODE_AUTH_TOKEN: ${{secrets.npm_token}} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 77b64aa6..2a1155a3 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -9,8 +9,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, windows-latest, macOS-latest] - python-version: [3.6, 2.7] - node-version: [12] + python-version: [2.7, 3.6, 3.7, 3.8] steps: - uses: actions/checkout@v2 @@ -19,10 +18,10 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Set up Node ${{ matrix.node-version }} + - name: Set up Node uses: actions/setup-node@v1 with: - node-version: ${{ matrix.node-version }} + node-version: 14 - name: Check python version run: | diff --git a/package.json b/package.json index 945141e6..0ffc0c50 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "serverless-python-requirements", "version": "5.1.0", "engines": { - "node": ">=10.0" + "node": ">=12.0" }, "description": "Serverless Python Requirements Plugin", "author": "United Income ", From 72a3684a3dcb4ec1050ea5dc24a77173ecc7027a Mon Sep 17 00:00:00 2001 From: Bryant Biggs Date: Mon, 15 Mar 2021 22:40:41 -0400 Subject: [PATCH 221/328] chore: rollback python version change for now --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2a1155a3..aa94f4b0 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -9,7 +9,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, windows-latest, macOS-latest] - python-version: [2.7, 3.6, 3.7, 3.8] + python-version: [2.7, 3.6] steps: - uses: actions/checkout@v2 From 2078e4a52fca8d55ad834eeb5298fa1edea57c3c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 20 Mar 2021 03:00:17 +0000 Subject: [PATCH 222/328] chore(deps): bump jinja2 from 2.11.1 to 2.11.3 in /tests/pipenv Bumps [jinja2](https://github.com/pallets/jinja) from 2.11.1 to 2.11.3. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/master/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/2.11.1...2.11.3) Signed-off-by: dependabot[bot] --- tests/pipenv/Pipfile.lock | 127 +++++++++++++++++++------------------- 1 file changed, 62 insertions(+), 65 deletions(-) diff --git a/tests/pipenv/Pipfile.lock b/tests/pipenv/Pipfile.lock index 4b68385d..63a3e92b 100644 --- a/tests/pipenv/Pipfile.lock +++ b/tests/pipenv/Pipfile.lock @@ -22,10 +22,10 @@ }, "botocore": { "hashes": [ - "sha256:00bff61d899c4f12abe020527452e08cf49b3b60400c5d0d9f83c00b7d18c642", - "sha256:5ffdf30746dbfca59d31d2059789168255e96bd98a17a65f8edb3b6de0a96b3e" + "sha256:a474131ba7a7d700b91696a27e8cdcf1b473084addf92f90b269ebd8f5c3d3e0", + "sha256:b805691b4dedcb2a252f52347479ff351429624a873f001b6a1c81aca03dccee" ], - "version": "==1.15.2" + "version": "==1.15.49" }, "bottle": { "hashes": [ @@ -36,10 +36,10 @@ }, "click": { "hashes": [ - "sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", - "sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7" + "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", + "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" ], - "version": "==7.0" + "version": "==7.1.2" }, "docutils": { "hashes": [ @@ -65,17 +65,17 @@ }, "jinja2": { "hashes": [ - "sha256:93187ffbc7808079673ef52771baa950426fd664d3aad1d0fa3e95644360e250", - "sha256:b0eaf100007721b5c16c1fc1eecb87409464edc10469ddc9a22a27a99123be49" + "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419", + "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6" ], - "version": "==2.11.1" + "version": "==2.11.3" }, "jmespath": { "hashes": [ - "sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6", - "sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c" + "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9", + "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f" ], - "version": "==0.9.4" + "version": "==0.10.0" }, "markupsafe": { "hashes": [ @@ -84,8 +84,12 @@ "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", + "sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f", + "sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39", "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", + "sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014", + "sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f", "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", @@ -94,24 +98,39 @@ "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", + "sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85", + "sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1", "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", + "sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850", + "sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0", "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", + "sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb", "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", + "sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1", + "sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2", "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", + "sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7", "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", + "sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8", "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", + "sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193", "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", + "sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b", "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", + "sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5", + "sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c", + "sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032", "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", - "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" + "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be", + "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621" ], "version": "==1.1.1" }, @@ -124,63 +143,55 @@ }, "s3transfer": { "hashes": [ - "sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13", - "sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db" + "sha256:5d48b1fd2232141a9d5fb279709117aaba506cacea7f86f11bc392f06bfa8fc2", + "sha256:c5dadf598762899d8cfaecf68eba649cd25b0ce93b6c954b156aaa3eed160547" ], - "version": "==0.3.3" + "version": "==0.3.6" }, "six": { "hashes": [ - "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", - "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" + "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", + "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "version": "==1.14.0" + "version": "==1.15.0" }, "urllib3": { "hashes": [ - "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc", - "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc" + "sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2", + "sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e" ], "markers": "python_version != '3.4'", - "version": "==1.25.8" + "version": "==1.25.11" }, "werkzeug": { "hashes": [ - "sha256:169ba8a33788476292d04186ab33b01d6add475033dfc07215e6d219cc077096", - "sha256:6dc65cf9091cf750012f56f2cad759fa9e879f511b5ff8685e456b4e3bf90d16" + "sha256:2de2a5db0baeae7b2d2664949077c2ac63fbd16d98da0ff71837f7d1dea3fd43", + "sha256:6c80b1e5ad3665290ea39320b91e1be1e0d5f60652b964a3070216de83d2e47c" ], - "version": "==1.0.0" + "version": "==1.0.1" } }, "develop": { "attrs": { "hashes": [ - "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", - "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" + "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", + "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], - "version": "==19.3.0" - }, - "importlib-metadata": { - "hashes": [ - "sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302", - "sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b" - ], - "markers": "python_version < '3.8'", - "version": "==1.5.0" + "version": "==20.3.0" }, "more-itertools": { "hashes": [ - "sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c", - "sha256:b1ddb932186d8a6ac451e1d95844b382f55e12686d51ca0c68b6f61f2ab7a507" + "sha256:5652a9ac72209ed7df8d9c15daf4e1aa0e3d2ccd3c87f8265a0673cd9cbc9ced", + "sha256:c5d6da9ca3ff65220c3bfd2a8db06d698f05d4d2b9be57e1deb2be5a45019713" ], - "version": "==8.2.0" + "version": "==8.7.0" }, "packaging": { "hashes": [ - "sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73", - "sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334" + "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", + "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" ], - "version": "==20.1" + "version": "==20.9" }, "pluggy": { "hashes": [ @@ -191,17 +202,17 @@ }, "py": { "hashes": [ - "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa", - "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0" + "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3", + "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a" ], - "version": "==1.8.1" + "version": "==1.10.0" }, "pyparsing": { "hashes": [ - "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f", - "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec" + "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", + "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], - "version": "==2.4.6" + "version": "==2.4.7" }, "pytest": { "hashes": [ @@ -210,26 +221,12 @@ ], "version": "==5.3.5" }, - "six": { - "hashes": [ - "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", - "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" - ], - "version": "==1.14.0" - }, "wcwidth": { "hashes": [ - "sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603", - "sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8" - ], - "version": "==0.1.8" - }, - "zipp": { - "hashes": [ - "sha256:12248a63bbdf7548f89cb4c7cda4681e537031eda29c02ea29674bc6854460c2", - "sha256:7c0f8e91abc0dc07a5068f315c52cb30c66bfbc581e5b50704c8a2f6ebae794a" + "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784", + "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83" ], - "version": "==3.0.0" + "version": "==0.2.5" } } } From 32c0fff1375f1772c71145260bc611fc25363f99 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Apr 2021 17:14:04 +0000 Subject: [PATCH 223/328] chore(deps): bump jinja2 from 2.10 to 2.11.3 in /tests/base Bumps [jinja2](https://github.com/pallets/jinja) from 2.10 to 2.11.3. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/master/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/2.10...2.11.3) Signed-off-by: dependabot[bot] --- tests/base/requirements-w-hashes.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/base/requirements-w-hashes.txt b/tests/base/requirements-w-hashes.txt index 2ee1ab77..47443a7b 100644 --- a/tests/base/requirements-w-hashes.txt +++ b/tests/base/requirements-w-hashes.txt @@ -29,9 +29,9 @@ itsdangerous==1.1.0 \ --hash=sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19 \ --hash=sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749 \ # via flask -jinja2==2.10 \ - --hash=sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd \ - --hash=sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4 \ +jinja2==2.11.3 \ + --hash=sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419 \ + --hash=sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6 \ # via flask jmespath==0.9.3 \ --hash=sha256:6a81d4c9aa62caf061cb517b4d9ad1dd300374cd4706997aff9cd6aedd61fc64 \ From 6586a250ef3210bb91ea79c51c262b60fe9d0ed3 Mon Sep 17 00:00:00 2001 From: pbq443 Date: Thu, 1 Apr 2021 15:47:58 -0400 Subject: [PATCH 224/328] Prettify merged commit. --- lib/poetry.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/poetry.js b/lib/poetry.js index 106bb14c..553a1392 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -44,7 +44,7 @@ function pyprojectTomlToRequirements() { const editableFlag = new RegExp(/^-e /gm); const sourceRequirements = path.join(this.servicePath, 'requirements.txt'); const requirementsContents = fse.readFileSync(sourceRequirements, { - encoding: "utf-8" + encoding: 'utf-8', }); if (requirementsContents.match(editableFlag)) { From fbbc4a772caff50a013848557673aea23534263f Mon Sep 17 00:00:00 2001 From: pbq443 Date: Thu, 1 Apr 2021 16:37:16 -0400 Subject: [PATCH 225/328] Update minor version and note recent contributors. --- README.md | 15 +++++++++++---- package.json | 2 +- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 94594c55..fbcab967 100644 --- a/README.md +++ b/README.md @@ -528,17 +528,17 @@ package: ## Contributors -- [@dschep](https://github.com/dschep) - Lead developer & maintainer +- [@dschep](https://github.com/dschep) - Lead developer & original maintainer - [@azurelogic](https://github.com/azurelogic) - logging & documentation fixes - [@abetomo](https://github.com/abetomo) - style & linting - [@angstwad](https://github.com/angstwad) - `deploy --function` support - [@mather](https://github.com/mather) - the cache invalidation option - [@rmax](https://github.com/rmax) - the extra pip args option -- [@bsamuel-ui](https://github.com/bsamuel-ui) - Python 3 support +- [@bsamuel-ui](https://github.com/bsamuel-ui) - Python 3 support, current maintainer - [@suxor42](https://github.com/suxor42) - fixing permission issues with Docker on Linux - [@mbeltran213](https://github.com/mbeltran213) - fixing docker linux -u option bug - [@Tethik](https://github.com/Tethik) - adding usePipenv option -- [@miketheman](https://github.com/miketheman) - fixing bug with includes when using zip option +- [@miketheman](https://github.com/miketheman) - fixing bug with includes when using zip option, update eslint, - [@wattdave](https://github.com/wattdave) - fixing bug when using `deploymentBucket` - [@heri16](https://github.com/heri16) - fixing Docker support in Windows - [@ryansb](https://github.com/ryansb) - package individually support @@ -551,5 +551,12 @@ package: - [@alexjurkiewicz](https://github.com/alexjurkiewicz) - [docs about docker workflows](#native-code-dependencies-during-build) - [@andrewfarley](https://github.com/andrewfarley) - Implemented download caching and static caching - [@bweigel](https://github.com/bweigel) - adding the `slimPatternsAppendDefaults` option & fixing per-function packaging when some functions don't have requirements & Porting tests from bats to js! -- [@squaresurf](https://github.com/squaresurf) - adding usePoetry option +- Poetry support + - [@squaresurf](https://github.com/squaresurf) + - [@drice](https://github.com/drice) + - [@ofercaspi](https://github.com/ofercaspi) + - [@tpansino](https://github.com/tpansino) - [@david-mk-lawrence](https://github.com/david-mk-lawrence) - added Lambda Layer support +- [@bryantbriggs](https://github.com/bryantbiggs) - Fixing CI/CD +- [@jacksgt](https://github.com/jacksgt) - Fixing pip issues +- [@lephuongbg](https://github.com/lephuongbg) - Fixing single function deployment diff --git a/package.json b/package.json index 0ffc0c50..e09dadd5 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "5.1.0", + "version": "5.1.1", "engines": { "node": ">=12.0" }, From 492176fc593dfac2311fa323b358e366f7a6af9c Mon Sep 17 00:00:00 2001 From: Ben Samuel Date: Thu, 1 Apr 2021 17:33:57 -0400 Subject: [PATCH 226/328] Corrections before point release. - Fold in fixes to gitignore from #596 - Correct some version numbers in test files. --- .gitignore | 31 ++++ example/.gitignore | 25 --- example_native_deps/.gitignore | 7 - tests/base/.gitignore | 25 --- tests/base/package.json | 2 +- tests/individually/package.json | 2 +- tests/non_build_pyproject/.gitignore | 22 --- tests/non_build_pyproject/package.json | 2 +- tests/non_poetry_pyproject/.gitignore | 22 --- tests/non_poetry_pyproject/package.json | 2 +- tests/pipenv/.gitignore | 21 --- tests/pipenv/Pipfile.lock | 232 ------------------------ tests/pipenv/package.json | 2 +- tests/poetry/.gitignore | 22 --- tests/poetry/package.json | 2 +- tests/poetry/poetry.lock | 197 -------------------- 16 files changed, 37 insertions(+), 579 deletions(-) delete mode 100644 example/.gitignore delete mode 100644 example_native_deps/.gitignore delete mode 100644 tests/base/.gitignore delete mode 100644 tests/non_build_pyproject/.gitignore delete mode 100644 tests/non_poetry_pyproject/.gitignore delete mode 100644 tests/pipenv/.gitignore delete mode 100644 tests/pipenv/Pipfile.lock delete mode 100644 tests/poetry/.gitignore delete mode 100644 tests/poetry/poetry.lock diff --git a/.gitignore b/.gitignore index db119782..ab0317f3 100644 --- a/.gitignore +++ b/.gitignore @@ -46,3 +46,34 @@ __pycache__ #NODE STUFF package-lock.json yarn.lock + +# Lockfiles +*.lock + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# Serverless directories +.serverless +.requirements +.requirements.zip +unzip_requirements.py + +# Project ignores +puck/ +serverless.yml.bak diff --git a/example/.gitignore b/example/.gitignore deleted file mode 100644 index 213a542c..00000000 --- a/example/.gitignore +++ /dev/null @@ -1,25 +0,0 @@ -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -*.egg-info/ -.installed.cfg -*.egg - -# Serverless directories -.serverless -.requirements - -# Project ignores -puck/ -serverless.yml.bak diff --git a/example_native_deps/.gitignore b/example_native_deps/.gitignore deleted file mode 100644 index abe9b1af..00000000 --- a/example_native_deps/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -# npm install -node_modules -package-lock.json - -# serverless -.serverless -.requirements.zip \ No newline at end of file diff --git a/tests/base/.gitignore b/tests/base/.gitignore deleted file mode 100644 index 213a542c..00000000 --- a/tests/base/.gitignore +++ /dev/null @@ -1,25 +0,0 @@ -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -*.egg-info/ -.installed.cfg -*.egg - -# Serverless directories -.serverless -.requirements - -# Project ignores -puck/ -serverless.yml.bak diff --git a/tests/base/package.json b/tests/base/package.json index 752c49c6..43ce4eee 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" } } diff --git a/tests/individually/package.json b/tests/individually/package.json index 752c49c6..43ce4eee 100644 --- a/tests/individually/package.json +++ b/tests/individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" } } diff --git a/tests/non_build_pyproject/.gitignore b/tests/non_build_pyproject/.gitignore deleted file mode 100644 index 3c2369dc..00000000 --- a/tests/non_build_pyproject/.gitignore +++ /dev/null @@ -1,22 +0,0 @@ -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -*.egg-info/ -.installed.cfg -*.egg - -# Serverless -.serverless -.requirements -unzip_requirements.py diff --git a/tests/non_build_pyproject/package.json b/tests/non_build_pyproject/package.json index 752c49c6..43ce4eee 100644 --- a/tests/non_build_pyproject/package.json +++ b/tests/non_build_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" } } diff --git a/tests/non_poetry_pyproject/.gitignore b/tests/non_poetry_pyproject/.gitignore deleted file mode 100644 index 3c2369dc..00000000 --- a/tests/non_poetry_pyproject/.gitignore +++ /dev/null @@ -1,22 +0,0 @@ -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -*.egg-info/ -.installed.cfg -*.egg - -# Serverless -.serverless -.requirements -unzip_requirements.py diff --git a/tests/non_poetry_pyproject/package.json b/tests/non_poetry_pyproject/package.json index 752c49c6..43ce4eee 100644 --- a/tests/non_poetry_pyproject/package.json +++ b/tests/non_poetry_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" } } diff --git a/tests/pipenv/.gitignore b/tests/pipenv/.gitignore deleted file mode 100644 index cf9dab3c..00000000 --- a/tests/pipenv/.gitignore +++ /dev/null @@ -1,21 +0,0 @@ -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -*.egg-info/ -.installed.cfg -*.egg - -# Serverless directories -.serverless -.requirements diff --git a/tests/pipenv/Pipfile.lock b/tests/pipenv/Pipfile.lock deleted file mode 100644 index 63a3e92b..00000000 --- a/tests/pipenv/Pipfile.lock +++ /dev/null @@ -1,232 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "82666b88a005fce4645bad0e283c9eddf23446745dc6228888183a1fed2f7185" - }, - "pipfile-spec": 6, - "requires": {}, - "sources": [ - { - "url": "https://pypi.python.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "boto3": { - "hashes": [ - "sha256:68e32e2d1c911b0e8408278c7603f0f46c31780b46c44d23346ccef71b3f10dc", - "sha256:967c7a5ac484fe627706e241dfc9294a6220c863ceb53a4f34e3fe9e11a71d7a" - ], - "version": "==1.12.2" - }, - "botocore": { - "hashes": [ - "sha256:a474131ba7a7d700b91696a27e8cdcf1b473084addf92f90b269ebd8f5c3d3e0", - "sha256:b805691b4dedcb2a252f52347479ff351429624a873f001b6a1c81aca03dccee" - ], - "version": "==1.15.49" - }, - "bottle": { - "hashes": [ - "sha256:0819b74b145a7def225c0e83b16a4d5711fde751cd92bae467a69efce720f69e", - "sha256:43157254e88f32c6be16f8d9eb1f1d1472396a4e174ebd2bf62544854ecf37e7" - ], - "version": "==0.12.18" - }, - "click": { - "hashes": [ - "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", - "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" - ], - "version": "==7.1.2" - }, - "docutils": { - "hashes": [ - "sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0", - "sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827", - "sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99" - ], - "version": "==0.15.2" - }, - "flask": { - "hashes": [ - "sha256:13f9f196f330c7c2c5d7a5cf91af894110ca0215ac051b5844701f2bfd934d52", - "sha256:45eb5a6fd193d6cf7e0cf5d8a5b31f83d5faae0293695626f539a823e93b13f6" - ], - "version": "==1.1.1" - }, - "itsdangerous": { - "hashes": [ - "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", - "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749" - ], - "version": "==1.1.0" - }, - "jinja2": { - "hashes": [ - "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419", - "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6" - ], - "version": "==2.11.3" - }, - "jmespath": { - "hashes": [ - "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9", - "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f" - ], - "version": "==0.10.0" - }, - "markupsafe": { - "hashes": [ - "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", - "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", - "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", - "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", - "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", - "sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f", - "sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39", - "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", - "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", - "sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014", - "sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f", - "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", - "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", - "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", - "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", - "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b", - "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", - "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", - "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", - "sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85", - "sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1", - "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", - "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", - "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", - "sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850", - "sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0", - "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", - "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", - "sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb", - "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", - "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", - "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", - "sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1", - "sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2", - "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", - "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", - "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", - "sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7", - "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", - "sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8", - "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", - "sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193", - "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", - "sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b", - "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", - "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", - "sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5", - "sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c", - "sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032", - "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", - "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be", - "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621" - ], - "version": "==1.1.1" - }, - "python-dateutil": { - "hashes": [ - "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", - "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a" - ], - "version": "==2.8.1" - }, - "s3transfer": { - "hashes": [ - "sha256:5d48b1fd2232141a9d5fb279709117aaba506cacea7f86f11bc392f06bfa8fc2", - "sha256:c5dadf598762899d8cfaecf68eba649cd25b0ce93b6c954b156aaa3eed160547" - ], - "version": "==0.3.6" - }, - "six": { - "hashes": [ - "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", - "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" - ], - "version": "==1.15.0" - }, - "urllib3": { - "hashes": [ - "sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2", - "sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e" - ], - "markers": "python_version != '3.4'", - "version": "==1.25.11" - }, - "werkzeug": { - "hashes": [ - "sha256:2de2a5db0baeae7b2d2664949077c2ac63fbd16d98da0ff71837f7d1dea3fd43", - "sha256:6c80b1e5ad3665290ea39320b91e1be1e0d5f60652b964a3070216de83d2e47c" - ], - "version": "==1.0.1" - } - }, - "develop": { - "attrs": { - "hashes": [ - "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", - "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" - ], - "version": "==20.3.0" - }, - "more-itertools": { - "hashes": [ - "sha256:5652a9ac72209ed7df8d9c15daf4e1aa0e3d2ccd3c87f8265a0673cd9cbc9ced", - "sha256:c5d6da9ca3ff65220c3bfd2a8db06d698f05d4d2b9be57e1deb2be5a45019713" - ], - "version": "==8.7.0" - }, - "packaging": { - "hashes": [ - "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", - "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" - ], - "version": "==20.9" - }, - "pluggy": { - "hashes": [ - "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", - "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" - ], - "version": "==0.13.1" - }, - "py": { - "hashes": [ - "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3", - "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a" - ], - "version": "==1.10.0" - }, - "pyparsing": { - "hashes": [ - "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", - "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" - ], - "version": "==2.4.7" - }, - "pytest": { - "hashes": [ - "sha256:0d5fe9189a148acc3c3eb2ac8e1ac0742cb7618c084f3d228baaec0c254b318d", - "sha256:ff615c761e25eb25df19edddc0b970302d2a9091fbce0e7213298d85fb61fef6" - ], - "version": "==5.3.5" - }, - "wcwidth": { - "hashes": [ - "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784", - "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83" - ], - "version": "==0.2.5" - } - } -} diff --git a/tests/pipenv/package.json b/tests/pipenv/package.json index 752c49c6..43ce4eee 100644 --- a/tests/pipenv/package.json +++ b/tests/pipenv/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" } } diff --git a/tests/poetry/.gitignore b/tests/poetry/.gitignore deleted file mode 100644 index 3c2369dc..00000000 --- a/tests/poetry/.gitignore +++ /dev/null @@ -1,22 +0,0 @@ -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -*.egg-info/ -.installed.cfg -*.egg - -# Serverless -.serverless -.requirements -unzip_requirements.py diff --git a/tests/poetry/package.json b/tests/poetry/package.json index 752c49c6..43ce4eee 100644 --- a/tests/poetry/package.json +++ b/tests/poetry/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" } } diff --git a/tests/poetry/poetry.lock b/tests/poetry/poetry.lock deleted file mode 100644 index 5d8eccc7..00000000 --- a/tests/poetry/poetry.lock +++ /dev/null @@ -1,197 +0,0 @@ -[[package]] -category = "main" -description = "The AWS SDK for Python" -name = "boto3" -optional = false -python-versions = "*" -version = "1.9.199" - -[package.dependencies] -botocore = ">=1.12.199,<1.13.0" -jmespath = ">=0.7.1,<1.0.0" -s3transfer = ">=0.2.0,<0.3.0" - -[[package]] -category = "main" -description = "Low-level, data-driven core of boto 3." -name = "botocore" -optional = false -python-versions = "*" -version = "1.12.199" - -[package.dependencies] -docutils = ">=0.10,<0.15" -jmespath = ">=0.7.1,<1.0.0" - -[package.dependencies.python-dateutil] -python = ">=2.7" -version = ">=2.1,<3.0.0" - -[package.dependencies.urllib3] -python = ">=3.4" -version = ">=1.20,<1.26" - -[[package]] -category = "main" -description = "Fast and simple WSGI-framework for small web-applications." -name = "bottle" -optional = false -python-versions = "*" -version = "0.12.16" - -[package.source] -reference = "0.12.16" -type = "git" -url = "https://git@github.com/bottlepy/bottle.git" -[[package]] -category = "main" -description = "Composable command line interface toolkit" -name = "click" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "7.0" - -[[package]] -category = "main" -description = "Docutils -- Python Documentation Utilities" -name = "docutils" -optional = false -python-versions = "*" -version = "0.14" - -[[package]] -category = "main" -description = "A simple framework for building complex web applications." -name = "flask" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "1.1.1" - -[package.dependencies] -Jinja2 = ">=2.10.1" -Werkzeug = ">=0.15" -click = ">=5.1" -itsdangerous = ">=0.24" - -[package.extras] -dev = ["pytest", "coverage", "tox", "sphinx", "pallets-sphinx-themes", "sphinxcontrib-log-cabinet", "sphinx-issues"] -docs = ["sphinx", "pallets-sphinx-themes", "sphinxcontrib-log-cabinet", "sphinx-issues"] -dotenv = ["python-dotenv"] - -[[package]] -category = "main" -description = "Various helpers to pass data to untrusted environments and back." -name = "itsdangerous" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.1.0" - -[[package]] -category = "main" -description = "A small but fast and easy to use stand-alone template engine written in pure python." -name = "jinja2" -optional = false -python-versions = "*" -version = "2.10.1" - -[package.dependencies] -MarkupSafe = ">=0.23" - -[package.extras] -i18n = ["Babel (>=0.8)"] - -[[package]] -category = "main" -description = "JSON Matching Expressions" -name = "jmespath" -optional = false -python-versions = "*" -version = "0.9.4" - -[[package]] -category = "main" -description = "Safely add untrusted strings to HTML/XML markup." -name = "markupsafe" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -version = "1.1.1" - -[[package]] -category = "main" -description = "Extensions to the standard Python datetime module" -marker = "python_version >= \"2.7\"" -name = "python-dateutil" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -version = "2.8.0" - -[package.dependencies] -six = ">=1.5" - -[[package]] -category = "main" -description = "An Amazon S3 Transfer Manager" -name = "s3transfer" -optional = false -python-versions = "*" -version = "0.2.1" - -[package.dependencies] -botocore = ">=1.12.36,<2.0.0" - -[[package]] -category = "main" -description = "Python 2 and 3 compatibility utilities" -marker = "python_version >= \"2.7\"" -name = "six" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*" -version = "1.12.0" - -[[package]] -category = "main" -description = "HTTP library with thread-safe connection pooling, file post, and more." -marker = "python_version >= \"3.4\"" -name = "urllib3" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" -version = "1.25.3" - -[package.extras] -brotli = ["brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] -socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] - -[[package]] -category = "main" -description = "The comprehensive WSGI web application library." -name = "werkzeug" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.15.5" - -[package.extras] -dev = ["pytest", "coverage", "tox", "sphinx", "pallets-sphinx-themes", "sphinx-issues"] -termcolor = ["termcolor"] -watchdog = ["watchdog"] - -[metadata] -content-hash = "76568ab19fae4155c453c30ac4362880dac17b449380f5a2dd017dc8a4ec4a2f" -python-versions = "^3.6" - -[metadata.hashes] -boto3 = ["0cd4a3e158f40eedb54b36b3fbe60d135db74a245f0ca8eead1af2eb6d46a649", "68e9eba6f846cf8e01973ec565afdb1adfb9612b531c15bb5c5524394db4df5b"] -botocore = ["25d87047241b7b775443570c0e790ca952f9f7491d4d6472430a4b006383a257", "e4729c1acaa936d4c5c948a18d279f92bbf61fad9b5fb03942c753ec405e427d"] -bottle = [] -click = ["2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", "5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"] -docutils = ["02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6", "51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274", "7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6"] -flask = ["13f9f196f330c7c2c5d7a5cf91af894110ca0215ac051b5844701f2bfd934d52", "45eb5a6fd193d6cf7e0cf5d8a5b31f83d5faae0293695626f539a823e93b13f6"] -itsdangerous = ["321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", "b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"] -jinja2 = ["065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013", "14dd6caf1527abb21f08f86c784eac40853ba93edb79552aa1e4b8aef1b61c7b"] -jmespath = ["3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6", "bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c"] -markupsafe = ["00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", "09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", "09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", "1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", "24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", "29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", "43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", "46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", "500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", "535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", "62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", "6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", "717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", "79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", "7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", "88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", "8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", "98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", "9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", "9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", "ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", "b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", "b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", "b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", "ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", "c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", "cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", "e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"] -python-dateutil = ["7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", "c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"] -s3transfer = ["6efc926738a3cd576c2a79725fed9afde92378aa5c6a957e3af010cb019fac9d", "b780f2411b824cb541dbcd2c713d0cb61c7d1bcadae204cdddda2b35cef493ba"] -six = ["3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"] -urllib3 = ["b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1", "dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"] -werkzeug = ["87ae4e5b5366da2347eb3116c0e6c681a0e939a33b2805e2c0cbd282664932c4", "a13b74dd3c45f758d4ebdb224be8f1ab8ef58b3c0ffc1783a8c7d9f4f50227e6"] From 92e36149f863d1eeeade7e381b19b0619a2d9b83 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 7 Apr 2021 21:31:39 +0000 Subject: [PATCH 227/328] chore(deps): bump bottle from 0.12.13 to 0.12.19 in /tests/base Bumps [bottle](https://github.com/bottlepy/bottle) from 0.12.13 to 0.12.19. - [Release notes](https://github.com/bottlepy/bottle/releases) - [Changelog](https://github.com/bottlepy/bottle/blob/master/docs/changelog.rst) - [Commits](https://github.com/bottlepy/bottle/compare/0.12.13...0.12.19) Signed-off-by: dependabot[bot] --- tests/base/requirements-w-hashes.txt | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/base/requirements-w-hashes.txt b/tests/base/requirements-w-hashes.txt index 47443a7b..428e8528 100644 --- a/tests/base/requirements-w-hashes.txt +++ b/tests/base/requirements-w-hashes.txt @@ -11,8 +11,9 @@ botocore==1.12.50 \ --hash=sha256:07fae5a2b8cfb5a92c1dbee3f2feb4da7c471bcead7e18ce735babe5f39e270f \ --hash=sha256:eeaa190f50ee05a56225ee78c64cb8bf0c3bf090ec605ca6c2f325aa3826a347 \ # via boto3, s3transfer -bottle==0.12.13 \ - --hash=sha256:39b751aee0b167be8dffb63ca81b735bbf1dd0905b3bc42761efedee8f123355 +bottle==0.12.19 \ + --hash=sha256:f6b8a34fe9aa406f9813c02990db72ca69ce6a158b5b156d2c41f345016a723d \ + --hash=sha256:a9d73ffcbc6a1345ca2d7949638db46349f5b2b77dac65d6494d45c23628da2c click==7.0 \ --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \ --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \ From da36249f53df2313ac113caaecbf5db94286bdce Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Wed, 11 Aug 2021 13:28:55 +0200 Subject: [PATCH 228/328] test: Pin dependencies to avoid unexpected upgrades --- tests/base/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/base/requirements.txt b/tests/base/requirements.txt index 24a42e66..23bfb7a6 100644 --- a/tests/base/requirements.txt +++ b/tests/base/requirements.txt @@ -1,3 +1,3 @@ -flask +flask==0.12.5 bottle boto3 From e7e9a6a0c501dc1a20c458c9f74acec86115b7f0 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Wed, 11 Aug 2021 11:23:50 +0200 Subject: [PATCH 229/328] fix: Ignore functions defined with `image` --- index.js | 2 +- test.js | 71 +++++++++++++++++++++++++++++++++++++++ tests/base/serverless.yml | 3 ++ 3 files changed, 75 insertions(+), 1 deletion(-) diff --git a/index.js b/index.js index a61ca8fb..d7d4ecb0 100644 --- a/index.js +++ b/index.js @@ -102,7 +102,7 @@ class ServerlessPythonRequirements { let inputOpt = this.serverless.processedInput.options; return inputOpt.function ? [inputOpt.functionObj] - : values(this.serverless.service.functions); + : values(this.serverless.service.functions).filter((func) => !func.image); } /** diff --git a/test.js b/test.js index e35b521c..0322ab91 100644 --- a/test.js +++ b/test.js @@ -1520,6 +1520,8 @@ test( { skip: !hasPython(3.6) } ); + + test( 'py3.6 can package flask with package individually & slim option', async t => { @@ -1769,6 +1771,40 @@ test( { skip: !hasPython(2.7) } ); +test( + 'py2.7 can ignore functions defined with `image`', + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--individually=true', '--runtime=python2.7', 'package']); + + t.true( + pathExistsSync('.serverless/hello.zip'), + 'function hello is packaged' + ); + t.true( + pathExistsSync('.serverless/hello2.zip'), + 'function hello2 is packaged' + ); + t.true( + pathExistsSync('.serverless/hello3.zip'), + 'function hello3 is packaged' + ); + t.true( + pathExistsSync('.serverless/hello4.zip'), + 'function hello4 is packaged' + ); + t.false( + pathExistsSync('.serverless/hello5.zip'), + 'function hello5 is not packaged' + ); + + t.end(); + }, + { skip: !hasPython(2.7) } +); + test( 'py3.6 can package only requirements of module', async t => { @@ -2234,3 +2270,38 @@ test( }, { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } ); + +test( + 'py3.6 can ignore functions defined with `image`', + async t => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['--individually=true', 'package']); + + + t.true( + pathExistsSync('.serverless/hello.zip'), + 'function hello is packaged' + ); + t.true( + pathExistsSync('.serverless/hello2.zip'), + 'function hello2 is packaged' + ); + t.true( + pathExistsSync('.serverless/hello3.zip'), + 'function hello3 is packaged' + ); + t.true( + pathExistsSync('.serverless/hello4.zip'), + 'function hello4 is packaged' + ); + t.false( + pathExistsSync('.serverless/hello5.zip'), + 'function hello5 is not packaged' + ); + + t.end(); + }, + { skip: !hasPython(3.6) } +); diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index 0763da0a..7ca64ab4 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -49,5 +49,8 @@ functions: package: include: - 'fn2/**' + hello5: + image: 000000000000.dkr.ecr.sa-east-1.amazonaws.com/test-lambda-docker@sha256:6bb600b4d6e1d7cf521097177dd0c4e9ea373edb91984a505333be8ac9455d38 + From cccb7c3ed71876329507c028a24d0a9b1907f894 Mon Sep 17 00:00:00 2001 From: Mariusz Nowak Date: Thu, 12 Aug 2021 11:03:28 +0200 Subject: [PATCH 230/328] fix: Fix help output for `requirements` container command --- index.js | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/index.js b/index.js index d7d4ecb0..09289193 100644 --- a/index.js +++ b/index.js @@ -118,8 +118,6 @@ class ServerlessPythonRequirements { this.commands = { requirements: { - usage: 'Serverless plugin to bundle Python packages', - lifecycleEvents: ['requirements'], commands: { clean: { usage: 'Remove .requirements and requirements.zip', @@ -138,6 +136,15 @@ class ServerlessPythonRequirements { } }; + if (this.serverless.cli.generateCommandsHelp) { + Object.assign(this.commands.requirements, { + usage: 'Serverless plugin to bundle Python packages', + lifecycleEvents: ['requirements'] + }); + } else { + this.commands.requirements.type = 'container'; + } + const isFunctionRuntimePython = args => { // If functionObj.runtime is undefined, python. if (!args[1].functionObj || !args[1].functionObj.runtime) { From 01337625a59ee5c60d5a0da74f49ff55bb202268 Mon Sep 17 00:00:00 2001 From: Mariusz Nowak Date: Thu, 1 Jul 2021 16:27:00 +0200 Subject: [PATCH 231/328] chore: Register `serverless` as peer dependency It's to ensure only compatible versions or Framework are used together with a plugin --- package.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/package.json b/package.json index e09dadd5..51e5ea06 100644 --- a/package.json +++ b/package.json @@ -67,6 +67,9 @@ "sha256-file": "1.0.0", "shell-quote": "^1.7.2" }, + "peerDependencies": { + "serverless": "^1.34 || 2" + }, "eslintConfig": { "extends": "eslint:recommended", "env": { From 51535a678180293beb2423375b05c9b0d94fa9fc Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 16 Aug 2021 15:18:36 +0200 Subject: [PATCH 232/328] fix(Packaging): Ensure support for `package.patterns` BREAKING CHANGE: Plugin now relies on `package.patterns` which was introduced with Serverless Framework `v2.32.0` --- lib/zip.js | 17 +++++++++-------- package.json | 2 +- tests/base/serverless.yml | 7 +++---- tests/individually/serverless.yml | 4 ++-- tests/non_build_pyproject/serverless.yml | 7 +++---- tests/non_poetry_pyproject/serverless.yml | 7 +++---- tests/pipenv/serverless.yml | 7 +++---- tests/poetry/serverless.yml | 7 +++---- 8 files changed, 27 insertions(+), 31 deletions(-) diff --git a/lib/zip.js b/lib/zip.js index 9076fb81..2e872aa9 100644 --- a/lib/zip.js +++ b/lib/zip.js @@ -18,13 +18,14 @@ function addVendorHelper() { if (this.serverless.service.package.individually) { return BbPromise.resolve(this.targetFuncs) .map((f) => { - if (!get(f, 'package.include')) { - set(f, ['package', 'include'], []); + if (!get(f, 'package.patterns')) { + set(f, ['package', 'patterns'], []); } if (!get(f, 'module')) { set(f, ['module'], '.'); } - f.package.include.push('unzip_requirements.py'); + + f.package.patterns.push('unzip_requirements.py'); return f; }) .then((functions) => uniqBy(functions, (func) => func.module)) @@ -41,11 +42,11 @@ function addVendorHelper() { } else { this.serverless.cli.log('Adding Python requirements helper...'); - if (!get(this.serverless.service, 'package.include')) { - set(this.serverless.service, ['package', 'include'], []); + if (!get(this.serverless.service, 'package.patterns')) { + set(this.serverless.service, ['package', 'patterns'], []); } - this.serverless.service.package.include.push('unzip_requirements.py'); + this.serverless.service.package.patterns.push('unzip_requirements.py'); return fse.copyAsync( path.resolve(__dirname, '../unzip_requirements.py'), @@ -106,7 +107,7 @@ function packRequirements() { this.serverless.cli.log( `Zipping required Python packages for ${f.module}...` ); - f.package.include.push(`${f.module}/.requirements.zip`); + f.package.patterns.push(`${f.module}/.requirements.zip`); return addTree( new JSZip(), `.serverless/${f.module}/requirements` @@ -114,7 +115,7 @@ function packRequirements() { }); } else { this.serverless.cli.log('Zipping required Python packages...'); - this.serverless.service.package.include.push('.requirements.zip'); + this.serverless.service.package.patterns.push('.requirements.zip'); return addTree(new JSZip(), '.serverless/requirements').then((zip) => writeZip(zip, path.join(this.servicePath, '.requirements.zip')) ); diff --git a/package.json b/package.json index 51e5ea06..4d40da5e 100644 --- a/package.json +++ b/package.json @@ -68,7 +68,7 @@ "shell-quote": "^1.7.2" }, "peerDependencies": { - "serverless": "^1.34 || 2" + "serverless": "^2.32" }, "eslintConfig": { "extends": "eslint:recommended", diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index 7ca64ab4..6bb1f322 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -30,9 +30,8 @@ custom: package: individually: ${opt:individually, self:custom.defaults.individually} - exclude: - - '**/*' - include: + patterns: + - '!**/*' - 'handler.py' functions: @@ -47,7 +46,7 @@ functions: handler: fn2_handler.hello module: fn2 package: - include: + patterns: - 'fn2/**' hello5: image: 000000000000.dkr.ecr.sa-east-1.amazonaws.com/test-lambda-docker@sha256:6bb600b4d6e1d7cf521097177dd0c4e9ea373edb91984a505333be8ac9455d38 diff --git a/tests/individually/serverless.yml b/tests/individually/serverless.yml index 0e41cb06..121bd89d 100644 --- a/tests/individually/serverless.yml +++ b/tests/individually/serverless.yml @@ -6,8 +6,8 @@ provider: package: individually: true - exclude: - - 'node_modules/**' + patterns: + - '!node_modules/**' custom: pythonRequirements: dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} diff --git a/tests/non_build_pyproject/serverless.yml b/tests/non_build_pyproject/serverless.yml index 973132c8..02e5a1f3 100644 --- a/tests/non_build_pyproject/serverless.yml +++ b/tests/non_build_pyproject/serverless.yml @@ -11,10 +11,9 @@ custom: usePoetry: false package: - exclude: - - '**/*' - include: - - handler.py + patterns: + - '!**/*' + - 'handler.py' functions: hello: diff --git a/tests/non_poetry_pyproject/serverless.yml b/tests/non_poetry_pyproject/serverless.yml index 2456a72a..3d872a87 100644 --- a/tests/non_poetry_pyproject/serverless.yml +++ b/tests/non_poetry_pyproject/serverless.yml @@ -8,10 +8,9 @@ plugins: - serverless-python-requirements package: - exclude: - - '**/*' - include: - - handler.py + patterns: + - '!**/*' + - 'handler.py' functions: hello: diff --git a/tests/pipenv/serverless.yml b/tests/pipenv/serverless.yml index 6df76a55..dd93e290 100644 --- a/tests/pipenv/serverless.yml +++ b/tests/pipenv/serverless.yml @@ -21,10 +21,9 @@ custom: dockerizePip: false package: - exclude: - - '**/*' - include: - - handler.py + patterns: + - '!**/*' + - 'handler.py' functions: hello: diff --git a/tests/poetry/serverless.yml b/tests/poetry/serverless.yml index 6df76a55..dd93e290 100644 --- a/tests/poetry/serverless.yml +++ b/tests/poetry/serverless.yml @@ -21,10 +21,9 @@ custom: dockerizePip: false package: - exclude: - - '**/*' - include: - - handler.py + patterns: + - '!**/*' + - 'handler.py' functions: hello: From 891a53847e0053ce69f959e27830cd4859f932a0 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 16 Aug 2021 15:19:20 +0200 Subject: [PATCH 233/328] ci: Ensure to use latest major during tests --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index aa94f4b0..f89486bf 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -34,7 +34,7 @@ jobs: run: python -m pip install pipenv poetry - name: Install serverless - run: npm install -g serverless + run: npm install -g serverless@2 - name: Install deps run: npm install From 3f627637e04b0f77ea526b3d21dfceacf48d1dfe Mon Sep 17 00:00:00 2001 From: Mariusz Nowak Date: Thu, 28 Oct 2021 10:16:44 +0200 Subject: [PATCH 234/328] docs: Reference Capital One --- README.md | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 0b3bad55..d52a712f 100644 --- a/README.md +++ b/README.md @@ -5,10 +5,15 @@ [![npm](https://img.shields.io/npm/v/serverless-python-requirements.svg)](https://www.npmjs.com/package/serverless-python-requirements) [![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg)](https://github.com/prettier/prettier) -A Serverless v1.x plugin to automatically bundle dependencies from -`requirements.txt` and make them available in your `PYTHONPATH`. +A Serverless v1.x plugin to automatically bundle dependencies from `requirements.txt` and make them available in your `PYTHONPATH`. -## Requires Serverless >= v1.34 +--- + +_Originally developed by [**Capital One**](https://github.com/UnitedIncome), now maintained in scope of Serverless, Inc_ + +_Capital One considers itself the bank a technology company would build. It's delivering best-in-class innovation so that its millions of customers can manage their finances with ease. Capital One is all-in on the cloud and is a leader in the adoption of open source, RESTful APIs, microservices and containers. We build our own products and release them with a speed and agility that allows us to get new customer experiences to market quickly. Our engineers use artificial intelligence and machine learning to transform real-time data, software and algorithms into the future of finance, reimagined._ + +--- ## Install From 4c576345588bcf85d2d85ab5c2a6e7eea9ebb8fa Mon Sep 17 00:00:00 2001 From: Mariusz Nowak Date: Wed, 3 Nov 2021 21:42:17 +0100 Subject: [PATCH 235/328] docs: Improve introduction --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index d52a712f..d1746ca5 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ A Serverless v1.x plugin to automatically bundle dependencies from `requirements --- -_Originally developed by [**Capital One**](https://github.com/UnitedIncome), now maintained in scope of Serverless, Inc_ +_Originally developed by [**Capital One**](https://www.capitalone.com/tech/open-source/), now maintained in scope of Serverless, Inc_ _Capital One considers itself the bank a technology company would build. It's delivering best-in-class innovation so that its millions of customers can manage their finances with ease. Capital One is all-in on the cloud and is a leader in the adoption of open source, RESTful APIs, microservices and containers. We build our own products and release them with a speed and agility that allows us to get new customer experiences to market quickly. Our engineers use artificial intelligence and machine learning to transform real-time data, software and algorithms into the future of finance, reimagined._ From 36a469b58544fe6c855e67374a0a2b9af66a3ef1 Mon Sep 17 00:00:00 2001 From: Riley <8637253+rileypriddle@users.noreply.github.com> Date: Thu, 18 Nov 2021 15:28:58 +0200 Subject: [PATCH 236/328] feat: Introduce schema validation for `module` property (#641) --- index.js | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/index.js b/index.js index 09289193..2072bbc1 100644 --- a/index.js +++ b/index.js @@ -115,7 +115,18 @@ class ServerlessPythonRequirements { this.serverless = serverless; this.servicePath = this.serverless.config.servicePath; this.warningLogged = false; - + if ( + this.serverless.configSchemaHandler && + this.serverless.configSchemaHandler.defineFunctionProperties + ) { + this.serverless.configSchemaHandler.defineFunctionProperties('aws', { + properties: { + module: { + type: 'string', + }, + }, + }); + } this.commands = { requirements: { commands: { From a7723bdb76fe4f62690d3926728aa0ad35bb395d Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Thu, 18 Nov 2021 15:03:29 +0100 Subject: [PATCH 237/328] chore: Bump dependencies --- package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 4d40da5e..ed2e279c 100644 --- a/package.json +++ b/package.json @@ -45,7 +45,7 @@ }, "devDependencies": { "cross-spawn": "*", - "eslint": "^7.22.0", + "eslint": "^7.32.0", "lodash": "^4.17.21", "prettier": "^2", "tape": "*", @@ -58,14 +58,14 @@ "fs-extra": "^9.1.0", "glob-all": "^3.2.1", "is-wsl": "^2.2.0", - "jszip": "^3.6.0", + "jszip": "^3.7.1", "lodash.get": "^4.4.2", "lodash.set": "^4.3.2", "lodash.uniqby": "^4.7.0", "lodash.values": "^4.3.0", "rimraf": "^3.0.2", "sha256-file": "1.0.0", - "shell-quote": "^1.7.2" + "shell-quote": "^1.7.3" }, "peerDependencies": { "serverless": "^2.32" From ea382341529e3057f849ae7d85f18d8b57fb3577 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Thu, 18 Nov 2021 15:06:23 +0100 Subject: [PATCH 238/328] chore: Release v5.2.0 --- README.md | 1 + package.json | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index d1746ca5..518d5ce1 100644 --- a/README.md +++ b/README.md @@ -565,3 +565,4 @@ package: - [@bryantbriggs](https://github.com/bryantbiggs) - Fixing CI/CD - [@jacksgt](https://github.com/jacksgt) - Fixing pip issues - [@lephuongbg](https://github.com/lephuongbg) - Fixing single function deployment +- [@rileypriddle](https://github.com/rileypriddle) - Introducing schema validation for `module` property diff --git a/package.json b/package.json index ed2e279c..1fed4c39 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "5.1.1", + "version": "5.2.0", "engines": { "node": ">=12.0" }, From 50c2850874ded795fd50ae377f1db817a0212e7d Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Wed, 24 Nov 2021 11:42:35 +0100 Subject: [PATCH 239/328] refactor: Adapt to `async` version of `spawn` --- lib/docker.js | 37 ++++++------- lib/pip.js | 147 +++++++++++++++++++++++++++----------------------- lib/pipenv.js | 28 +++++----- lib/poetry.js | 47 ++++++++-------- package.json | 1 + 5 files changed, 136 insertions(+), 124 deletions(-) diff --git a/lib/docker.js b/lib/docker.js index 328e3088..94229b21 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -1,4 +1,4 @@ -const { spawnSync } = require('child_process'); +const spawn = require('child-process-ext/spawn'); const isWsl = require('is-wsl'); const fse = require('fs-extra'); const path = require('path'); @@ -8,18 +8,19 @@ const path = require('path'); * @param {string[]} options * @return {Object} */ -function dockerCommand(options) { +async function dockerCommand(options) { const cmd = 'docker'; - const ps = spawnSync(cmd, options, { encoding: 'utf-8' }); - if (ps.error) { - if (ps.error.code === 'ENOENT') { + try { + return await spawn(cmd, options, { encoding: 'utf-8' }); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { throw new Error('docker not found! Please install it.'); } - throw new Error(ps.error); - } else if (ps.status !== 0) { - throw new Error(ps.stderr); + throw e; } - return ps; } /** @@ -28,7 +29,7 @@ function dockerCommand(options) { * @param {string[]} extraArgs * @return {string} The name of the built docker image. */ -function buildImage(dockerFile, extraArgs) { +async function buildImage(dockerFile, extraArgs) { const imageName = 'sls-py-reqs-custom'; const options = ['build', '-f', dockerFile, '-t', imageName]; @@ -40,7 +41,7 @@ function buildImage(dockerFile, extraArgs) { options.push('.'); - dockerCommand(options); + await dockerCommand(options); return imageName; } @@ -72,7 +73,7 @@ function findTestFile(servicePath) { * @param {string} bindPath * @return {boolean} */ -function tryBindPath(serverless, bindPath, testFile) { +async function tryBindPath(serverless, bindPath, testFile) { const debug = process.env.SLS_DEBUG; const options = [ 'run', @@ -85,7 +86,7 @@ function tryBindPath(serverless, bindPath, testFile) { ]; try { if (debug) serverless.cli.log(`Trying bindPath ${bindPath} (${options})`); - const ps = dockerCommand(options); + const ps = await dockerCommand(options); if (debug) serverless.cli.log(ps.stdout.trim()); return ps.stdout.trim() === `/test/${testFile}`; } catch (err) { @@ -100,14 +101,14 @@ function tryBindPath(serverless, bindPath, testFile) { * @param {string} servicePath * @return {string} The bind path. */ -function getBindPath(serverless, servicePath) { +async function getBindPath(serverless, servicePath) { // Determine bind path if (process.platform !== 'win32' && !isWsl) { return servicePath; } // test docker is available - dockerCommand(['version']); + await dockerCommand(['version']); // find good bind path for Windows let bindPaths = []; @@ -144,7 +145,7 @@ function getBindPath(serverless, servicePath) { for (let i = 0; i < bindPaths.length; i++) { const bindPath = bindPaths[i]; - if (tryBindPath(serverless, bindPath, testFile)) { + if (await tryBindPath(serverless, bindPath, testFile)) { return bindPath; } } @@ -157,7 +158,7 @@ function getBindPath(serverless, servicePath) { * @param {string} bindPath * @return {boolean} */ -function getDockerUid(bindPath) { +async function getDockerUid(bindPath) { const options = [ 'run', '--rm', @@ -169,7 +170,7 @@ function getDockerUid(bindPath) { '%u', '/bin/sh', ]; - const ps = dockerCommand(options); + const ps = await dockerCommand(options); return ps.stdout.trim(); } diff --git a/lib/pip.js b/lib/pip.js index 244010c8..78af2e20 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -3,7 +3,7 @@ const rimraf = require('rimraf'); const path = require('path'); const get = require('lodash.get'); const set = require('lodash.set'); -const { spawnSync } = require('child_process'); +const spawn = require('child-process-ext/spawn'); const { quote } = require('shell-quote'); const { buildImage, getBindPath, getDockerUid } = require('./docker'); const { getStripCommand, getStripMode, deleteFiles } = require('./slim'); @@ -96,16 +96,23 @@ function generateRequirementsFile( } } -function pipAcceptsSystem(pythonBin) { +async function pipAcceptsSystem(pythonBin) { // Check if pip has Debian's --system option and set it if so - const pipTestRes = spawnSync(pythonBin, ['-m', 'pip', 'help', 'install']); - if (pipTestRes.error) { - if (pipTestRes.error.code === 'ENOENT') { + try { + const pipTestRes = await spawn(pythonBin, ['-m', 'pip', 'help', 'install']); + return ( + pipTestRes.stdoutBuffer && + pipTestRes.stdoutBuffer.toString().indexOf('--system') >= 0 + ); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { throw new Error(`${pythonBin} not found! Try the pythonBin option.`); } - throw pipTestRes.error; + throw e; } - return pipTestRes.stdout.toString().indexOf('--system') >= 0; } /** @@ -115,7 +122,7 @@ function pipAcceptsSystem(pythonBin) { * @param {Object} options * @return {undefined} */ -function installRequirements(targetFolder, serverless, options) { +async function installRequirements(targetFolder, serverless, options) { const targetRequirementsTxt = path.join(targetFolder, 'requirements.txt'); serverless.cli.log( @@ -176,7 +183,7 @@ function installRequirements(targetFolder, serverless, options) { pipCmd.push('--cache-dir', downloadCacheDir); } - if (pipAcceptsSystem(options.pythonBin)) { + if (await pipAcceptsSystem(options.pythonBin)) { pipCmd.push('--system'); } } @@ -191,7 +198,7 @@ function installRequirements(targetFolder, serverless, options) { serverless.cli.log( `Building custom docker image from ${options.dockerFile}...` ); - dockerImage = buildImage( + dockerImage = await buildImage( options.dockerFile, options.dockerBuildCmdExtraArgs ); @@ -201,7 +208,9 @@ function installRequirements(targetFolder, serverless, options) { serverless.cli.log(`Docker Image: ${dockerImage}`); // Prepare bind path depending on os platform - const bindPath = dockerPathForWin(getBindPath(serverless, targetFolder)); + const bindPath = dockerPathForWin( + await getBindPath(serverless, targetFolder) + ); dockerCmd.push('docker', 'run', '--rm', '-v', `${bindPath}:/var/task:z`); if (options.dockerSsh) { @@ -233,7 +242,7 @@ function installRequirements(targetFolder, serverless, options) { fse.closeSync( fse.openSync(path.join(downloadCacheDir, 'requirements.txt'), 'w') ); - const windowsized = getBindPath(serverless, downloadCacheDir); + const windowsized = await getBindPath(serverless, downloadCacheDir); // And now push it to a volume mount and to pip... dockerCmd.push('-v', `${windowsized}:${dockerDownloadCacheDir}:z`); pipCmd.push('--cache-dir', dockerDownloadCacheDir); @@ -262,7 +271,7 @@ function installRequirements(targetFolder, serverless, options) { ]); } else { // Use same user so --cache-dir works - dockerCmd.push('-u', getDockerUid(bindPath)); + dockerCmd.push('-u', await getDockerUid(bindPath)); } for (let path of options.dockerExtraFiles) { @@ -315,22 +324,23 @@ function installRequirements(targetFolder, serverless, options) { serverless.cli.log(`Running ${quote(dockerCmd)}...`); - filterCommands(mainCmds).forEach(([cmd, ...args]) => { - const res = spawnSync(cmd, args); - if (res.error) { - if (res.error.code === 'ENOENT') { + for (const [cmd, ...args] of mainCmds) { + try { + await spawn(cmd, args); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { const advice = cmd.indexOf('python') > -1 ? 'Try the pythonBin option' : 'Please install it'; throw new Error(`${cmd} not found! ${advice}`); } - throw res.error; - } - if (res.status !== 0) { - throw new Error(`STDOUT: ${res.stdout}\n\nSTDERR: ${res.stderr}`); + throw e; } - }); + } // If enabled slimming, delete files in slimPatterns if (options.slim === true || options.slim === 'true') { deleteFiles(options, targetFolder); @@ -489,7 +499,7 @@ function requirementsFileExists(servicePath, options, fileName) { * @param {Object} serverless * @return {string} */ -function installRequirementsIfNeeded( +async function installRequirementsIfNeeded( servicePath, modulePath, options, @@ -573,7 +583,7 @@ function installRequirementsIfNeeded( fse.copySync(slsReqsTxt, path.join(workingReqsFolder, 'requirements.txt')); // Then install our requirements from this folder - installRequirements(workingReqsFolder, serverless, options); + await installRequirements(workingReqsFolder, serverless, options); // Copy vendor libraries to requirements folder if (options.vendor) { @@ -596,7 +606,7 @@ function installRequirementsIfNeeded( * pip install the requirements to the requirements directory * @return {undefined} */ -function installAllRequirements() { +async function installAllRequirements() { // fse.ensureDirSync(path.join(this.servicePath, '.serverless')); // First, check and delete cache versions, if enabled checkForAndDeleteMaxCacheVersions(this.options, this.serverless); @@ -604,55 +614,56 @@ function installAllRequirements() { // Then if we're going to package functions individually... if (this.serverless.service.package.individually) { let doneModules = []; - this.targetFuncs - .filter((func) => - (func.runtime || this.serverless.service.provider.runtime).match( - /^python.*/ - ) + const filteredFuncs = this.targetFuncs.filter((func) => + (func.runtime || this.serverless.service.provider.runtime).match( + /^python.*/ ) - .map((f) => { - if (!get(f, 'module')) { - set(f, ['module'], '.'); - } - // If we didn't already process a module (functions can re-use modules) - if (!doneModules.includes(f.module)) { - const reqsInstalledAt = installRequirementsIfNeeded( - this.servicePath, - f.module, - this.options, - f, - this.serverless - ); - // Add modulePath into .serverless for each module so it's easier for injecting and for users to see where reqs are - let modulePath = path.join( - this.servicePath, - '.serverless', - `${f.module}`, - 'requirements' - ); - // Only do if we didn't already do it - if ( - reqsInstalledAt && - !fse.existsSync(modulePath) && - reqsInstalledAt != modulePath - ) { - if (this.options.useStaticCache) { - // Windows can't symlink so we have to copy on Windows, - // it's not as fast, but at least it works - if (process.platform == 'win32') { - fse.copySync(reqsInstalledAt, modulePath); - } else { - fse.symlink(reqsInstalledAt, modulePath); - } + ); + + for (const f of filteredFuncs) { + if (!get(f, 'module')) { + set(f, ['module'], '.'); + } + + // If we didn't already process a module (functions can re-use modules) + if (!doneModules.includes(f.module)) { + const reqsInstalledAt = await installRequirementsIfNeeded( + this.servicePath, + f.module, + this.options, + f, + this.serverless + ); + // Add modulePath into .serverless for each module so it's easier for injecting and for users to see where reqs are + let modulePath = path.join( + this.servicePath, + '.serverless', + `${f.module}`, + 'requirements' + ); + // Only do if we didn't already do it + if ( + reqsInstalledAt && + !fse.existsSync(modulePath) && + reqsInstalledAt != modulePath + ) { + if (this.options.useStaticCache) { + // Windows can't symlink so we have to copy on Windows, + // it's not as fast, but at least it works + if (process.platform == 'win32') { + fse.copySync(reqsInstalledAt, modulePath); } else { - fse.rename(reqsInstalledAt, modulePath); + fse.symlink(reqsInstalledAt, modulePath); } + } else { + fse.rename(reqsInstalledAt, modulePath); } - doneModules.push(f.module); } - }); + doneModules.push(f.module); + } + } } else { - const reqsInstalledAt = installRequirementsIfNeeded( + const reqsInstalledAt = await installRequirementsIfNeeded( this.servicePath, '', this.options, diff --git a/lib/pipenv.js b/lib/pipenv.js index 063fb5d8..e5731aaf 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -1,12 +1,12 @@ const fse = require('fs-extra'); const path = require('path'); -const { spawnSync } = require('child_process'); +const spawn = require('child-process-ext/spawn'); const { EOL } = require('os'); /** * pipenv install */ -function pipfileToRequirements() { +async function pipfileToRequirements() { if ( !this.options.usePipenv || !fse.existsSync(path.join(this.servicePath, 'Pipfile')) @@ -16,28 +16,26 @@ function pipfileToRequirements() { this.serverless.cli.log('Generating requirements.txt from Pipfile...'); - const res = spawnSync( - 'pipenv', - ['lock', '--requirements', '--keep-outdated'], - { + let res; + try { + res = await spawn('pipenv', ['lock', '--requirements', '--keep-outdated'], { cwd: this.servicePath, - } - ); - if (res.error) { - if (res.error.code === 'ENOENT') { + }); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { throw new Error( `pipenv not found! Install it with 'pip install pipenv'.` ); } - throw new Error(res.error); - } - if (res.status !== 0) { - throw new Error(res.stderr); + throw e; } fse.ensureDirSync(path.join(this.servicePath, '.serverless')); fse.writeFileSync( path.join(this.servicePath, '.serverless/requirements.txt'), - removeEditableFlagFromRequirementsString(res.stdout) + removeEditableFlagFromRequirementsString(res.stdoutBuffer) ); } diff --git a/lib/poetry.js b/lib/poetry.js index 553a1392..55f83289 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -1,44 +1,45 @@ const fs = require('fs'); const fse = require('fs-extra'); const path = require('path'); -const { spawnSync } = require('child_process'); +const spawn = require('child-process-ext/spawn'); const tomlParse = require('@iarna/toml/parse-string'); /** * poetry install */ -function pyprojectTomlToRequirements() { +async function pyprojectTomlToRequirements() { if (!this.options.usePoetry || !isPoetryProject(this.servicePath)) { return; } this.serverless.cli.log('Generating requirements.txt from pyproject.toml...'); - const res = spawnSync( - 'poetry', - [ - 'export', - '--without-hashes', - '-f', - 'requirements.txt', - '-o', - 'requirements.txt', - '--with-credentials', - ], - { - cwd: this.servicePath, - } - ); - if (res.error) { - if (res.error.code === 'ENOENT') { + try { + await spawn( + 'poetry', + [ + 'export', + '--without-hashes', + '-f', + 'requirements.txt', + '-o', + 'requirements.txt', + '--with-credentials', + ], + { + cwd: this.servicePath, + } + ); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { throw new Error( `poetry not found! Install it according to the poetry docs.` ); } - throw new Error(res.error); - } - if (res.status !== 0) { - throw new Error(res.stderr); + throw e; } const editableFlag = new RegExp(/^-e /gm); diff --git a/package.json b/package.json index 1fed4c39..c9d247a3 100644 --- a/package.json +++ b/package.json @@ -55,6 +55,7 @@ "@iarna/toml": "^2.2.5", "appdirectory": "^0.1.0", "bluebird": "^3.7.2", + "child-process-ext": "^2.1.1", "fs-extra": "^9.1.0", "glob-all": "^3.2.1", "is-wsl": "^2.2.0", From a79899ae5f6f66aa0c65e7fda8e0186d38ff446e Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 10:57:15 +0100 Subject: [PATCH 240/328] refactor: Adapt v3 log writing interfaces --- index.js | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/index.js b/index.js index 2072bbc1..975ff750 100644 --- a/index.js +++ b/index.js @@ -109,9 +109,10 @@ class ServerlessPythonRequirements { * The plugin constructor * @param {Object} serverless * @param {Object} options + * @param {Object} v3Utils * @return {undefined} */ - constructor(serverless) { + constructor(serverless, cliOptions, v3Utils) { this.serverless = serverless; this.servicePath = this.serverless.config.servicePath; this.warningLogged = false; @@ -127,6 +128,13 @@ class ServerlessPythonRequirements { }, }); } + + if (v3Utils) { + this.log = v3Utils.log; + this.progress = v3Utils.progress; + this.writeText = v3Utils.writeText; + }; + this.commands = { requirements: { commands: { From 9e952df5e91abb98679ce9ea700a0c5409198205 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 12:09:29 +0100 Subject: [PATCH 241/328] refactor: Adapt `poetry` for modern logs --- lib/poetry.js | 104 +++++++++++++++++++++++++++++--------------------- 1 file changed, 61 insertions(+), 43 deletions(-) diff --git a/lib/poetry.js b/lib/poetry.js index 55f83289..65970cc4 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -1,6 +1,7 @@ const fs = require('fs'); const fse = require('fs-extra'); const path = require('path'); + const spawn = require('child-process-ext/spawn'); const tomlParse = require('@iarna/toml/parse-string'); @@ -12,58 +13,75 @@ async function pyprojectTomlToRequirements() { return; } - this.serverless.cli.log('Generating requirements.txt from pyproject.toml...'); + let generateRequirementsProgress; + if (this.progress) { + generateRequirementsProgress = this.progress.get( + 'python-generate-requirements-toml' + ); + } else { + this.serverless.cli.log( + 'Generating requirements.txt from pyproject.toml...' + ); + } try { - await spawn( - 'poetry', - [ - 'export', - '--without-hashes', - '-f', - 'requirements.txt', - '-o', - 'requirements.txt', - '--with-credentials', - ], - { - cwd: this.servicePath, - } - ); - } catch (e) { - if ( - e.stderrBuffer && - e.stderrBuffer.toString().includes('command not found') - ) { - throw new Error( - `poetry not found! Install it according to the poetry docs.` + try { + await spawn( + 'poetry', + [ + 'export', + '--without-hashes', + '-f', + 'requirements.txt', + '-o', + 'requirements.txt', + '--with-credentials', + ], + { + cwd: this.servicePath, + } ); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { + throw new Error( + `poetry not found! Install it according to the poetry docs.` + ); + } + throw e; } - throw e; - } - const editableFlag = new RegExp(/^-e /gm); - const sourceRequirements = path.join(this.servicePath, 'requirements.txt'); - const requirementsContents = fse.readFileSync(sourceRequirements, { - encoding: 'utf-8', - }); + const editableFlag = new RegExp(/^-e /gm); + const sourceRequirements = path.join(this.servicePath, 'requirements.txt'); + const requirementsContents = fse.readFileSync(sourceRequirements, { + encoding: 'utf-8', + }); - if (requirementsContents.match(editableFlag)) { - this.serverless.cli.log( - 'The generated file contains -e flags, removing them...' - ); - fse.writeFileSync( + if (requirementsContents.match(editableFlag)) { + if (this.log) { + this.log.info('The generated file contains -e flags, removing them'); + } else { + this.serverless.cli.log( + 'The generated file contains -e flags, removing them...' + ); + } + fse.writeFileSync( + sourceRequirements, + requirementsContents.replace(editableFlag, '') + ); + } + + fse.ensureDirSync(path.join(this.servicePath, '.serverless')); + fse.moveSync( sourceRequirements, - requirementsContents.replace(editableFlag, '') + path.join(this.servicePath, '.serverless', 'requirements.txt'), + { overwrite: true } ); + } finally { + generateRequirementsProgress && generateRequirementsProgress.remove(); } - - fse.ensureDirSync(path.join(this.servicePath, '.serverless')); - fse.moveSync( - sourceRequirements, - path.join(this.servicePath, '.serverless', 'requirements.txt'), - { overwrite: true } - ); } /** From e3afe7bf4162b0be77764874641a5e024be5a91a Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 12:09:46 +0100 Subject: [PATCH 242/328] refactor: Adapt `pipenv` to modern logs --- lib/pipenv.js | 55 ++++++++++++++++++++++++++++++++++----------------- lib/poetry.js | 4 ++++ 2 files changed, 41 insertions(+), 18 deletions(-) diff --git a/lib/pipenv.js b/lib/pipenv.js index e5731aaf..5100a810 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -14,29 +14,48 @@ async function pipfileToRequirements() { return; } - this.serverless.cli.log('Generating requirements.txt from Pipfile...'); + let generateRequirementsProgress; + if (this.progress) { + generateRequirementsProgress = this.progress.get( + 'python-generate-requirements-pipfile' + ); + generateRequirementsProgress.update( + 'Generating requirements.txt from Pipfile', + { isMainEvent: true } + ); + } else { + this.serverless.cli.log('Generating requirements.txt from Pipfile...'); + } - let res; try { - res = await spawn('pipenv', ['lock', '--requirements', '--keep-outdated'], { - cwd: this.servicePath, - }); - } catch (e) { - if ( - e.stderrBuffer && - e.stderrBuffer.toString().includes('command not found') - ) { - throw new Error( - `pipenv not found! Install it with 'pip install pipenv'.` + let res; + try { + res = await spawn( + 'pipenv', + ['lock', '--requirements', '--keep-outdated'], + { + cwd: this.servicePath, + } ); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { + throw new Error( + `pipenv not found! Install it with 'pip install pipenv'.` + ); + } + throw e; } - throw e; + fse.ensureDirSync(path.join(this.servicePath, '.serverless')); + fse.writeFileSync( + path.join(this.servicePath, '.serverless/requirements.txt'), + removeEditableFlagFromRequirementsString(res.stdoutBuffer) + ); + } finally { + generateRequirementsProgress && generateRequirementsProgress.remove(); } - fse.ensureDirSync(path.join(this.servicePath, '.serverless')); - fse.writeFileSync( - path.join(this.servicePath, '.serverless/requirements.txt'), - removeEditableFlagFromRequirementsString(res.stdoutBuffer) - ); } /** diff --git a/lib/poetry.js b/lib/poetry.js index 65970cc4..81988742 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -18,6 +18,10 @@ async function pyprojectTomlToRequirements() { generateRequirementsProgress = this.progress.get( 'python-generate-requirements-toml' ); + generateRequirementsProgress.update( + 'Generating requirements.txt from "pyproject.toml"', + { isMainEvent: true } + ); } else { this.serverless.cli.log( 'Generating requirements.txt from pyproject.toml...' From 1c8f911ef57c91b3efd5fd3c030df9548aa6ceae Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 12:12:29 +0100 Subject: [PATCH 243/328] refactor: Adapt `clean` to modern logs --- lib/clean.js | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/lib/clean.js b/lib/clean.js index e0bff238..88d7d03e 100644 --- a/lib/clean.js +++ b/lib/clean.js @@ -38,8 +38,16 @@ function cleanup() { function cleanupCache() { const cacheLocation = getUserCachePath(this.options); if (fse.existsSync(cacheLocation)) { + let cleanupProgress; if (this.serverless) { - this.serverless.cli.log(`Removing static caches at: ${cacheLocation}`); + if (this.progress) { + cleanupProgress = this.progress.get('python-cleanup-cache'); + cleanupProgress.notice(`Removing static caches at: ${cacheLocation}`, { + isMainEvent: true, + }); + } else { + this.serverless.cli.log(`Removing static caches at: ${cacheLocation}`); + } } // Only remove cache folders that we added, just incase someone accidentally puts a weird @@ -50,10 +58,19 @@ function cleanupCache() { .forEach((file) => { promises.push(fse.removeAsync(file)); }); - return BbPromise.all(promises); + return BbPromise.all(promises) + .then(() => cleanupProgress && cleanupProgress.remove()) + .catch((e) => { + cleanupProgress && cleanupProgress.remove(); + throw e; + }); } else { if (this.serverless) { - this.serverless.cli.log(`No static cache found`); + if (this.log) { + this.log.info(`No static cache found`); + } else { + this.serverless.cli.log(`No static cache found`); + } } return BbPromise.resolve(); } From 8ff97e6b7c279334e417dbdb65e64d0de2656986 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 12:12:44 +0100 Subject: [PATCH 244/328] refactor: Adapt `shared` to modern logs --- lib/shared.js | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/lib/shared.js b/lib/shared.js index 79b60cef..7baee58b 100644 --- a/lib/shared.js +++ b/lib/shared.js @@ -12,7 +12,7 @@ const sha256File = require('sha256-file'); * @param {Object} serverless * @return {undefined} */ -function checkForAndDeleteMaxCacheVersions(options, serverless) { +function checkForAndDeleteMaxCacheVersions({ serverless, options, log }) { // If we're using the static cache, and we have static cache max versions enabled if ( options.useStaticCache && @@ -42,10 +42,17 @@ function checkForAndDeleteMaxCacheVersions(options, serverless) { rimraf.sync(files[i]); items++; } + // Log the number of cache files flushed - serverless.cli.log( - `Removed ${items} items from cache because of staticCacheMaxVersions` - ); + if (log) { + log.info( + `Removed ${items} items from cache because of staticCacheMaxVersions` + ); + } else { + serverless.cli.log( + `Removed ${items} items from cache because of staticCacheMaxVersions` + ); + } } } } From 1162275d6eb95a756d174f87b40b9cfecd892bc7 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 12:13:23 +0100 Subject: [PATCH 245/328] refactor: Adapt `zip` to modern logs --- lib/zip.js | 81 +++++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 62 insertions(+), 19 deletions(-) diff --git a/lib/zip.js b/lib/zip.js index 2e872aa9..de61ce0f 100644 --- a/lib/zip.js +++ b/lib/zip.js @@ -30,9 +30,13 @@ function addVendorHelper() { }) .then((functions) => uniqBy(functions, (func) => func.module)) .map((f) => { - this.serverless.cli.log( - `Adding Python requirements helper to ${f.module}...` - ); + if (this.log) { + this.log.info(`Adding Python requirements helper to ${f.module}`); + } else { + this.serverless.cli.log( + `Adding Python requirements helper to ${f.module}...` + ); + } return fse.copyAsync( path.resolve(__dirname, '../unzip_requirements.py'), @@ -40,7 +44,11 @@ function addVendorHelper() { ); }); } else { - this.serverless.cli.log('Adding Python requirements helper...'); + if (this.log) { + this.log.info('Adding Python requirements helper'); + } else { + this.serverless.cli.log('Adding Python requirements helper...'); + } if (!get(this.serverless.service, 'package.patterns')) { set(this.serverless.service, ['package', 'patterns'], []); @@ -72,15 +80,25 @@ function removeVendorHelper() { }) .then((funcs) => uniqBy(funcs, (f) => f.module)) .map((f) => { - this.serverless.cli.log( - `Removing Python requirements helper from ${f.module}...` - ); + if (this.log) { + this.log.info( + `Removing Python requirements helper from ${f.module}` + ); + } else { + this.serverless.cli.log( + `Removing Python requirements helper from ${f.module}...` + ); + } return fse.removeAsync( path.join(this.servicePath, f.module, 'unzip_requirements.py') ); }); } else { - this.serverless.cli.log('Removing Python requirements helper...'); + if (this.log) { + this.log.info('Removing Python requirements helper'); + } else { + this.serverless.cli.log('Removing Python requirements helper...'); + } return fse.removeAsync( path.join(this.servicePath, 'unzip_requirements.py') ); @@ -104,21 +122,46 @@ function packRequirements() { }) .then((funcs) => uniqBy(funcs, (f) => f.module)) .map((f) => { - this.serverless.cli.log( - `Zipping required Python packages for ${f.module}...` - ); + let packProgress; + if (this.progress) { + packProgress = this.progress.get( + `python-pack-requirements-${f.module}` + ); + packProgress.update( + `Zipping required Python packages for ${f.module}`, + { isMainEvent: true } + ); + } else { + this.serverless.cli.log( + `Zipping required Python packages for ${f.module}...` + ); + } f.package.patterns.push(`${f.module}/.requirements.zip`); - return addTree( - new JSZip(), - `.serverless/${f.module}/requirements` - ).then((zip) => writeZip(zip, `${f.module}/.requirements.zip`)); + return addTree(new JSZip(), `.serverless/${f.module}/requirements`) + .then((zip) => writeZip(zip, `${f.module}/.requirements.zip`)) + .then(() => packProgress && packProgress.remove()) + .catch((e) => { + packProgress && packProgress.remove(); + throw e; + }); }); } else { - this.serverless.cli.log('Zipping required Python packages...'); + let packProgress; + if (this.progress) { + packProgress = this.progress.get(`python-pack-requirements`); + } else { + this.serverless.cli.log('Zipping required Python packages...'); + } this.serverless.service.package.patterns.push('.requirements.zip'); - return addTree(new JSZip(), '.serverless/requirements').then((zip) => - writeZip(zip, path.join(this.servicePath, '.requirements.zip')) - ); + return addTree(new JSZip(), '.serverless/requirements') + .then((zip) => + writeZip(zip, path.join(this.servicePath, '.requirements.zip')) + ) + .then(() => packProgress && packProgress.remove()) + .catch((e) => { + packProgress && packProgress.remove(); + throw e; + }); } } } From b7902aa047bd91359e41d65b42efca324ec15997 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 13:14:35 +0100 Subject: [PATCH 246/328] refactor: Adapt `pip` to modern logs --- index.js | 10 +- lib/pip.js | 566 ++++++++++++++++++++++++++++++----------------------- 2 files changed, 331 insertions(+), 245 deletions(-) diff --git a/index.js b/index.js index 975ff750..26616295 100644 --- a/index.js +++ b/index.js @@ -74,9 +74,13 @@ class ServerlessPythonRequirements { (options.dockerSsh || options.dockerImage || options.dockerFile) ) { if (!this.warningLogged) { - this.serverless.cli.log( - 'WARNING: You provided a docker related option but dockerizePip is set to false.' - ); + if (this.log) { + this.log.warning('You provided a docker related option but dockerizePip is set to false.'); + } else { + this.serverless.cli.log( + 'WARNING: You provided a docker related option but dockerizePip is set to false.' + ); + } this.warningLogged = true; } } diff --git a/lib/pip.js b/lib/pip.js index 78af2e20..89e8e74a 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -57,10 +57,9 @@ function mergeCommands(commands) { function generateRequirementsFile( requirementsPath, targetFile, - serverless, - servicePath, - options + pluginInstance ) { + const { serverless, servicePath, options, log } = pluginInstance; if ( options.usePoetry && fse.existsSync(path.join(servicePath, 'pyproject.toml')) && @@ -69,12 +68,15 @@ function generateRequirementsFile( filterRequirementsFile( path.join(servicePath, '.serverless/requirements.txt'), targetFile, - options, - serverless - ); - serverless.cli.log( - `Parsed requirements.txt from pyproject.toml in ${targetFile}...` + pluginInstance ); + if (log) { + log.info(`Parsed requirements.txt from pyproject.toml in ${targetFile}`); + } else { + serverless.cli.log( + `Parsed requirements.txt from pyproject.toml in ${targetFile}...` + ); + } } else if ( options.usePipenv && fse.existsSync(path.join(servicePath, 'Pipfile')) @@ -82,17 +84,26 @@ function generateRequirementsFile( filterRequirementsFile( path.join(servicePath, '.serverless/requirements.txt'), targetFile, - options, - serverless - ); - serverless.cli.log( - `Parsed requirements.txt from Pipfile in ${targetFile}...` + pluginInstance ); + if (log) { + log.info(`Parsed requirements.txt from Pipfile in ${targetFile}`); + } else { + serverless.cli.log( + `Parsed requirements.txt from Pipfile in ${targetFile}...` + ); + } } else { - filterRequirementsFile(requirementsPath, targetFile, options, serverless); - serverless.cli.log( - `Generated requirements from ${requirementsPath} in ${targetFile}...` - ); + filterRequirementsFile(requirementsPath, targetFile, pluginInstance); + if (log) { + log.info( + `Generated requirements from ${requirementsPath} in ${targetFile}` + ); + } else { + serverless.cli.log( + `Generated requirements from ${requirementsPath} in ${targetFile}...` + ); + } } } @@ -122,228 +133,290 @@ async function pipAcceptsSystem(pythonBin) { * @param {Object} options * @return {undefined} */ -async function installRequirements(targetFolder, serverless, options) { +async function installRequirements( + targetFolder, + { options, serverless, log, progress } +) { const targetRequirementsTxt = path.join(targetFolder, 'requirements.txt'); - serverless.cli.log( - `Installing requirements from ${targetRequirementsTxt} ...` - ); - - const dockerCmd = []; - const pipCmd = [options.pythonBin, '-m', 'pip', 'install']; - - if ( - Array.isArray(options.pipCmdExtraArgs) && - options.pipCmdExtraArgs.length > 0 - ) { - options.pipCmdExtraArgs.forEach((cmd) => { - const parts = cmd.split(/\s+/, 2); - pipCmd.push(...parts); - }); + let installProgress; + if (progress) { + installProgress = progress.get('python-install'); + installProgress.update( + `Installing requirements from "${targetRequirementsTxt}"`, + { isMainEvent: true } + ); + } else { + serverless.cli.log( + `Installing requirements from ${targetRequirementsTxt} ...` + ); } - const pipCmds = [pipCmd]; - const postCmds = []; - // Check if we're using the legacy --cache-dir command... - if (options.pipCmdExtraArgs.indexOf('--cache-dir') > -1) { - if (options.dockerizePip) { - throw ( - 'Error: You can not use --cache-dir with Docker any more, please\n' + - ' use the new option useDownloadCache instead. Please see:\n' + - ' https://github.com/UnitedIncome/serverless-python-requirements#caching' - ); - } else { - serverless.cli.log('=================================================='); - serverless.cli.log( - 'Warning: You are using a deprecated --cache-dir inside\n' + - ' your pipCmdExtraArgs which may not work properly, please use the\n' + - ' useDownloadCache option instead. Please see: \n' + - ' https://github.com/UnitedIncome/serverless-python-requirements#caching' - ); - serverless.cli.log('=================================================='); - } - } + try { + const dockerCmd = []; + const pipCmd = [options.pythonBin, '-m', 'pip', 'install']; - if (!options.dockerizePip) { - // Push our local OS-specific paths for requirements and target directory - pipCmd.push( - '-t', - dockerPathForWin(targetFolder), - '-r', - dockerPathForWin(targetRequirementsTxt) - ); - // If we want a download cache... - if (options.useDownloadCache) { - const downloadCacheDir = path.join( - getUserCachePath(options), - 'downloadCacheslspyc' - ); - serverless.cli.log(`Using download cache directory ${downloadCacheDir}`); - fse.ensureDirSync(downloadCacheDir); - pipCmd.push('--cache-dir', downloadCacheDir); + if ( + Array.isArray(options.pipCmdExtraArgs) && + options.pipCmdExtraArgs.length > 0 + ) { + options.pipCmdExtraArgs.forEach((cmd) => { + const parts = cmd.split(/\s+/, 2); + pipCmd.push(...parts); + }); } - if (await pipAcceptsSystem(options.pythonBin)) { - pipCmd.push('--system'); - } - } - // If we are dockerizing pip - if (options.dockerizePip) { - // Push docker-specific paths for requirements and target directory - pipCmd.push('-t', '/var/task/', '-r', '/var/task/requirements.txt'); - - // Build docker image if required - let dockerImage; - if (options.dockerFile) { - serverless.cli.log( - `Building custom docker image from ${options.dockerFile}...` - ); - dockerImage = await buildImage( - options.dockerFile, - options.dockerBuildCmdExtraArgs - ); - } else { - dockerImage = options.dockerImage; + const pipCmds = [pipCmd]; + const postCmds = []; + // Check if we're using the legacy --cache-dir command... + if (options.pipCmdExtraArgs.indexOf('--cache-dir') > -1) { + if (options.dockerizePip) { + throw ( + 'Error: You can not use --cache-dir with Docker any more, please\n' + + ' use the new option useDownloadCache instead. Please see:\n' + + ' https://github.com/UnitedIncome/serverless-python-requirements#caching' + ); + } else { + if (log) { + log.warning( + 'You are using a deprecated --cache-dir inside\n' + + ' your pipCmdExtraArgs which may not work properly, please use the\n' + + ' useDownloadCache option instead. Please see: \n' + + ' https://github.com/UnitedIncome/serverless-python-requirements#caching' + ); + } else { + serverless.cli.log( + '==================================================' + ); + serverless.cli.log( + 'Warning: You are using a deprecated --cache-dir inside\n' + + ' your pipCmdExtraArgs which may not work properly, please use the\n' + + ' useDownloadCache option instead. Please see: \n' + + ' https://github.com/UnitedIncome/serverless-python-requirements#caching' + ); + serverless.cli.log( + '==================================================' + ); + } + } } - serverless.cli.log(`Docker Image: ${dockerImage}`); - - // Prepare bind path depending on os platform - const bindPath = dockerPathForWin( - await getBindPath(serverless, targetFolder) - ); - dockerCmd.push('docker', 'run', '--rm', '-v', `${bindPath}:/var/task:z`); - if (options.dockerSsh) { - // Mount necessary ssh files to work with private repos - dockerCmd.push( - '-v', - `${process.env.HOME}/.ssh/id_rsa:/root/.ssh/id_rsa:z`, - '-v', - `${process.env.HOME}/.ssh/known_hosts:/root/.ssh/known_hosts:z`, - '-v', - `${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z`, - '-e', - 'SSH_AUTH_SOCK=/tmp/ssh_sock' + if (!options.dockerizePip) { + // Push our local OS-specific paths for requirements and target directory + pipCmd.push( + '-t', + dockerPathForWin(targetFolder), + '-r', + dockerPathForWin(targetRequirementsTxt) ); - } + // If we want a download cache... + if (options.useDownloadCache) { + const downloadCacheDir = path.join( + getUserCachePath(options), + 'downloadCacheslspyc' + ); + if (log) { + log.info(`Using download cache directory ${downloadCacheDir}`); + } else { + serverless.cli.log( + `Using download cache directory ${downloadCacheDir}` + ); + } + fse.ensureDirSync(downloadCacheDir); + pipCmd.push('--cache-dir', downloadCacheDir); + } - // If we want a download cache... - const dockerDownloadCacheDir = '/var/useDownloadCache'; - if (options.useDownloadCache) { - const downloadCacheDir = path.join( - getUserCachePath(options), - 'downloadCacheslspyc' - ); - serverless.cli.log(`Using download cache directory ${downloadCacheDir}`); - fse.ensureDirSync(downloadCacheDir); - // This little hack is necessary because getBindPath requires something inside of it to test... - // Ugh, this is so ugly, but someone has to fix getBindPath in some other way (eg: make it use - // its own temp file) - fse.closeSync( - fse.openSync(path.join(downloadCacheDir, 'requirements.txt'), 'w') - ); - const windowsized = await getBindPath(serverless, downloadCacheDir); - // And now push it to a volume mount and to pip... - dockerCmd.push('-v', `${windowsized}:${dockerDownloadCacheDir}:z`); - pipCmd.push('--cache-dir', dockerDownloadCacheDir); + if (pipAcceptsSystem(options.pythonBin)) { + pipCmd.push('--system'); + } } - if (options.dockerEnv) { - // Add environment variables to docker run cmd - options.dockerEnv.forEach(function (item) { - dockerCmd.push('-e', item); - }); - } + // If we are dockerizing pip + if (options.dockerizePip) { + // Push docker-specific paths for requirements and target directory + pipCmd.push('-t', '/var/task/', '-r', '/var/task/requirements.txt'); + + // Build docker image if required + let dockerImage; + if (options.dockerFile) { + let buildDockerImageProgress; + if (progress) { + buildDockerImageProgress = progress.get( + 'python-install-build-docker' + ); + buildDockerImageProgress.update( + `Building custom docker image from ${options.dockerFile}` + ); + } else { + serverless.cli.log( + `Building custom docker image from ${options.dockerFile}...` + ); + } + try { + dockerImage = buildImage( + options.dockerFile, + options.dockerBuildCmdExtraArgs + ); + } finally { + buildDockerImageProgress && buildDockerImageProgress.remove(); + } + } else { + dockerImage = options.dockerImage; + } + if (log) { + log.info(`Docker Image: ${dockerImage}`); + } else { + serverless.cli.log(`Docker Image: ${dockerImage}`); + } - if (process.platform === 'linux') { - // Use same user so requirements folder is not root and so --cache-dir works + // Prepare bind path depending on os platform + const bindPath = dockerPathForWin(getBindPath(serverless, targetFolder)); + + dockerCmd.push('docker', 'run', '--rm', '-v', `${bindPath}:/var/task:z`); + if (options.dockerSsh) { + // Mount necessary ssh files to work with private repos + dockerCmd.push( + '-v', + `${process.env.HOME}/.ssh/id_rsa:/root/.ssh/id_rsa:z`, + '-v', + `${process.env.HOME}/.ssh/known_hosts:/root/.ssh/known_hosts:z`, + '-v', + `${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z`, + '-e', + 'SSH_AUTH_SOCK=/tmp/ssh_sock' + ); + } + + // If we want a download cache... + const dockerDownloadCacheDir = '/var/useDownloadCache'; if (options.useDownloadCache) { - // Set the ownership of the download cache dir to root - pipCmds.unshift(['chown', '-R', '0:0', dockerDownloadCacheDir]); + const downloadCacheDir = path.join( + getUserCachePath(options), + 'downloadCacheslspyc' + ); + if (log) { + log.info(`Using download cache directory ${downloadCacheDir}`); + } else { + serverless.cli.log( + `Using download cache directory ${downloadCacheDir}` + ); + } + fse.ensureDirSync(downloadCacheDir); + // This little hack is necessary because getBindPath requires something inside of it to test... + // Ugh, this is so ugly, but someone has to fix getBindPath in some other way (eg: make it use + // its own temp file) + fse.closeSync( + fse.openSync(path.join(downloadCacheDir, 'requirements.txt'), 'w') + ); + const windowsized = getBindPath(serverless, downloadCacheDir); + // And now push it to a volume mount and to pip... + dockerCmd.push('-v', `${windowsized}:${dockerDownloadCacheDir}:z`); + pipCmd.push('--cache-dir', dockerDownloadCacheDir); } - // Install requirements with pip - // Set the ownership of the current folder to user - pipCmds.push([ - 'chown', - '-R', - `${process.getuid()}:${process.getgid()}`, - '/var/task', - ]); - } else { - // Use same user so --cache-dir works - dockerCmd.push('-u', await getDockerUid(bindPath)); - } - for (let path of options.dockerExtraFiles) { - pipCmds.push(['cp', path, '/var/task/']); - } + if (options.dockerEnv) { + // Add environment variables to docker run cmd + options.dockerEnv.forEach(function (item) { + dockerCmd.push('-e', item); + }); + } - if (process.platform === 'linux') { - if (options.useDownloadCache) { - // Set the ownership of the download cache dir back to user + if (process.platform === 'linux') { + // Use same user so requirements folder is not root and so --cache-dir works + if (options.useDownloadCache) { + // Set the ownership of the download cache dir to root + pipCmds.unshift(['chown', '-R', '0:0', dockerDownloadCacheDir]); + } + // Install requirements with pip + // Set the ownership of the current folder to user pipCmds.push([ 'chown', '-R', `${process.getuid()}:${process.getgid()}`, - dockerDownloadCacheDir, + '/var/task', ]); + } else { + // Use same user so --cache-dir works + dockerCmd.push('-u', getDockerUid(bindPath)); + } + + for (let path of options.dockerExtraFiles) { + pipCmds.push(['cp', path, '/var/task/']); } + + if (process.platform === 'linux') { + if (options.useDownloadCache) { + // Set the ownership of the download cache dir back to user + pipCmds.push([ + 'chown', + '-R', + `${process.getuid()}:${process.getgid()}`, + dockerDownloadCacheDir, + ]); + } + } + + if (Array.isArray(options.dockerRunCmdExtraArgs)) { + dockerCmd.push(...options.dockerRunCmdExtraArgs); + } else { + throw new Error('dockerRunCmdExtraArgs option must be an array'); + } + + dockerCmd.push(dockerImage); } - if (Array.isArray(options.dockerRunCmdExtraArgs)) { - dockerCmd.push(...options.dockerRunCmdExtraArgs); - } else { - throw new Error('dockerRunCmdExtraArgs option must be an array'); + // If enabled slimming, strip so files + switch (getStripMode(options)) { + case 'docker': + pipCmds.push(getStripCommand(options, '/var/task')); + break; + case 'direct': + postCmds.push(getStripCommand(options, dockerPathForWin(targetFolder))); + break; } - dockerCmd.push(dockerImage); - } + let spawnArgs = { shell: true }; + if (process.env.SLS_DEBUG) { + spawnArgs.stdio = 'inherit'; + } + let mainCmds = []; + if (dockerCmd.length) { + dockerCmd.push(...mergeCommands(pipCmds)); + mainCmds = [dockerCmd]; + } else { + mainCmds = pipCmds; + } + mainCmds.push(...postCmds); - // If enabled slimming, strip so files - switch (getStripMode(options)) { - case 'docker': - pipCmds.push(getStripCommand(options, '/var/task')); - break; - case 'direct': - postCmds.push(getStripCommand(options, dockerPathForWin(targetFolder))); - break; - } + if (log) { + log.info(`Running ${quote(dockerCmd)}...`); + } else { + serverless.cli.log(`Running ${quote(dockerCmd)}...`); + } - let spawnArgs = { shell: true }; - if (process.env.SLS_DEBUG) { - spawnArgs.stdio = 'inherit'; - } - let mainCmds = []; - if (dockerCmd.length) { - dockerCmd.push(...mergeCommands(pipCmds)); - mainCmds = [dockerCmd]; - } else { - mainCmds = pipCmds; - } - mainCmds.push(...postCmds); - - serverless.cli.log(`Running ${quote(dockerCmd)}...`); - - for (const [cmd, ...args] of mainCmds) { - try { - await spawn(cmd, args); - } catch (e) { - if ( - e.stderrBuffer && - e.stderrBuffer.toString().includes('command not found') - ) { - const advice = - cmd.indexOf('python') > -1 - ? 'Try the pythonBin option' - : 'Please install it'; - throw new Error(`${cmd} not found! ${advice}`); + for (const [cmd, ...args] of mainCmds) { + try { + await spawn(cmd, args); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { + const advice = + cmd.indexOf('python') > -1 + ? 'Try the pythonBin option' + : 'Please install it'; + throw new Error(`${cmd} not found! ${advice}`); + } + throw e; } - throw e; } - } - // If enabled slimming, delete files in slimPatterns - if (options.slim === true || options.slim === 'true') { - deleteFiles(options, targetFolder); + // If enabled slimming, delete files in slimPatterns + if (options.slim === true || options.slim === 'true') { + deleteFiles(options, targetFolder); + } + } finally { + installProgress && installProgress.remove(); } } @@ -392,7 +465,7 @@ function getRequirements(source) { * @param {string} target requirements where results are written * @param {Object} options */ -function filterRequirementsFile(source, target, options, serverless) { +function filterRequirementsFile(source, target, { options, serverless, log }) { const noDeploy = new Set(options.noDeploy || []); const requirements = getRequirements(source); var prepend = []; @@ -414,9 +487,13 @@ function filterRequirementsFile(source, target, options, serverless) { // not required inside final archive and avoids pip bugs // see https://github.com/UnitedIncome/serverless-python-requirements/issues/240 req = req.split('-e')[1].trim(); - serverless.cli.log( - `Warning: Stripping -e flag from requirement ${req}` - ); + if (log) { + log.warning(`Stripping -e flag from requirement ${req}`); + } else { + serverless.cli.log( + `Warning: Stripping -e flag from requirement ${req}` + ); + } } // Keep options for later @@ -444,13 +521,19 @@ function filterRequirementsFile(source, target, options, serverless) { * @param {Object} serverless * @return {undefined} */ -function copyVendors(vendorFolder, targetFolder, serverless) { +function copyVendors(vendorFolder, targetFolder, { serverless, log }) { // Create target folder if it does not exist fse.ensureDirSync(targetFolder); - serverless.cli.log( - `Copying vendor libraries from ${vendorFolder} to ${targetFolder}...` - ); + if (log) { + log.info( + `Copying vendor libraries from ${vendorFolder} to ${targetFolder}` + ); + } else { + serverless.cli.log( + `Copying vendor libraries from ${vendorFolder} to ${targetFolder}...` + ); + } fse.readdirSync(vendorFolder).map((file) => { let source = path.join(vendorFolder, file); @@ -500,12 +583,11 @@ function requirementsFileExists(servicePath, options, fileName) { * @return {string} */ async function installRequirementsIfNeeded( - servicePath, modulePath, - options, funcOptions, - serverless + pluginInstance ) { + const { servicePath, options, serverless } = pluginInstance; // Our source requirements, under our service path, and our module path (if specified) const fileName = path.join(servicePath, modulePath, options.fileName); @@ -528,19 +610,19 @@ async function installRequirementsIfNeeded( fse.ensureDirSync(requirementsTxtDirectory); const slsReqsTxt = path.join(requirementsTxtDirectory, 'requirements.txt'); - generateRequirementsFile( - fileName, - slsReqsTxt, - serverless, - servicePath, - options - ); + generateRequirementsFile(fileName, slsReqsTxt, pluginInstance); // If no requirements file or an empty requirements file, then do nothing if (!fse.existsSync(slsReqsTxt) || fse.statSync(slsReqsTxt).size == 0) { - serverless.cli.log( - `Skipping empty output requirements.txt file from ${slsReqsTxt}` - ); + if (pluginInstance.log) { + pluginInstance.log.info( + `Skipping empty output requirements.txt file from ${slsReqsTxt}` + ); + } else { + serverless.cli.log( + `Skipping empty output requirements.txt file from ${slsReqsTxt}` + ); + } return false; } @@ -560,9 +642,15 @@ async function installRequirementsIfNeeded( fse.existsSync(path.join(workingReqsFolder, '.completed_requirements')) && workingReqsFolder.endsWith('_slspyc') ) { - serverless.cli.log( - `Using static cache of requirements found at ${workingReqsFolder} ...` - ); + if (pluginInstance.log) { + pluginInstance.log.info( + `Using static cache of requirements found at ${workingReqsFolder}` + ); + } else { + serverless.cli.log( + `Using static cache of requirements found at ${workingReqsFolder} ...` + ); + } // We'll "touch" the folder, as to bring it to the start of the FIFO cache fse.utimesSync(workingReqsFolder, new Date(), new Date()); return workingReqsFolder; @@ -583,14 +671,14 @@ async function installRequirementsIfNeeded( fse.copySync(slsReqsTxt, path.join(workingReqsFolder, 'requirements.txt')); // Then install our requirements from this folder - await installRequirements(workingReqsFolder, serverless, options); + await installRequirements(workingReqsFolder, pluginInstance); // Copy vendor libraries to requirements folder if (options.vendor) { - copyVendors(options.vendor, workingReqsFolder, serverless); + copyVendors(options.vendor, workingReqsFolder, pluginInstance); } if (funcOptions.vendor) { - copyVendors(funcOptions.vendor, workingReqsFolder, serverless); + copyVendors(funcOptions.vendor, workingReqsFolder, pluginInstance); } // Then touch our ".completed_requirements" file so we know we can use this for static cache @@ -609,7 +697,7 @@ async function installRequirementsIfNeeded( async function installAllRequirements() { // fse.ensureDirSync(path.join(this.servicePath, '.serverless')); // First, check and delete cache versions, if enabled - checkForAndDeleteMaxCacheVersions(this.options, this.serverless); + checkForAndDeleteMaxCacheVersions(this); // Then if we're going to package functions individually... if (this.serverless.service.package.individually) { @@ -663,13 +751,7 @@ async function installAllRequirements() { } } } else { - const reqsInstalledAt = await installRequirementsIfNeeded( - this.servicePath, - '', - this.options, - {}, - this.serverless - ); + const reqsInstalledAt = await installRequirementsIfNeeded('', {}, this); // Add symlinks into .serverless for so it's easier for injecting and for users to see where reqs are let symlinkPath = path.join( this.servicePath, From f43acea2a5ee207874f4b5120a95a6b0164bd405 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 13:23:49 +0100 Subject: [PATCH 247/328] refactor: Adapt `layer` to modern logs --- lib/layer.js | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/lib/layer.js b/lib/layer.js index 12d338ec..ddc90f6a 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -53,9 +53,24 @@ function layerRequirements() { return BbPromise.resolve(); } - this.serverless.cli.log('Packaging Python Requirements Lambda Layer...'); + let layerProgress; + if (this.progress) { + layerProgress = this.progress.get('python-layer-requirements'); + layerProgress.update('Packaging Python Requirements Lambda Layer', { + isMainEvent: true, + }); + } else { + this.serverless.cli.log('Packaging Python Requirements Lambda Layer...'); + } - return BbPromise.bind(this).then(zipRequirements).then(createLayers); + return BbPromise.bind(this) + .then(zipRequirements) + .then(createLayers) + .then(() => layerProgress && layerProgress.remove()) + .catch((e) => { + layerProgress && layerProgress.remove(); + throw e; + }); } module.exports = { From cbd7e9c4ecb335457ed59b6d4942636b0639a53a Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 13:34:52 +0100 Subject: [PATCH 248/328] refactor: Adapt `inject` to modern logs --- lib/inject.js | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/lib/inject.js b/lib/inject.js index 3cad758d..2d1bdc2b 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -92,10 +92,19 @@ function injectAllRequirements(funcArtifact) { return BbPromise.resolve(); } - this.serverless.cli.log('Injecting required Python packages to package...'); + let injectProgress; + if (this.progress) { + injectProgress = this.progress.get('python-inject-requirements'); + injectProgress.update('Injecting required Python packages to package', { + isMainEvent: true, + }); + } else { + this.serverless.cli.log('Injecting required Python packages to package...'); + } + let returnPromise; if (this.serverless.service.package.individually) { - return BbPromise.resolve(this.targetFuncs) + returnPromise = BbPromise.resolve(this.targetFuncs) .filter((func) => (func.runtime || this.serverless.service.provider.runtime).match( /^python.*/ @@ -132,12 +141,19 @@ function injectAllRequirements(funcArtifact) { ); }); } else if (!this.options.zip) { - return injectRequirements( + returnPromise = injectRequirements( path.join('.serverless', 'requirements'), this.serverless.service.package.artifact || funcArtifact, this.options ); } + + return returnPromise + .then(() => injectProgress && injectProgress.remove()) + .catch((e) => { + injectProgress && injectProgress.remove(); + throw e; + }); } module.exports = { injectAllRequirements }; From d70ca215eb8d0644697aed3d9515755c89c701e1 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 13:43:51 +0100 Subject: [PATCH 249/328] refactor: Adapt `docker` for modern logs --- index.js | 2 +- lib/docker.js | 34 ++++++++++++++++++++++++++-------- lib/inject.js | 15 +++++++++------ lib/pip.js | 26 +++++++++++--------------- 4 files changed, 47 insertions(+), 30 deletions(-) diff --git a/index.js b/index.js index 26616295..cf2af38e 100644 --- a/index.js +++ b/index.js @@ -137,7 +137,7 @@ class ServerlessPythonRequirements { this.log = v3Utils.log; this.progress = v3Utils.progress; this.writeText = v3Utils.writeText; - }; + } this.commands = { requirements: { diff --git a/lib/docker.js b/lib/docker.js index 94229b21..9da6da6a 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -73,7 +73,7 @@ function findTestFile(servicePath) { * @param {string} bindPath * @return {boolean} */ -async function tryBindPath(serverless, bindPath, testFile) { +async function tryBindPath(bindPath, testFile, { serverless, log }) { const debug = process.env.SLS_DEBUG; const options = [ 'run', @@ -85,12 +85,30 @@ async function tryBindPath(serverless, bindPath, testFile) { `/test/${testFile}`, ]; try { - if (debug) serverless.cli.log(`Trying bindPath ${bindPath} (${options})`); + if (debug) { + if (log) { + log.debug(`Trying bindPath ${bindPath} (${options})`); + } else { + serverless.cli.log(`Trying bindPath ${bindPath} (${options})`); + } + } const ps = await dockerCommand(options); - if (debug) serverless.cli.log(ps.stdout.trim()); - return ps.stdout.trim() === `/test/${testFile}`; + if (debug) { + if (log) { + log.debug(ps.stdoutBuffer.trim()); + } else { + serverless.cli.log(ps.stdoutBuffer.trim()); + } + } + return ps.stdoutBuffer.trim() === `/test/${testFile}`; } catch (err) { - if (debug) serverless.cli.log(`Finding bindPath failed with ${err}`); + if (debug) { + if (log) { + log.debug(`Finding bindPath failed with ${err}`); + } else { + serverless.cli.log(`Finding bindPath failed with ${err}`); + } + } return false; } } @@ -101,7 +119,7 @@ async function tryBindPath(serverless, bindPath, testFile) { * @param {string} servicePath * @return {string} The bind path. */ -async function getBindPath(serverless, servicePath) { +async function getBindPath(servicePath, pluginInstance) { // Determine bind path if (process.platform !== 'win32' && !isWsl) { return servicePath; @@ -145,7 +163,7 @@ async function getBindPath(serverless, servicePath) { for (let i = 0; i < bindPaths.length; i++) { const bindPath = bindPaths[i]; - if (await tryBindPath(serverless, bindPath, testFile)) { + if (await tryBindPath(bindPath, testFile, pluginInstance)) { return bindPath; } } @@ -171,7 +189,7 @@ async function getDockerUid(bindPath) { '/bin/sh', ]; const ps = await dockerCommand(options); - return ps.stdout.trim(); + return ps.stdoutBuffer.trim(); } module.exports = { buildImage, getBindPath, getDockerUid }; diff --git a/lib/inject.js b/lib/inject.js index 2d1bdc2b..85bdf597 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -148,12 +148,15 @@ function injectAllRequirements(funcArtifact) { ); } - return returnPromise - .then(() => injectProgress && injectProgress.remove()) - .catch((e) => { - injectProgress && injectProgress.remove(); - throw e; - }); + return ( + returnPromise && + returnPromise + .then(() => injectProgress && injectProgress.remove()) + .catch((e) => { + injectProgress && injectProgress.remove(); + throw e; + }) + ); } module.exports = { injectAllRequirements }; diff --git a/lib/pip.js b/lib/pip.js index 89e8e74a..d88f5c7d 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -133,19 +133,15 @@ async function pipAcceptsSystem(pythonBin) { * @param {Object} options * @return {undefined} */ -async function installRequirements( - targetFolder, - { options, serverless, log, progress } -) { +async function installRequirements(targetFolder, pluginInstance) { + const { options, serverless, log, progress } = pluginInstance; const targetRequirementsTxt = path.join(targetFolder, 'requirements.txt'); let installProgress; if (progress) { + log.info(`Installing requirements from "${targetRequirementsTxt}"`); installProgress = progress.get('python-install'); - installProgress.update( - `Installing requirements from "${targetRequirementsTxt}"`, - { isMainEvent: true } - ); + installProgress.update('Installing requirements'); } else { serverless.cli.log( `Installing requirements from ${targetRequirementsTxt} ...` @@ -226,7 +222,7 @@ async function installRequirements( pipCmd.push('--cache-dir', downloadCacheDir); } - if (pipAcceptsSystem(options.pythonBin)) { + if (await pipAcceptsSystem(options.pythonBin)) { pipCmd.push('--system'); } } @@ -253,7 +249,7 @@ async function installRequirements( ); } try { - dockerImage = buildImage( + dockerImage = await buildImage( options.dockerFile, options.dockerBuildCmdExtraArgs ); @@ -270,7 +266,9 @@ async function installRequirements( } // Prepare bind path depending on os platform - const bindPath = dockerPathForWin(getBindPath(serverless, targetFolder)); + const bindPath = dockerPathForWin( + await getBindPath(targetFolder, pluginInstance) + ); dockerCmd.push('docker', 'run', '--rm', '-v', `${bindPath}:/var/task:z`); if (options.dockerSsh) { @@ -308,7 +306,7 @@ async function installRequirements( fse.closeSync( fse.openSync(path.join(downloadCacheDir, 'requirements.txt'), 'w') ); - const windowsized = getBindPath(serverless, downloadCacheDir); + const windowsized = await getBindPath(downloadCacheDir, pluginInstance); // And now push it to a volume mount and to pip... dockerCmd.push('-v', `${windowsized}:${dockerDownloadCacheDir}:z`); pipCmd.push('--cache-dir', dockerDownloadCacheDir); @@ -337,7 +335,7 @@ async function installRequirements( ]); } else { // Use same user so --cache-dir works - dockerCmd.push('-u', getDockerUid(bindPath)); + dockerCmd.push('-u', await getDockerUid(bindPath)); } for (let path of options.dockerExtraFiles) { @@ -716,9 +714,7 @@ async function installAllRequirements() { // If we didn't already process a module (functions can re-use modules) if (!doneModules.includes(f.module)) { const reqsInstalledAt = await installRequirementsIfNeeded( - this.servicePath, f.module, - this.options, f, this.serverless ); From 44b9591f01157a1811e3ca8b43e21265a155a976 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Wed, 24 Nov 2021 21:44:40 +0100 Subject: [PATCH 250/328] refactor: Ensure proper verbose progress logs --- lib/clean.js | 7 +++---- lib/inject.js | 7 +++---- lib/layer.js | 7 +++---- lib/pip.js | 2 +- lib/pipenv.js | 6 +++--- lib/poetry.js | 6 +++--- lib/zip.js | 6 +++--- 7 files changed, 19 insertions(+), 22 deletions(-) diff --git a/lib/clean.js b/lib/clean.js index 88d7d03e..e972f567 100644 --- a/lib/clean.js +++ b/lib/clean.js @@ -40,11 +40,10 @@ function cleanupCache() { if (fse.existsSync(cacheLocation)) { let cleanupProgress; if (this.serverless) { - if (this.progress) { + if (this.progress && this.log) { cleanupProgress = this.progress.get('python-cleanup-cache'); - cleanupProgress.notice(`Removing static caches at: ${cacheLocation}`, { - isMainEvent: true, - }); + cleanupProgress.notice('Removing static caches'); + this.log.info(`Removing static caches at: ${cacheLocation}`); } else { this.serverless.cli.log(`Removing static caches at: ${cacheLocation}`); } diff --git a/lib/inject.js b/lib/inject.js index 85bdf597..9f3ad77a 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -93,11 +93,10 @@ function injectAllRequirements(funcArtifact) { } let injectProgress; - if (this.progress) { + if (this.progress && this.log) { injectProgress = this.progress.get('python-inject-requirements'); - injectProgress.update('Injecting required Python packages to package', { - isMainEvent: true, - }); + injectProgress.update('Injecting required Python packages to package'); + this.log.info('Injecting required Python packages to package'); } else { this.serverless.cli.log('Injecting required Python packages to package...'); } diff --git a/lib/layer.js b/lib/layer.js index ddc90f6a..141d1fd7 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -54,11 +54,10 @@ function layerRequirements() { } let layerProgress; - if (this.progress) { + if (this.progress && this.log) { layerProgress = this.progress.get('python-layer-requirements'); - layerProgress.update('Packaging Python Requirements Lambda Layer', { - isMainEvent: true, - }); + layerProgress.update('Packaging Python Requirements Lambda Layer'); + this.log.info('Packaging Python Requirements Lambda Layer'); } else { this.serverless.cli.log('Packaging Python Requirements Lambda Layer...'); } diff --git a/lib/pip.js b/lib/pip.js index d88f5c7d..2f6d4571 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -716,7 +716,7 @@ async function installAllRequirements() { const reqsInstalledAt = await installRequirementsIfNeeded( f.module, f, - this.serverless + this ); // Add modulePath into .serverless for each module so it's easier for injecting and for users to see where reqs are let modulePath = path.join( diff --git a/lib/pipenv.js b/lib/pipenv.js index 5100a810..4949e924 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -15,14 +15,14 @@ async function pipfileToRequirements() { } let generateRequirementsProgress; - if (this.progress) { + if (this.progress && this.log) { generateRequirementsProgress = this.progress.get( 'python-generate-requirements-pipfile' ); generateRequirementsProgress.update( - 'Generating requirements.txt from Pipfile', - { isMainEvent: true } + 'Generating requirements.txt from Pipfile' ); + this.log.info('Generating requirements.txt from Pipfile'); } else { this.serverless.cli.log('Generating requirements.txt from Pipfile...'); } diff --git a/lib/poetry.js b/lib/poetry.js index 81988742..12904fd9 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -14,14 +14,14 @@ async function pyprojectTomlToRequirements() { } let generateRequirementsProgress; - if (this.progress) { + if (this.progress && this.log) { generateRequirementsProgress = this.progress.get( 'python-generate-requirements-toml' ); generateRequirementsProgress.update( - 'Generating requirements.txt from "pyproject.toml"', - { isMainEvent: true } + 'Generating requirements.txt from "pyproject.toml"' ); + this.log.info('Generating requirements.txt from "pyproject.toml"'); } else { this.serverless.cli.log( 'Generating requirements.txt from pyproject.toml...' diff --git a/lib/zip.js b/lib/zip.js index de61ce0f..aabb2333 100644 --- a/lib/zip.js +++ b/lib/zip.js @@ -123,14 +123,14 @@ function packRequirements() { .then((funcs) => uniqBy(funcs, (f) => f.module)) .map((f) => { let packProgress; - if (this.progress) { + if (this.progress && this.log) { packProgress = this.progress.get( `python-pack-requirements-${f.module}` ); packProgress.update( - `Zipping required Python packages for ${f.module}`, - { isMainEvent: true } + `Zipping required Python packages for ${f.module}` ); + this.log.info(`Zipping required Python packages for ${f.module}`); } else { this.serverless.cli.log( `Zipping required Python packages for ${f.module}...` From 9479a90b1d262f55a6808a9d12c478f220258da9 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Thu, 25 Nov 2021 11:49:11 +0100 Subject: [PATCH 251/328] refactor: Cleanup and use `finally` for code simplification --- lib/clean.js | 11 +++--- lib/inject.js | 99 ++++++++++++++++++++++++--------------------------- lib/layer.js | 6 +--- lib/zip.js | 12 ++----- 4 files changed, 54 insertions(+), 74 deletions(-) diff --git a/lib/clean.js b/lib/clean.js index e972f567..8aaf331e 100644 --- a/lib/clean.js +++ b/lib/clean.js @@ -40,7 +40,7 @@ function cleanupCache() { if (fse.existsSync(cacheLocation)) { let cleanupProgress; if (this.serverless) { - if (this.progress && this.log) { + if (this.log) { cleanupProgress = this.progress.get('python-cleanup-cache'); cleanupProgress.notice('Removing static caches'); this.log.info(`Removing static caches at: ${cacheLocation}`); @@ -57,12 +57,9 @@ function cleanupCache() { .forEach((file) => { promises.push(fse.removeAsync(file)); }); - return BbPromise.all(promises) - .then(() => cleanupProgress && cleanupProgress.remove()) - .catch((e) => { - cleanupProgress && cleanupProgress.remove(); - throw e; - }); + return BbPromise.all(promises).finally( + () => cleanupProgress && cleanupProgress.remove() + ); } else { if (this.serverless) { if (this.log) { diff --git a/lib/inject.js b/lib/inject.js index 9f3ad77a..f32c9d46 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -86,7 +86,7 @@ function moveModuleUp(source, target, module) { * Inject requirements into packaged application. * @return {Promise} the combined promise for requirements injection. */ -function injectAllRequirements(funcArtifact) { +async function injectAllRequirements(funcArtifact) { if (this.options.layer) { // The requirements will be placed in a Layer, so just resolve return BbPromise.resolve(); @@ -101,61 +101,56 @@ function injectAllRequirements(funcArtifact) { this.serverless.cli.log('Injecting required Python packages to package...'); } - let returnPromise; - if (this.serverless.service.package.individually) { - returnPromise = BbPromise.resolve(this.targetFuncs) - .filter((func) => - (func.runtime || this.serverless.service.provider.runtime).match( - /^python.*/ + try { + if (this.serverless.service.package.individually) { + await BbPromise.resolve(this.targetFuncs) + .filter((func) => + (func.runtime || this.serverless.service.provider.runtime).match( + /^python.*/ + ) ) - ) - .map((func) => { - if (!get(func, 'module')) { - set(func, ['module'], '.'); - } - return func; - }) - .map((func) => { - if (func.module !== '.') { - const artifact = func.package ? func.package.artifact : funcArtifact; - const newArtifact = path.join( - '.serverless', - `${func.module}-${func.name}.zip` - ); - func.package.artifact = newArtifact; - return moveModuleUp(artifact, newArtifact, func.module).then( - () => func - ); - } else { + .map((func) => { + if (!get(func, 'module')) { + set(func, ['module'], '.'); + } return func; - } - }) - .map((func) => { - return this.options.zip - ? func - : injectRequirements( - path.join('.serverless', func.module, 'requirements'), - func.package.artifact, - this.options + }) + .map((func) => { + if (func.module !== '.') { + const artifact = func.package + ? func.package.artifact + : funcArtifact; + const newArtifact = path.join( + '.serverless', + `${func.module}-${func.name}.zip` ); - }); - } else if (!this.options.zip) { - returnPromise = injectRequirements( - path.join('.serverless', 'requirements'), - this.serverless.service.package.artifact || funcArtifact, - this.options - ); + func.package.artifact = newArtifact; + return moveModuleUp(artifact, newArtifact, func.module).then( + () => func + ); + } else { + return func; + } + }) + .map((func) => { + return this.options.zip + ? func + : injectRequirements( + path.join('.serverless', func.module, 'requirements'), + func.package.artifact, + this.options + ); + }); + } else if (!this.options.zip) { + await injectRequirements( + path.join('.serverless', 'requirements'), + this.serverless.service.package.artifact || funcArtifact, + this.options + ); + } + } finally { + injectProgress && injectProgress.remove(); } - - return ( - returnPromise && - returnPromise - .then(() => injectProgress && injectProgress.remove()) - .catch((e) => { - injectProgress && injectProgress.remove(); - throw e; - }) - ); } module.exports = { injectAllRequirements }; diff --git a/lib/layer.js b/lib/layer.js index 141d1fd7..fe2a4a00 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -65,11 +65,7 @@ function layerRequirements() { return BbPromise.bind(this) .then(zipRequirements) .then(createLayers) - .then(() => layerProgress && layerProgress.remove()) - .catch((e) => { - layerProgress && layerProgress.remove(); - throw e; - }); + .finally(() => layerProgress && layerProgress.remove()); } module.exports = { diff --git a/lib/zip.js b/lib/zip.js index aabb2333..cba29450 100644 --- a/lib/zip.js +++ b/lib/zip.js @@ -139,11 +139,7 @@ function packRequirements() { f.package.patterns.push(`${f.module}/.requirements.zip`); return addTree(new JSZip(), `.serverless/${f.module}/requirements`) .then((zip) => writeZip(zip, `${f.module}/.requirements.zip`)) - .then(() => packProgress && packProgress.remove()) - .catch((e) => { - packProgress && packProgress.remove(); - throw e; - }); + .finally(() => packProgress && packProgress.remove()); }); } else { let packProgress; @@ -157,11 +153,7 @@ function packRequirements() { .then((zip) => writeZip(zip, path.join(this.servicePath, '.requirements.zip')) ) - .then(() => packProgress && packProgress.remove()) - .catch((e) => { - packProgress && packProgress.remove(); - throw e; - }); + .finally(() => packProgress && packProgress.remove()); } } } From cdb71110bc9c69b5087b6e18fb353d65962afe4a Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 12:16:27 +0100 Subject: [PATCH 252/328] refactor: Use `ServerlessError` in `docker` --- lib/docker.js | 36 ++++++++++++++++++++++-------------- lib/pip.js | 5 +++-- 2 files changed, 25 insertions(+), 16 deletions(-) diff --git a/lib/docker.js b/lib/docker.js index 9da6da6a..5157803f 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -8,7 +8,7 @@ const path = require('path'); * @param {string[]} options * @return {Object} */ -async function dockerCommand(options) { +async function dockerCommand(options, pluginInstance) { const cmd = 'docker'; try { return await spawn(cmd, options, { encoding: 'utf-8' }); @@ -17,7 +17,10 @@ async function dockerCommand(options) { e.stderrBuffer && e.stderrBuffer.toString().includes('command not found') ) { - throw new Error('docker not found! Please install it.'); + throw new pluginInstance.serverless.classes.Error( + 'docker not found! Please install it.', + 'PYTHON_REQUIREMENTS_DOCKER_NOT_FOUND' + ); } throw e; } @@ -29,19 +32,22 @@ async function dockerCommand(options) { * @param {string[]} extraArgs * @return {string} The name of the built docker image. */ -async function buildImage(dockerFile, extraArgs) { +async function buildImage(dockerFile, extraArgs, pluginInstance) { const imageName = 'sls-py-reqs-custom'; const options = ['build', '-f', dockerFile, '-t', imageName]; if (Array.isArray(extraArgs)) { options.push(...extraArgs); } else { - throw new Error('dockerRunCmdExtraArgs option must be an array'); + throw new pluginInstance.serverless.classes.Error( + 'dockerRunCmdExtraArgs option must be an array', + 'PYTHON_REQUIREMENTS_INVALID_DOCKER_EXTRA_ARGS' + ); } options.push('.'); - await dockerCommand(options); + await dockerCommand(options, pluginInstance); return imageName; } @@ -50,7 +56,7 @@ async function buildImage(dockerFile, extraArgs) { * @param {string} servicePath * @return {string} file name */ -function findTestFile(servicePath) { +function findTestFile(servicePath, pluginInstance) { if (fse.pathExistsSync(path.join(servicePath, 'serverless.yml'))) { return 'serverless.yml'; } @@ -63,8 +69,9 @@ function findTestFile(servicePath) { if (fse.pathExistsSync(path.join(servicePath, 'requirements.txt'))) { return 'requirements.txt'; } - throw new Error( - 'Unable to find serverless.{yml|yaml|json} or requirements.txt for getBindPath()' + throw new pluginInstance.serverless.classes.Error( + 'Unable to find serverless.{yml|yaml|json} or requirements.txt for getBindPath()', + 'PYTHON_REQUIREMENTS_MISSING_GET_BIND_PATH_FILE' ); } @@ -73,7 +80,8 @@ function findTestFile(servicePath) { * @param {string} bindPath * @return {boolean} */ -async function tryBindPath(bindPath, testFile, { serverless, log }) { +async function tryBindPath(bindPath, testFile, pluginInstance) { + const { serverless, log } = pluginInstance; const debug = process.env.SLS_DEBUG; const options = [ 'run', @@ -92,7 +100,7 @@ async function tryBindPath(bindPath, testFile, { serverless, log }) { serverless.cli.log(`Trying bindPath ${bindPath} (${options})`); } } - const ps = await dockerCommand(options); + const ps = await dockerCommand(options, pluginInstance); if (debug) { if (log) { log.debug(ps.stdoutBuffer.trim()); @@ -126,7 +134,7 @@ async function getBindPath(servicePath, pluginInstance) { } // test docker is available - await dockerCommand(['version']); + await dockerCommand(['version'], pluginInstance); // find good bind path for Windows let bindPaths = []; @@ -159,7 +167,7 @@ async function getBindPath(servicePath, pluginInstance) { bindPaths.push(`/mnt/${drive.toUpperCase()}/${path}`); bindPaths.push(`${drive.toUpperCase()}:/${path}`); - const testFile = findTestFile(servicePath); + const testFile = findTestFile(servicePath, pluginInstance); for (let i = 0; i < bindPaths.length; i++) { const bindPath = bindPaths[i]; @@ -176,7 +184,7 @@ async function getBindPath(servicePath, pluginInstance) { * @param {string} bindPath * @return {boolean} */ -async function getDockerUid(bindPath) { +async function getDockerUid(bindPath, pluginInstance) { const options = [ 'run', '--rm', @@ -188,7 +196,7 @@ async function getDockerUid(bindPath) { '%u', '/bin/sh', ]; - const ps = await dockerCommand(options); + const ps = await dockerCommand(options, pluginInstance); return ps.stdoutBuffer.trim(); } diff --git a/lib/pip.js b/lib/pip.js index 2f6d4571..24d4c55f 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -251,7 +251,8 @@ async function installRequirements(targetFolder, pluginInstance) { try { dockerImage = await buildImage( options.dockerFile, - options.dockerBuildCmdExtraArgs + options.dockerBuildCmdExtraArgs, + pluginInstance ); } finally { buildDockerImageProgress && buildDockerImageProgress.remove(); @@ -335,7 +336,7 @@ async function installRequirements(targetFolder, pluginInstance) { ]); } else { // Use same user so --cache-dir works - dockerCmd.push('-u', await getDockerUid(bindPath)); + dockerCmd.push('-u', await getDockerUid(bindPath, pluginInstance)); } for (let path of options.dockerExtraFiles) { From 395082761ae574c2664f1c272ea4970cfa3fd1f7 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 12:22:22 +0100 Subject: [PATCH 253/328] refactor: Use `ServerlessError` in `poetry` --- lib/poetry.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/poetry.js b/lib/poetry.js index 12904fd9..23f43dc0 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -50,8 +50,9 @@ async function pyprojectTomlToRequirements() { e.stderrBuffer && e.stderrBuffer.toString().includes('command not found') ) { - throw new Error( - `poetry not found! Install it according to the poetry docs.` + throw new this.serverless.classes.Error( + `poetry not found! Install it according to the poetry docs.`, + 'PYTHON_REQUIREMENTS_POETRY_NOT_FOUND' ); } throw e; From 618ef76c830349f34f5f8414dc6e381b167cfa43 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 12:23:04 +0100 Subject: [PATCH 254/328] refactor: Use `ServerlessError` in `pipenv` --- lib/pipenv.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/pipenv.js b/lib/pipenv.js index 4949e924..5856d47b 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -42,8 +42,9 @@ async function pipfileToRequirements() { e.stderrBuffer && e.stderrBuffer.toString().includes('command not found') ) { - throw new Error( - `pipenv not found! Install it with 'pip install pipenv'.` + throw new this.serverless.classes.Error( + `pipenv not found! Install it according to the poetry docs.`, + 'PYTHON_REQUIREMENTS_PIPENV_NOT_FOUND' ); } throw e; From 8a4bc83025b41acd7f6e88982cd72af24f8d9967 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 12:28:02 +0100 Subject: [PATCH 255/328] refactor: Use `ServerlessError` in `pip` --- lib/pip.js | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index 24d4c55f..ce348532 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -107,7 +107,7 @@ function generateRequirementsFile( } } -async function pipAcceptsSystem(pythonBin) { +async function pipAcceptsSystem(pythonBin, pluginInstance) { // Check if pip has Debian's --system option and set it if so try { const pipTestRes = await spawn(pythonBin, ['-m', 'pip', 'help', 'install']); @@ -120,7 +120,10 @@ async function pipAcceptsSystem(pythonBin) { e.stderrBuffer && e.stderrBuffer.toString().includes('command not found') ) { - throw new Error(`${pythonBin} not found! Try the pythonBin option.`); + throw new pluginInstance.serverless.classes.Error( + `${pythonBin} not found! Install it according to the poetry docs.`, + 'PYTHON_REQUIREMENTS_PYTHON_NOT_FOUND' + ); } throw e; } @@ -167,10 +170,9 @@ async function installRequirements(targetFolder, pluginInstance) { // Check if we're using the legacy --cache-dir command... if (options.pipCmdExtraArgs.indexOf('--cache-dir') > -1) { if (options.dockerizePip) { - throw ( - 'Error: You can not use --cache-dir with Docker any more, please\n' + - ' use the new option useDownloadCache instead. Please see:\n' + - ' https://github.com/UnitedIncome/serverless-python-requirements#caching' + throw new pluginInstance.serverless.classes.Error( + 'You cannot use --cache-dir with Docker any more, please use the new option useDownloadCache instead. Please see: https://github.com/UnitedIncome/serverless-python-requirements#caching for more details.', + 'PYTHON_REQUIREMENTS_CACHE_DIR_DOCKER_INVALID' ); } else { if (log) { @@ -222,7 +224,7 @@ async function installRequirements(targetFolder, pluginInstance) { pipCmd.push('--cache-dir', downloadCacheDir); } - if (await pipAcceptsSystem(options.pythonBin)) { + if (await pipAcceptsSystem(options.pythonBin, pluginInstance)) { pipCmd.push('--system'); } } @@ -358,7 +360,10 @@ async function installRequirements(targetFolder, pluginInstance) { if (Array.isArray(options.dockerRunCmdExtraArgs)) { dockerCmd.push(...options.dockerRunCmdExtraArgs); } else { - throw new Error('dockerRunCmdExtraArgs option must be an array'); + throw new pluginInstance.serverless.classes.Error( + 'dockerRunCmdExtraArgs option must be an array', + 'PYTHON_REQUIREMENTS_INVALID_DOCKER_EXTRA_ARGS' + ); } dockerCmd.push(dockerImage); @@ -405,7 +410,10 @@ async function installRequirements(targetFolder, pluginInstance) { cmd.indexOf('python') > -1 ? 'Try the pythonBin option' : 'Please install it'; - throw new Error(`${cmd} not found! ${advice}`); + throw new pluginInstance.serverless.classes.Error( + `${cmd} not found! ${advice}`, + 'PYTHON_REQUIREMENTS_COMMAND_NOT_FOUND' + ); } throw e; } From 1f0804cfa95e85375b5a075188b1fee1fcdcb42e Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 13:05:20 +0100 Subject: [PATCH 256/328] ci: Update validate CI workflow --- .github/workflows/lint.yml | 20 --- .github/workflows/test.yml | 54 -------- .github/workflows/validate.yml | 240 +++++++++++++++++++++++++++++++++ package.json | 11 +- 4 files changed, 248 insertions(+), 77 deletions(-) delete mode 100644 .github/workflows/lint.yml delete mode 100644 .github/workflows/test.yml create mode 100644 .github/workflows/validate.yml diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml deleted file mode 100644 index 1e6b9ee8..00000000 --- a/.github/workflows/lint.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: Lint - -on: [push, pull_request] - -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - - name: Set up Node ${{ matrix.node-version }} - uses: actions/setup-node@v1 - with: - node-version: 14 - - - name: Install deps - run: npm install - - - name: Lint - run: npm run ci:lint diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index f89486bf..00000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: Test - -on: [push, pull_request] - -jobs: - build: - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest, windows-latest, macOS-latest] - python-version: [2.7, 3.6] - steps: - - uses: actions/checkout@v2 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - - name: Set up Node - uses: actions/setup-node@v1 - with: - node-version: 14 - - - name: Check python version - run: | - python --version - - - name: Install setuptools - run: python -m pip install --force setuptools wheel - - - name: Install pipenv / poetry - run: python -m pip install pipenv poetry - - - name: Install serverless - run: npm install -g serverless@2 - - - name: Install deps - run: npm install - - - name: Test - run: npm run test - env: - LC_ALL: C.UTF-8 - LANG: C.UTF-8 - if: matrix.os != 'macOS-latest' - - - name: Test (Mac) - run: npm run test - env: - LC_ALL: en_US.UTF-8 - LANG: en_US.UTF-8 - if: matrix.os == 'macOS-latest' diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml new file mode 100644 index 00000000..3a9af5c4 --- /dev/null +++ b/.github/workflows/validate.yml @@ -0,0 +1,240 @@ +# PR's only + +name: Validate + +on: + pull_request: + branches: [master] + +env: + FORCE_COLOR: 1 + +jobs: + linuxNode16: + name: '[Linux] Node.js v16: Lint, Formatting & Unit tests' + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [2.7, 3.6] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + with: + # For commitlint purpose ensure to have complete list of PR commits + # It's loose and imperfect assumption that PR has no more than 30 commits + fetch-depth: 30 + + - name: Retrieve last master commit (for `git diff` purposes) + run: | + git checkout -b pr + git fetch --prune --depth=30 origin +refs/heads/master:refs/remotes/origin/master + git checkout master + git checkout pr + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v16-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: | + npm-v16-${{ runner.os }}-${{ github.ref }}- + npm-v16-${{ runner.os }}-refs/heads/master- + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Node.js and npm + uses: actions/setup-node@v1 + with: + node-version: 16.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry + + - name: Install serverless + run: npm install -g serverless@2 + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Validate Prettier formatting + run: npm run prettier-check:updated + - name: Validate ESLint rules + run: npm run lint:updated + - name: Unit tests + run: script -e -c "npm test" + + windowsNode16: + name: '[Windows] Node.js v16: Unit tests' + runs-on: windows-latest + strategy: + matrix: + python-version: [2.7, 3.6] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v16-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: | + npm-v16-${{ runner.os }}-${{ github.ref }}- + npm-v16-${{ runner.os }}-refs/heads/master- + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Node.js and npm + uses: actions/setup-node@v1 + with: + node-version: 16.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry + + - name: Install serverless + run: npm install -g serverless@2 + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Unit tests + run: npm test + + linuxNode14: + name: '[Linux] Node.js 14: Unit tests' + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [2.7, 3.6] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: | + npm-v14-${{ runner.os }}-${{ github.ref }}- + npm-v14-${{ runner.os }}-refs/heads/master- + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Node.js and npm + uses: actions/setup-node@v1 + with: + node-version: 14.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry + + - name: Install serverless + run: npm install -g serverless@2 + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Unit tests + # Some tests depend on TTY support, which is missing in GA runner + # Workaround taken from https://github.com/actions/runner/issues/241#issuecomment-577360161 + run: script -e -c "npm test" + + linuxNode12: + name: '[Linux] Node.js v12: Unit tests' + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [2.7, 3.6] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v12-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: | + npm-v12-${{ runner.os }}-${{ github.ref }}- + npm-v12-${{ runner.os }}-refs/heads/master- + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Node.js and npm + uses: actions/setup-node@v1 + with: + node-version: 12.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry + + - name: Install serverless + run: npm install -g serverless@2 + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Unit tests + run: script -e -c "npm test" diff --git a/package.json b/package.json index c9d247a3..9d1f5852 100644 --- a/package.json +++ b/package.json @@ -38,14 +38,19 @@ "main": "index.js", "bin": {}, "scripts": { - "ci:lint": "eslint *.js lib/*.js --format junit --output-file ~/reports/eslint.xml && prettier -c '{.,lib}/*.{js,md}'", - "test": "node test.js", + "format": "prettier --write '{.,lib}/*.{js,md}'", "lint": "eslint *.js lib/*.js && prettier -c '{.,lib}/*.{js,md}'", - "format": "prettier --write '{.,lib}/*.{js,md}'" + "lint:updated": "pipe-git-updated --ext=js -- eslint", + "prettier-check": "prettier -c --ignore-path .gitignore \"**/*.{css,html,js,json,md,yaml,yml}\"", + "prettier-check:updated": "pipe-git-updated --ext=css --ext=html --ext=js --ext=json --ext=md --ext=yaml --ext=yml -- prettier -c", + "prettify": "prettier --write --ignore-path .gitignore \"**/*.{css,html,js,json,md,yaml,yml}\"", + "prettify:updated": "pipe-git-updated --ext=css --ext=html --ext=js --ext=json --ext=md --ext=yaml --ext=yml -- prettier --write", + "test": "node test.js" }, "devDependencies": { "cross-spawn": "*", "eslint": "^7.32.0", + "git-list-updated": "^1.2.1", "lodash": "^4.17.21", "prettier": "^2", "tape": "*", From 080b0ba4e834e4ba1d60db243d5dc9962fa9c9f0 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 13:57:45 +0100 Subject: [PATCH 257/328] ci: Introduce integrate CI workflow --- .github/workflows/integrate.yml | 239 ++++++++++++++++++++++++++++++++ 1 file changed, 239 insertions(+) create mode 100644 .github/workflows/integrate.yml diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml new file mode 100644 index 00000000..d241971c --- /dev/null +++ b/.github/workflows/integrate.yml @@ -0,0 +1,239 @@ +# master only + +name: Integrate + +on: + push: + branches: [master] + +env: + FORCE_COLOR: 1 + +jobs: + linuxNode16: + name: '[Linux] Node.js v16: Unit tests' + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [2.7, 3.6] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v16-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: npm-v16-${{ runner.os }}-${{ github.ref }}- + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Node.js and npm + uses: actions/setup-node@v1 + with: + node-version: 16.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry + + - name: Install serverless + run: npm install -g serverless@2 + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Unit tests + run: script -e -c "npm test" + + windowsNode16: + name: '[Windows] Node.js v16: Unit tests' + runs-on: windows-latest + strategy: + matrix: + python-version: [2.7, 3.6] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v16-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: npm-v16-${{ runner.os }}-${{ github.ref }}- + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Node.js and npm + uses: actions/setup-node@v1 + with: + node-version: 16.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry + + - name: Install serverless + run: npm install -g serverless@2 + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Unit tests + run: npm test + + linuxNode14: + name: '[Linux] Node.js 14: Unit tests' + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [2.7, 3.6] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: npm-v14-${{ runner.os }}-${{ github.ref }}- + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Node.js and npm + uses: actions/setup-node@v1 + with: + node-version: 14.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry + + - name: Install serverless + run: npm install -g serverless@2 + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Unit tests + run: script -e -c "npm test" + + linuxNode12: + name: '[Linux] Node.js v12: Unit tests' + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [2.7, 3.6] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v12-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: npm-v12-${{ runner.os }}-${{ github.ref }}- + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Node.js and npm + uses: actions/setup-node@v1 + with: + node-version: 12.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry + + - name: Install serverless + run: npm install -g serverless@2 + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Unit tests + run: script -e -c "npm test" + + tagIfNewVersion: + name: Tag if new version + runs-on: ubuntu-latest + needs: [linuxNode14, windowsNode14, linuxNode16, linuxNode12] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + with: + # Ensure to have complete history of commits pushed with given push operation + # It's loose and imperfect assumption that no more than 30 commits will be pushed at once + fetch-depth: 30 + # Tag needs to be pushed with real user token, otherwise pushed tag won't trigger the actions workflow + # Hence we're passing 'serverless-ci' user authentication token + token: ${{ secrets.USER_GITHUB_TOKEN }} + + - name: Tag if new version + run: | + NEW_VERSION=`git diff -U0 ${{ github.event.before }} package.json | grep '"version": "' | tail -n 1 | grep -oE "[0-9]+\.[0-9]+\.[0-9]+"` || : + if [ -n "$NEW_VERSION" ]; + then + git tag v$NEW_VERSION + git push --tags + fi From f4d87b459a82422347758f647774c77542525774 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 14:05:10 +0100 Subject: [PATCH 258/328] ci: Introduce new CI publish workflow --- .github/workflows/publish.yml | 50 +++++++++++++++++++++++++++++------ CHANGELOG.md | 4 +++ package.json | 2 ++ 3 files changed, 48 insertions(+), 8 deletions(-) create mode 100644 CHANGELOG.md diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 6a1e7d26..6eee5b45 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -1,18 +1,52 @@ +# Version tags only + name: Publish -on: [release] +on: + push: + tags: + - v[0-9]+.[0-9]+.[0-9]+ jobs: - publish-npm: + publish: + name: Publish runs-on: ubuntu-latest + env: + # It'll work with secrets.GITHUB_TOKEN (which is provided by GitHub unconditionally) + # Still then release author would be "github-actions". It's better if it's dedicated repo bot + GITHUB_TOKEN: ${{ secrets.USER_GITHUB_TOKEN }} steps: - - uses: actions/checkout@v2 + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Retrieve node_modules from cache + id: cacheNodeModules + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v14-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} - - uses: actions/setup-node@v2 + - name: Install Node.js and npm + uses: actions/setup-node@v1 with: - version: 14 - registry-url: https://registry.npmjs.org/ + node-version: 14.x + registry-url: https://registry.npmjs.org - - run: npm publish + - name: Publish new version env: - NODE_AUTH_TOKEN: ${{secrets.npm_token}} + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + run: npm publish + + # Note: No need to install dependencies as: + # 1. We have retrieved cached `node_modules` for very same `package.json` + # as stored with recent `master `build + # 2. If for some reason cache retrieval fails `npx` will download and install + # `github-release-from-cc-changelog` + + - name: Publish release notes + run: | + TEMP_ARRAY=($(echo $GITHUB_REF | tr "/" "\n")) + TAG=${TEMP_ARRAY[@]: -1} + npx github-release-from-cc-changelog $TAG diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..cda73dee --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,4 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + diff --git a/package.json b/package.json index 9d1f5852..ec73734b 100644 --- a/package.json +++ b/package.json @@ -41,6 +41,7 @@ "format": "prettier --write '{.,lib}/*.{js,md}'", "lint": "eslint *.js lib/*.js && prettier -c '{.,lib}/*.{js,md}'", "lint:updated": "pipe-git-updated --ext=js -- eslint", + "prepare-release": "standard-version && prettier --write CHANGELOG.md", "prettier-check": "prettier -c --ignore-path .gitignore \"**/*.{css,html,js,json,md,yaml,yml}\"", "prettier-check:updated": "pipe-git-updated --ext=css --ext=html --ext=js --ext=json --ext=md --ext=yaml --ext=yml -- prettier -c", "prettify": "prettier --write --ignore-path .gitignore \"**/*.{css,html,js,json,md,yaml,yml}\"", @@ -51,6 +52,7 @@ "cross-spawn": "*", "eslint": "^7.32.0", "git-list-updated": "^1.2.1", + "github-release-from-cc-changelog": "^2.2.0", "lodash": "^4.17.21", "prettier": "^2", "tape": "*", From 274b8e52d9499612afbf096c023d73e75e0dd71a Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 14:07:57 +0100 Subject: [PATCH 259/328] ci: Add commitlint job to CI --- .github/workflows/validate.yml | 12 +++++++++++- CHANGELOG.md | 1 - commitlint.config.js | 31 +++++++++++++++++++++++++++++++ package.json | 1 + 4 files changed, 43 insertions(+), 2 deletions(-) create mode 100644 commitlint.config.js diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 3a9af5c4..9f546619 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -11,7 +11,7 @@ env: jobs: linuxNode16: - name: '[Linux] Node.js v16: Lint, Formatting & Unit tests' + name: '[Linux] Node.js v16: Lint, Eventual Commitlint, Eventual Changelog, Formatting & Unit tests' runs-on: ubuntu-latest strategy: matrix: @@ -75,6 +75,16 @@ jobs: run: npm run prettier-check:updated - name: Validate ESLint rules run: npm run lint:updated + - name: Validate commit messages + if: github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id + run: npx commitlint -f master + - name: Validate changelog (if new version) + run: | + NEW_VERSION=`git diff -U0 master package.json | grep '"version": "' | tail -n 1 | grep -oE "[0-9]+\.[0-9]+\.[0-9]+"` || : + if [ -n "$NEW_VERSION" ]; + then + npx dump-release-notes-from-cc-changelog $NEW_VERSION + fi - name: Unit tests run: script -e -c "npm test" diff --git a/CHANGELOG.md b/CHANGELOG.md index cda73dee..5c79a3c3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,3 @@ # Changelog All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - diff --git a/commitlint.config.js b/commitlint.config.js new file mode 100644 index 00000000..d23a0d6b --- /dev/null +++ b/commitlint.config.js @@ -0,0 +1,31 @@ +'use strict'; + +module.exports = { + rules: { + 'body-leading-blank': [2, 'always'], + 'footer-leading-blank': [2, 'always'], + 'header-max-length': [2, 'always', 72], + 'scope-enum': [2, 'always', ['', 'Config', 'Log']], + 'subject-case': [2, 'always', 'sentence-case'], + 'subject-empty': [2, 'never'], + 'subject-full-stop': [2, 'never', '.'], + 'type-case': [2, 'always', 'lower-case'], + 'type-empty': [2, 'never'], + 'type-enum': [ + 2, + 'always', + [ + 'build', + 'chore', + 'ci', + 'docs', + 'feat', + 'fix', + 'perf', + 'refactor', + 'style', + 'test', + ], + ], + }, +}; diff --git a/package.json b/package.json index ec73734b..be7bc48d 100644 --- a/package.json +++ b/package.json @@ -38,6 +38,7 @@ "main": "index.js", "bin": {}, "scripts": { + "commitlint": "commitlint -f HEAD@{15}", "format": "prettier --write '{.,lib}/*.{js,md}'", "lint": "eslint *.js lib/*.js && prettier -c '{.,lib}/*.{js,md}'", "lint:updated": "pipe-git-updated --ext=js -- eslint", From 29f957dd04505174e7f23f20945a2fb8307a7942 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 14:09:09 +0100 Subject: [PATCH 260/328] chore: Reformat with eslint & prettier --- .github/dependabot.yml | 20 +- .github/workflows/integrate.yml | 8 +- .github/workflows/publish.yml | 4 +- .github/workflows/validate.yml | 8 +- README.md | 12 +- index.js | 44 ++-- package.json | 11 +- test.js | 371 ++++++++++++++++---------------- tests/base/_slimPatterns.yml | 2 +- tests/base/serverless.yml | 3 - tests/pipenv/_slimPatterns.yml | 2 +- tests/poetry/_slimPatterns.yml | 2 +- 12 files changed, 249 insertions(+), 238 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index ac29398e..ab487438 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,12 +1,12 @@ version: 2 updates: -- package-ecosystem: npm - directory: "/" - schedule: - interval: daily - time: "10:00" - open-pull-requests-limit: 10 - ignore: - - dependency-name: eslint - versions: - - "> 7.22.0" + - package-ecosystem: npm + directory: '/' + schedule: + interval: daily + time: '10:00' + open-pull-requests-limit: 10 + ignore: + - dependency-name: eslint + versions: + - '> 7.22.0' diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index d241971c..64396542 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -59,7 +59,7 @@ jobs: npm update --no-save npm update --save-dev --no-save - name: Unit tests - run: script -e -c "npm test" + run: npm test windowsNode16: name: '[Windows] Node.js v16: Unit tests' @@ -161,7 +161,7 @@ jobs: npm update --no-save npm update --save-dev --no-save - name: Unit tests - run: script -e -c "npm test" + run: npm test linuxNode12: name: '[Linux] Node.js v12: Unit tests' @@ -212,12 +212,12 @@ jobs: npm update --no-save npm update --save-dev --no-save - name: Unit tests - run: script -e -c "npm test" + run: npm test tagIfNewVersion: name: Tag if new version runs-on: ubuntu-latest - needs: [linuxNode14, windowsNode14, linuxNode16, linuxNode12] + needs: [linuxNode16, windowsNode16, linuxNode14, linuxNode12] steps: - name: Checkout repository uses: actions/checkout@v2 diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 6eee5b45..b44da770 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -26,12 +26,12 @@ jobs: path: | ~/.npm node_modules - key: npm-v14-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} + key: npm-v16-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} - name: Install Node.js and npm uses: actions/setup-node@v1 with: - node-version: 14.x + node-version: 16.x registry-url: https://registry.npmjs.org - name: Publish new version diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 9f546619..d541cec7 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -86,7 +86,7 @@ jobs: npx dump-release-notes-from-cc-changelog $NEW_VERSION fi - name: Unit tests - run: script -e -c "npm test" + run: npm test windowsNode16: name: '[Windows] Node.js v16: Unit tests' @@ -192,9 +192,7 @@ jobs: npm update --no-save npm update --save-dev --no-save - name: Unit tests - # Some tests depend on TTY support, which is missing in GA runner - # Workaround taken from https://github.com/actions/runner/issues/241#issuecomment-577360161 - run: script -e -c "npm test" + run: npm test linuxNode12: name: '[Linux] Node.js v12: Unit tests' @@ -247,4 +245,4 @@ jobs: npm update --no-save npm update --save-dev --no-save - name: Unit tests - run: script -e -c "npm test" + run: npm test diff --git a/README.md b/README.md index 518d5ce1..abe6a175 100644 --- a/README.md +++ b/README.md @@ -7,13 +7,13 @@ A Serverless v1.x plugin to automatically bundle dependencies from `requirements.txt` and make them available in your `PYTHONPATH`. ---- +--- _Originally developed by [**Capital One**](https://www.capitalone.com/tech/open-source/), now maintained in scope of Serverless, Inc_ _Capital One considers itself the bank a technology company would build. It's delivering best-in-class innovation so that its millions of customers can manage their finances with ease. Capital One is all-in on the cloud and is a leader in the adoption of open source, RESTful APIs, microservices and containers. We build our own products and release them with a speed and agility that allows us to get new customer experiences to market quickly. Our engineers use artificial intelligence and machine learning to transform real-time data, software and algorithms into the future of finance, reimagined._ ---- +--- ## Install @@ -557,10 +557,10 @@ package: - [@andrewfarley](https://github.com/andrewfarley) - Implemented download caching and static caching - [@bweigel](https://github.com/bweigel) - adding the `slimPatternsAppendDefaults` option & fixing per-function packaging when some functions don't have requirements & Porting tests from bats to js! - Poetry support - - [@squaresurf](https://github.com/squaresurf) - - [@drice](https://github.com/drice) - - [@ofercaspi](https://github.com/ofercaspi) - - [@tpansino](https://github.com/tpansino) + - [@squaresurf](https://github.com/squaresurf) + - [@drice](https://github.com/drice) + - [@ofercaspi](https://github.com/ofercaspi) + - [@tpansino](https://github.com/tpansino) - [@david-mk-lawrence](https://github.com/david-mk-lawrence) - added Lambda Layer support - [@bryantbriggs](https://github.com/bryantbiggs) - Fixing CI/CD - [@jacksgt](https://github.com/jacksgt) - Fixing pip issues diff --git a/index.js b/index.js index cf2af38e..7741a7f8 100644 --- a/index.js +++ b/index.js @@ -7,7 +7,7 @@ const values = require('lodash.values'); const { addVendorHelper, removeVendorHelper, - packRequirements + packRequirements, } = require('./lib/zip'); const { injectAllRequirements } = require('./lib/inject'); const { layerRequirements } = require('./lib/layer'); @@ -57,7 +57,7 @@ class ServerlessPythonRequirements { staticCacheMaxVersions: 0, pipCmdExtraArgs: [], noDeploy: [], - vendor: '' + vendor: '', }, (this.serverless.service.custom && this.serverless.service.custom.pythonRequirements) || @@ -75,7 +75,9 @@ class ServerlessPythonRequirements { ) { if (!this.warningLogged) { if (this.log) { - this.log.warning('You provided a docker related option but dockerizePip is set to false.'); + this.log.warning( + 'You provided a docker related option but dockerizePip is set to false.' + ); } else { this.serverless.cli.log( 'WARNING: You provided a docker related option but dockerizePip is set to false.' @@ -144,31 +146,31 @@ class ServerlessPythonRequirements { commands: { clean: { usage: 'Remove .requirements and requirements.zip', - lifecycleEvents: ['clean'] + lifecycleEvents: ['clean'], }, install: { usage: 'install requirements manually', - lifecycleEvents: ['install'] + lifecycleEvents: ['install'], }, cleanCache: { usage: 'Removes all items in the pip download/static cache (if present)', - lifecycleEvents: ['cleanCache'] - } - } - } + lifecycleEvents: ['cleanCache'], + }, + }, + }, }; - if (this.serverless.cli.generateCommandsHelp) { - Object.assign(this.commands.requirements, { - usage: 'Serverless plugin to bundle Python packages', - lifecycleEvents: ['requirements'] - }); - } else { - this.commands.requirements.type = 'container'; - } + if (this.serverless.cli.generateCommandsHelp) { + Object.assign(this.commands.requirements, { + usage: 'Serverless plugin to bundle Python packages', + lifecycleEvents: ['requirements'], + }); + } else { + this.commands.requirements.type = 'container'; + } - const isFunctionRuntimePython = args => { + const isFunctionRuntimePython = (args) => { // If functionObj.runtime is undefined, python. if (!args[1].functionObj || !args[1].functionObj.runtime) { return true; @@ -177,9 +179,7 @@ class ServerlessPythonRequirements { }; const clean = () => - BbPromise.bind(this) - .then(cleanup) - .then(removeVendorHelper); + BbPromise.bind(this).then(cleanup).then(removeVendorHelper); const setupArtifactPathCapturing = () => { // Reference: @@ -243,7 +243,7 @@ class ServerlessPythonRequirements { }, 'requirements:install:install': before, 'requirements:clean:clean': clean, - 'requirements:cleanCache:cleanCache': cleanCache + 'requirements:cleanCache:cleanCache': cleanCache, }; } } diff --git a/package.json b/package.json index be7bc48d..4db9497a 100644 --- a/package.json +++ b/package.json @@ -39,8 +39,7 @@ "bin": {}, "scripts": { "commitlint": "commitlint -f HEAD@{15}", - "format": "prettier --write '{.,lib}/*.{js,md}'", - "lint": "eslint *.js lib/*.js && prettier -c '{.,lib}/*.{js,md}'", + "lint": "eslint .", "lint:updated": "pipe-git-updated --ext=js -- eslint", "prepare-release": "standard-version && prettier --write CHANGELOG.md", "prettier-check": "prettier -c --ignore-path .gitignore \"**/*.{css,html,js,json,md,yaml,yml}\"", @@ -79,6 +78,14 @@ "peerDependencies": { "serverless": "^2.32" }, + "lint-staged": { + "*.js": [ + "eslint" + ], + "*.{css,html,js,json,md,yaml,yml}": [ + "prettier -c" + ] + }, "eslintConfig": { "extends": "eslint:recommended", "env": { diff --git a/test.js b/test.js index 0322ab91..f234229a 100644 --- a/test.js +++ b/test.js @@ -10,7 +10,7 @@ const { copySync, writeFileSync, statSync, - pathExistsSync + pathExistsSync, } = require('fs-extra'); const { quote } = require('shell-quote'); const { sep } = require('path'); @@ -19,30 +19,32 @@ const { getUserCachePath, sha256Path } = require('./lib/shared'); const initialWorkingDir = process.cwd(); -const mkCommand = cmd => (args, options = {}) => { - const { error, stdout, stderr, status } = crossSpawn.sync( - cmd, - args, - Object.assign( - { - env: Object.assign({}, process.env, { SLS_DEBUG: 't' }) - }, - options - ) - ); - if (error) { - console.error(`Error running: ${quote([cmd, ...args])}`); // eslint-disable-line no-console - throw error; - } - if (status) { - console.error('STDOUT: ', stdout.toString()); // eslint-disable-line no-console - console.error('STDERR: ', stderr.toString()); // eslint-disable-line no-console - throw new Error( - `${quote([cmd, ...args])} failed with status code ${status}` - ); - } - return stdout && stdout.toString().trim(); -}; +const mkCommand = + (cmd) => + (args, options = {}) => { + const { error, stdout, stderr, status } = crossSpawn.sync( + cmd, + args, + Object.assign( + { + env: Object.assign({}, process.env, { SLS_DEBUG: 't' }), + }, + options + ) + ); + if (error) { + console.error(`Error running: ${quote([cmd, ...args])}`); // eslint-disable-line no-console + throw error; + } + if (status) { + console.error('STDOUT: ', stdout.toString()); // eslint-disable-line no-console + console.error('STDERR: ', stderr.toString()); // eslint-disable-line no-console + throw new Error( + `${quote([cmd, ...args])} failed with status code ${status}` + ); + } + return stdout && stdout.toString().trim(); + }; const sls = mkCommand('sls'); const git = mkCommand('git'); const npm = mkCommand('npm'); @@ -73,8 +75,8 @@ const teardown = () => { 'serverless.yml.bak', 'module1/foobar', getUserCachePath(), - ...glob.sync('serverless-python-requirements-*.tgz') - ].map(path => removeSync(path)); + ...glob.sync('serverless-python-requirements-*.tgz'), + ].map((path) => removeSync(path)); if (!cwd.endsWith('base with a space')) { try { git(['checkout', 'serverless.yml']); @@ -93,15 +95,17 @@ const teardown = () => { const testFilter = (() => { const elems = process.argv.slice(2); // skip ['node', 'test.js'] if (elems.length) { - return desc => - elems.some(text => desc.search(text) != -1) ? tape.test : tape.test.skip; + return (desc) => + elems.some((text) => desc.search(text) != -1) + ? tape.test + : tape.test.skip; } else { return () => tape.test; } })(); const test = (desc, func, opts = {}) => - testFilter(desc)(desc, opts, async t => { + testFilter(desc)(desc, opts, async (t) => { setup(); let ended = false; try { @@ -124,7 +128,7 @@ const availablePythons = (() => { const mapping = {}; if (process.env.USE_PYTHON) { binaries.push( - ...process.env.USE_PYTHON.split(',').map(v => v.toString().trim()) + ...process.env.USE_PYTHON.split(',').map((v) => v.toString().trim()) ); } else { // For running outside of CI @@ -135,7 +139,7 @@ const availablePythons = (() => { const python = `${bin}${exe}`; const { stdout, status } = crossSpawn.sync(python, [ '-c', - 'import sys; sys.stdout.write(".".join(map(str, sys.version_info[:2])))' + 'import sys; sys.stdout.write(".".join(map(str, sys.version_info[:2])))', ]); const ver = stdout && stdout.toString().trim(); if (!status && ver) { @@ -152,29 +156,29 @@ const availablePythons = (() => { return mapping; })(); -const getPythonBin = version => { +const getPythonBin = (version) => { const bin = availablePythons[String(version)]; if (!bin) throw new Error(`No python version ${version} available`); return bin; }; -const hasPython = version => { +const hasPython = (version) => { return Boolean(availablePythons[String(version)]); }; -const listZipFiles = async function(filename) { +const listZipFiles = async function (filename) { const file = await readFile(filename); const zip = await new JSZip().loadAsync(file); return Object.keys(zip.files); }; -const listZipFilesWithMetaData = async function(filename) { +const listZipFilesWithMetaData = async function (filename) { const file = await readFile(filename); const zip = await new JSZip().loadAsync(file); return Object(zip.files); }; -const listRequirementsZipFiles = async function(filename) { +const listRequirementsZipFiles = async function (filename) { const file = await readFile(filename); const zip = await new JSZip().loadAsync(file); const reqsBuffer = await zip.file('.requirements.zip').async('nodebuffer'); @@ -197,7 +201,7 @@ const brokenOn = (...platforms) => platforms.indexOf(process.platform) != -1; test( 'default pythonBin can package flask with default options', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -212,7 +216,7 @@ test( test( 'py3.6 packages have the same hash', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -231,7 +235,7 @@ test( test( 'py3.6 can package flask with default options', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -246,14 +250,14 @@ test( test( 'py3.6 can package flask with hashes', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls([ `--pythonBin=${getPythonBin(3)}`, '--fileName=requirements-w-hashes.txt', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); @@ -264,14 +268,14 @@ test( test( 'py3.6 can package flask with nested', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls([ `--pythonBin=${getPythonBin(3)}`, '--fileName=requirements-w-nested.txt', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); @@ -283,7 +287,7 @@ test( test( 'py3.6 can package flask with zip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -308,7 +312,7 @@ test( test( 'py3.6 can package flask with slim option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -316,12 +320,13 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged' ); t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > + 0, '__main__.py files are packaged' ); t.end(); @@ -331,7 +336,7 @@ test( test( 'py3.6 can package flask with slim & slimPatterns options', - async t => { + async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -340,12 +345,12 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -356,7 +361,7 @@ test( test( "py3.6 doesn't package bottle with noDeploy option", - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -365,7 +370,7 @@ test( '-i.bak', '-e', 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' + 'serverless.yml', ]); sls([`--pythonBin=${getPythonBin(3)}`, 'package']); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -378,14 +383,14 @@ test( test( 'py3.6 can package boto3 with editable', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls([ `--pythonBin=${getPythonBin(3)}`, '--fileName=requirements-w-editable.txt', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -400,7 +405,7 @@ test( test( 'py3.6 can package flask with dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -416,7 +421,7 @@ test( test( 'py3.6 can package flask with slim & dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -424,12 +429,13 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], '*.pyc files are NOT packaged' ); t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > + 0, '__main__.py files are packaged' ); t.end(); @@ -439,7 +445,7 @@ test( test( 'py3.6 can package flask with slim & dockerizePip & slimPatterns options', - async t => { + async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -448,12 +454,12 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], '*.pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -464,7 +470,7 @@ test( test( 'py3.6 can package flask with zip & dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -497,7 +503,7 @@ test( test( 'py3.6 can package flask with zip & slim & dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -530,7 +536,7 @@ test( test( 'py2.7 can package flask with default options', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -545,7 +551,7 @@ test( test( 'py2.7 can package flask with slim option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -553,17 +559,18 @@ test( `--pythonBin=${getPythonBin(2)}`, '--runtime=python2.7', '--slim=true', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged' ); t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > + 0, '__main__.py files are packaged' ); t.end(); @@ -573,7 +580,7 @@ test( test( 'py2.7 can package flask with zip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -581,7 +588,7 @@ test( `--pythonBin=${getPythonBin(2)}`, '--runtime=python2.7', '--zip=true', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( @@ -603,7 +610,7 @@ test( test( 'py2.7 can package flask with slim & dockerizePip & slimPatterns options', - async t => { + async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -614,17 +621,17 @@ test( '--runtime=python2.7', '--dockerizePip=true', '--slim=true', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], '*.pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -635,7 +642,7 @@ test( test( "py2.7 doesn't package bottle with noDeploy option", - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -644,7 +651,7 @@ test( '-i.bak', '-e', 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' + 'serverless.yml', ]); sls([`--pythonBin=${getPythonBin(2)}`, '--runtime=python2.7', 'package']); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -657,7 +664,7 @@ test( test( 'py2.7 can package flask with zip & dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -666,7 +673,7 @@ test( '--runtime=python2.7', '--dockerizePip=true', '--zip=true', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -696,7 +703,7 @@ test( test( 'py2.7 can package flask with zip & slim & dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -706,7 +713,7 @@ test( '--dockerizePip=true', '--zip=true', '--slim=true', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -736,7 +743,7 @@ test( test( 'py2.7 can package flask with dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -744,7 +751,7 @@ test( `--pythonBin=${getPythonBin(2)}`, '--runtime=python2.7', '--dockerizePip=true', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -757,7 +764,7 @@ test( test( 'py2.7 can package flask with slim & dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -766,17 +773,18 @@ test( '--runtime=python2.7', '--dockerizePip=true', '--slim=true', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], '*.pyc files are NOT packaged' ); t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > + 0, '__main__.py files are packaged' ); t.end(); @@ -786,7 +794,7 @@ test( test( 'py2.7 can package flask with slim & dockerizePip & slimPatterns options', - async t => { + async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -797,17 +805,17 @@ test( '--runtime=python2.7', '--dockerizePip=true', '--slim=true', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], '*.pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -818,7 +826,7 @@ test( test( 'pipenv py3.6 can package flask with default options', - async t => { + async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -837,7 +845,7 @@ test( test( 'pipenv py3.6 can package flask with slim option', - async t => { + async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -845,12 +853,13 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged' ); t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > + 0, '__main__.py files are packaged' ); t.end(); @@ -860,7 +869,7 @@ test( test( 'pipenv py3.6 can package flask with slim & slimPatterns options', - async t => { + async (t) => { process.chdir('tests/pipenv'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -870,12 +879,12 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -886,7 +895,7 @@ test( test( 'pipenv py3.6 can package flask with zip option', - async t => { + async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -911,7 +920,7 @@ test( test( "pipenv py3.6 doesn't package bottle with noDeploy option", - async t => { + async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -920,7 +929,7 @@ test( '-i.bak', '-e', 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' + 'serverless.yml', ]); sls(['package']); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -933,7 +942,7 @@ test( test( 'non build pyproject.toml uses requirements.txt', - async t => { + async (t) => { process.chdir('tests/non_build_pyproject'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -948,7 +957,7 @@ test( test( 'non poetry pyproject.toml without requirements.txt packages handler only', - async t => { + async (t) => { process.chdir('tests/non_poetry_pyproject'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -962,7 +971,7 @@ test( test( 'poetry py3.6 can package flask with default options', - async t => { + async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -978,7 +987,7 @@ test( test( 'poetry py3.6 can package flask with slim option', - async t => { + async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -986,12 +995,13 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged' ); t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > + 0, '__main__.py files are packaged' ); t.end(); @@ -1001,7 +1011,7 @@ test( test( 'poetry py3.6 can package flask with slim & slimPatterns options', - async t => { + async (t) => { process.chdir('tests/poetry'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -1011,12 +1021,12 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -1027,7 +1037,7 @@ test( test( 'poetry py3.6 can package flask with zip option', - async t => { + async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1052,7 +1062,7 @@ test( test( "poetry py3.6 doesn't package bottle with noDeploy option", - async t => { + async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1061,7 +1071,7 @@ test( '-i.bak', '-e', 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' + 'serverless.yml', ]); sls(['package']); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -1074,7 +1084,7 @@ test( test( 'py3.6 can package flask with zip option and no explicit include', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1101,7 +1111,7 @@ test( test( 'py3.6 can package lambda-decorators using vendor option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1120,7 +1130,7 @@ test( test( "Don't nuke execute perms", - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); const perm = '755'; @@ -1131,7 +1141,7 @@ test( '-i.bak', '-e', 's/(handler.py.*$)/$1\n - foobar/', - 'serverless.yml' + 'serverless.yml', ]); writeFileSync(`foobar`, ''); chmodSync(`foobar`, perm); @@ -1169,7 +1179,7 @@ test( test( 'py3.6 can package flask in a project with a space in it', - async t => { + async (t) => { copySync('tests/base', 'tests/base with a space'); process.chdir('tests/base with a space'); const path = npm(['pack', '../..']); @@ -1185,7 +1195,7 @@ test( test( 'py3.6 can package flask in a project with a space in it with docker', - async t => { + async (t) => { copySync('tests/base', 'tests/base with a space'); process.chdir('tests/base with a space'); const path = npm(['pack', '../..']); @@ -1201,7 +1211,7 @@ test( test( 'py3.6 supports custom file name with fileName option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); writeFileSync('puck', 'requests'); @@ -1227,7 +1237,7 @@ test( test( "py3.6 doesn't package bottle with zip option", - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1236,7 +1246,7 @@ test( '-i.bak', '-e', 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' + 'serverless.yml', ]); sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -1270,7 +1280,7 @@ test( test( 'py3.6 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', - async t => { + async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -1280,11 +1290,11 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, 'pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -1295,7 +1305,7 @@ test( test( 'py3.6 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', - async t => { + async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -1304,17 +1314,17 @@ test( '--dockerizePip=true', '--slim=true', '--slimPatternsAppendDefaults=false', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, 'pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -1325,7 +1335,7 @@ test( test( 'py2.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false options', - async t => { + async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -1334,17 +1344,17 @@ test( '--runtime=python2.7', '--slim=true', '--slimPatternsAppendDefaults=false', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, 'pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -1355,7 +1365,7 @@ test( test( 'py2.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', - async t => { + async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -1365,16 +1375,16 @@ test( '--runtime=python2.7', '--slim=true', '--slimPatternsAppendDefaults=false', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, 'pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -1385,7 +1395,7 @@ test( test( 'pipenv py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', - async t => { + async (t) => { process.chdir('tests/pipenv'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -1395,11 +1405,11 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, 'pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -1410,7 +1420,7 @@ test( test( 'poetry py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', - async t => { + async (t) => { process.chdir('tests/poetry'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -1420,11 +1430,11 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, 'pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -1435,7 +1445,7 @@ test( test( 'py3.6 can package flask with package individually option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1520,11 +1530,9 @@ test( { skip: !hasPython(3.6) } ); - - test( 'py3.6 can package flask with package individually & slim option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1536,7 +1544,7 @@ test( 'handler.py is packaged in function hello' ); t.deepEqual( - zipfiles_hello.filter(filename => filename.endsWith('.pyc')), + zipfiles_hello.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged in function hello' ); @@ -1555,7 +1563,7 @@ test( 'handler.py is packaged in function hello2' ); t.deepEqual( - zipfiles_hello2.filter(filename => filename.endsWith('.pyc')), + zipfiles_hello2.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged in function hello2' ); @@ -1574,7 +1582,7 @@ test( 'handler.py is packaged in function hello3' ); t.deepEqual( - zipfiles_hello3.filter(filename => filename.endsWith('.pyc')), + zipfiles_hello3.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged in function hello3' ); @@ -1599,7 +1607,7 @@ test( 'flask is NOT packaged in function hello4' ); t.deepEqual( - zipfiles_hello4.filter(filename => filename.endsWith('.pyc')), + zipfiles_hello4.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged in function hello4' ); @@ -1611,7 +1619,7 @@ test( test( 'py2.7 can package flask with package individually option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1682,7 +1690,7 @@ test( test( 'py2.7 can package flask with package individually & slim option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1690,7 +1698,7 @@ test( '--individually=true', '--runtime=python2.7', '--slim=true', - 'package' + 'package', ]); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); @@ -1699,7 +1707,7 @@ test( 'handler.py is packaged in function hello' ); t.deepEqual( - zipfiles_hello.filter(filename => filename.endsWith('.pyc')), + zipfiles_hello.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged in function hello' ); @@ -1718,7 +1726,7 @@ test( 'handler.py is packaged in function hello2' ); t.deepEqual( - zipfiles_hello2.filter(filename => filename.endsWith('.pyc')), + zipfiles_hello2.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged in function hello2' ); @@ -1737,7 +1745,7 @@ test( 'handler.py is packaged in function hello3' ); t.deepEqual( - zipfiles_hello3.filter(filename => filename.endsWith('.pyc')), + zipfiles_hello3.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged in function hello3' ); @@ -1773,7 +1781,7 @@ test( test( 'py2.7 can ignore functions defined with `image`', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1807,7 +1815,7 @@ test( test( 'py3.6 can package only requirements of module', - async t => { + async (t) => { process.chdir('tests/individually'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1868,7 +1876,7 @@ test( test( 'py3.6 can package lambda-decorators using vendor and invidiually option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1950,7 +1958,7 @@ test( test( "Don't nuke execute perms when using individually", - async t => { + async (t) => { process.chdir('tests/individually'); const path = npm(['pack', '../..']); const perm = '755'; @@ -1974,8 +1982,9 @@ test( const zipfiles_hello2 = await listZipFilesWithMetaData( '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' ); - const flaskPerm = statSync('.serverless/module2/requirements/bin/flask') - .mode; + const flaskPerm = statSync( + '.serverless/module2/requirements/bin/flask' + ).mode; t.true( zipfiles_hello2['bin/flask'].unixPermissions === flaskPerm, @@ -1989,7 +1998,7 @@ test( test( "Don't nuke execute perms when using individually w/docker", - async t => { + async (t) => { process.chdir('tests/individually'); const path = npm(['pack', '../..']); const perm = '755'; @@ -2013,8 +2022,9 @@ test( const zipfiles_hello2 = await listZipFilesWithMetaData( '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' ); - const flaskPerm = statSync('.serverless/module2/requirements/bin/flask') - .mode; + const flaskPerm = statSync( + '.serverless/module2/requirements/bin/flask' + ).mode; t.true( zipfiles_hello2['bin/flask'].unixPermissions === flaskPerm, @@ -2028,7 +2038,7 @@ test( test( 'py3.6 uses download cache by default option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2045,7 +2055,7 @@ test( test( 'py3.6 uses download cache by default', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2061,7 +2071,7 @@ test( test( 'py3.6 uses download cache with dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2078,14 +2088,14 @@ test( test( 'py3.6 uses download cache with dockerizePip by default option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls([ '--dockerizePip=true', '--cacheLocation=.requirements-cache', - 'package' + 'package', ]); t.true( pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), @@ -2098,7 +2108,7 @@ test( test( 'py3.6 uses static and download cache', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2120,7 +2130,7 @@ test( test( 'py3.6 uses static and download cache with dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2142,7 +2152,7 @@ test( test( 'py3.6 uses static cache', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2180,7 +2190,7 @@ test( test( 'py3.6 uses static cache with cacheLocation option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2204,7 +2214,7 @@ test( test( 'py3.6 uses static cache with dockerizePip & slim option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2235,7 +2245,7 @@ test( "static cache is really used when running 'sls package' again" ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files are packaged' ); @@ -2247,7 +2257,7 @@ test( test( 'py3.6 uses download cache with dockerizePip & slim option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2261,7 +2271,7 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files are packaged' ); @@ -2273,13 +2283,12 @@ test( test( 'py3.6 can ignore functions defined with `image`', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls(['--individually=true', 'package']); - t.true( pathExistsSync('.serverless/hello.zip'), 'function hello is packaged' diff --git a/tests/base/_slimPatterns.yml b/tests/base/_slimPatterns.yml index 02c631b4..443af9a0 100644 --- a/tests/base/_slimPatterns.yml +++ b/tests/base/_slimPatterns.yml @@ -1,2 +1,2 @@ slimPatterns: - - "**/__main__.py" + - '**/__main__.py' diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index 6bb1f322..0b360e9b 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -50,6 +50,3 @@ functions: - 'fn2/**' hello5: image: 000000000000.dkr.ecr.sa-east-1.amazonaws.com/test-lambda-docker@sha256:6bb600b4d6e1d7cf521097177dd0c4e9ea373edb91984a505333be8ac9455d38 - - - diff --git a/tests/pipenv/_slimPatterns.yml b/tests/pipenv/_slimPatterns.yml index 02c631b4..443af9a0 100644 --- a/tests/pipenv/_slimPatterns.yml +++ b/tests/pipenv/_slimPatterns.yml @@ -1,2 +1,2 @@ slimPatterns: - - "**/__main__.py" + - '**/__main__.py' diff --git a/tests/poetry/_slimPatterns.yml b/tests/poetry/_slimPatterns.yml index 02c631b4..443af9a0 100644 --- a/tests/poetry/_slimPatterns.yml +++ b/tests/poetry/_slimPatterns.yml @@ -1,2 +1,2 @@ slimPatterns: - - "**/__main__.py" + - '**/__main__.py' From 9b84abf826d95a5ad152d6a70de7c722770b467b Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 17:04:17 +0100 Subject: [PATCH 261/328] chore: Remove dependabot --- .github/dependabot.yml | 12 ------------ 1 file changed, 12 deletions(-) delete mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml deleted file mode 100644 index ab487438..00000000 --- a/.github/dependabot.yml +++ /dev/null @@ -1,12 +0,0 @@ -version: 2 -updates: - - package-ecosystem: npm - directory: '/' - schedule: - interval: daily - time: '10:00' - open-pull-requests-limit: 10 - ignore: - - dependency-name: eslint - versions: - - '> 7.22.0' From 89b3bababd48d496e159c9694546b8ab18b2955b Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 29 Nov 2021 15:03:46 +0100 Subject: [PATCH 262/328] chore: Remove Node16 tests --- .github/workflows/integrate.yml | 63 +++--------------------------- .github/workflows/publish.yml | 4 +- .github/workflows/validate.yml | 69 ++++----------------------------- 3 files changed, 16 insertions(+), 120 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index 64396542..953951df 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -10,59 +10,8 @@ env: FORCE_COLOR: 1 jobs: - linuxNode16: - name: '[Linux] Node.js v16: Unit tests' - runs-on: ubuntu-latest - strategy: - matrix: - python-version: [2.7, 3.6] - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: Retrieve dependencies from cache - id: cacheNpm - uses: actions/cache@v2 - with: - path: | - ~/.npm - node_modules - key: npm-v16-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} - restore-keys: npm-v16-${{ runner.os }}-${{ github.ref }}- - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - - name: Install Node.js and npm - uses: actions/setup-node@v1 - with: - node-version: 16.x - - - name: Check python version - run: | - python --version - - - name: Install setuptools - run: python -m pip install --force setuptools wheel - - - name: Install pipenv / poetry - run: python -m pip install pipenv poetry - - - name: Install serverless - run: npm install -g serverless@2 - - - name: Install dependencies - if: steps.cacheNpm.outputs.cache-hit != 'true' - run: | - npm update --no-save - npm update --save-dev --no-save - - name: Unit tests - run: npm test - - windowsNode16: - name: '[Windows] Node.js v16: Unit tests' + windowsNode14: + name: '[Windows] Node.js v14: Unit tests' runs-on: windows-latest strategy: matrix: @@ -78,8 +27,8 @@ jobs: path: | ~/.npm node_modules - key: npm-v16-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} - restore-keys: npm-v16-${{ runner.os }}-${{ github.ref }}- + key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: npm-v14-${{ runner.os }}-${{ github.ref }}- - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 @@ -89,7 +38,7 @@ jobs: - name: Install Node.js and npm uses: actions/setup-node@v1 with: - node-version: 16.x + node-version: 14.x - name: Check python version run: | @@ -217,7 +166,7 @@ jobs: tagIfNewVersion: name: Tag if new version runs-on: ubuntu-latest - needs: [linuxNode16, windowsNode16, linuxNode14, linuxNode12] + needs: [windowsNode14, linuxNode14, linuxNode12] steps: - name: Checkout repository uses: actions/checkout@v2 diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index b44da770..6eee5b45 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -26,12 +26,12 @@ jobs: path: | ~/.npm node_modules - key: npm-v16-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} + key: npm-v14-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} - name: Install Node.js and npm uses: actions/setup-node@v1 with: - node-version: 16.x + node-version: 14.x registry-url: https://registry.npmjs.org - name: Publish new version diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index d541cec7..9215eee1 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -10,8 +10,8 @@ env: FORCE_COLOR: 1 jobs: - linuxNode16: - name: '[Linux] Node.js v16: Lint, Eventual Commitlint, Eventual Changelog, Formatting & Unit tests' + linuxNode14: + name: '[Linux] Node.js v14: Lint, Eventual Commitlint, Eventual Changelog, Formatting & Unit tests' runs-on: ubuntu-latest strategy: matrix: @@ -38,10 +38,10 @@ jobs: path: | ~/.npm node_modules - key: npm-v16-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} restore-keys: | - npm-v16-${{ runner.os }}-${{ github.ref }}- - npm-v16-${{ runner.os }}-refs/heads/master- + npm-v14-${{ runner.os }}-${{ github.ref }}- + npm-v14-${{ runner.os }}-refs/heads/master- - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 @@ -51,7 +51,7 @@ jobs: - name: Install Node.js and npm uses: actions/setup-node@v1 with: - node-version: 16.x + node-version: 14.x - name: Check python version run: | @@ -88,62 +88,9 @@ jobs: - name: Unit tests run: npm test - windowsNode16: - name: '[Windows] Node.js v16: Unit tests' + windowsNode14: + name: '[Windows] Node.js v14: Unit tests' runs-on: windows-latest - strategy: - matrix: - python-version: [2.7, 3.6] - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: Retrieve dependencies from cache - id: cacheNpm - uses: actions/cache@v2 - with: - path: | - ~/.npm - node_modules - key: npm-v16-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} - restore-keys: | - npm-v16-${{ runner.os }}-${{ github.ref }}- - npm-v16-${{ runner.os }}-refs/heads/master- - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - - name: Install Node.js and npm - uses: actions/setup-node@v1 - with: - node-version: 16.x - - - name: Check python version - run: | - python --version - - - name: Install setuptools - run: python -m pip install --force setuptools wheel - - - name: Install pipenv / poetry - run: python -m pip install pipenv poetry - - - name: Install serverless - run: npm install -g serverless@2 - - - name: Install dependencies - if: steps.cacheNpm.outputs.cache-hit != 'true' - run: | - npm update --no-save - npm update --save-dev --no-save - - name: Unit tests - run: npm test - - linuxNode14: - name: '[Linux] Node.js 14: Unit tests' - runs-on: ubuntu-latest strategy: matrix: python-version: [2.7, 3.6] From 328cb016e58231d3c72399918553c4b10d4aa6d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Wilczy=C5=84ski?= Date: Wed, 1 Dec 2021 20:13:59 +0100 Subject: [PATCH 263/328] feat: Add architecture to requirements cache directory name (#645) --- lib/pip.js | 3 ++- lib/shared.js | 7 +++++-- test.js | 35 +++++++++++++++++++++++++---------- 3 files changed, 32 insertions(+), 13 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index ce348532..7a0a0ceb 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -640,7 +640,8 @@ async function installRequirementsIfNeeded( const workingReqsFolder = getRequirementsWorkingPath( reqChecksum, requirementsTxtDirectory, - options + options, + serverless ); // Check if our static cache is present and is valid diff --git a/lib/shared.js b/lib/shared.js index 7baee58b..426d6c50 100644 --- a/lib/shared.js +++ b/lib/shared.js @@ -62,17 +62,20 @@ function checkForAndDeleteMaxCacheVersions({ serverless, options, log }) { * @param {string} subfolder * @param {string} servicePath * @param {Object} options + * @param {Object} serverless * @return {string} */ function getRequirementsWorkingPath( subfolder, requirementsTxtDirectory, - options + options, + serverless ) { // If we want to use the static cache if (options && options.useStaticCache) { if (subfolder) { - subfolder = subfolder + '_slspyc'; + const architecture = serverless.service.provider.architecture || 'x86_64'; + subfolder = `${subfolder}_${architecture}_slspyc`; } // If we have max number of cache items... diff --git a/test.js b/test.js index f234229a..ccd1920c 100644 --- a/test.js +++ b/test.js @@ -2115,12 +2115,15 @@ test( sls(['package']); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), 'http exists in download-cache' ); t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), 'flask exists in static-cache' ); t.end(); @@ -2137,12 +2140,15 @@ test( sls(['--dockerizePip=true', 'package']); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), 'http exists in download-cache' ); t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), 'flask exists in static-cache' ); t.end(); @@ -2159,20 +2165,23 @@ test( sls(['package']); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), 'flask exists in static-cache' ); t.true( pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}.completed_requirements` + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` ), '.completed_requirements exists in static-cache' ); // py3.6 checking that static cache actually pulls from cache (by poisoning it) writeFileSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}injected_file_is_bad_form`, + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' ); sls(['package']); @@ -2197,13 +2206,16 @@ test( const cachepath = '.requirements-cache'; sls([`--cacheLocation=${cachepath}`, 'package']); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), 'flask exists in static-cache' ); t.true( pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}.completed_requirements` + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` ), '.completed_requirements exists in static-cache' ); @@ -2221,20 +2233,23 @@ test( sls(['--dockerizePip=true', '--slim=true', 'package']); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), 'flask exists in static-cache' ); t.true( pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}.completed_requirements` + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` ), '.completed_requirements exists in static-cache' ); // py3.6 checking that static cache actually pulls from cache (by poisoning it) writeFileSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}injected_file_is_bad_form`, + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' ); sls(['--dockerizePip=true', '--slim=true', 'package']); From 347245cc5a7d97c56dc32f0da4285e0be59cf535 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 30 Nov 2021 16:57:55 +0100 Subject: [PATCH 264/328] chore: Add `standard-version` config --- package.json | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/package.json b/package.json index 4db9497a..6a9923c4 100644 --- a/package.json +++ b/package.json @@ -55,6 +55,7 @@ "github-release-from-cc-changelog": "^2.2.0", "lodash": "^4.17.21", "prettier": "^2", + "standard-version": "^9.3.2", "tape": "*", "tape-promise": "*" }, @@ -100,6 +101,30 @@ "no-console": "off" } }, + "standard-version": { + "skip": { + "commit": true, + "tag": true + }, + "types": [ + { + "type": "feat", + "section": "Features" + }, + { + "type": "fix", + "section": "Bug Fixes" + }, + { + "type": "perf", + "section": "Performance Improvements" + }, + { + "type": "refactor", + "section": "Maintenance Improvements" + } + ] + }, "prettier": { "semi": true, "singleQuote": true From e3d9ebcdd3ec72cc2e3301d33e10dd10ef6a594d Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 30 Nov 2021 17:01:53 +0100 Subject: [PATCH 265/328] chore: Release v5.2.1 --- CHANGELOG.md | 14 ++++++++++++++ package.json | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5c79a3c3..bd87ce72 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,17 @@ # Changelog All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +### [5.2.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.0...v5.2.1) (2021-11-30) + +### Maintenance Improvements + +- Adapt plugin to modern logs ([#646](https://github.com/serverless/serverless-python-requirements/pull/646)) ([8ff97e6](https://github.com/UnitedIncome/serverless-python-requirements/commit/8ff97e6b7c279334e417dbdb65e64d0de2656986)) ([Piotr Grzesik](https://github.com/pgrzesik)) +- Adapt to `async` version of `spawn` ([#648](https://github.com/serverless/serverless-python-requirements/pull/648)) ([50c2850](https://github.com/UnitedIncome/serverless-python-requirements/commit/50c2850874ded795fd50ae377f1db817a0212e7d)) ([Piotr Grzesik](https://github.com/pgrzesik)) +- Adapt v3 log writing interfaces ([#646](https://github.com/serverless/serverless-python-requirements/pull/646)) ([a79899a](https://github.com/UnitedIncome/serverless-python-requirements/commit/a79899ae5f6f66aa0c65e7fda8e0186d38ff446e)) ([Piotr Grzesik](https://github.com/pgrzesik)) +- Ensure proper verbose progress logs ([#646](https://github.com/serverless/serverless-python-requirements/pull/646)) ([44b9591](https://github.com/UnitedIncome/serverless-python-requirements/commit/44b9591f01157a1811e3ca8b43e21265a155a976)) ([Piotr Grzesik](https://github.com/pgrzesik)) +- Use `ServerlessError` ([#649](https://github.com/serverless/serverless-python-requirements/pull/649)) ([cdb7111](https://github.com/UnitedIncome/serverless-python-requirements/commit/cdb71110bc9c69b5087b6e18fb353d65962afe4a)) ([Piotr Grzesik](https://github.com/pgrzesik)) + +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. diff --git a/package.json b/package.json index 6a9923c4..c6a1f5e6 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "5.2.0", + "version": "5.2.1", "engines": { "node": ">=12.0" }, From f60eed1225f091c090f9c253771a12b33fafcab0 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Thu, 2 Dec 2021 20:57:51 +0100 Subject: [PATCH 266/328] fix: Ensure cast `toString` before `trim` on buffer --- lib/docker.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/docker.js b/lib/docker.js index 5157803f..a3079ff7 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -103,12 +103,12 @@ async function tryBindPath(bindPath, testFile, pluginInstance) { const ps = await dockerCommand(options, pluginInstance); if (debug) { if (log) { - log.debug(ps.stdoutBuffer.trim()); + log.debug(ps.stdoutBuffer.toString().trim()); } else { - serverless.cli.log(ps.stdoutBuffer.trim()); + serverless.cli.log(ps.stdoutBuffer.toString().trim()); } } - return ps.stdoutBuffer.trim() === `/test/${testFile}`; + return ps.stdoutBuffer.toString().trim() === `/test/${testFile}`; } catch (err) { if (debug) { if (log) { @@ -197,7 +197,7 @@ async function getDockerUid(bindPath, pluginInstance) { '/bin/sh', ]; const ps = await dockerCommand(options, pluginInstance); - return ps.stdoutBuffer.trim(); + return ps.stdoutBuffer.toString().trim(); } module.exports = { buildImage, getBindPath, getDockerUid }; From c4808770713f451dcaa907fb40fcd34414bf7de7 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 3 Dec 2021 13:49:37 +0100 Subject: [PATCH 267/328] chore: Release v5.2.2 --- CHANGELOG.md | 6 ++++++ package.json | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bd87ce72..47570012 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [5.2.2](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.1...v5.2.2) (2021-12-03) + +### Bug Fixes + +- Ensure cast `toString` before `trim` on buffer ([#656](https://github.com/serverless/serverless-python-requirements/pull/656)) ([f60eed1](https://github.com/UnitedIncome/serverless-python-requirements/commit/f60eed1225f091c090f9c253771a12b33fafcab0)) ([Piotr Grzesik](https://github.com/pgrzesik)) + ### [5.2.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.0...v5.2.1) (2021-11-30) ### Maintenance Improvements diff --git a/package.json b/package.json index c6a1f5e6..44554659 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "5.2.1", + "version": "5.2.2", "engines": { "node": ">=12.0" }, From 406f6bac1ca934a34387048b5c00242aff3f581b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Wilczy=C5=84ski?= Date: Mon, 20 Dec 2021 18:06:23 +0100 Subject: [PATCH 268/328] feat: Support requirements layer caching (#644) --- lib/layer.js | 47 ++++++++++++++++++++++++++++++++++++++++++----- lib/shared.js | 21 +++++++++++++++++++++ 2 files changed, 63 insertions(+), 5 deletions(-) diff --git a/lib/layer.js b/lib/layer.js index fe2a4a00..6fe9ca4c 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -3,6 +3,7 @@ const fse = require('fs-extra'); const path = require('path'); const JSZip = require('jszip'); const { writeZip, addTree } = require('./zipTree'); +const { sha256Path, getRequirementsLayerPath } = require('./shared'); BbPromise.promisifyAll(fse); @@ -11,13 +12,49 @@ BbPromise.promisifyAll(fse); * @return {Promise} the JSZip object constructed. */ function zipRequirements() { - const rootZip = new JSZip(); const src = path.join('.serverless', 'requirements'); - const runtimepath = 'python'; - - return addTree(rootZip.folder(runtimepath), src).then(() => - writeZip(rootZip, path.join('.serverless', 'pythonRequirements.zip')) + const reqChecksum = sha256Path(path.join('.serverless', 'requirements.txt')); + const targetZipPath = path.join('.serverless', 'pythonRequirements.zip'); + const zipCachePath = getRequirementsLayerPath( + reqChecksum, + targetZipPath, + this.options, + this.serverless ); + + const promises = []; + if (fse.existsSync(zipCachePath)) { + let layerProgress; + if (this.progress && this.log) { + layerProgress = this.progress.get('python-layer-requirements'); + layerProgress.update( + 'Using cached Python Requirements Lambda Layer file' + ); + this.log.info('Found cached Python Requirements Lambda Layer file'); + } else { + this.serverless.cli.log( + 'Found cached Python Requirements Lambda Layer file' + ); + } + } else { + const rootZip = new JSZip(); + const runtimepath = 'python'; + + promises.push( + addTree(rootZip.folder(runtimepath), src).then(() => + writeZip(rootZip, zipCachePath) + ) + ); + } + return BbPromise.all(promises).then(() => { + if (zipCachePath !== targetZipPath) { + if (process.platform === 'win32') { + fse.copySync(zipCachePath, targetZipPath); + } else { + fse.symlink(zipCachePath, targetZipPath, 'file'); + } + } + }); } /** diff --git a/lib/shared.js b/lib/shared.js index 426d6c50..bebb3f09 100644 --- a/lib/shared.js +++ b/lib/shared.js @@ -86,6 +86,26 @@ function getRequirementsWorkingPath( return path.join(requirementsTxtDirectory, 'requirements'); } +/** + * Path of a cached requirements layer archive file + * @param {string} subfolder + * @param {string} fallback + * @param {Object} options + * @param {Object} serverless + * @return {string} + */ +function getRequirementsLayerPath(hash, fallback, options, serverless) { + // If we want to use the static cache + if (hash && options && options.useStaticCache) { + const architecture = serverless.service.provider.architecture || 'x86_64'; + hash = `${hash}_${architecture}_slspyc.zip`; + return path.join(getUserCachePath(options), hash); + } + + // If we don't want to use the static cache, then fallback to requirements file in .serverless directory + return fallback; +} + /** * The static cache path that will be used for this system + options, used if static cache is enabled * @param {Object} options @@ -117,6 +137,7 @@ function sha256Path(fullpath) { module.exports = { checkForAndDeleteMaxCacheVersions, getRequirementsWorkingPath, + getRequirementsLayerPath, getUserCachePath, sha256Path, }; From 6730d7e383ab2d0a2bb55b42c4713fdf12877690 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 21 Dec 2021 14:37:27 +0100 Subject: [PATCH 269/328] chore: Update `serverless` peer dependency --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 44554659..39afe62e 100644 --- a/package.json +++ b/package.json @@ -77,7 +77,7 @@ "shell-quote": "^1.7.3" }, "peerDependencies": { - "serverless": "^2.32" + "serverless": "^2.32 || 3" }, "lint-staged": { "*.js": [ From 95c694f9e64dfb4f59ac6e6da4dec83c1d572c5c Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 21 Dec 2021 14:38:59 +0100 Subject: [PATCH 270/328] chore: Release v5.3.0 --- CHANGELOG.md | 10 ++++++++++ package.json | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 47570012..805a42e5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,16 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +## [5.3.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.1...v5.3.0) (2021-12-21) + +### Features + +- Support requirements layer caching ([#644](https://github.com/UnitedIncome/serverless-python-requirements/issues/644)) ([406f6ba](https://github.com/UnitedIncome/serverless-python-requirements/commit/406f6bac1ca934a34387048b5c00242aff3f581b)) ([Maciej Wilczyński](https://github.com/mLupine)) + +### Bug Fixes + +- Ensure cast `toString` before `trim` on buffer ([f60eed1](https://github.com/UnitedIncome/serverless-python-requirements/commit/f60eed1225f091c090f9c253771a12b33fafcab0)) + ### [5.2.2](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.1...v5.2.2) (2021-12-03) ### Bug Fixes diff --git a/package.json b/package.json index 39afe62e..a7e2b2a1 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "5.2.2", + "version": "5.3.0", "engines": { "node": ">=12.0" }, From b16c82dbdd31ca7f61093bb6b8ed50be31908a24 Mon Sep 17 00:00:00 2001 From: Shinichi Makino Date: Wed, 26 Jan 2022 19:13:04 +0900 Subject: [PATCH 271/328] fix: Address unknown path format error in `wsl2` (#667) --- lib/docker.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/docker.js b/lib/docker.js index a3079ff7..68cf935b 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -2,6 +2,7 @@ const spawn = require('child-process-ext/spawn'); const isWsl = require('is-wsl'); const fse = require('fs-extra'); const path = require('path'); +const os = require('os'); /** * Helper function to run a docker command @@ -129,7 +130,8 @@ async function tryBindPath(bindPath, testFile, pluginInstance) { */ async function getBindPath(servicePath, pluginInstance) { // Determine bind path - if (process.platform !== 'win32' && !isWsl) { + let isWsl1 = isWsl && !os.release().includes('microsoft-standard'); + if (process.platform !== 'win32' && !isWsl1) { return servicePath; } From 1668cbc2a91f9e4b84024a9609877631aa3d71aa Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 28 Jan 2022 15:12:40 +0100 Subject: [PATCH 272/328] chore: Release v5.3.1 --- CHANGELOG.md | 6 ++++++ package.json | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 805a42e5..bbe979bf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [5.3.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.3.0...v5.3.1) (2022-01-28) + +### Bug Fixes + +- Address unknown path format error in `wsl2` ([#667](https://github.com/UnitedIncome/serverless-python-requirements/issues/667)) ([b16c82d](https://github.com/UnitedIncome/serverless-python-requirements/commit/b16c82dbdd31ca7f61093bb6b8ed50be31908a24)) ([Shinichi Makino](https://github.com/snicmakino)) + ## [5.3.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.1...v5.3.0) (2021-12-21) ### Features diff --git a/package.json b/package.json index a7e2b2a1..50ef2246 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "5.3.0", + "version": "5.3.1", "engines": { "node": ">=12.0" }, From ec84948747a2fe578f7f112b443a3710e6202b42 Mon Sep 17 00:00:00 2001 From: Marcin Szleszynski <64603095+martinezpl@users.noreply.github.com> Date: Sun, 13 Feb 2022 15:30:22 +0100 Subject: [PATCH 273/328] test: Refactor tests to use `env` instead of `opt` vars (#672) --- test.js | 360 +++++++++++++++--------------- tests/base/serverless.yml | 22 +- tests/individually/serverless.yml | 2 +- tests/pipenv/serverless.yml | 8 +- tests/poetry/serverless.yml | 8 +- 5 files changed, 196 insertions(+), 204 deletions(-) diff --git a/test.js b/test.js index ccd1920c..11a7cce5 100644 --- a/test.js +++ b/test.js @@ -22,15 +22,15 @@ const initialWorkingDir = process.cwd(); const mkCommand = (cmd) => (args, options = {}) => { + options['env'] = Object.assign( + { SLS_DEBUG: 't' }, + process.env, + options['env'] + ); const { error, stdout, stderr, status } = crossSpawn.sync( cmd, args, - Object.assign( - { - env: Object.assign({}, process.env, { SLS_DEBUG: 't' }), - }, - options - ) + options ); if (error) { console.error(`Error running: ${quote([cmd, ...args])}`); // eslint-disable-line no-console @@ -45,6 +45,7 @@ const mkCommand = } return stdout && stdout.toString().trim(); }; + const sls = mkCommand('sls'); const git = mkCommand('git'); const npm = mkCommand('npm'); @@ -205,7 +206,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -220,9 +221,9 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const fileHash = sha256File('.serverless/sls-py-req-test.zip'); - sls(['package']); + sls(['package'], { env: {} }); t.equal( sha256File('.serverless/sls-py-req-test.zip'), fileHash, @@ -239,7 +240,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, 'package']); + sls(['package'], { env: { pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -254,11 +255,12 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(3)}`, - '--fileName=requirements-w-hashes.txt', - 'package', - ]); + sls(['package'], { + env: { + fileName: 'requirements-w-hashes.txt', + pythonBin: getPythonBin(3), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.end(); @@ -272,11 +274,12 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(3)}`, - '--fileName=requirements-w-nested.txt', - 'package', - ]); + sls(['package'], { + env: { + fileName: 'requirements-w-nested.txt', + pythonBin: getPythonBin(3), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -291,7 +294,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('.requirements.zip'), @@ -316,7 +319,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, '--slim=true', 'package']); + sls(['package'], { env: { slim: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -341,7 +344,7 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--slim=true', 'package']); + sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -372,7 +375,7 @@ test( 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', 'serverless.yml', ]); - sls([`--pythonBin=${getPythonBin(3)}`, 'package']); + sls(['package'], { env: { pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); @@ -387,11 +390,12 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(3)}`, - '--fileName=requirements-w-editable.txt', - 'package', - ]); + sls(['package'], { + env: { + fileName: 'requirements-w-editable.txt', + pythonBin: getPythonBin(3), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.true( @@ -409,8 +413,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', 'package']); - + sls(['package'], { env: { dockerizePip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -425,7 +428,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--slim=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -450,7 +453,7 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--slim=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -474,8 +477,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--zip=true', 'package']); - + sls(['package'], { env: { dockerizePip: 'true', zip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); const zippedReqs = await listRequirementsZipFiles( '.serverless/sls-py-req-test.zip' @@ -507,8 +509,9 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--zip=true', '--slim=true', 'package']); - + sls(['package'], { + env: { dockerizePip: 'true', zip: 'true', slim: 'true' }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); const zippedReqs = await listRequirementsZipFiles( '.serverless/sls-py-req-test.zip' @@ -540,7 +543,9 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--pythonBin=${getPythonBin(2)}`, '--runtime=python2.7', 'package']); + sls(['package'], { + env: { runtime: 'python2.7', pythonBin: getPythonBin(2) }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -555,12 +560,9 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--slim=true', - 'package', - ]); + sls(['package'], { + env: { runtime: 'python2.7', slim: 'true', pythonBin: getPythonBin(2) }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -584,12 +586,9 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--zip=true', - 'package', - ]); + sls(['package'], { + env: { runtime: 'python2.7', zip: 'true', pythonBin: getPythonBin(2) }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('.requirements.zip'), @@ -616,13 +615,14 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - '--slim=true', - 'package', - ]); + sls(['package'], { + env: { + runtime: 'python2.7', + dockerizePip: 'true', + slim: 'true', + pythonBin: getPythonBin(2), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -653,7 +653,9 @@ test( 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', 'serverless.yml', ]); - sls([`--pythonBin=${getPythonBin(2)}`, '--runtime=python2.7', 'package']); + sls(['package'], { + env: { runtime: 'python2.7', pythonBin: getPythonBin(2) }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); @@ -668,14 +670,14 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - '--zip=true', - 'package', - ]); - + sls(['package'], { + env: { + runtime: 'python2.7', + dockerizePip: 'true', + zip: 'true', + pythonBin: getPythonBin(2), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); const zippedReqs = await listRequirementsZipFiles( '.serverless/sls-py-req-test.zip' @@ -707,15 +709,15 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - '--zip=true', - '--slim=true', - 'package', - ]); - + sls(['package'], { + env: { + runtime: 'python2.7', + dockerizePip: 'true', + zip: 'true', + slim: 'true', + pythonBin: getPythonBin(2), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); const zippedReqs = await listRequirementsZipFiles( '.serverless/sls-py-req-test.zip' @@ -747,13 +749,13 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - 'package', - ]); - + sls(['package'], { + env: { + runtime: 'python2.7', + dockerizePip: 'true', + pythonBin: getPythonBin(2), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -768,13 +770,14 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - '--slim=true', - 'package', - ]); + sls(['package'], { + env: { + runtime: 'python2.7', + dockerizePip: 'true', + slim: 'true', + pythonBin: getPythonBin(2), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -800,13 +803,14 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - '--slim=true', - 'package', - ]); + sls(['package'], { + env: { + runtime: 'python2.7', + dockerizePip: 'true', + slim: 'true', + pythonBin: getPythonBin(2), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -830,7 +834,7 @@ test( process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -849,7 +853,7 @@ test( process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--slim=true', 'package']); + sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -875,7 +879,7 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--slim=true', 'package']); + sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -899,7 +903,7 @@ test( process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('.requirements.zip'), @@ -931,7 +935,7 @@ test( 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', 'serverless.yml', ]); - sls(['package']); + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); @@ -946,7 +950,7 @@ test( process.chdir('tests/non_build_pyproject'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -961,7 +965,7 @@ test( process.chdir('tests/non_poetry_pyproject'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`handler.py`), 'handler is packaged'); t.end(); @@ -975,7 +979,7 @@ test( process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); @@ -991,7 +995,7 @@ test( process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--slim=true', 'package']); + sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -1017,7 +1021,7 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--slim=true', 'package']); + sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -1041,7 +1045,7 @@ test( process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('.requirements.zip'), @@ -1073,7 +1077,7 @@ test( 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', 'serverless.yml', ]); - sls(['package']); + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); @@ -1090,7 +1094,7 @@ test( npm(['i', path]); perl(['-p', '-i.bak', '-e', 's/include://', 'serverless.yml']); perl(['-p', '-i.bak', '-e', 's/^.*handler.py.*$//', 'serverless.yml']); - sls(['--zip=true', 'package']); + sls(['package'], { env: { zip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('.requirements.zip'), @@ -1115,7 +1119,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--vendor=./vendor`, 'package']); + sls(['package'], { env: { vendor: './vendor' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -1145,8 +1149,7 @@ test( ]); writeFileSync(`foobar`, ''); chmodSync(`foobar`, perm); - sls(['--vendor=./vendor', 'package']); - + sls(['package'], { env: { vendor: './vendor' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -1184,7 +1187,7 @@ test( process.chdir('tests/base with a space'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -1200,7 +1203,7 @@ test( process.chdir('tests/base with a space'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -1216,7 +1219,7 @@ test( const path = npm(['pack', '../..']); writeFileSync('puck', 'requests'); npm(['i', path]); - sls(['--fileName=puck', 'package']); + sls(['package'], { env: { fileName: 'puck' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes(`requests${sep}__init__.py`), @@ -1248,7 +1251,7 @@ test( 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', 'serverless.yml', ]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); const zippedReqs = await listRequirementsZipFiles( '.serverless/sls-py-req-test.zip' @@ -1285,8 +1288,9 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); - + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( @@ -1310,13 +1314,13 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - '--dockerizePip=true', - '--slim=true', - '--slimPatternsAppendDefaults=false', - 'package', - ]); - + sls(['package'], { + env: { + dockerizePip: 'true', + slim: 'true', + slimPatternsAppendDefaults: 'false', + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( @@ -1340,13 +1344,13 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - '--runtime=python2.7', - '--slim=true', - '--slimPatternsAppendDefaults=false', - 'package', - ]); - + sls(['package'], { + env: { + runtime: 'python2.7', + slim: 'true', + slimPatternsAppendDefaults: 'false', + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( @@ -1370,13 +1374,14 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - '--dockerizePip=true', - '--runtime=python2.7', - '--slim=true', - '--slimPatternsAppendDefaults=false', - 'package', - ]); + sls(['package'], { + env: { + dockerizePip: 'true', + runtime: 'python2.7', + slim: 'true', + slimPatternsAppendDefaults: 'false', + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( @@ -1401,7 +1406,9 @@ test( const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( @@ -1426,7 +1433,9 @@ test( const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( @@ -1449,8 +1458,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--individually=true', 'package']); - + sls(['package'], { env: { individually: 'true' } }); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); t.false( zipfiles_hello.includes(`fn2${sep}__init__.py`), @@ -1536,8 +1544,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--individually=true', '--slim=true', 'package']); - + sls(['package'], { env: { individually: 'true', slim: 'true' } }); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); t.true( zipfiles_hello.includes('handler.py'), @@ -1623,8 +1630,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--individually=true', '--runtime=python2.7', 'package']); - + sls(['package'], { env: { individually: 'true', runtime: 'python2.7' } }); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); t.true( zipfiles_hello.includes('handler.py'), @@ -1694,13 +1700,9 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - '--individually=true', - '--runtime=python2.7', - '--slim=true', - 'package', - ]); - + sls(['package'], { + env: { individually: 'true', runtime: 'python2.7', slim: 'true' }, + }); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); t.true( zipfiles_hello.includes('handler.py'), @@ -1785,8 +1787,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--individually=true', '--runtime=python2.7', 'package']); - + sls(['package'], { env: { individually: 'true', runtime: 'python2.7' } }); t.true( pathExistsSync('.serverless/hello.zip'), 'function hello is packaged' @@ -1819,8 +1820,7 @@ test( process.chdir('tests/individually'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); - + sls(['package'], { env: {} }); const zipfiles_hello = await listZipFiles( '.serverless/module1-sls-py-req-test-indiv-dev-hello1.zip' ); @@ -1880,8 +1880,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--individually=true', '--vendor=./vendor', 'package']); - + sls(['package'], { env: { individually: 'true', vendor: './vendor' } }); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); t.true( zipfiles_hello.includes('handler.py'), @@ -1966,8 +1965,7 @@ test( chmodSync(`module1${sep}foobar`, perm); npm(['i', path]); - sls(['package']); - + sls(['package'], { env: {} }); const zipfiles_hello1 = await listZipFilesWithMetaData( '.serverless/hello1.zip' ); @@ -2006,8 +2004,7 @@ test( chmodSync(`module1${sep}foobar`, perm); npm(['i', path]); - sls(['--dockerizePip=true', 'package']); - + sls(['package'], { env: { dockerizePip: 'true' } }); const zipfiles_hello = await listZipFilesWithMetaData( '.serverless/hello1.zip' ); @@ -2042,7 +2039,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const cachepath = getUserCachePath(); t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), @@ -2059,7 +2056,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--cacheLocation=.requirements-cache', 'package']); + sls(['package'], { env: { cacheLocation: '.requirements-cache' } }); t.true( pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), 'cache directory exists' @@ -2075,7 +2072,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true' } }); const cachepath = getUserCachePath(); t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), @@ -2092,11 +2089,9 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - '--dockerizePip=true', - '--cacheLocation=.requirements-cache', - 'package', - ]); + sls(['package'], { + env: { dockerizePip: 'true', cacheLocation: '.requirements-cache' }, + }); t.true( pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), 'cache directory exists' @@ -2112,7 +2107,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); const arch = 'x86_64'; @@ -2137,7 +2132,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true' } }); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); const arch = 'x86_64'; @@ -2162,7 +2157,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); const arch = 'x86_64'; @@ -2184,8 +2179,7 @@ test( `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' ); - sls(['package']); - + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('injected_file_is_bad_form'), @@ -2204,7 +2198,7 @@ test( const path = npm(['pack', '../..']); npm(['i', path]); const cachepath = '.requirements-cache'; - sls([`--cacheLocation=${cachepath}`, 'package']); + sls(['package'], { env: { cacheLocation: cachepath } }); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); const arch = 'x86_64'; t.true( @@ -2230,7 +2224,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--slim=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); const arch = 'x86_64'; @@ -2252,8 +2246,7 @@ test( `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' ); - sls(['--dockerizePip=true', '--slim=true', 'package']); - + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('injected_file_is_bad_form'), @@ -2276,7 +2269,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--slim=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const cachepath = getUserCachePath(); t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), @@ -2302,8 +2295,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--individually=true', 'package']); - + sls(['package'], { env: { individually: 'true' } }); t.true( pathExistsSync('.serverless/hello.zip'), 'function hello is packaged' diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index 0b360e9b..37238158 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -2,22 +2,22 @@ service: sls-py-req-test provider: name: aws - runtime: ${opt:runtime, 'python3.6'} + runtime: ${env:runtime, 'python3.6'} plugins: - serverless-python-requirements custom: pythonRequirements: - zip: ${opt:zip, self:custom.defaults.zip} - dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} - slim: ${opt:slim, self:custom.defaults.slim} + zip: ${env:zip, self:custom.defaults.zip} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + slim: ${env:slim, self:custom.defaults.slim} slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} - slimPatternsAppendDefaults: ${opt:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} - vendor: ${opt:vendor, ''} - fileName: ${opt:fileName, 'requirements.txt'} - useStaticCache: ${opt:useStaticCache, self:custom.defaults.useStaticCache} - useDownloadCache: ${opt:useDownloadCache, self:custom.defaults.useDownloadCache} - cacheLocation: ${opt:cacheLocation, ''} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + vendor: ${env:vendor, ''} + fileName: ${env:fileName, 'requirements.txt'} + useStaticCache: ${env:useStaticCache, self:custom.defaults.useStaticCache} + useDownloadCache: ${env:useDownloadCache, self:custom.defaults.useDownloadCache} + cacheLocation: ${env:cacheLocation, ''} defaults: slim: false slimPatterns: false @@ -29,7 +29,7 @@ custom: useDownloadCache: true package: - individually: ${opt:individually, self:custom.defaults.individually} + individually: ${env:individually, self:custom.defaults.individually} patterns: - '!**/*' - 'handler.py' diff --git a/tests/individually/serverless.yml b/tests/individually/serverless.yml index 121bd89d..a83ac7e0 100644 --- a/tests/individually/serverless.yml +++ b/tests/individually/serverless.yml @@ -10,7 +10,7 @@ package: - '!node_modules/**' custom: pythonRequirements: - dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} defaults: dockerizePip: false diff --git a/tests/pipenv/serverless.yml b/tests/pipenv/serverless.yml index dd93e290..4b343bfc 100644 --- a/tests/pipenv/serverless.yml +++ b/tests/pipenv/serverless.yml @@ -8,11 +8,11 @@ plugins: - serverless-python-requirements custom: pythonRequirements: - zip: ${opt:zip, self:custom.defaults.zip} - slim: ${opt:slim, self:custom.defaults.slim} + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} - slimPatternsAppendDefaults: ${opt:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} - dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} defaults: zip: false slimPatterns: false diff --git a/tests/poetry/serverless.yml b/tests/poetry/serverless.yml index dd93e290..4b343bfc 100644 --- a/tests/poetry/serverless.yml +++ b/tests/poetry/serverless.yml @@ -8,11 +8,11 @@ plugins: - serverless-python-requirements custom: pythonRequirements: - zip: ${opt:zip, self:custom.defaults.zip} - slim: ${opt:slim, self:custom.defaults.slim} + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} - slimPatternsAppendDefaults: ${opt:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} - dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} defaults: zip: false slimPatterns: false From 769bc820eed1f65c8ae41ccfa74749c650560e6a Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 27 Feb 2022 22:26:21 +0100 Subject: [PATCH 274/328] ci: Upgrade `setup-python` github action --- .github/workflows/integrate.yml | 6 +++--- .github/workflows/validate.yml | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index 953951df..0d77acfc 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -31,7 +31,7 @@ jobs: restore-keys: npm-v14-${{ runner.os }}-${{ github.ref }}- - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} @@ -82,7 +82,7 @@ jobs: restore-keys: npm-v14-${{ runner.os }}-${{ github.ref }}- - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} @@ -133,7 +133,7 @@ jobs: restore-keys: npm-v12-${{ runner.os }}-${{ github.ref }}- - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 9215eee1..b4c245f5 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -44,7 +44,7 @@ jobs: npm-v14-${{ runner.os }}-refs/heads/master- - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} @@ -111,7 +111,7 @@ jobs: npm-v14-${{ runner.os }}-refs/heads/master- - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} @@ -164,7 +164,7 @@ jobs: npm-v12-${{ runner.os }}-refs/heads/master- - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} From ff11497cbcf42fe7f7d73fb2e8e2642c542dd8d7 Mon Sep 17 00:00:00 2001 From: Andrei Zhemaituk Date: Mon, 28 Feb 2022 07:10:15 -0500 Subject: [PATCH 275/328] refactor: Log child process command output on error (#679) --- lib/pip.js | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/lib/pip.js b/lib/pip.js index 7a0a0ceb..79dec42a 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -415,6 +415,13 @@ async function installRequirements(targetFolder, pluginInstance) { 'PYTHON_REQUIREMENTS_COMMAND_NOT_FOUND' ); } + if (log) { + log.info(`Stdout: ${e.stdoutBuffer}`); + log.info(`Stderr: ${e.stderrBuffer}`); + } else { + serverless.cli.log(`Stdout: ${e.stdoutBuffer}`); + serverless.cli.log(`Stderr: ${e.stderrBuffer}`); + } throw e; } } From 3edf0e0cabeeb11ffadd9dcac6f198f22aee4a16 Mon Sep 17 00:00:00 2001 From: Marc Hassan Date: Wed, 2 Mar 2022 07:11:36 -0500 Subject: [PATCH 276/328] refactor: Replace `lodash.set` with `set-value` (#676) --- lib/inject.js | 2 +- lib/pip.js | 2 +- lib/zip.js | 2 +- package.json | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/inject.js b/lib/inject.js index f32c9d46..ea20e58d 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -2,7 +2,7 @@ const BbPromise = require('bluebird'); const fse = require('fs-extra'); const glob = require('glob-all'); const get = require('lodash.get'); -const set = require('lodash.set'); +const set = require('set-value'); const path = require('path'); const JSZip = require('jszip'); const { writeZip, zipFile } = require('./zipTree'); diff --git a/lib/pip.js b/lib/pip.js index 79dec42a..9e7c592e 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -2,7 +2,7 @@ const fse = require('fs-extra'); const rimraf = require('rimraf'); const path = require('path'); const get = require('lodash.get'); -const set = require('lodash.set'); +const set = require('set-value'); const spawn = require('child-process-ext/spawn'); const { quote } = require('shell-quote'); const { buildImage, getBindPath, getDockerUid } = require('./docker'); diff --git a/lib/zip.js b/lib/zip.js index cba29450..4b652f98 100644 --- a/lib/zip.js +++ b/lib/zip.js @@ -1,7 +1,7 @@ const fse = require('fs-extra'); const path = require('path'); const get = require('lodash.get'); -const set = require('lodash.set'); +const set = require('set-value'); const uniqBy = require('lodash.uniqby'); const BbPromise = require('bluebird'); const JSZip = require('jszip'); diff --git a/package.json b/package.json index 50ef2246..d9422de5 100644 --- a/package.json +++ b/package.json @@ -69,10 +69,10 @@ "is-wsl": "^2.2.0", "jszip": "^3.7.1", "lodash.get": "^4.4.2", - "lodash.set": "^4.3.2", "lodash.uniqby": "^4.7.0", "lodash.values": "^4.3.0", "rimraf": "^3.0.2", + "set-value": "^4.1.0", "sha256-file": "1.0.0", "shell-quote": "^1.7.3" }, From 915bcadad2f8a3be5434d6e42771bc835271baf8 Mon Sep 17 00:00:00 2001 From: Marcin Szleszynski <64603095+martinezpl@users.noreply.github.com> Date: Tue, 8 Mar 2022 13:00:56 +0100 Subject: [PATCH 277/328] feat: Support `dockerPrivateKey` to specify path to SSH key (#674) --- README.md | 19 ++++++++++++++++--- index.js | 7 +++++-- lib/pip.js | 8 ++++++-- test.js | 33 ++++++++++++++++++++++++++++++--- tests/base/custom_ssh | 1 + tests/base/serverless.yml | 6 ++++++ 6 files changed, 64 insertions(+), 10 deletions(-) create mode 100644 tests/base/custom_ssh diff --git a/README.md b/README.md index abe6a175..63b1a32a 100644 --- a/README.md +++ b/README.md @@ -77,8 +77,20 @@ custom: ``` The `dockerSsh` option will mount your `$HOME/.ssh/id_rsa` and `$HOME/.ssh/known_hosts` as a -volume in the docker container. If your SSH key is password protected, you can use `ssh-agent` -because `$SSH_AUTH_SOCK` is also mounted & the env var set. +volume in the docker container. + +In case you want to use a different key, you can specify the path (absolute) to it through `dockerPrivateKey` option: + +```yaml +custom: + pythonRequirements: + dockerizePip: true + dockerSsh: true + dockerPrivateKey: /home/.ssh/id_ed25519 +``` + +If your SSH key is password protected, you can use `ssh-agent` +because `$SSH_AUTH_SOCK` is also mounted & the env var is set. It is important that the host of your private repositories has already been added in your `$HOME/.ssh/known_hosts` file, as the install process will fail otherwise due to host authenticity failure. @@ -213,7 +225,7 @@ the names in `slimPatterns` #### Option not to strip binaries -In some cases, stripping binaries leads to problems like "ELF load command address/offset not properly aligned", even when done in the Docker environment. You can still slim down the package without `*.so` files with +In some cases, stripping binaries leads to problems like "ELF load command address/offset not properly aligned", even when done in the Docker environment. You can still slim down the package without `*.so` files with: ```yaml custom: @@ -566,3 +578,4 @@ package: - [@jacksgt](https://github.com/jacksgt) - Fixing pip issues - [@lephuongbg](https://github.com/lephuongbg) - Fixing single function deployment - [@rileypriddle](https://github.com/rileypriddle) - Introducing schema validation for `module` property +- [@martinezpl](https://github.com/martinezpl) - Fixing test issues, adding `dockerPrivateKey` option diff --git a/index.js b/index.js index 7741a7f8..c6577fe0 100644 --- a/index.js +++ b/index.js @@ -15,7 +15,6 @@ const { installAllRequirements } = require('./lib/pip'); const { pipfileToRequirements } = require('./lib/pipenv'); const { pyprojectTomlToRequirements } = require('./lib/poetry'); const { cleanup, cleanupCache } = require('./lib/clean'); - BbPromise.promisifyAll(fse); /** @@ -45,6 +44,7 @@ class ServerlessPythonRequirements { : this.serverless.service.provider.runtime || 'python', dockerizePip: false, dockerSsh: false, + dockerPrivateKey: null, dockerImage: null, dockerFile: null, dockerEnv: false, @@ -71,7 +71,10 @@ class ServerlessPythonRequirements { } if ( !options.dockerizePip && - (options.dockerSsh || options.dockerImage || options.dockerFile) + (options.dockerSsh || + options.dockerImage || + options.dockerFile || + options.dockerPrivateKey) ) { if (!this.warningLogged) { if (this.log) { diff --git a/lib/pip.js b/lib/pip.js index 9e7c592e..9f950664 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -275,12 +275,16 @@ async function installRequirements(targetFolder, pluginInstance) { dockerCmd.push('docker', 'run', '--rm', '-v', `${bindPath}:/var/task:z`); if (options.dockerSsh) { + const homePath = require('os').homedir(); + const sshKeyPath = + options.dockerPrivateKey || `${homePath}/.ssh/id_rsa`; + // Mount necessary ssh files to work with private repos dockerCmd.push( '-v', - `${process.env.HOME}/.ssh/id_rsa:/root/.ssh/id_rsa:z`, + `${sshKeyPath}:/root/.ssh/${sshKeyPath.split('/').splice(-1)[0]}:z`, '-v', - `${process.env.HOME}/.ssh/known_hosts:/root/.ssh/known_hosts:z`, + `${homePath}/.ssh/known_hosts:/root/.ssh/known_hosts:z`, '-v', `${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z`, '-e', diff --git a/test.js b/test.js index 11a7cce5..b228805e 100644 --- a/test.js +++ b/test.js @@ -3,6 +3,7 @@ const glob = require('glob-all'); const JSZip = require('jszip'); const sha256File = require('sha256-file'); const tape = require('tape-promise/tape'); + const { chmodSync, removeSync, @@ -23,7 +24,7 @@ const mkCommand = (cmd) => (args, options = {}) => { options['env'] = Object.assign( - { SLS_DEBUG: 't' }, + { SLS_DEBUG: 'true' }, process.env, options['env'] ); @@ -32,11 +33,11 @@ const mkCommand = args, options ); - if (error) { + if (error && !options['noThrow']) { console.error(`Error running: ${quote([cmd, ...args])}`); // eslint-disable-line no-console throw error; } - if (status) { + if (status && !options['noThrow']) { console.error('STDOUT: ', stdout.toString()); // eslint-disable-line no-console console.error('STDERR: ', stderr.toString()); // eslint-disable-line no-console throw new Error( @@ -200,6 +201,32 @@ const canUseDocker = () => { // Skip if running on these platforms. const brokenOn = (...platforms) => platforms.indexOf(process.platform) != -1; +test( + 'dockerPrivateKey option correctly resolves docker command', + async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + const stdout = sls(['package'], { + noThrow: true, + env: { + dockerizePip: true, + dockerSsh: true, + dockerPrivateKey: `${__dirname}${sep}tests${sep}base${sep}custom_ssh`, + dockerImage: 'break the build to log the command', + }, + }); + t.true( + stdout.includes( + `-v ${__dirname}${sep}tests${sep}base${sep}custom_ssh:/root/.ssh/custom_ssh:z` + ), + 'docker command properly resolved' + ); + t.end(); + }, + { skip: !canUseDocker() || brokenOn('win32') } +); + test( 'default pythonBin can package flask with default options', async (t) => { diff --git a/tests/base/custom_ssh b/tests/base/custom_ssh new file mode 100644 index 00000000..8a7c4203 --- /dev/null +++ b/tests/base/custom_ssh @@ -0,0 +1 @@ +SOME KEY diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index 37238158..6526246c 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -10,6 +10,9 @@ custom: pythonRequirements: zip: ${env:zip, self:custom.defaults.zip} dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + dockerSsh: ${env:dockerSsh, self:custom.defaults.dockerSsh} + dockerPrivateKey: ${env:dockerPrivateKey, self:custom.defaults.dockerPrivateKey} + dockerImage: ${env:dockerImage, self:custom.defaults.dockerImage} slim: ${env:slim, self:custom.defaults.slim} slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} @@ -24,6 +27,9 @@ custom: slimPatternsAppendDefaults: true zip: false dockerizePip: false + dockerSsh: false + dockerPrivateKey: '' + dockerImage: '' individually: false useStaticCache: true useDownloadCache: true From ebd12cb14ea352fb08c0957f213bda7dcce800df Mon Sep 17 00:00:00 2001 From: Brandon White Date: Mon, 14 Mar 2022 11:50:12 -0500 Subject: [PATCH 278/328] feat: Support individual packaging with `poetry` (#682) --- .gitignore | 1 - index.js | 2 -- lib/pip.js | 23 ++++--------- lib/poetry.js | 33 ++++++++++--------- test.js | 19 +++++++++++ tests/base/package.json | 2 +- tests/non_build_pyproject/package.json | 2 +- tests/non_poetry_pyproject/package.json | 2 +- tests/pipenv/package.json | 2 +- tests/poetry/package.json | 2 +- tests/poetry_individually/module1/handler.py | 5 +++ .../module1/pyproject.toml | 17 ++++++++++ tests/poetry_individually/package.json | 14 ++++++++ tests/poetry_individually/serverless.yml | 32 ++++++++++++++++++ 14 files changed, 116 insertions(+), 40 deletions(-) create mode 100644 tests/poetry_individually/module1/handler.py create mode 100644 tests/poetry_individually/module1/pyproject.toml create mode 100644 tests/poetry_individually/package.json create mode 100644 tests/poetry_individually/serverless.yml diff --git a/.gitignore b/.gitignore index ab0317f3..3707ff1e 100644 --- a/.gitignore +++ b/.gitignore @@ -59,7 +59,6 @@ dist/ downloads/ eggs/ .eggs/ -lib/ lib64/ parts/ sdist/ diff --git a/index.js b/index.js index c6577fe0..ebfc4017 100644 --- a/index.js +++ b/index.js @@ -13,7 +13,6 @@ const { injectAllRequirements } = require('./lib/inject'); const { layerRequirements } = require('./lib/layer'); const { installAllRequirements } = require('./lib/pip'); const { pipfileToRequirements } = require('./lib/pipenv'); -const { pyprojectTomlToRequirements } = require('./lib/poetry'); const { cleanup, cleanupCache } = require('./lib/clean'); BbPromise.promisifyAll(fse); @@ -203,7 +202,6 @@ class ServerlessPythonRequirements { } return BbPromise.bind(this) .then(pipfileToRequirements) - .then(pyprojectTomlToRequirements) .then(addVendorHelper) .then(installAllRequirements) .then(packRequirements) diff --git a/lib/pip.js b/lib/pip.js index 9f950664..ccb809c3 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -7,7 +7,7 @@ const spawn = require('child-process-ext/spawn'); const { quote } = require('shell-quote'); const { buildImage, getBindPath, getDockerUid } = require('./docker'); const { getStripCommand, getStripMode, deleteFiles } = require('./slim'); -const { isPoetryProject } = require('./poetry'); +const { isPoetryProject, pyprojectTomlToRequirements } = require('./poetry'); const { checkForAndDeleteMaxCacheVersions, sha256Path, @@ -60,16 +60,9 @@ function generateRequirementsFile( pluginInstance ) { const { serverless, servicePath, options, log } = pluginInstance; - if ( - options.usePoetry && - fse.existsSync(path.join(servicePath, 'pyproject.toml')) && - isPoetryProject(servicePath) - ) { - filterRequirementsFile( - path.join(servicePath, '.serverless/requirements.txt'), - targetFile, - pluginInstance - ); + const modulePath = path.dirname(requirementsPath); + if (options.usePoetry && isPoetryProject(modulePath)) { + filterRequirementsFile(targetFile, targetFile, pluginInstance); if (log) { log.info(`Parsed requirements.txt from pyproject.toml in ${targetFile}`); } else { @@ -570,11 +563,7 @@ function copyVendors(vendorFolder, targetFolder, { serverless, log }) { * @param {string} fileName */ function requirementsFileExists(servicePath, options, fileName) { - if ( - options.usePoetry && - fse.existsSync(path.join(servicePath, 'pyproject.toml')) && - isPoetryProject(servicePath) - ) { + if (options.usePoetry && isPoetryProject(path.dirname(fileName))) { return true; } @@ -609,6 +598,8 @@ async function installRequirementsIfNeeded( // Our source requirements, under our service path, and our module path (if specified) const fileName = path.join(servicePath, modulePath, options.fileName); + await pyprojectTomlToRequirements(modulePath, pluginInstance); + // Skip requirements generation, if requirements file doesn't exist if (!requirementsFileExists(servicePath, options, fileName)) { return false; diff --git a/lib/poetry.js b/lib/poetry.js index 23f43dc0..4003c1df 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -8,24 +8,25 @@ const tomlParse = require('@iarna/toml/parse-string'); /** * poetry install */ -async function pyprojectTomlToRequirements() { - if (!this.options.usePoetry || !isPoetryProject(this.servicePath)) { +async function pyprojectTomlToRequirements(modulePath, pluginInstance) { + const { serverless, servicePath, options, log, progress } = pluginInstance; + + const moduleProjectPath = path.join(servicePath, modulePath); + if (!options.usePoetry || !isPoetryProject(moduleProjectPath)) { return; } let generateRequirementsProgress; - if (this.progress && this.log) { - generateRequirementsProgress = this.progress.get( + if (progress && log) { + generateRequirementsProgress = progress.get( 'python-generate-requirements-toml' ); generateRequirementsProgress.update( 'Generating requirements.txt from "pyproject.toml"' ); - this.log.info('Generating requirements.txt from "pyproject.toml"'); + log.info('Generating requirements.txt from "pyproject.toml"'); } else { - this.serverless.cli.log( - 'Generating requirements.txt from pyproject.toml...' - ); + serverless.cli.log('Generating requirements.txt from pyproject.toml...'); } try { @@ -42,7 +43,7 @@ async function pyprojectTomlToRequirements() { '--with-credentials', ], { - cwd: this.servicePath, + cwd: moduleProjectPath, } ); } catch (e) { @@ -50,7 +51,7 @@ async function pyprojectTomlToRequirements() { e.stderrBuffer && e.stderrBuffer.toString().includes('command not found') ) { - throw new this.serverless.classes.Error( + throw new serverless.classes.Error( `poetry not found! Install it according to the poetry docs.`, 'PYTHON_REQUIREMENTS_POETRY_NOT_FOUND' ); @@ -59,16 +60,16 @@ async function pyprojectTomlToRequirements() { } const editableFlag = new RegExp(/^-e /gm); - const sourceRequirements = path.join(this.servicePath, 'requirements.txt'); + const sourceRequirements = path.join(moduleProjectPath, 'requirements.txt'); const requirementsContents = fse.readFileSync(sourceRequirements, { encoding: 'utf-8', }); if (requirementsContents.match(editableFlag)) { - if (this.log) { - this.log.info('The generated file contains -e flags, removing them'); + if (log) { + log.info('The generated file contains -e flags, removing them'); } else { - this.serverless.cli.log( + serverless.cli.log( 'The generated file contains -e flags, removing them...' ); } @@ -78,10 +79,10 @@ async function pyprojectTomlToRequirements() { ); } - fse.ensureDirSync(path.join(this.servicePath, '.serverless')); + fse.ensureDirSync(path.join(servicePath, '.serverless')); fse.moveSync( sourceRequirements, - path.join(this.servicePath, '.serverless', 'requirements.txt'), + path.join(servicePath, '.serverless', modulePath, 'requirements.txt'), { overwrite: true } ); } finally { diff --git a/test.js b/test.js index b228805e..e2bbdc2c 100644 --- a/test.js +++ b/test.js @@ -1479,6 +1479,25 @@ test( { skip: !hasPython(3.6) } ); +test( + 'poetry py3.6 can package flask with package individually option', + async (t) => { + process.chdir('tests/poetry_individually'); + const path = npm(['pack', '../..']); + npm(['i', path]); + + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles( + '.serverless/module1-sls-py-req-test-dev-hello.zip' + ); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); + }, + { skip: !hasPython(3.6) } +); + test( 'py3.6 can package flask with package individually option', async (t) => { diff --git a/tests/base/package.json b/tests/base/package.json index 43ce4eee..38630491 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" } } diff --git a/tests/non_build_pyproject/package.json b/tests/non_build_pyproject/package.json index 43ce4eee..38630491 100644 --- a/tests/non_build_pyproject/package.json +++ b/tests/non_build_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" } } diff --git a/tests/non_poetry_pyproject/package.json b/tests/non_poetry_pyproject/package.json index 43ce4eee..38630491 100644 --- a/tests/non_poetry_pyproject/package.json +++ b/tests/non_poetry_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" } } diff --git a/tests/pipenv/package.json b/tests/pipenv/package.json index 43ce4eee..38630491 100644 --- a/tests/pipenv/package.json +++ b/tests/pipenv/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" } } diff --git a/tests/poetry/package.json b/tests/poetry/package.json index 43ce4eee..38630491 100644 --- a/tests/poetry/package.json +++ b/tests/poetry/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" } } diff --git a/tests/poetry_individually/module1/handler.py b/tests/poetry_individually/module1/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/poetry_individually/module1/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/poetry_individually/module1/pyproject.toml b/tests/poetry_individually/module1/pyproject.toml new file mode 100644 index 00000000..b813968a --- /dev/null +++ b/tests/poetry_individually/module1/pyproject.toml @@ -0,0 +1,17 @@ +[tool.poetry] +name = "poetry" +version = "0.1.0" +description = "" +authors = ["Your Name "] + +[tool.poetry.dependencies] +python = "^3.6" +Flask = "^1.0" +bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} +boto3 = "^1.9" + +[tool.poetry.dev-dependencies] + +[build-system] +requires = ["poetry>=0.12"] +build-backend = "poetry.masonry.api" diff --git a/tests/poetry_individually/package.json b/tests/poetry_individually/package.json new file mode 100644 index 00000000..38630491 --- /dev/null +++ b/tests/poetry_individually/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" + } +} diff --git a/tests/poetry_individually/serverless.yml b/tests/poetry_individually/serverless.yml new file mode 100644 index 00000000..2cb2d160 --- /dev/null +++ b/tests/poetry_individually/serverless.yml @@ -0,0 +1,32 @@ +service: sls-py-req-test + +provider: + name: aws + runtime: python3.6 + +plugins: + - serverless-python-requirements +custom: + pythonRequirements: + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + defaults: + zip: false + slimPatterns: false + slimPatternsAppendDefaults: true + slim: false + dockerizePip: false + +package: + individually: true + +functions: + hello: + handler: handler.hello + module: module1 + package: + patterns: + - 'module1/**' From 33f5d5a0dc5fd166086b9d548615e1dfdb0cbd12 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 14 Mar 2022 18:01:56 +0100 Subject: [PATCH 279/328] chore: Bump dependencies --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index d9422de5..c40b9cbf 100644 --- a/package.json +++ b/package.json @@ -52,7 +52,7 @@ "cross-spawn": "*", "eslint": "^7.32.0", "git-list-updated": "^1.2.1", - "github-release-from-cc-changelog": "^2.2.0", + "github-release-from-cc-changelog": "^2.2.1", "lodash": "^4.17.21", "prettier": "^2", "standard-version": "^9.3.2", @@ -65,7 +65,7 @@ "bluebird": "^3.7.2", "child-process-ext": "^2.1.1", "fs-extra": "^9.1.0", - "glob-all": "^3.2.1", + "glob-all": "^3.3.0", "is-wsl": "^2.2.0", "jszip": "^3.7.1", "lodash.get": "^4.4.2", From 3a898e5e707658c76f6063f44938366935b41812 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 14 Mar 2022 18:04:22 +0100 Subject: [PATCH 280/328] chore: Release v5.4.0 --- CHANGELOG.md | 12 ++++++++++++ package.json | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bbe979bf..41041fd3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,18 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +## [5.4.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.3.1...v5.4.0) (2022-03-14) + +### Features + +- Support `dockerPrivateKey` to specify path to SSH key ([#674](https://github.com/UnitedIncome/serverless-python-requirements/issues/674)) ([915bcad](https://github.com/UnitedIncome/serverless-python-requirements/commit/915bcadad2f8a3be5434d6e42771bc835271baf8)) ([Marcin Szleszynski](https://github.com/martinezpl)) +- Support individual packaging with `poetry` ([#682](https://github.com/UnitedIncome/serverless-python-requirements/issues/682)) ([ebd12cb](https://github.com/UnitedIncome/serverless-python-requirements/commit/ebd12cb14ea352fb08c0957f213bda7dcce800df)) ([Brandon White](https://github.com/BrandonLWhite)) + +### Maintenance Improvements + +- Log child process command output on error ([#679](https://github.com/UnitedIncome/serverless-python-requirements/issues/679)) ([ff11497](https://github.com/UnitedIncome/serverless-python-requirements/commit/ff11497cbcf42fe7f7d73fb2e8e2642c542dd8d7)) ([Andrei Zhemaituk](https://github.com/zhemaituk)) +- Replace `lodash.set` with `set-value` ([#676](https://github.com/UnitedIncome/serverless-python-requirements/issues/676)) ([3edf0e0](https://github.com/UnitedIncome/serverless-python-requirements/commit/3edf0e0cabeeb11ffadd9dcac6f198f22aee4a16)) ([Marc Hassan](https://github.com/mhassan1)) + ### [5.3.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.3.0...v5.3.1) (2022-01-28) ### Bug Fixes diff --git a/package.json b/package.json index c40b9cbf..7985cb60 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "5.3.1", + "version": "5.4.0", "engines": { "node": ">=12.0" }, From 48234a79068c0ff51b9e2944e8dedb8ef88a191d Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 11 Apr 2022 14:41:58 +0200 Subject: [PATCH 281/328] test: Pin Flask to ensure support for Python 3.6 --- tests/base/requirements-w-nested.txt | 2 +- tests/individually/module2/requirements.txt | 2 +- tests/non_build_pyproject/requirements.txt | 2 +- tests/pipenv/Pipfile | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/base/requirements-w-nested.txt b/tests/base/requirements-w-nested.txt index 4d73c837..b09aa52a 100644 --- a/tests/base/requirements-w-nested.txt +++ b/tests/base/requirements-w-nested.txt @@ -1,3 +1,3 @@ -flask +flask==2.0.3 bottle -r requirements-common.txt diff --git a/tests/individually/module2/requirements.txt b/tests/individually/module2/requirements.txt index 7e106024..c09d0264 100644 --- a/tests/individually/module2/requirements.txt +++ b/tests/individually/module2/requirements.txt @@ -1 +1 @@ -flask +flask==2.0.3 diff --git a/tests/non_build_pyproject/requirements.txt b/tests/non_build_pyproject/requirements.txt index aa55d989..09764fc3 100644 --- a/tests/non_build_pyproject/requirements.txt +++ b/tests/non_build_pyproject/requirements.txt @@ -1,2 +1,2 @@ -flask +flask==2.0.3 boto3 diff --git a/tests/pipenv/Pipfile b/tests/pipenv/Pipfile index 0d65eb75..6770a12a 100644 --- a/tests/pipenv/Pipfile +++ b/tests/pipenv/Pipfile @@ -3,7 +3,7 @@ url = "https://pypi.python.org/simple" verify_ssl = true [packages] -Flask = "*" +Flask = "==2.0.3" bottle = "*" boto3 = "*" From a4cd36b1145b3cb45c44eaaff0653461472e9a3c Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Thu, 20 Jan 2022 10:42:16 +0100 Subject: [PATCH 282/328] docs: Remove reference to `v1` from docs --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 63b1a32a..7c09a4de 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ [![npm](https://img.shields.io/npm/v/serverless-python-requirements.svg)](https://www.npmjs.com/package/serverless-python-requirements) [![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg)](https://github.com/prettier/prettier) -A Serverless v1.x plugin to automatically bundle dependencies from `requirements.txt` and make them available in your `PYTHONPATH`. +A Serverless Framework plugin to automatically bundle dependencies from `requirements.txt` and make them available in your `PYTHONPATH`. --- From 29a9f5735d941ed8b4b85c0a409b758b02e2d607 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 27 Sep 2022 20:11:02 +0200 Subject: [PATCH 283/328] ci: Run tests against python3.7 instead of python3.6 --- .github/workflows/integrate.yml | 6 +- .github/workflows/validate.yml | 6 +- example/serverless.yml | 2 +- example_native_deps/serverless.yml | 2 +- test.js | 198 +++++++++++----------- tests/base/serverless.yml | 2 +- tests/individually/serverless.yml | 2 +- tests/non_build_pyproject/serverless.yml | 2 +- tests/non_poetry_pyproject/serverless.yml | 2 +- tests/pipenv/serverless.yml | 2 +- tests/poetry/serverless.yml | 2 +- tests/poetry_individually/serverless.yml | 2 +- 12 files changed, 114 insertions(+), 114 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index 0d77acfc..cca8b6be 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -15,7 +15,7 @@ jobs: runs-on: windows-latest strategy: matrix: - python-version: [2.7, 3.6] + python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -66,7 +66,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [2.7, 3.6] + python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -117,7 +117,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [2.7, 3.6] + python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index b4c245f5..905bee55 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [2.7, 3.6] + python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -93,7 +93,7 @@ jobs: runs-on: windows-latest strategy: matrix: - python-version: [2.7, 3.6] + python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -146,7 +146,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [2.7, 3.6] + python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 diff --git a/example/serverless.yml b/example/serverless.yml index 9b58ead1..349cdcb8 100644 --- a/example/serverless.yml +++ b/example/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.7 plugins: - serverless-python-requirements diff --git a/example_native_deps/serverless.yml b/example_native_deps/serverless.yml index 0f4e632a..4deed44a 100644 --- a/example_native_deps/serverless.yml +++ b/example_native_deps/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.7 plugins: - serverless-python-requirements diff --git a/test.js b/test.js index e2bbdc2c..0ecd361d 100644 --- a/test.js +++ b/test.js @@ -239,11 +239,11 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 packages have the same hash', + 'py3.7 packages have the same hash', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -258,11 +258,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 can package flask with default options', + 'py3.7 can package flask with default options', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -277,7 +277,7 @@ test( ); test( - 'py3.6 can package flask with hashes', + 'py3.7 can package flask with hashes', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -296,7 +296,7 @@ test( ); test( - 'py3.6 can package flask with nested', + 'py3.7 can package flask with nested', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -316,7 +316,7 @@ test( ); test( - 'py3.6 can package flask with zip option', + 'py3.7 can package flask with zip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -341,7 +341,7 @@ test( ); test( - 'py3.6 can package flask with slim option', + 'py3.7 can package flask with slim option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -365,7 +365,7 @@ test( ); test( - 'py3.6 can package flask with slim & slimPatterns options', + 'py3.7 can package flask with slim & slimPatterns options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -386,11 +386,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - "py3.6 doesn't package bottle with noDeploy option", + "py3.7 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -412,7 +412,7 @@ test( ); test( - 'py3.6 can package boto3 with editable', + 'py3.7 can package boto3 with editable', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -435,7 +435,7 @@ test( ); test( - 'py3.6 can package flask with dockerizePip option', + 'py3.7 can package flask with dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -446,11 +446,11 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 can package flask with slim & dockerizePip option', + 'py3.7 can package flask with slim & dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -470,11 +470,11 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 can package flask with slim & dockerizePip & slimPatterns options', + 'py3.7 can package flask with slim & dockerizePip & slimPatterns options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -495,11 +495,11 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 can package flask with zip & dockerizePip option', + 'py3.7 can package flask with zip & dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -527,11 +527,11 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 can package flask with zip & slim & dockerizePip option', + 'py3.7 can package flask with zip & slim & dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -561,7 +561,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( @@ -856,7 +856,7 @@ test( ); test( - 'pipenv py3.6 can package flask with default options', + 'pipenv py3.7 can package flask with default options', async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); @@ -871,11 +871,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'pipenv py3.6 can package flask with slim option', + 'pipenv py3.7 can package flask with slim option', async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); @@ -895,11 +895,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'pipenv py3.6 can package flask with slim & slimPatterns options', + 'pipenv py3.7 can package flask with slim & slimPatterns options', async (t) => { process.chdir('tests/pipenv'); @@ -921,11 +921,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'pipenv py3.6 can package flask with zip option', + 'pipenv py3.7 can package flask with zip option', async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); @@ -946,11 +946,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - "pipenv py3.6 doesn't package bottle with noDeploy option", + "pipenv py3.7 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); @@ -968,7 +968,7 @@ test( t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( @@ -983,7 +983,7 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( @@ -997,11 +997,11 @@ test( t.true(zipfiles.includes(`handler.py`), 'handler is packaged'); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'poetry py3.6 can package flask with default options', + 'poetry py3.7 can package flask with default options', async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); @@ -1013,11 +1013,11 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'poetry py3.6 can package flask with slim option', + 'poetry py3.7 can package flask with slim option', async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); @@ -1037,11 +1037,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'poetry py3.6 can package flask with slim & slimPatterns options', + 'poetry py3.7 can package flask with slim & slimPatterns options', async (t) => { process.chdir('tests/poetry'); @@ -1063,11 +1063,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'poetry py3.6 can package flask with zip option', + 'poetry py3.7 can package flask with zip option', async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); @@ -1092,7 +1092,7 @@ test( ); test( - "poetry py3.6 doesn't package bottle with noDeploy option", + "poetry py3.7 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); @@ -1110,11 +1110,11 @@ test( t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 can package flask with zip option and no explicit include', + 'py3.7 can package flask with zip option and no explicit include', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1137,11 +1137,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 can package lambda-decorators using vendor option', + 'py3.7 can package lambda-decorators using vendor option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1156,7 +1156,7 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( @@ -1204,11 +1204,11 @@ test( t.end(); }, - { skip: process.platform === 'win32' || !hasPython(3.6) } + { skip: process.platform === 'win32' || !hasPython(3.7) } ); test( - 'py3.6 can package flask in a project with a space in it', + 'py3.7 can package flask in a project with a space in it', async (t) => { copySync('tests/base', 'tests/base with a space'); process.chdir('tests/base with a space'); @@ -1220,11 +1220,11 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 can package flask in a project with a space in it with docker', + 'py3.7 can package flask in a project with a space in it with docker', async (t) => { copySync('tests/base', 'tests/base with a space'); process.chdir('tests/base with a space'); @@ -1236,11 +1236,11 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 supports custom file name with fileName option', + 'py3.7 supports custom file name with fileName option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1262,11 +1262,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - "py3.6 doesn't package bottle with zip option", + "py3.7 doesn't package bottle with zip option", async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1309,7 +1309,7 @@ test( ); test( - 'py3.6 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', + 'py3.7 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -1331,11 +1331,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', + 'py3.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -1361,7 +1361,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( @@ -1426,7 +1426,7 @@ test( ); test( - 'pipenv py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', + 'pipenv py3.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { process.chdir('tests/pipenv'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -1449,11 +1449,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'poetry py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', + 'poetry py3.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { process.chdir('tests/poetry'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -1476,11 +1476,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'poetry py3.6 can package flask with package individually option', + 'poetry py3.7 can package flask with package individually option', async (t) => { process.chdir('tests/poetry_individually'); const path = npm(['pack', '../..']); @@ -1495,11 +1495,11 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 can package flask with package individually option', + 'py3.7 can package flask with package individually option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1581,11 +1581,11 @@ test( t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 can package flask with package individually & slim option', + 'py3.7 can package flask with package individually & slim option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1667,7 +1667,7 @@ test( t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( @@ -1861,7 +1861,7 @@ test( ); test( - 'py3.6 can package only requirements of module', + 'py3.7 can package only requirements of module', async (t) => { process.chdir('tests/individually'); const path = npm(['pack', '../..']); @@ -1917,11 +1917,11 @@ test( t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 can package lambda-decorators using vendor and invidiually option', + 'py3.7 can package lambda-decorators using vendor and invidiually option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1998,7 +1998,7 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( @@ -2037,7 +2037,7 @@ test( t.end(); }, - { skip: process.platform === 'win32' || !hasPython(3.6) } + { skip: process.platform === 'win32' || !hasPython(3.7) } ); test( @@ -2076,11 +2076,11 @@ test( t.end(); }, - { skip: !canUseDocker() || process.platform === 'win32' || !hasPython(3.6) } + { skip: !canUseDocker() || process.platform === 'win32' || !hasPython(3.7) } ); test( - 'py3.6 uses download cache by default option', + 'py3.7 uses download cache by default option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2093,11 +2093,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 uses download cache by default', + 'py3.7 uses download cache by default', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2109,11 +2109,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 uses download cache with dockerizePip option', + 'py3.7 uses download cache with dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2126,11 +2126,11 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 uses download cache with dockerizePip by default option', + 'py3.7 uses download cache with dockerizePip by default option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2144,11 +2144,11 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 uses static and download cache', + 'py3.7 uses static and download cache', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2169,11 +2169,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 uses static and download cache with dockerizePip option', + 'py3.7 uses static and download cache with dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2194,11 +2194,11 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 uses static cache', + 'py3.7 uses static cache', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2220,7 +2220,7 @@ test( '.completed_requirements exists in static-cache' ); - // py3.6 checking that static cache actually pulls from cache (by poisoning it) + // py3.7 checking that static cache actually pulls from cache (by poisoning it) writeFileSync( `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' @@ -2234,11 +2234,11 @@ test( t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 uses static cache with cacheLocation option', + 'py3.7 uses static cache with cacheLocation option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2261,11 +2261,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 uses static cache with dockerizePip & slim option', + 'py3.7 uses static cache with dockerizePip & slim option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2287,7 +2287,7 @@ test( '.completed_requirements exists in static-cache' ); - // py3.6 checking that static cache actually pulls from cache (by poisoning it) + // py3.7 checking that static cache actually pulls from cache (by poisoning it) writeFileSync( `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' @@ -2306,11 +2306,11 @@ test( t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 uses download cache with dockerizePip & slim option', + 'py3.7 uses download cache with dockerizePip & slim option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2332,11 +2332,11 @@ test( t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 can ignore functions defined with `image`', + 'py3.7 can ignore functions defined with `image`', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2365,5 +2365,5 @@ test( t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index 6526246c..ef48e901 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: ${env:runtime, 'python3.6'} + runtime: ${env:runtime, 'python3.7'} plugins: - serverless-python-requirements diff --git a/tests/individually/serverless.yml b/tests/individually/serverless.yml index a83ac7e0..d73d613a 100644 --- a/tests/individually/serverless.yml +++ b/tests/individually/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test-indiv provider: name: aws - runtime: python3.6 + runtime: python3.7 package: individually: true diff --git a/tests/non_build_pyproject/serverless.yml b/tests/non_build_pyproject/serverless.yml index 02e5a1f3..b0436e61 100644 --- a/tests/non_build_pyproject/serverless.yml +++ b/tests/non_build_pyproject/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.7 plugins: - serverless-python-requirements diff --git a/tests/non_poetry_pyproject/serverless.yml b/tests/non_poetry_pyproject/serverless.yml index 3d872a87..2b16790c 100644 --- a/tests/non_poetry_pyproject/serverless.yml +++ b/tests/non_poetry_pyproject/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.7 plugins: - serverless-python-requirements diff --git a/tests/pipenv/serverless.yml b/tests/pipenv/serverless.yml index 4b343bfc..315f6741 100644 --- a/tests/pipenv/serverless.yml +++ b/tests/pipenv/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.7 plugins: - serverless-python-requirements diff --git a/tests/poetry/serverless.yml b/tests/poetry/serverless.yml index 4b343bfc..315f6741 100644 --- a/tests/poetry/serverless.yml +++ b/tests/poetry/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.7 plugins: - serverless-python-requirements diff --git a/tests/poetry_individually/serverless.yml b/tests/poetry_individually/serverless.yml index 2cb2d160..527a2846 100644 --- a/tests/poetry_individually/serverless.yml +++ b/tests/poetry_individually/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.7 plugins: - serverless-python-requirements From 332096484b0bf554ae60619a34a068d0bb36c5f5 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 27 Sep 2022 20:40:35 +0200 Subject: [PATCH 284/328] test: Freeze `pipenv` version to `2021.11.5` --- .github/workflows/integrate.yml | 6 +++--- .github/workflows/validate.yml | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index cca8b6be..211a75cf 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -48,7 +48,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless run: npm install -g serverless@2 @@ -99,7 +99,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless run: npm install -g serverless@2 @@ -150,7 +150,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless run: npm install -g serverless@2 diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 905bee55..0efc0ea7 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -61,7 +61,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless run: npm install -g serverless@2 @@ -128,7 +128,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless run: npm install -g serverless@2 @@ -181,7 +181,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless run: npm install -g serverless@2 From 7c6e4855c1afeb9fa55170c8f8df4f62e42867bc Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Wed, 28 Sep 2022 20:59:44 +0200 Subject: [PATCH 285/328] test: Remove tests against python2.7 --- .github/workflows/integrate.yml | 21 +- .github/workflows/validate.yml | 21 +- test.js | 2759 ++++++++++++------------------- 3 files changed, 1025 insertions(+), 1776 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index 211a75cf..f4cfe708 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -13,9 +13,6 @@ jobs: windowsNode14: name: '[Windows] Node.js v14: Unit tests' runs-on: windows-latest - strategy: - matrix: - python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -30,10 +27,10 @@ jobs: key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} restore-keys: npm-v14-${{ runner.os }}-${{ github.ref }}- - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python 3.7 uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python-version }} + python-version: 3.7 - name: Install Node.js and npm uses: actions/setup-node@v1 @@ -64,9 +61,6 @@ jobs: linuxNode14: name: '[Linux] Node.js 14: Unit tests' runs-on: ubuntu-latest - strategy: - matrix: - python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -81,10 +75,10 @@ jobs: key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} restore-keys: npm-v14-${{ runner.os }}-${{ github.ref }}- - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python 3.7 uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python-version }} + python-version: 3.7 - name: Install Node.js and npm uses: actions/setup-node@v1 @@ -115,9 +109,6 @@ jobs: linuxNode12: name: '[Linux] Node.js v12: Unit tests' runs-on: ubuntu-latest - strategy: - matrix: - python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -132,10 +123,10 @@ jobs: key: npm-v12-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} restore-keys: npm-v12-${{ runner.os }}-${{ github.ref }}- - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python 3.7 uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python-version }} + python-version: 3.7 - name: Install Node.js and npm uses: actions/setup-node@v1 diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 0efc0ea7..dd68b2f6 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -13,9 +13,6 @@ jobs: linuxNode14: name: '[Linux] Node.js v14: Lint, Eventual Commitlint, Eventual Changelog, Formatting & Unit tests' runs-on: ubuntu-latest - strategy: - matrix: - python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -43,10 +40,10 @@ jobs: npm-v14-${{ runner.os }}-${{ github.ref }}- npm-v14-${{ runner.os }}-refs/heads/master- - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python 3.7 uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python-version }} + python-version: 3.7 - name: Install Node.js and npm uses: actions/setup-node@v1 @@ -91,9 +88,6 @@ jobs: windowsNode14: name: '[Windows] Node.js v14: Unit tests' runs-on: windows-latest - strategy: - matrix: - python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -110,10 +104,10 @@ jobs: npm-v14-${{ runner.os }}-${{ github.ref }}- npm-v14-${{ runner.os }}-refs/heads/master- - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python 3.7 uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python-version }} + python-version: 3.7 - name: Install Node.js and npm uses: actions/setup-node@v1 @@ -144,9 +138,6 @@ jobs: linuxNode12: name: '[Linux] Node.js v12: Unit tests' runs-on: ubuntu-latest - strategy: - matrix: - python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -163,10 +154,10 @@ jobs: npm-v12-${{ runner.os }}-${{ github.ref }}- npm-v12-${{ runner.os }}-refs/heads/master- - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python 3.7 uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python-version }} + python-version: 3.7 - name: Install Node.js and npm uses: actions/setup-node@v1 diff --git a/test.js b/test.js index 0ecd361d..c20817a5 100644 --- a/test.js +++ b/test.js @@ -164,10 +164,6 @@ const getPythonBin = (version) => { return bin; }; -const hasPython = (version) => { - return Boolean(availablePythons[String(version)]); -}; - const listZipFiles = async function (filename) { const file = await readFile(filename); const zip = await new JSZip().loadAsync(file); @@ -227,54 +223,42 @@ test( { skip: !canUseDocker() || brokenOn('win32') } ); -test( - 'default pythonBin can package flask with default options', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 packages have the same hash', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const fileHash = sha256File('.serverless/sls-py-req-test.zip'); - sls(['package'], { env: {} }); - t.equal( - sha256File('.serverless/sls-py-req-test.zip'), - fileHash, - 'packages have the same hash' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 can package flask with default options', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { pythonBin: getPythonBin(3) } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3) } -); +test('default pythonBin can package flask with default options', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('py3.7 packages have the same hash', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const fileHash = sha256File('.serverless/sls-py-req-test.zip'); + sls(['package'], { env: {} }); + t.equal( + sha256File('.serverless/sls-py-req-test.zip'), + fileHash, + 'packages have the same hash' + ); + t.end(); +}); + +test('py3.7 can package flask with default options', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); test( 'py3.7 can package flask with hashes', @@ -292,147 +276,119 @@ test( t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.end(); }, - { skip: !hasPython(3) || brokenOn('win32') } -); - -test( - 'py3.7 can package flask with nested', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - fileName: 'requirements-w-nested.txt', - pythonBin: getPythonBin(3), - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3) } -); - -test( - 'py3.7 can package flask with zip option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.end(); - }, - { skip: !hasPython(3) } -); - -test( - 'py3.7 can package flask with slim option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { slim: 'true', pythonBin: getPythonBin(3) } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.true( - zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > - 0, - '__main__.py files are packaged' - ); - t.end(); - }, - { skip: !hasPython(3) } -); - -test( - 'py3.7 can package flask with slim & slimPatterns options', - async (t) => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { slim: 'true' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - "py3.7 doesn't package bottle with noDeploy option", - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml', - ]); - sls(['package'], { env: { pythonBin: getPythonBin(3) } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); - t.end(); - }, - { skip: !hasPython(3) } + { skip: brokenOn('win32') } ); -test( - 'py3.7 can package boto3 with editable', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - fileName: 'requirements-w-editable.txt', - pythonBin: getPythonBin(3), - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.true( - zipfiles.includes(`botocore${sep}__init__.py`), - 'botocore is packaged' - ); - t.end(); - }, - { skip: !hasPython(3) } -); +test('py3.7 can package flask with nested', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + fileName: 'requirements-w-nested.txt', + pythonBin: getPythonBin(3), + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('py3.7 can package flask with zip option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test('py3.7 can package flask with slim option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.true( + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); +}); + +test('py3.7 can package flask with slim & slimPatterns options', async (t) => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test("py3.7 doesn't package bottle with noDeploy option", async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml', + ]); + sls(['package'], { env: { pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); +}); + +test('py3.7 can package boto3 with editable', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + fileName: 'requirements-w-editable.txt', + pythonBin: getPythonBin(3), + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.true( + zipfiles.includes(`botocore${sep}__init__.py`), + 'botocore is packaged' + ); + t.end(); +}); test( 'py3.7 can package flask with dockerizePip option', @@ -446,7 +402,7 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); test( @@ -470,7 +426,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); test( @@ -495,7 +451,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); test( @@ -527,7 +483,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); test( @@ -561,101 +517,415 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); +test('pipenv py3.7 can package flask with default options', async (t) => { + process.chdir('tests/pipenv'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.false( + zipfiles.includes(`pytest${sep}__init__.py`), + 'dev-package pytest is NOT packaged' + ); + t.end(); +}); + +test('pipenv py3.7 can package flask with slim option', async (t) => { + process.chdir('tests/pipenv'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.true( + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); +}); + +test('pipenv py3.7 can package flask with slim & slimPatterns options', async (t) => { + process.chdir('tests/pipenv'); + + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test('pipenv py3.7 can package flask with zip option', async (t) => { + process.chdir('tests/pipenv'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test("pipenv py3.7 doesn't package bottle with noDeploy option", async (t) => { + process.chdir('tests/pipenv'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml', + ]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); +}); + +test('non build pyproject.toml uses requirements.txt', async (t) => { + process.chdir('tests/non_build_pyproject'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('non poetry pyproject.toml without requirements.txt packages handler only', async (t) => { + process.chdir('tests/non_poetry_pyproject'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`handler.py`), 'handler is packaged'); + t.end(); +}); + +test('poetry py3.7 can package flask with default options', async (t) => { + process.chdir('tests/poetry'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('poetry py3.7 can package flask with slim option', async (t) => { + process.chdir('tests/poetry'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.true( + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); +}); + +test('poetry py3.7 can package flask with slim & slimPatterns options', async (t) => { + process.chdir('tests/poetry'); + + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test('poetry py3.7 can package flask with zip option', async (t) => { + process.chdir('tests/poetry'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test("poetry py3.7 doesn't package bottle with noDeploy option", async (t) => { + process.chdir('tests/poetry'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml', + ]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); +}); + +test('py3.7 can package flask with zip option and no explicit include', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl(['-p', '-i.bak', '-e', 's/include://', 'serverless.yml']); + perl(['-p', '-i.bak', '-e', 's/^.*handler.py.*$//', 'serverless.yml']); + sls(['package'], { env: { zip: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test('py3.7 can package lambda-decorators using vendor option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { vendor: './vendor' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.true( + zipfiles.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged' + ); + t.end(); +}); + test( - 'py2.7 can package flask with default options', + "Don't nuke execute perms", async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); + const perm = '755'; + npm(['i', path]); - sls(['package'], { - env: { runtime: 'python2.7', pythonBin: getPythonBin(2) }, - }); + perl([ + '-p', + '-i.bak', + '-e', + 's/(handler.py.*$)/$1\n - foobar/', + 'serverless.yml', + ]); + writeFileSync(`foobar`, ''); + chmodSync(`foobar`, perm); + sls(['package'], { env: { vendor: './vendor' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(2) } -); + t.true( + zipfiles.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged' + ); + t.true(zipfiles.includes(`foobar`), 'foobar is packaged'); -test( - 'py2.7 can package flask with slim option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { runtime: 'python2.7', slim: 'true', pythonBin: getPythonBin(2) }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' + const zipfiles_with_metadata = await listZipFilesWithMetaData( + '.serverless/sls-py-req-test.zip' ); t.true( - zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > - 0, - '__main__.py files are packaged' + zipfiles_with_metadata['foobar'].unixPermissions + .toString(8) + .slice(3, 6) === perm, + 'foobar has retained its executable file permissions' + ); + + const flaskPerm = statSync('.serverless/requirements/bin/flask').mode; + t.true( + zipfiles_with_metadata['bin/flask'].unixPermissions === flaskPerm, + 'bin/flask has retained its executable file permissions' ); + t.end(); }, - { skip: !hasPython(2) } + { skip: process.platform === 'win32' } ); +test('py3.7 can package flask in a project with a space in it', async (t) => { + copySync('tests/base', 'tests/base with a space'); + process.chdir('tests/base with a space'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + test( - 'py2.7 can package flask with zip option', + 'py3.7 can package flask in a project with a space in it with docker', async (t) => { - process.chdir('tests/base'); + copySync('tests/base', 'tests/base with a space'); + process.chdir('tests/base with a space'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package'], { - env: { runtime: 'python2.7', zip: 'true', pythonBin: getPythonBin(2) }, - }); + sls(['package'], { env: { dockerizePip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !hasPython(2) } + { skip: !canUseDocker() || brokenOn('win32') } ); +test('py3.7 supports custom file name with fileName option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + writeFileSync('puck', 'requests'); + npm(['i', path]); + sls(['package'], { env: { fileName: 'puck' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes(`requests${sep}__init__.py`), + 'requests is packaged' + ); + t.false(zipfiles.includes(`flask${sep}__init__.py`), 'flask is NOT packaged'); + t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.end(); +}); + +test("py3.7 doesn't package bottle with zip option", async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml', + ]); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = await listRequirementsZipFiles( + '.serverless/sls-py-req-test.zip' + ); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.true( + zippedReqs.includes(`flask/__init__.py`), + 'flask is packaged in the .requirements.zip file' + ); + t.false( + zippedReqs.includes(`bottle.py`), + 'bottle is NOT packaged in the .requirements.zip file' + ); + t.end(); +}); + +test('py3.7 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + test( - 'py2.7 can package flask with slim & dockerizePip & slimPatterns options', + 'py3.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { - runtime: 'python2.7', dockerizePip: 'true', slim: 'true', - pythonBin: getPythonBin(2), + slimPatternsAppendDefaults: 'false', }, }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - '*.pyc files are packaged' + t.true( + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' ); t.deepEqual( zipfiles.filter((filename) => filename.endsWith('__main__.py')), @@ -664,1347 +934,371 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); +test('pipenv py3.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { + process.chdir('tests/pipenv'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test('poetry py3.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { + process.chdir('tests/poetry'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test('poetry py3.7 can package flask with package individually option', async (t) => { + process.chdir('tests/poetry_individually'); + const path = npm(['pack', '../..']); + npm(['i', path]); + + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles( + '.serverless/module1-sls-py-req-test-dev-hello.zip' + ); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('py3.7 can package flask with package individually option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { individually: 'true' } }); + const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); + t.false( + zipfiles_hello.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello' + ); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged in function hello' + ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + + const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); + t.false( + zipfiles_hello2.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + + const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); + t.false( + zipfiles_hello3.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello3' + ); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + + const zipfiles_hello4 = await listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.false( + zipfiles_hello4.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello4' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); + + t.end(); +}); + +test('py3.7 can package flask with package individually & slim option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { individually: 'true', slim: 'true' } }); + const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged in function hello' + ); + t.deepEqual( + zipfiles_hello.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); + + const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.deepEqual( + zipfiles_hello2.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); + + const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged in function hello3' + ); + t.deepEqual( + zipfiles_hello3.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + + const zipfiles_hello4 = await listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); + t.deepEqual( + zipfiles_hello4.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello4' + ); + + t.end(); +}); + +test('py3.7 can package only requirements of module', async (t) => { + process.chdir('tests/individually'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles_hello = await listZipFiles( + '.serverless/module1-sls-py-req-test-indiv-dev-hello1.zip' + ); + t.true( + zipfiles_hello.includes('handler1.py'), + 'handler1.py is packaged at root level in function hello1' + ); + t.false( + zipfiles_hello.includes('handler2.py'), + 'handler2.py is NOT packaged at root level in function hello1' + ); + t.true( + zipfiles_hello.includes(`pyaml${sep}__init__.py`), + 'pyaml is packaged in function hello1' + ); + t.true( + zipfiles_hello.includes(`boto3${sep}__init__.py`), + 'boto3 is packaged in function hello1' + ); + t.false( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello1' + ); + + const zipfiles_hello2 = await listZipFiles( + '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' + ); + t.true( + zipfiles_hello2.includes('handler2.py'), + 'handler2.py is packaged at root level in function hello2' + ); + t.false( + zipfiles_hello2.includes('handler1.py'), + 'handler1.py is NOT packaged at root level in function hello2' + ); + t.false( + zipfiles_hello2.includes(`pyaml${sep}__init__.py`), + 'pyaml is NOT packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`boto3${sep}__init__.py`), + 'boto3 is NOT packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + + t.end(); +}); + +test('py3.7 can package lambda-decorators using vendor and invidiually option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { individually: 'true', vendor: './vendor' } }); + const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged at root level in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged in function hello' + ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); + + const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged at root level in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); + + const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged at root level in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`lambda_decorators.py`), + 'lambda_decorators.py is NOT packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello3' + ); + + const zipfiles_hello4 = await listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); + t.end(); +}); + test( - "py2.7 doesn't package bottle with noDeploy option", + "Don't nuke execute perms when using individually", async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml', - ]); - sls(['package'], { - env: { runtime: 'python2.7', pythonBin: getPythonBin(2) }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); - t.end(); - }, - { skip: !hasPython(2) } -); - -test( - 'py2.7 can package flask with zip & dockerizePip option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - runtime: 'python2.7', - dockerizePip: 'true', - zip: 'true', - pythonBin: getPythonBin(2), - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - const zippedReqs = await listRequirementsZipFiles( - '.serverless/sls-py-req-test.zip' - ); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.true( - zippedReqs.includes(`flask/__init__.py`), - 'flask is packaged in the .requirements.zip file' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with zip & slim & dockerizePip option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - runtime: 'python2.7', - dockerizePip: 'true', - zip: 'true', - slim: 'true', - pythonBin: getPythonBin(2), - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - const zippedReqs = await listRequirementsZipFiles( - '.serverless/sls-py-req-test.zip' - ); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.true( - zippedReqs.includes(`flask/__init__.py`), - 'flask is packaged in the .requirements.zip file' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with dockerizePip option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - runtime: 'python2.7', - dockerizePip: 'true', - pythonBin: getPythonBin(2), - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with slim & dockerizePip option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - runtime: 'python2.7', - dockerizePip: 'true', - slim: 'true', - pythonBin: getPythonBin(2), - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - '*.pyc files are NOT packaged' - ); - t.true( - zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > - 0, - '__main__.py files are packaged' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with slim & dockerizePip & slimPatterns options', - async (t) => { - process.chdir('tests/base'); - - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - runtime: 'python2.7', - dockerizePip: 'true', - slim: 'true', - pythonBin: getPythonBin(2), - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - '*.pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } -); - -test( - 'pipenv py3.7 can package flask with default options', - async (t) => { - process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.false( - zipfiles.includes(`pytest${sep}__init__.py`), - 'dev-package pytest is NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'pipenv py3.7 can package flask with slim option', - async (t) => { - process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { slim: 'true' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.true( - zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > - 0, - '__main__.py files are packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'pipenv py3.7 can package flask with slim & slimPatterns options', - async (t) => { - process.chdir('tests/pipenv'); - - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { slim: 'true' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'pipenv py3.7 can package flask with zip option', - async (t) => { - process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - "pipenv py3.7 doesn't package bottle with noDeploy option", - async (t) => { - process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml', - ]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'non build pyproject.toml uses requirements.txt', - async (t) => { - process.chdir('tests/non_build_pyproject'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'non poetry pyproject.toml without requirements.txt packages handler only', - async (t) => { - process.chdir('tests/non_poetry_pyproject'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`handler.py`), 'handler is packaged'); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'poetry py3.7 can package flask with default options', - async (t) => { - process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'poetry py3.7 can package flask with slim option', - async (t) => { - process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { slim: 'true' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.true( - zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > - 0, - '__main__.py files are packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'poetry py3.7 can package flask with slim & slimPatterns options', - async (t) => { - process.chdir('tests/poetry'); - - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { slim: 'true' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'poetry py3.7 can package flask with zip option', - async (t) => { - process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.end(); - }, - { skip: !hasPython(3) } -); - -test( - "poetry py3.7 doesn't package bottle with noDeploy option", - async (t) => { - process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml', - ]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 can package flask with zip option and no explicit include', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl(['-p', '-i.bak', '-e', 's/include://', 'serverless.yml']); - perl(['-p', '-i.bak', '-e', 's/^.*handler.py.*$//', 'serverless.yml']); - sls(['package'], { env: { zip: 'true' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 can package lambda-decorators using vendor option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { vendor: './vendor' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.true( - zipfiles.includes(`lambda_decorators.py`), - 'lambda_decorators.py is packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - "Don't nuke execute perms", - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - const perm = '755'; - - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(handler.py.*$)/$1\n - foobar/', - 'serverless.yml', - ]); - writeFileSync(`foobar`, ''); - chmodSync(`foobar`, perm); - sls(['package'], { env: { vendor: './vendor' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.true( - zipfiles.includes(`lambda_decorators.py`), - 'lambda_decorators.py is packaged' - ); - t.true(zipfiles.includes(`foobar`), 'foobar is packaged'); - - const zipfiles_with_metadata = await listZipFilesWithMetaData( - '.serverless/sls-py-req-test.zip' - ); - t.true( - zipfiles_with_metadata['foobar'].unixPermissions - .toString(8) - .slice(3, 6) === perm, - 'foobar has retained its executable file permissions' - ); - - const flaskPerm = statSync('.serverless/requirements/bin/flask').mode; - t.true( - zipfiles_with_metadata['bin/flask'].unixPermissions === flaskPerm, - 'bin/flask has retained its executable file permissions' - ); - - t.end(); - }, - { skip: process.platform === 'win32' || !hasPython(3.7) } -); - -test( - 'py3.7 can package flask in a project with a space in it', - async (t) => { - copySync('tests/base', 'tests/base with a space'); - process.chdir('tests/base with a space'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 can package flask in a project with a space in it with docker', - async (t) => { - copySync('tests/base', 'tests/base with a space'); - process.chdir('tests/base with a space'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { dockerizePip: 'true' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } -); - -test( - 'py3.7 supports custom file name with fileName option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - writeFileSync('puck', 'requests'); - npm(['i', path]); - sls(['package'], { env: { fileName: 'puck' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes(`requests${sep}__init__.py`), - 'requests is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged' - ); - t.false( - zipfiles.includes(`boto3${sep}__init__.py`), - 'boto3 is NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - "py3.7 doesn't package bottle with zip option", - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml', - ]); - sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - const zippedReqs = await listRequirementsZipFiles( - '.serverless/sls-py-req-test.zip' - ); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.true( - zippedReqs.includes(`flask/__init__.py`), - 'flask is packaged in the .requirements.zip file' - ); - t.false( - zippedReqs.includes(`bottle.py`), - 'bottle is NOT packaged in the .requirements.zip file' - ); - t.end(); - }, - { skip: !hasPython(3) } -); - -test( - 'py3.7 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', - async (t) => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', - async (t) => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - dockerizePip: 'true', - slim: 'true', - slimPatternsAppendDefaults: 'false', - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false options', - async (t) => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - runtime: 'python2.7', - slim: 'true', - slimPatternsAppendDefaults: 'false', - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(2.7) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', - async (t) => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - dockerizePip: 'true', - runtime: 'python2.7', - slim: 'true', - slimPatternsAppendDefaults: 'false', - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2.7) || brokenOn('win32') } -); - -test( - 'pipenv py3.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', - async (t) => { - process.chdir('tests/pipenv'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - - sls(['package'], { - env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'poetry py3.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', - async (t) => { - process.chdir('tests/poetry'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - - sls(['package'], { - env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'poetry py3.7 can package flask with package individually option', - async (t) => { - process.chdir('tests/poetry_individually'); - const path = npm(['pack', '../..']); - npm(['i', path]); - - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles( - '.serverless/module1-sls-py-req-test-dev-hello.zip' - ); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 can package flask with package individually option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { individually: 'true' } }); - const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); - t.false( - zipfiles_hello.includes(`fn2${sep}__init__.py`), - 'fn2 is NOT packaged in function hello' - ); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - - const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); - t.false( - zipfiles_hello2.includes(`fn2${sep}__init__.py`), - 'fn2 is NOT packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - - const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); - t.false( - zipfiles_hello3.includes(`fn2${sep}__init__.py`), - 'fn2 is NOT packaged in function hello3' - ); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - - const zipfiles_hello4 = await listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.false( - zipfiles_hello4.includes(`fn2${sep}__init__.py`), - 'fn2 is NOT packaged in function hello4' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); - - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 can package flask with package individually & slim option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { individually: 'true', slim: 'true' } }); - const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged in function hello' - ); - t.deepEqual( - zipfiles_hello.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); - - const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged in function hello2' - ); - t.deepEqual( - zipfiles_hello2.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); - - const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello3' - ); - t.deepEqual( - zipfiles_hello3.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - - const zipfiles_hello4 = await listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); - t.deepEqual( - zipfiles_hello4.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello4' - ); - - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py2.7 can package flask with package individually option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { individually: 'true', runtime: 'python2.7' } }); - const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); - - const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); - - const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello3' - ); - - const zipfiles_hello4 = await listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); - - t.end(); - }, - { skip: !hasPython(2.7) } -); - -test( - 'py2.7 can package flask with package individually & slim option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { individually: 'true', runtime: 'python2.7', slim: 'true' }, - }); - const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged in function hello' - ); - t.deepEqual( - zipfiles_hello.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); - - const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged in function hello2' - ); - t.deepEqual( - zipfiles_hello2.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); - - const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello3' - ); - t.deepEqual( - zipfiles_hello3.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello3' - ); - - const zipfiles_hello4 = await listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); - - t.end(); - }, - { skip: !hasPython(2.7) } -); - -test( - 'py2.7 can ignore functions defined with `image`', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { individually: 'true', runtime: 'python2.7' } }); - t.true( - pathExistsSync('.serverless/hello.zip'), - 'function hello is packaged' - ); - t.true( - pathExistsSync('.serverless/hello2.zip'), - 'function hello2 is packaged' - ); - t.true( - pathExistsSync('.serverless/hello3.zip'), - 'function hello3 is packaged' - ); - t.true( - pathExistsSync('.serverless/hello4.zip'), - 'function hello4 is packaged' - ); - t.false( - pathExistsSync('.serverless/hello5.zip'), - 'function hello5 is not packaged' - ); - - t.end(); - }, - { skip: !hasPython(2.7) } -); - -test( - 'py3.7 can package only requirements of module', - async (t) => { - process.chdir('tests/individually'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const zipfiles_hello = await listZipFiles( - '.serverless/module1-sls-py-req-test-indiv-dev-hello1.zip' - ); - t.true( - zipfiles_hello.includes('handler1.py'), - 'handler1.py is packaged at root level in function hello1' - ); - t.false( - zipfiles_hello.includes('handler2.py'), - 'handler2.py is NOT packaged at root level in function hello1' - ); - t.true( - zipfiles_hello.includes(`pyaml${sep}__init__.py`), - 'pyaml is packaged in function hello1' - ); - t.true( - zipfiles_hello.includes(`boto3${sep}__init__.py`), - 'boto3 is packaged in function hello1' - ); - t.false( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello1' - ); - - const zipfiles_hello2 = await listZipFiles( - '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' - ); - t.true( - zipfiles_hello2.includes('handler2.py'), - 'handler2.py is packaged at root level in function hello2' - ); - t.false( - zipfiles_hello2.includes('handler1.py'), - 'handler1.py is NOT packaged at root level in function hello2' - ); - t.false( - zipfiles_hello2.includes(`pyaml${sep}__init__.py`), - 'pyaml is NOT packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`boto3${sep}__init__.py`), - 'boto3 is NOT packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 can package lambda-decorators using vendor and invidiually option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { individually: 'true', vendor: './vendor' } }); - const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged at root level in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`lambda_decorators.py`), - 'lambda_decorators.py is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); - - const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged at root level in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`lambda_decorators.py`), - 'lambda_decorators.py is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); - - const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged at root level in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`lambda_decorators.py`), - 'lambda_decorators.py is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello3' - ); - - const zipfiles_hello4 = await listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - "Don't nuke execute perms when using individually", - async (t) => { - process.chdir('tests/individually'); + process.chdir('tests/individually'); const path = npm(['pack', '../..']); const perm = '755'; writeFileSync(`module1${sep}foobar`, ''); @@ -2037,7 +1331,7 @@ test( t.end(); }, - { skip: process.platform === 'win32' || !hasPython(3.7) } + { skip: process.platform === 'win32' } ); test( @@ -2076,41 +1370,33 @@ test( t.end(); }, - { skip: !canUseDocker() || process.platform === 'win32' || !hasPython(3.7) } + { skip: !canUseDocker() || process.platform === 'win32' } ); -test( - 'py3.7 uses download cache by default option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const cachepath = getUserCachePath(); - t.true( - pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), - 'cache directory exists' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); +test('py3.7 uses download cache by default option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const cachepath = getUserCachePath(); + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'cache directory exists' + ); + t.end(); +}); -test( - 'py3.7 uses download cache by default', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { cacheLocation: '.requirements-cache' } }); - t.true( - pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), - 'cache directory exists' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); +test('py3.7 uses download cache by default', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { cacheLocation: '.requirements-cache' } }); + t.true( + pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), + 'cache directory exists' + ); + t.end(); +}); test( 'py3.7 uses download cache with dockerizePip option', @@ -2126,7 +1412,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); test( @@ -2144,33 +1430,29 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); -test( - 'py3.7 uses static and download cache', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const cachepath = getUserCachePath(); - const cacheFolderHash = sha256Path('.serverless/requirements.txt'); - const arch = 'x86_64'; - t.true( - pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), - 'http exists in download-cache' - ); - t.true( - pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` - ), - 'flask exists in static-cache' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); +test('py3.7 uses static and download cache', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const cachepath = getUserCachePath(); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'http exists in download-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), + 'flask exists in static-cache' + ); + t.end(); +}); test( 'py3.7 uses static and download cache with dockerizePip option', @@ -2194,75 +1476,67 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } -); - -test( - 'py3.7 uses static cache', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const cachepath = getUserCachePath(); - const cacheFolderHash = sha256Path('.serverless/requirements.txt'); - const arch = 'x86_64'; - t.true( - pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` - ), - 'flask exists in static-cache' - ); - t.true( - pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` - ), - '.completed_requirements exists in static-cache' - ); - - // py3.7 checking that static cache actually pulls from cache (by poisoning it) - writeFileSync( - `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, - 'injected new file into static cache folder' - ); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('injected_file_is_bad_form'), - "static cache is really used when running 'sls package' again" - ); - - t.end(); - }, - { skip: !hasPython(3.7) } + { skip: !canUseDocker() || brokenOn('win32') } ); -test( - 'py3.7 uses static cache with cacheLocation option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - const cachepath = '.requirements-cache'; - sls(['package'], { env: { cacheLocation: cachepath } }); - const cacheFolderHash = sha256Path('.serverless/requirements.txt'); - const arch = 'x86_64'; - t.true( - pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` - ), - 'flask exists in static-cache' - ); - t.true( - pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` - ), - '.completed_requirements exists in static-cache' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); +test('py3.7 uses static cache', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const cachepath = getUserCachePath(); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), + 'flask exists in static-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` + ), + '.completed_requirements exists in static-cache' + ); + + // py3.7 checking that static cache actually pulls from cache (by poisoning it) + writeFileSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, + 'injected new file into static cache folder' + ); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('injected_file_is_bad_form'), + "static cache is really used when running 'sls package' again" + ); + + t.end(); +}); + +test('py3.7 uses static cache with cacheLocation option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + const cachepath = '.requirements-cache'; + sls(['package'], { env: { cacheLocation: cachepath } }); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), + 'flask exists in static-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` + ), + '.completed_requirements exists in static-cache' + ); + t.end(); +}); test( 'py3.7 uses static cache with dockerizePip & slim option', @@ -2306,7 +1580,7 @@ test( t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); test( @@ -2332,38 +1606,31 @@ test( t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); -test( - 'py3.7 can ignore functions defined with `image`', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { individually: 'true' } }); - t.true( - pathExistsSync('.serverless/hello.zip'), - 'function hello is packaged' - ); - t.true( - pathExistsSync('.serverless/hello2.zip'), - 'function hello2 is packaged' - ); - t.true( - pathExistsSync('.serverless/hello3.zip'), - 'function hello3 is packaged' - ); - t.true( - pathExistsSync('.serverless/hello4.zip'), - 'function hello4 is packaged' - ); - t.false( - pathExistsSync('.serverless/hello5.zip'), - 'function hello5 is not packaged' - ); - - t.end(); - }, - { skip: !hasPython(3.7) } -); +test('py3.7 can ignore functions defined with `image`', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { individually: 'true' } }); + t.true(pathExistsSync('.serverless/hello.zip'), 'function hello is packaged'); + t.true( + pathExistsSync('.serverless/hello2.zip'), + 'function hello2 is packaged' + ); + t.true( + pathExistsSync('.serverless/hello3.zip'), + 'function hello3 is packaged' + ); + t.true( + pathExistsSync('.serverless/hello4.zip'), + 'function hello4 is packaged' + ); + t.false( + pathExistsSync('.serverless/hello5.zip'), + 'function hello5 is not packaged' + ); + + t.end(); +}); From cc146d088d362187641dd5ae3e9d0129a14c60e2 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 25 Sep 2022 21:53:40 +0200 Subject: [PATCH 286/328] refactor: Improve error message for docker failures --- lib/pip.js | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/lib/pip.js b/lib/pip.js index ccb809c3..20340ea2 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -412,6 +412,14 @@ async function installRequirements(targetFolder, pluginInstance) { 'PYTHON_REQUIREMENTS_COMMAND_NOT_FOUND' ); } + + if (cmd === 'docker' && e.stderrBuffer) { + throw new pluginInstance.serverless.classes.Error( + `Running ${cmd} failed with: "${e.stderrBuffer.toString().trim()}"`, + 'PYTHON_REQUIREMENTS_DOCKER_COMMAND_FAILED' + ); + } + if (log) { log.info(`Stdout: ${e.stdoutBuffer}`); log.info(`Stderr: ${e.stderrBuffer}`); From f0c41835df9af6a39d03cbb3fbc492deeca420bf Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 27 Sep 2022 19:02:37 +0200 Subject: [PATCH 287/328] test: Investigate issue on CI --- lib/pip.js | 4 +++- test.js | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/pip.js b/lib/pip.js index 20340ea2..149c0285 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -415,7 +415,9 @@ async function installRequirements(targetFolder, pluginInstance) { if (cmd === 'docker' && e.stderrBuffer) { throw new pluginInstance.serverless.classes.Error( - `Running ${cmd} failed with: "${e.stderrBuffer.toString().trim()}"`, + `Running "${cmd} ${args.join(' ')}" failed with: "${e.stderrBuffer + .toString() + .trim()}"`, 'PYTHON_REQUIREMENTS_DOCKER_COMMAND_FAILED' ); } diff --git a/test.js b/test.js index c20817a5..3b8a242a 100644 --- a/test.js +++ b/test.js @@ -212,6 +212,7 @@ test( dockerImage: 'break the build to log the command', }, }); + console.log('STDOUT', stdout); t.true( stdout.includes( `-v ${__dirname}${sep}tests${sep}base${sep}custom_ssh:/root/.ssh/custom_ssh:z` From 2ce9d8e4b70fd34ab3628bfd28d8e7be857e7da2 Mon Sep 17 00:00:00 2001 From: Giuseppe Lumia Date: Thu, 29 Sep 2022 23:35:05 +0200 Subject: [PATCH 288/328] docs: Add a warning about individual packaging with Poetry/Pipenv --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index 7c09a4de..6b5bffd6 100644 --- a/README.md +++ b/README.md @@ -360,6 +360,9 @@ custom: ### Per-function requirements +**Note: this feature does not work with Pipenv/Poetry, it requires `requirements.txt` +files for your Python modules.** + If you have different python functions, with different sets of requirements, you can avoid including all the unecessary dependencies of your functions by using the following structure: From 78795be24eb08dc78acd7566778b3960c28b263c Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 30 Sep 2022 23:59:38 +0200 Subject: [PATCH 289/328] fix: Properly recognize individual function (#725) --- index.js | 2 +- test.js | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/index.js b/index.js index ebfc4017..4c24bf8b 100644 --- a/index.js +++ b/index.js @@ -109,7 +109,7 @@ class ServerlessPythonRequirements { get targetFuncs() { let inputOpt = this.serverless.processedInput.options; return inputOpt.function - ? [inputOpt.functionObj] + ? [this.serverless.service.functions[inputOpt.function]] : values(this.serverless.service.functions).filter((func) => !func.image); } diff --git a/test.js b/test.js index 3b8a242a..c20817a5 100644 --- a/test.js +++ b/test.js @@ -212,7 +212,6 @@ test( dockerImage: 'break the build to log the command', }, }); - console.log('STDOUT', stdout); t.true( stdout.includes( `-v ${__dirname}${sep}tests${sep}base${sep}custom_ssh:/root/.ssh/custom_ssh:z` From 6fbdde1123e82a3ddb7d36aa14d23daa4654be86 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 4 Oct 2022 00:29:27 +0200 Subject: [PATCH 290/328] ci: Run CI tests against Serverless v3 --- .github/workflows/integrate.yml | 15 ++++++++++++--- .github/workflows/validate.yml | 15 ++++++++++++--- 2 files changed, 24 insertions(+), 6 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index f4cfe708..6d5f57ac 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -13,6 +13,9 @@ jobs: windowsNode14: name: '[Windows] Node.js v14: Unit tests' runs-on: windows-latest + strategy: + matrix: + sls-version: [2, 3] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -48,7 +51,7 @@ jobs: run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless - run: npm install -g serverless@2 + run: npm install -g serverless@${{ matrix.sls-version }} - name: Install dependencies if: steps.cacheNpm.outputs.cache-hit != 'true' @@ -61,6 +64,9 @@ jobs: linuxNode14: name: '[Linux] Node.js 14: Unit tests' runs-on: ubuntu-latest + strategy: + matrix: + sls-version: [2, 3] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -96,7 +102,7 @@ jobs: run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless - run: npm install -g serverless@2 + run: npm install -g serverless@${{ matrix.sls-version }} - name: Install dependencies if: steps.cacheNpm.outputs.cache-hit != 'true' @@ -144,7 +150,7 @@ jobs: run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless - run: npm install -g serverless@2 + run: npm install -g serverless@${{ matrix.sls-version }} - name: Install dependencies if: steps.cacheNpm.outputs.cache-hit != 'true' @@ -157,6 +163,9 @@ jobs: tagIfNewVersion: name: Tag if new version runs-on: ubuntu-latest + strategy: + matrix: + sls-version: [2, 3] needs: [windowsNode14, linuxNode14, linuxNode12] steps: - name: Checkout repository diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index dd68b2f6..801b7194 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -13,6 +13,9 @@ jobs: linuxNode14: name: '[Linux] Node.js v14: Lint, Eventual Commitlint, Eventual Changelog, Formatting & Unit tests' runs-on: ubuntu-latest + strategy: + matrix: + sls-version: [2, 3] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -61,7 +64,7 @@ jobs: run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless - run: npm install -g serverless@2 + run: npm install -g serverless@${{ matrix.sls-version }} - name: Install dependencies if: steps.cacheNpm.outputs.cache-hit != 'true' @@ -88,6 +91,9 @@ jobs: windowsNode14: name: '[Windows] Node.js v14: Unit tests' runs-on: windows-latest + strategy: + matrix: + sls-version: [2, 3] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -125,7 +131,7 @@ jobs: run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless - run: npm install -g serverless@2 + run: npm install -g serverless@${{ matrix.sls-version }} - name: Install dependencies if: steps.cacheNpm.outputs.cache-hit != 'true' @@ -138,6 +144,9 @@ jobs: linuxNode12: name: '[Linux] Node.js v12: Unit tests' runs-on: ubuntu-latest + strategy: + matrix: + sls-version: [2, 3] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -175,7 +184,7 @@ jobs: run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless - run: npm install -g serverless@2 + run: npm install -g serverless@${{ matrix.sls-version }} - name: Install dependencies if: steps.cacheNpm.outputs.cache-hit != 'true' From e81d9e1824c135f110b4deccae2c26b0cbb26778 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois-Michel=20L=27Heureux?= Date: Tue, 18 Oct 2022 17:39:04 -0400 Subject: [PATCH 291/328] feat: Introduce `requirePoetryLockFile` flag --- README.md | 9 +++++++++ index.js | 1 + lib/poetry.js | 26 +++++++++++++++++++++----- test.js | 20 ++++++++++++++++++++ tests/poetry/serverless.yml | 1 + 5 files changed, 52 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 6b5bffd6..9563ff51 100644 --- a/README.md +++ b/README.md @@ -131,6 +131,15 @@ custom: usePoetry: false ``` +Be aware that if no `poetry.lock` file is present, a new one will be generated on the fly. To help having predictable builds, +you can set the `requirePoetryLockFile` flag to true to throw an error when `poetry.lock` is missing. + +```yaml +custom: + pythonRequirements: + requirePoetryLockFile: false +``` + ### Poetry with git dependencies Poetry by default generates the exported requirements.txt file with `-e` and that breaks pip with `-t` parameter diff --git a/index.js b/index.js index 4c24bf8b..30803971 100644 --- a/index.js +++ b/index.js @@ -57,6 +57,7 @@ class ServerlessPythonRequirements { pipCmdExtraArgs: [], noDeploy: [], vendor: '', + requirePoetryLockFile: false, }, (this.serverless.service.custom && this.serverless.service.custom.pythonRequirements) || diff --git a/lib/poetry.js b/lib/poetry.js index 4003c1df..d324784b 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -21,12 +21,28 @@ async function pyprojectTomlToRequirements(modulePath, pluginInstance) { generateRequirementsProgress = progress.get( 'python-generate-requirements-toml' ); - generateRequirementsProgress.update( - 'Generating requirements.txt from "pyproject.toml"' - ); - log.info('Generating requirements.txt from "pyproject.toml"'); + } + + const emitMsg = (msg) => { + if (generateRequirementsProgress) { + generateRequirementsProgress.update(msg); + log.info(msg); + } else { + serverless.cli.log(msg); + } + }; + + if (fs.existsSync('poetry.lock')) { + emitMsg('Generating requirements.txt from poetry.lock'); } else { - serverless.cli.log('Generating requirements.txt from pyproject.toml...'); + if (options.requirePoetryLockFile) { + throw new serverless.classes.Error( + 'poetry.lock file not found - set requirePoetryLockFile to false to ' + + 'disable this error', + 'MISSING_REQUIRED_POETRY_LOCK' + ); + } + emitMsg('Generating poetry.lock and requirements.txt from pyproject.toml'); } try { diff --git a/test.js b/test.js index c20817a5..27db6884 100644 --- a/test.js +++ b/test.js @@ -1634,3 +1634,23 @@ test('py3.7 can ignore functions defined with `image`', async (t) => { t.end(); }); + +test('poetry py3.7 fails packaging if poetry.lock is missing and flag requirePoetryLockFile is set to true', async (t) => { + copySync('tests/poetry', 'tests/base with a space'); + process.chdir('tests/base with a space'); + removeSync('poetry.lock'); + + const path = npm(['pack', '../..']); + npm(['i', path]); + const stdout = sls(['package'], { + env: { requirePoetryLockFile: 'true', slim: 'true' }, + noThrow: true, + }); + t.true( + stdout.includes( + 'poetry.lock file not found - set requirePoetryLockFile to false to disable this error' + ), + 'flag works and error is properly reported' + ); + t.end(); +}); diff --git a/tests/poetry/serverless.yml b/tests/poetry/serverless.yml index 315f6741..2d032acd 100644 --- a/tests/poetry/serverless.yml +++ b/tests/poetry/serverless.yml @@ -13,6 +13,7 @@ custom: slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + requirePoetryLockFile: ${env:requirePoetryLockFile, false} defaults: zip: false slimPatterns: false From 8969fb2aa403f2be14d8fc4fed21f12e4d8b9b47 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 23 Oct 2022 17:10:05 +0200 Subject: [PATCH 292/328] chore: Add issue templates (#735) --- .github/ISSUE_TEMPLATE/bug-report.yml | 68 ++++++++++++++++++++++ .github/ISSUE_TEMPLATE/config.yml | 5 ++ .github/ISSUE_TEMPLATE/feature-request.yml | 21 +++++++ 3 files changed, 94 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/bug-report.yml create mode 100644 .github/ISSUE_TEMPLATE/config.yml create mode 100644 .github/ISSUE_TEMPLATE/feature-request.yml diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml new file mode 100644 index 00000000..bde39a55 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -0,0 +1,68 @@ +name: 🐛 Bug report +description: Create a bug report +body: + - type: checkboxes + attributes: + label: Are you certain it's a bug? + description: If you're uncertain, please report at https://github.com/serverless/serverless-python-requirements/discussions instead + options: + - label: Yes, it looks like a bug + required: true + - type: checkboxes + attributes: + label: Are you using the latest plugin release? + description: Latest version can be checked at https://github.com/serverless/serverless-python-requirements/releases/latest + options: + - label: Yes, I'm using the latest plugin release + required: true + - type: checkboxes + attributes: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists + options: + - label: I have searched existing issues, it hasn't been reported yet + required: true + - type: textarea + attributes: + label: Issue description + validations: + required: true + - type: textarea + attributes: + label: Service configuration (serverless.yml) content + description: | + Provide COMPLETE content of serverless.yml, ensuring that: + • It consistently reproduces described issue + • It's as minimal as possible + • Ideally with no other plugins involved + • Has sensitive parts masked out + + If not applicable, fill with "N/A" + render: yaml + validations: + required: true + - type: input + attributes: + label: Command name and used flags + description: | + Full command name with used flags (If not applicable, fill with "N/A") + placeholder: serverless [...flags] + validations: + required: true + - type: textarea + attributes: + label: Command output + description: | + COMPLETE command output. + + If not applicable, fill with "N/A" + render: shell + validations: + required: true + - type: textarea + attributes: + label: Environment information + description: '"serverless --version" output + used version of the plugin' + render: shell + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000..a7f83c6b --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: false +contact_links: + - name: Question + url: https://github.com/serverless/serverless-python-requirements/discussions + about: Please ask and answer questions here diff --git a/.github/ISSUE_TEMPLATE/feature-request.yml b/.github/ISSUE_TEMPLATE/feature-request.yml new file mode 100644 index 00000000..14907ec2 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature-request.yml @@ -0,0 +1,21 @@ +name: 🎉 Feature request +description: Suggest an idea +body: + - type: checkboxes + attributes: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists + options: + - label: I have searched existing issues, it hasn't been reported yet + required: true + - type: textarea + attributes: + label: Use case description + description: Describe the use case that needs to be addressed + validations: + required: true + - type: textarea + attributes: + label: Proposed solution (optional) + description: | + e.g. propose how the configuration and implementation of the new feature could look From 853da8d39921dc83a23d59fd825b2180814f87ff Mon Sep 17 00:00:00 2001 From: Anders Steiner Date: Sun, 23 Oct 2022 15:49:06 -0500 Subject: [PATCH 293/328] fix: Adapt to support latest `pipenv` version (#718) BREAKING CHANGE: Requires `pipenv` in version `2022-04-08` or higher Co-authored-by: Randy Westergren Co-authored-by: Piotr Grzesik --- .github/workflows/integrate.yml | 6 +++--- .github/workflows/validate.yml | 6 +++--- lib/pipenv.js | 31 ++++++++++++++++++------------- tests/pipenv/Pipfile | 3 ++- 4 files changed, 26 insertions(+), 20 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index 6d5f57ac..1f979b0a 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -48,7 +48,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv==2021.11.5 poetry + run: python -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -99,7 +99,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv==2021.11.5 poetry + run: python -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -147,7 +147,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv==2021.11.5 poetry + run: python -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 801b7194..31052279 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -61,7 +61,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv==2021.11.5 poetry + run: python -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -128,7 +128,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv==2021.11.5 poetry + run: python -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -181,7 +181,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv==2021.11.5 poetry + run: python -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} diff --git a/lib/pipenv.js b/lib/pipenv.js index 5856d47b..11331ee3 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -28,27 +28,32 @@ async function pipfileToRequirements() { } try { - let res; try { - res = await spawn( - 'pipenv', - ['lock', '--requirements', '--keep-outdated'], - { - cwd: this.servicePath, - } - ); + await spawn('pipenv', ['lock', '--keep-outdated'], { + cwd: this.servicePath, + }); } catch (e) { - if ( - e.stderrBuffer && - e.stderrBuffer.toString().includes('command not found') - ) { + const stderrBufferContent = + (e.stderrBuffer && e.stderrBuffer.toString()) || ''; + + if (stderrBufferContent.includes('must exist to use')) { + // No previous Pipfile.lock, we will try to generate it here + await spawn('pipenv', ['lock'], { + cwd: this.servicePath, + }); + } else if (stderrBufferContent.includes('command not found')) { throw new this.serverless.classes.Error( `pipenv not found! Install it according to the poetry docs.`, 'PYTHON_REQUIREMENTS_PIPENV_NOT_FOUND' ); + } else { + throw e; } - throw e; } + const res = await spawn('pipenv', ['requirements'], { + cwd: this.servicePath, + }); + fse.ensureDirSync(path.join(this.servicePath, '.serverless')); fse.writeFileSync( path.join(this.servicePath, '.serverless/requirements.txt'), diff --git a/tests/pipenv/Pipfile b/tests/pipenv/Pipfile index 6770a12a..30e51dda 100644 --- a/tests/pipenv/Pipfile +++ b/tests/pipenv/Pipfile @@ -1,6 +1,7 @@ [[source]] -url = "https://pypi.python.org/simple" +url = "https://pypi.org/simple" verify_ssl = true +name = "pypi" [packages] Flask = "==2.0.3" From 4ba3bbeb9296b4844feb476de695f33ee2a30056 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 23 Oct 2022 22:50:04 +0200 Subject: [PATCH 294/328] feat: Switch to official AWS docker images by default (#724) BREAKING CHANGE: Changes default `dockerImage` used for building dependencies --- index.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/index.js b/index.js index 30803971..5c568cc4 100644 --- a/index.js +++ b/index.js @@ -95,7 +95,9 @@ class ServerlessPythonRequirements { ); } else if (!options.dockerFile) { // If no dockerFile is provided, use default image - const defaultImage = `lambci/lambda:build-${this.serverless.service.provider.runtime}`; + const architecture = + this.serverless.service.provider.architecture || 'x86_64'; + const defaultImage = `public.ecr.aws/sam/build-${this.serverless.service.provider.runtime}:latest-${architecture}`; options.dockerImage = options.dockerImage || defaultImage; } if (options.layer) { From 8f12c58d63c9e2572f48f1441bc8951863c8d8a6 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 23 Oct 2022 22:55:30 +0200 Subject: [PATCH 295/328] chore: Bump dependencies --- package.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/package.json b/package.json index 7985cb60..ae0f87e6 100644 --- a/package.json +++ b/package.json @@ -52,10 +52,10 @@ "cross-spawn": "*", "eslint": "^7.32.0", "git-list-updated": "^1.2.1", - "github-release-from-cc-changelog": "^2.2.1", + "github-release-from-cc-changelog": "^2.3.0", "lodash": "^4.17.21", "prettier": "^2", - "standard-version": "^9.3.2", + "standard-version": "^9.5.0", "tape": "*", "tape-promise": "*" }, @@ -67,14 +67,14 @@ "fs-extra": "^9.1.0", "glob-all": "^3.3.0", "is-wsl": "^2.2.0", - "jszip": "^3.7.1", + "jszip": "^3.10.1", "lodash.get": "^4.4.2", "lodash.uniqby": "^4.7.0", "lodash.values": "^4.3.0", "rimraf": "^3.0.2", "set-value": "^4.1.0", "sha256-file": "1.0.0", - "shell-quote": "^1.7.3" + "shell-quote": "^1.7.4" }, "peerDependencies": { "serverless": "^2.32 || 3" From a5c6a819884ce54bb587403d52458675e4bb25f5 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 23 Oct 2022 22:56:52 +0200 Subject: [PATCH 296/328] chore: Bump `fs-extra` to v10 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index ae0f87e6..f3937b96 100644 --- a/package.json +++ b/package.json @@ -64,7 +64,7 @@ "appdirectory": "^0.1.0", "bluebird": "^3.7.2", "child-process-ext": "^2.1.1", - "fs-extra": "^9.1.0", + "fs-extra": "^10.1.0", "glob-all": "^3.3.0", "is-wsl": "^2.2.0", "jszip": "^3.10.1", From 2b98f89348ebb7d2759e04aa37535d119675e66d Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 23 Oct 2022 22:57:36 +0200 Subject: [PATCH 297/328] chore: Bump `eslint` to v8 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index f3937b96..4d9683bf 100644 --- a/package.json +++ b/package.json @@ -50,7 +50,7 @@ }, "devDependencies": { "cross-spawn": "*", - "eslint": "^7.32.0", + "eslint": "^8.26.0", "git-list-updated": "^1.2.1", "github-release-from-cc-changelog": "^2.3.0", "lodash": "^4.17.21", From 8b8fe6668c092b4f5f7b48a476d84441a94ecf99 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 23 Oct 2022 23:03:19 +0200 Subject: [PATCH 298/328] chore: Release v6 --- CHANGELOG.md | 21 +++++++++++++++++++++ README.md | 2 ++ package.json | 2 +- 3 files changed, 24 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 41041fd3..fb11a43a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,27 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +## [6.0.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.4.0...v6.0.0) (2022-10-23) + +### ⚠ BREAKING CHANGES + +- Changes default `dockerImage` used for building dependencies (now uses images from `public.ecr.aws/sam` repository) +- Requires `pipenv` in version `2022-04-08` or higher + +### Features + +- Introduce `requirePoetryLockFile` flag ([#728](https://github.com/serverless/serverless-python-requirements/pull/728)) ([e81d9e1](https://github.com/UnitedIncome/serverless-python-requirements/commit/e81d9e1824c135f110b4deccae2c26b0cbb26778)) ([François-Michel L'Heureux](https://github.com/FinchPowers)) +- Switch to official AWS docker images by default ([#724](https://github.com/UnitedIncome/serverless-python-requirements/issues/724)) ([4ba3bbe](https://github.com/UnitedIncome/serverless-python-requirements/commit/4ba3bbeb9296b4844feb476de695f33ee2a30056)) ([Piotr Grzesik](https://github.com/pgrzesik)) + +### Bug Fixes + +- Adapt to support latest `pipenv` version ([#718](https://github.com/UnitedIncome/serverless-python-requirements/issues/718)) ([853da8d](https://github.com/UnitedIncome/serverless-python-requirements/commit/853da8d39921dc83a23d59fd825b2180814f87ff)) ([Anders Steiner](https://github.com/andidev) & [Randy Westergren](https://github.com/rwestergren) & [Piotr Grzesik](https://github.com/pgrzesik)) +- Properly recognize individual function ([#725](https://github.com/UnitedIncome/serverless-python-requirements/issues/725)) ([78795be](https://github.com/UnitedIncome/serverless-python-requirements/commit/78795be24eb08dc78acd7566778b3960c28b263c)) ([Piotr Grzesik](https://github.com/pgrzesik)) + +### Maintenance Improvements + +- Improve error message for docker failures ([#723](https://github.com/serverless/serverless-python-requirements/pull/723))([cc146d0](https://github.com/UnitedIncome/serverless-python-requirements/commit/cc146d088d362187641dd5ae3e9d0129a14c60e2)) ([Piotr Grzesik](https://github.com/pgrzesik)) + ## [5.4.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.3.1...v5.4.0) (2022-03-14) ### Features diff --git a/README.md b/README.md index 9563ff51..e0f27ac3 100644 --- a/README.md +++ b/README.md @@ -109,6 +109,8 @@ custom: ## :sparkles::cake::sparkles: Pipenv support +Requires `pipenv` in version `2022-04-08` or higher. + If you include a `Pipfile` and have `pipenv` installed instead of a `requirements.txt` this will use `pipenv lock -r` to generate them. It is fully compatible with all options such as `zip` and `dockerizePip`. If you don't want this plugin to generate it for you, set the following option: diff --git a/package.json b/package.json index 4d9683bf..1aeeb4e5 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "5.4.0", + "version": "6.0.0", "engines": { "node": ">=12.0" }, From 762ca3e4fed9639cb035a4de5a199c29183c411e Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 31 Oct 2022 22:58:31 +0100 Subject: [PATCH 299/328] docs: Update references to default Docker images --- README.md | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index e0f27ac3..3c1f3339 100644 --- a/README.md +++ b/README.md @@ -33,8 +33,7 @@ If you're on a mac, check out [these notes](#applebeersnake-mac-brew-installed-p ## Cross compiling Compiling non-pure-Python modules or fetching their manylinux wheels is -supported on non-linux OSs via the use of Docker and the -[docker-lambda](https://github.com/lambci/docker-lambda) image. +supported on non-linux OSs via the use of Docker and [official AWS build](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-image-repositories.html) images. To enable docker usage, add the following to your `serverless.yml`: ```yaml @@ -489,10 +488,10 @@ For usage of `dockerizePip` on Windows do Step 1 only if running serverless on w ## Native Code Dependencies During Build -Some Python packages require extra OS dependencies to build successfully. To deal with this, replace the default image (`lambci/lambda:python3.6`) with a `Dockerfile` like: +Some Python packages require extra OS dependencies to build successfully. To deal with this, replace the default image with a `Dockerfile` like: ```dockerfile -FROM lambci/lambda:build-python3.6 +FROM public.ecr.aws/sam/build-python3.9 # Install your dependencies RUN yum -y install mysql-devel From 012b55f402c588381733cbe50d2e94acc55a0517 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Wed, 7 Dec 2022 18:00:30 +0100 Subject: [PATCH 300/328] docs: Add contributing and code of conduct --- CODE_OF_CONDUCT.md | 75 ++++++++++++++++++++++++++++++++++++ CONTRIBUTING.md | 95 ++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 170 insertions(+) create mode 100644 CODE_OF_CONDUCT.md create mode 100644 CONTRIBUTING.md diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..9d7afa9c --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,75 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +- Using welcoming and inclusive language +- Being respectful of differing viewpoints and experiences +- Gracefully accepting constructive criticism +- Focusing on what is best for the community +- Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +- The use of sexualized language or imagery and unwelcome sexual attention or + advances +- Trolling, insulting/derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or electronic + address, without explicit permission +- Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting our team at **hello@serverless.com**. As an alternative +feel free to reach out to any of us personally. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..efcf6d1f --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,95 @@ +# Contributing Guidelines + +We are always looking to promote good contributors to be maintainers and provide them a front-row seat to serverless innovation. + +If you would like to be a maintainer for the [Serverless Framework](https://github.com/serverless/serverless) or any of our plugins, please get started with making code contributions and engaging with open issues/PRs. Also, please reach out to any of [Serverless organization](https://github.com/serverless) members to express your interest. + +We'd love to collaborate closely with amazing developers as we drive the development of this open technology into the future. + +Welcome, and thanks in advance for your help! + +# How to contribute to `serverless-python-requirements` + +## Setup + +It is recommended to use Node v14 or v16 for development. + +Then, to begin development fork repository and run `npm install` in its root folder. + +## Getting started + +A good first step is to search for open [issues](https://github.com/serverless/serverless-python-requirements/issues). Issues are labeled, and some good issues to start with are labeled: [good first issue](https://github.com/serverless/serverless-python-requirements/labels/good%20first%20issue) and [help wanted](https://github.com/serverless/serverless-python-requirements/labels/help%20wanted). + +## When you propose a new feature or bug fix + +Please make sure there is an open issue discussing your contribution before jumping into a Pull Request! +There are just a few situations (listed below) in which it is fine to submit PR without a corresponding issue: + +- Documentation update +- Obvious bug fix +- Maintenance improvement + +In all other cases please check if there's an open an issue discussing the given proposal, if there is not, create an issue respecting all its template remarks. + +In non-trivial cases please propose and let us review an implementation spec (in the corresponding issue) before jumping into implementation. + +Do not submit draft PRs. Submit only finalized work which is ready for merge. If you have any doubts related to implementation work please discuss in the corresponding issue. + +Once a PR has been reviewed and some changes are suggested, please ensure to **re-request review** after all new changes are pushed. It's the best and quietest way to inform maintainers that your work is ready to be checked again. + +## When you want to work on an existing issue + +**Note:** Please write a quick comment in the corresponding issue and ask if the feature is still relevant and that you want to jump into the implementation. + +Check out our [help wanted](https://github.com/serverless/serverless-python-requirements/labels/help%20wanted) or [good first issue](https://github.com/serverless/serverless-python-requirements/labels/good%20first%20issue) labels to find issues we want to move forward with your help. + +We will do our best to respond/review/merge your PR according to priority. We hope that you stay engaged with us during this period to ensure QA. Please note that the PR will be closed if there hasn't been any activity for a long time (~ 30 days) to keep us focused and keep the repo clean. + +## Reviewing Pull Requests + +Another really useful way to contribute is to review other people's Pull Requests. Having feedback from multiple people is helpful and reduces the overall time to make a final decision about the Pull Request. + +## Providing support + +The easiest thing you can do to help us move forward and make an impact on our progress is to simply provide support to other people having difficulties with their projects. + +You can do that by replying to [issues on GitHub](https://github.com/serverless/serverless-python-requirements/issues), chatting with other community members in [our Community Slack](https://www.serverless.com/slack), or [GitHub Discussions](https://github.com/serverless/serverless-python-requirements/discussions). + +--- + +# Code Style + +We aim for a clean, consistent code style. We're using [Prettier](https://prettier.io/) to confirm one code formatting style and [ESlint](https://eslint.org/) helps us to stay away from obvious issues that can be picked via static analysis. + +Ideally, you should have Prettier and ESlint integrated into your code editor, which will help you not think about specific rules and be sure you submit the code that follows guidelines. + +## Verifying prettier formatting + +``` +npm run prettier-check +``` + +## Verifying linting style + +``` +npm run lint +``` + +## Other guidelines + +- Minimize [lodash](https://lodash.com/) usage - resort to it, only if given part of logic cannot be expressed easily with native language constructs +- When writing asynchronous code, ensure to take advantage of [async functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function) and native `Promise` API. Do not rely on [Bluebird](http://bluebirdjs.com) even though still large parts of old code rely on it. We're looking forward to drop this dependency in the near future. + +# Testing + +When proposing a few feature or fixing a bug, it is recommended to also provide sufficient test coverage. All tests live in `./test.js` module. + +# Our Code of Conduct + +Finally, to make sure you have a pleasant experience while being in our welcoming community, please read our [code of conduct](CODE_OF_CONDUCT.md). It outlines our core values and beliefs and will make working together a happier experience. + +Thanks again for being a contributor to the Serverless Community :tada:! + +Cheers, + +The :zap: [Serverless](http://www.serverless.com) Team From 1436c17829848430ebb0b317cc0f208ce0954b85 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Wilczy=C5=84ski?= Date: Sun, 8 Jan 2023 21:15:11 +0100 Subject: [PATCH 301/328] Add support for specifying custom dependency groups in Poetry (#746) --- README.md | 10 +++++ index.js | 3 ++ lib/poetry.js | 9 +++++ test.js | 49 +++++++++++++++++++++++++ tests/poetry_packages/_poetryGroups.yml | 8 ++++ tests/poetry_packages/_slimPatterns.yml | 2 + tests/poetry_packages/handler.py | 5 +++ tests/poetry_packages/package.json | 14 +++++++ tests/poetry_packages/pyproject.toml | 19 ++++++++++ tests/poetry_packages/serverless.yml | 34 +++++++++++++++++ 10 files changed, 153 insertions(+) create mode 100644 tests/poetry_packages/_poetryGroups.yml create mode 100644 tests/poetry_packages/_slimPatterns.yml create mode 100644 tests/poetry_packages/handler.py create mode 100644 tests/poetry_packages/package.json create mode 100644 tests/poetry_packages/pyproject.toml create mode 100644 tests/poetry_packages/serverless.yml diff --git a/README.md b/README.md index 3c1f3339..cc93b310 100644 --- a/README.md +++ b/README.md @@ -141,6 +141,16 @@ custom: requirePoetryLockFile: false ``` +If your Poetry configuration includes custom dependency groups, they will not be installed automatically. To include them in the deployment package, use the `poetryWithGroups`, `poetryWithoutGroups` and `poetryOnlyGroups` options which wrap `poetry export`'s `--with`, `--without` and `--only` parameters. + +```yaml +custom: + pythonRequirements: + poetryWithGroups: + - internal_dependencies + - lambda_dependencies +``` + ### Poetry with git dependencies Poetry by default generates the exported requirements.txt file with `-e` and that breaks pip with `-t` parameter diff --git a/index.js b/index.js index 5c568cc4..50a005e1 100644 --- a/index.js +++ b/index.js @@ -58,6 +58,9 @@ class ServerlessPythonRequirements { noDeploy: [], vendor: '', requirePoetryLockFile: false, + poetryWithGroups: [], + poetryWithoutGroups: [], + poetryOnlyGroups: [], }, (this.serverless.service.custom && this.serverless.service.custom.pythonRequirements) || diff --git a/lib/poetry.js b/lib/poetry.js index d324784b..17e3268f 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -57,6 +57,15 @@ async function pyprojectTomlToRequirements(modulePath, pluginInstance) { '-o', 'requirements.txt', '--with-credentials', + ...(options.poetryWithGroups.length + ? [`--with=${options.poetryWithGroups.join(',')}`] + : []), + ...(options.poetryWithoutGroups.length + ? [`--without=${options.poetryWithoutGroups.join(',')}`] + : []), + ...(options.poetryOnlyGroups.length + ? [`--only=${options.poetryOnlyGroups.join(',')}`] + : []), ], { cwd: moduleProjectPath, diff --git a/test.js b/test.js index 27db6884..c7232a6e 100644 --- a/test.js +++ b/test.js @@ -1654,3 +1654,52 @@ test('poetry py3.7 fails packaging if poetry.lock is missing and flag requirePoe ); t.end(); }); + +test('poetry py3.7 packages additional optional packages', async (t) => { + process.chdir('tests/poetry_packages'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + poetryWithGroups: 'poetryWithGroups', + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('poetry py3.7 skips additional optional packages specified in withoutGroups', async (t) => { + process.chdir('tests/poetry_packages'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + poetryWithGroups: 'poetryWithGroups', + poetryWithoutGroups: 'poetryWithoutGroups', + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('poetry py3.7 only installs optional packages specified in onlyGroups', async (t) => { + process.chdir('tests/poetry_packages'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + poetryOnlyGroups: 'poetryOnlyGroups', + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.false(zipfiles.includes(`flask${sep}__init__.py`), 'flask is NOT packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); diff --git a/tests/poetry_packages/_poetryGroups.yml b/tests/poetry_packages/_poetryGroups.yml new file mode 100644 index 00000000..25abd07a --- /dev/null +++ b/tests/poetry_packages/_poetryGroups.yml @@ -0,0 +1,8 @@ +empty: [] +poetryWithGroups: + - custom1 + - custom2 +poetryWithoutGroups: + - custom1 +poetryOnlyGroups: + - custom2 diff --git a/tests/poetry_packages/_slimPatterns.yml b/tests/poetry_packages/_slimPatterns.yml new file mode 100644 index 00000000..443af9a0 --- /dev/null +++ b/tests/poetry_packages/_slimPatterns.yml @@ -0,0 +1,2 @@ +slimPatterns: + - '**/__main__.py' diff --git a/tests/poetry_packages/handler.py b/tests/poetry_packages/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/poetry_packages/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/poetry_packages/package.json b/tests/poetry_packages/package.json new file mode 100644 index 00000000..781a4259 --- /dev/null +++ b/tests/poetry_packages/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + } +} diff --git a/tests/poetry_packages/pyproject.toml b/tests/poetry_packages/pyproject.toml new file mode 100644 index 00000000..7bbe30bf --- /dev/null +++ b/tests/poetry_packages/pyproject.toml @@ -0,0 +1,19 @@ +[tool.poetry] +name = "poetry" +version = "0.1.0" +description = "" +authors = ["Your Name "] + +[tool.poetry.dependencies] +python = "^3.6" +Flask = "^1.0" + +[tool.poetry.group.custom1.dependencies] +bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} + +[tool.poetry.group.custom2.dependencies] +boto3 = "^1.9" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/tests/poetry_packages/serverless.yml b/tests/poetry_packages/serverless.yml new file mode 100644 index 00000000..03652968 --- /dev/null +++ b/tests/poetry_packages/serverless.yml @@ -0,0 +1,34 @@ +service: sls-py-req-test + +provider: + name: aws + runtime: python3.7 + +plugins: + - serverless-python-requirements +custom: + pythonRequirements: + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + requirePoetryLockFile: ${env:requirePoetryLockFile, false} + poetryWithGroups: ${file(./_poetryGroups.yml):${env:poetryWithGroups, "empty"}} + poetryWithoutGroups: ${file(./_poetryGroups.yml):${env:poetryWithoutGroups, "empty"}} + poetryOnlyGroups: ${file(./_poetryGroups.yml):${env:poetryOnlyGroups, "empty"}} + defaults: + zip: false + slimPatterns: false + slimPatternsAppendDefaults: true + slim: false + dockerizePip: false + +package: + patterns: + - '!**/*' + - 'handler.py' + +functions: + hello: + handler: handler.hello From 22a1f832ac8051f0963328743f9e768f8e66649e Mon Sep 17 00:00:00 2001 From: Randy Westergren Date: Sun, 8 Jan 2023 15:17:07 -0500 Subject: [PATCH 302/328] fix: Add legacy `pipenv` backward compatability (#742) --- .github/workflows/integrate.yml | 9 ++-- .github/workflows/validate.yml | 9 ++-- README.md | 3 +- lib/pipenv.js | 94 ++++++++++++++++++++++++++------- package.json | 1 + 5 files changed, 88 insertions(+), 28 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index 1f979b0a..b0ca0207 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -16,6 +16,7 @@ jobs: strategy: matrix: sls-version: [2, 3] + pipenv-version: ['2022.8.5', '2022.8.13'] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -48,7 +49,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -67,6 +68,7 @@ jobs: strategy: matrix: sls-version: [2, 3] + pipenv-version: ['2022.8.5', '2022.8.13'] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -99,7 +101,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -147,7 +149,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -166,6 +168,7 @@ jobs: strategy: matrix: sls-version: [2, 3] + pipenv-version: ['2022.8.5', '2022.8.13'] needs: [windowsNode14, linuxNode14, linuxNode12] steps: - name: Checkout repository diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 31052279..e77f6cce 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -16,6 +16,7 @@ jobs: strategy: matrix: sls-version: [2, 3] + pipenv-version: ['2022.8.5', '2022.8.13'] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -61,7 +62,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -94,6 +95,7 @@ jobs: strategy: matrix: sls-version: [2, 3] + pipenv-version: ['2022.8.5', '2022.8.13'] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -128,7 +130,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -147,6 +149,7 @@ jobs: strategy: matrix: sls-version: [2, 3] + pipenv-version: ['2022.8.5', '2022.8.13'] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -181,7 +184,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} diff --git a/README.md b/README.md index cc93b310..6032725a 100644 --- a/README.md +++ b/README.md @@ -110,8 +110,7 @@ custom: Requires `pipenv` in version `2022-04-08` or higher. -If you include a `Pipfile` and have `pipenv` installed instead of a `requirements.txt` this will use -`pipenv lock -r` to generate them. It is fully compatible with all options such as `zip` and +If you include a `Pipfile` and have `pipenv` installed, this will use `pipenv` to generate requirements instead of a `requirements.txt`. It is fully compatible with all options such as `zip` and `dockerizePip`. If you don't want this plugin to generate it for you, set the following option: ```yaml diff --git a/lib/pipenv.js b/lib/pipenv.js index 11331ee3..c59fe26a 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -2,6 +2,43 @@ const fse = require('fs-extra'); const path = require('path'); const spawn = require('child-process-ext/spawn'); const { EOL } = require('os'); +const semver = require('semver'); + +const LEGACY_PIPENV_VERSION = '2022.8.5'; + +async function getPipenvVersion() { + try { + const res = await spawn('pipenv', ['--version'], { + cwd: this.servicePath, + }); + + const stdoutBuffer = + (res.stdoutBuffer && res.stdoutBuffer.toString().trim()) || ''; + + const version = stdoutBuffer.split(' ')[2]; + + if (semver.valid(version)) { + return version; + } else { + throw new this.serverless.classes.Error( + `Unable to parse pipenv version!`, + 'PYTHON_REQUIREMENTS_PIPENV_VERSION_ERROR' + ); + } + } catch (e) { + const stderrBufferContent = + (e.stderrBuffer && e.stderrBuffer.toString()) || ''; + + if (stderrBufferContent.includes('command not found')) { + throw new this.serverless.classes.Error( + `pipenv not found! Install it according to the pipenv docs.`, + 'PYTHON_REQUIREMENTS_PIPENV_NOT_FOUND' + ); + } else { + throw e; + } + } +} /** * pipenv install @@ -28,31 +65,48 @@ async function pipfileToRequirements() { } try { - try { - await spawn('pipenv', ['lock', '--keep-outdated'], { - cwd: this.servicePath, - }); - } catch (e) { - const stderrBufferContent = - (e.stderrBuffer && e.stderrBuffer.toString()) || ''; + // Get and validate pipenv version + if (this.log) { + this.log.info('Getting pipenv version'); + } else { + this.serverless.cli.log('Getting pipenv version'); + } + + const pipenvVersion = await getPipenvVersion(); + let res; - if (stderrBufferContent.includes('must exist to use')) { - // No previous Pipfile.lock, we will try to generate it here - await spawn('pipenv', ['lock'], { + if (semver.gt(pipenvVersion, LEGACY_PIPENV_VERSION)) { + // Using new pipenv syntax ( >= 2022.8.13) + try { + await spawn('pipenv', ['lock', '--keep-outdated'], { cwd: this.servicePath, }); - } else if (stderrBufferContent.includes('command not found')) { - throw new this.serverless.classes.Error( - `pipenv not found! Install it according to the poetry docs.`, - 'PYTHON_REQUIREMENTS_PIPENV_NOT_FOUND' - ); - } else { - throw e; + } catch (e) { + const stderrBufferContent = + (e.stderrBuffer && e.stderrBuffer.toString()) || ''; + if (stderrBufferContent.includes('must exist to use')) { + // No previous Pipfile.lock, we will try to generate it here + await spawn('pipenv', ['lock'], { + cwd: this.servicePath, + }); + } else { + throw e; + } } + + res = await spawn('pipenv', ['requirements'], { + cwd: this.servicePath, + }); + } else { + // Falling back to legacy pipenv syntax + res = await spawn( + 'pipenv', + ['lock', '--requirements', '--keep-outdated'], + { + cwd: this.servicePath, + } + ); } - const res = await spawn('pipenv', ['requirements'], { - cwd: this.servicePath, - }); fse.ensureDirSync(path.join(this.servicePath, '.serverless')); fse.writeFileSync( diff --git a/package.json b/package.json index 1aeeb4e5..318eec59 100644 --- a/package.json +++ b/package.json @@ -72,6 +72,7 @@ "lodash.uniqby": "^4.7.0", "lodash.values": "^4.3.0", "rimraf": "^3.0.2", + "semver": "^7.3.8", "set-value": "^4.1.0", "sha256-file": "1.0.0", "shell-quote": "^1.7.4" From e8b2e51c265792046bacc3946f22f7bd842c60e6 Mon Sep 17 00:00:00 2001 From: Randy Westergren Date: Wed, 11 Jan 2023 17:31:37 -0500 Subject: [PATCH 303/328] fix: Fix integration test matrix configuration (#755) Matrix values appear to mistakenly added to `tagIfNewVersion` instead of `linuxNode12` --- .github/workflows/integrate.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index b0ca0207..b2b6f77d 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -117,6 +117,10 @@ jobs: linuxNode12: name: '[Linux] Node.js v12: Unit tests' runs-on: ubuntu-latest + strategy: + matrix: + sls-version: [2, 3] + pipenv-version: ['2022.8.5', '2022.8.13'] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -165,10 +169,6 @@ jobs: tagIfNewVersion: name: Tag if new version runs-on: ubuntu-latest - strategy: - matrix: - sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13'] needs: [windowsNode14, linuxNode14, linuxNode12] steps: - name: Checkout repository From ad40278629c63f4d0971637214b4d9bc20dbd288 Mon Sep 17 00:00:00 2001 From: Jeff Gordon <55799997+jfgordon2@users.noreply.github.com> Date: Thu, 17 Aug 2023 16:31:11 -0500 Subject: [PATCH 304/328] fix: Remove outdated Pipenv requirements flag (#780) --- .github/workflows/validate.yml | 10 +++++++--- lib/pipenv.js | 22 ++++++++++++++++------ tests/base/package.json | 2 +- tests/individually/package.json | 2 +- tests/non_build_pyproject/package.json | 2 +- tests/non_poetry_pyproject/package.json | 2 +- tests/pipenv/package.json | 2 +- tests/poetry/package.json | 2 +- tests/poetry_individually/package.json | 2 +- 9 files changed, 30 insertions(+), 16 deletions(-) diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index e77f6cce..227e6056 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -16,7 +16,11 @@ jobs: strategy: matrix: sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13'] + pipenv-version: ['2022.8.5', '2022.8.13', '2023.7.4', '2023.7.9'] + # pipenv 2202.8.13 marks deprecation of pipenv lock --requirements + # https://github.com/pypa/pipenv/blob/30067b458bd7a429f242736b7fde40c9bd4d4f14/CHANGELOG.rst#2022813-2022-08-13 + # pipenv 2023.7.9 marks deprecation of pipenv lock --keep-outdated + # https://github.com/pypa/pipenv/blob/30067b458bd7a429f242736b7fde40c9bd4d4f14/CHANGELOG.rst#202379-2023-07-09 steps: - name: Checkout repository uses: actions/checkout@v2 @@ -95,7 +99,7 @@ jobs: strategy: matrix: sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13'] + pipenv-version: ['2022.8.5', '2022.8.13', '2023.7.4', '2023.7.9'] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -149,7 +153,7 @@ jobs: strategy: matrix: sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13'] + pipenv-version: ['2022.8.5', '2022.8.13', '2023.7.4', '2023.7.9'] steps: - name: Checkout repository uses: actions/checkout@v2 diff --git a/lib/pipenv.js b/lib/pipenv.js index c59fe26a..1099b651 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -77,26 +77,36 @@ async function pipfileToRequirements() { if (semver.gt(pipenvVersion, LEGACY_PIPENV_VERSION)) { // Using new pipenv syntax ( >= 2022.8.13) + // Generate requirements from existing lock file. + // See: https://pipenv.pypa.io/en/latest/advanced/#generating-a-requirements-txt try { - await spawn('pipenv', ['lock', '--keep-outdated'], { + res = await spawn('pipenv', ['requirements'], { cwd: this.servicePath, }); } catch (e) { const stderrBufferContent = (e.stderrBuffer && e.stderrBuffer.toString()) || ''; - if (stderrBufferContent.includes('must exist to use')) { + if (stderrBufferContent.includes('FileNotFoundError')) { // No previous Pipfile.lock, we will try to generate it here + if (this.log) { + this.log.warning( + 'No Pipfile.lock found! Review https://pipenv.pypa.io/en/latest/pipfile/ for recommendations.' + ); + } else { + this.serverless.cli.log( + 'WARNING: No Pipfile.lock found! Review https://pipenv.pypa.io/en/latest/pipfile/ for recommendations.' + ); + } await spawn('pipenv', ['lock'], { cwd: this.servicePath, }); + res = await spawn('pipenv', ['requirements'], { + cwd: this.servicePath, + }); } else { throw e; } } - - res = await spawn('pipenv', ['requirements'], { - cwd: this.servicePath, - }); } else { // Falling back to legacy pipenv syntax res = await spawn( diff --git a/tests/base/package.json b/tests/base/package.json index 38630491..781a4259 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" } } diff --git a/tests/individually/package.json b/tests/individually/package.json index 43ce4eee..781a4259 100644 --- a/tests/individually/package.json +++ b/tests/individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" } } diff --git a/tests/non_build_pyproject/package.json b/tests/non_build_pyproject/package.json index 38630491..781a4259 100644 --- a/tests/non_build_pyproject/package.json +++ b/tests/non_build_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" } } diff --git a/tests/non_poetry_pyproject/package.json b/tests/non_poetry_pyproject/package.json index 38630491..781a4259 100644 --- a/tests/non_poetry_pyproject/package.json +++ b/tests/non_poetry_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" } } diff --git a/tests/pipenv/package.json b/tests/pipenv/package.json index 38630491..781a4259 100644 --- a/tests/pipenv/package.json +++ b/tests/pipenv/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" } } diff --git a/tests/poetry/package.json b/tests/poetry/package.json index 38630491..781a4259 100644 --- a/tests/poetry/package.json +++ b/tests/poetry/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" } } diff --git a/tests/poetry_individually/package.json b/tests/poetry_individually/package.json index 38630491..781a4259 100644 --- a/tests/poetry_individually/package.json +++ b/tests/poetry_individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" } } From c1f5ca114de815ca19ad213a79e250b5b81f29b3 Mon Sep 17 00:00:00 2001 From: Jim Kirkbride Date: Thu, 17 Aug 2023 17:36:14 -0400 Subject: [PATCH 305/328] fix: Not crash when runtime is not `python` (#773) Co-authored-by: Marco Kleinlein --- .gitignore | 3 +++ .python-version | 1 + CONTRIBUTING.md | 14 +++++++++++--- index.js | 7 +++++++ test.js | 12 ++++++++++++ 5 files changed, 34 insertions(+), 3 deletions(-) create mode 100644 .python-version diff --git a/.gitignore b/.gitignore index 3707ff1e..64bdbd6a 100644 --- a/.gitignore +++ b/.gitignore @@ -76,3 +76,6 @@ unzip_requirements.py # Project ignores puck/ serverless.yml.bak + +# Generated packaging +*.tgz diff --git a/.python-version b/.python-version new file mode 100644 index 00000000..475ba515 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.7 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index efcf6d1f..4616858b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -12,9 +12,17 @@ Welcome, and thanks in advance for your help! ## Setup -It is recommended to use Node v14 or v16 for development. - -Then, to begin development fork repository and run `npm install` in its root folder. +Pre-Reqs: +* Python 3.7 +* [poetry](https://python-poetry.org/docs/) (if you use multiple versions of Python be sure to install it with python 3.7) +* Perl (used in the tests) +* Node v14 or v16 + +Then, to begin development: +1. fork the repository +2. `npm install -g serverless@` (check the peer dependencies in the root `package.json` file for the version) +3. run `npm install` in its root folder +4. run the tests via `npm run test` ## Getting started diff --git a/index.js b/index.js index 50a005e1..246b121e 100644 --- a/index.js +++ b/index.js @@ -66,6 +66,13 @@ class ServerlessPythonRequirements { this.serverless.service.custom.pythonRequirements) || {} ); + if ( + options.pythonBin === this.serverless.service.provider.runtime && + !options.pythonBin.startsWith('python') + ) { + options.pythonBin = 'python'; + } + if (options.dockerizePip === 'non-linux') { options.dockerizePip = process.platform !== 'linux'; } diff --git a/test.js b/test.js index c7232a6e..673bf631 100644 --- a/test.js +++ b/test.js @@ -1655,6 +1655,18 @@ test('poetry py3.7 fails packaging if poetry.lock is missing and flag requirePoe t.end(); }); +test('works with provider.runtime not being python', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { runtime: 'nodejs12.x' } }); + t.true( + pathExistsSync('.serverless/sls-py-req-test.zip'), + 'sls-py-req-test is packaged' + ); + t.end(); +}); + test('poetry py3.7 packages additional optional packages', async (t) => { process.chdir('tests/poetry_packages'); const path = npm(['pack', '../..']); From c1992f2497b86ae12dbf4c9a8b582df4cf658d8a Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 29 Oct 2023 22:42:06 +0100 Subject: [PATCH 306/328] ci: Remove node12 from testing matrix (#795) --- .github/workflows/integrate.yml | 54 +-------------------------------- .github/workflows/validate.yml | 54 --------------------------------- 2 files changed, 1 insertion(+), 107 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index b2b6f77d..be4bd2c4 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -114,62 +114,10 @@ jobs: - name: Unit tests run: npm test - linuxNode12: - name: '[Linux] Node.js v12: Unit tests' - runs-on: ubuntu-latest - strategy: - matrix: - sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13'] - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: Retrieve dependencies from cache - id: cacheNpm - uses: actions/cache@v2 - with: - path: | - ~/.npm - node_modules - key: npm-v12-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} - restore-keys: npm-v12-${{ runner.os }}-${{ github.ref }}- - - - name: Set up Python 3.7 - uses: actions/setup-python@v2 - with: - python-version: 3.7 - - - name: Install Node.js and npm - uses: actions/setup-node@v1 - with: - node-version: 12.x - - - name: Check python version - run: | - python --version - - - name: Install setuptools - run: python -m pip install --force setuptools wheel - - - name: Install pipenv / poetry - run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - - - name: Install serverless - run: npm install -g serverless@${{ matrix.sls-version }} - - - name: Install dependencies - if: steps.cacheNpm.outputs.cache-hit != 'true' - run: | - npm update --no-save - npm update --save-dev --no-save - - name: Unit tests - run: npm test - tagIfNewVersion: name: Tag if new version runs-on: ubuntu-latest - needs: [windowsNode14, linuxNode14, linuxNode12] + needs: [windowsNode14, linuxNode14] steps: - name: Checkout repository uses: actions/checkout@v2 diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 227e6056..79548057 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -146,57 +146,3 @@ jobs: npm update --save-dev --no-save - name: Unit tests run: npm test - - linuxNode12: - name: '[Linux] Node.js v12: Unit tests' - runs-on: ubuntu-latest - strategy: - matrix: - sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13', '2023.7.4', '2023.7.9'] - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: Retrieve dependencies from cache - id: cacheNpm - uses: actions/cache@v2 - with: - path: | - ~/.npm - node_modules - key: npm-v12-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} - restore-keys: | - npm-v12-${{ runner.os }}-${{ github.ref }}- - npm-v12-${{ runner.os }}-refs/heads/master- - - - name: Set up Python 3.7 - uses: actions/setup-python@v2 - with: - python-version: 3.7 - - - name: Install Node.js and npm - uses: actions/setup-node@v1 - with: - node-version: 12.x - - - name: Check python version - run: | - python --version - - - name: Install setuptools - run: python -m pip install --force setuptools wheel - - - name: Install pipenv / poetry - run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - - - name: Install serverless - run: npm install -g serverless@${{ matrix.sls-version }} - - - name: Install dependencies - if: steps.cacheNpm.outputs.cache-hit != 'true' - run: | - npm update --no-save - npm update --save-dev --no-save - - name: Unit tests - run: npm test From 4b93513778929ed0a56b20af8f4b58cbc818e4c0 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 30 Oct 2023 07:55:57 +0100 Subject: [PATCH 307/328] ci: Temp skip of cache-related tests (#796) --- test.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test.js b/test.js index 673bf631..596edcce 100644 --- a/test.js +++ b/test.js @@ -1373,7 +1373,7 @@ test( { skip: !canUseDocker() || process.platform === 'win32' } ); -test('py3.7 uses download cache by default option', async (t) => { +test.skip('py3.7 uses download cache by default option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1386,7 +1386,7 @@ test('py3.7 uses download cache by default option', async (t) => { t.end(); }); -test('py3.7 uses download cache by default', async (t) => { +test.skip('py3.7 uses download cache by default', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1415,7 +1415,7 @@ test( { skip: !canUseDocker() || brokenOn('win32') } ); -test( +test.skip( 'py3.7 uses download cache with dockerizePip by default option', async (t) => { process.chdir('tests/base'); @@ -1433,7 +1433,7 @@ test( { skip: !canUseDocker() || brokenOn('win32') } ); -test('py3.7 uses static and download cache', async (t) => { +test.skip('py3.7 uses static and download cache', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); From 2bd9263ffb22cdf1395619cf16cd836660c7afb6 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 30 Oct 2023 08:07:50 +0100 Subject: [PATCH 308/328] ci: Fix test skips (#797) --- test.js | 103 +++++++++++++++++++++++++++++++------------------------- 1 file changed, 57 insertions(+), 46 deletions(-) diff --git a/test.js b/test.js index 596edcce..a4bb992b 100644 --- a/test.js +++ b/test.js @@ -1373,30 +1373,36 @@ test( { skip: !canUseDocker() || process.platform === 'win32' } ); -test.skip('py3.7 uses download cache by default option', async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const cachepath = getUserCachePath(); - t.true( - pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), - 'cache directory exists' - ); - t.end(); -}); +test('py3.7 uses download cache by default option', + async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const cachepath = getUserCachePath(); + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'cache directory exists' + ); + t.end(); + }, + { skip: true } +); -test.skip('py3.7 uses download cache by default', async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { cacheLocation: '.requirements-cache' } }); - t.true( - pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), - 'cache directory exists' - ); - t.end(); -}); +test('py3.7 uses download cache by default', + async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { cacheLocation: '.requirements-cache' } }); + t.true( + pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), + 'cache directory exists' + ); + t.end(); + }, + { skip: true } +); test( 'py3.7 uses download cache with dockerizePip option', @@ -1412,10 +1418,11 @@ test( ); t.end(); }, - { skip: !canUseDocker() || brokenOn('win32') } + // { skip: !canUseDocker() || brokenOn('win32') } + { skip: true } ); -test.skip( +test( 'py3.7 uses download cache with dockerizePip by default option', async (t) => { process.chdir('tests/base'); @@ -1430,29 +1437,33 @@ test.skip( ); t.end(); }, - { skip: !canUseDocker() || brokenOn('win32') } + // { skip: !canUseDocker() || brokenOn('win32') } + { skip: true } ); -test.skip('py3.7 uses static and download cache', async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const cachepath = getUserCachePath(); - const cacheFolderHash = sha256Path('.serverless/requirements.txt'); - const arch = 'x86_64'; - t.true( - pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), - 'http exists in download-cache' - ); - t.true( - pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` - ), - 'flask exists in static-cache' - ); - t.end(); -}); +test('py3.7 uses static and download cache', + async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const cachepath = getUserCachePath(); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'http exists in download-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), + 'flask exists in static-cache' + ); + t.end(); + }, + { skip: true } +); test( 'py3.7 uses static and download cache with dockerizePip option', From c86176b42f11540d52cf0001e3667a8dc3b1f868 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 30 Oct 2023 23:21:19 +0100 Subject: [PATCH 309/328] ci: Temporarily minimize testing matrix (#799) --- .github/workflows/integrate.yml | 57 ++---------------------------- .github/workflows/validate.yml | 61 +-------------------------------- 2 files changed, 3 insertions(+), 115 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index be4bd2c4..414f7d9b 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -10,65 +10,12 @@ env: FORCE_COLOR: 1 jobs: - windowsNode14: - name: '[Windows] Node.js v14: Unit tests' - runs-on: windows-latest - strategy: - matrix: - sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13'] - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: Retrieve dependencies from cache - id: cacheNpm - uses: actions/cache@v2 - with: - path: | - ~/.npm - node_modules - key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} - restore-keys: npm-v14-${{ runner.os }}-${{ github.ref }}- - - - name: Set up Python 3.7 - uses: actions/setup-python@v2 - with: - python-version: 3.7 - - - name: Install Node.js and npm - uses: actions/setup-node@v1 - with: - node-version: 14.x - - - name: Check python version - run: | - python --version - - - name: Install setuptools - run: python -m pip install --force setuptools wheel - - - name: Install pipenv / poetry - run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - - - name: Install serverless - run: npm install -g serverless@${{ matrix.sls-version }} - - - name: Install dependencies - if: steps.cacheNpm.outputs.cache-hit != 'true' - run: | - npm update --no-save - npm update --save-dev --no-save - - name: Unit tests - run: npm test - linuxNode14: name: '[Linux] Node.js 14: Unit tests' runs-on: ubuntu-latest strategy: matrix: sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13'] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -101,7 +48,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry + run: python -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -117,7 +64,7 @@ jobs: tagIfNewVersion: name: Tag if new version runs-on: ubuntu-latest - needs: [windowsNode14, linuxNode14] + needs: [linuxNode14] steps: - name: Checkout repository uses: actions/checkout@v2 diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 79548057..8957e7f7 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -16,11 +16,6 @@ jobs: strategy: matrix: sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13', '2023.7.4', '2023.7.9'] - # pipenv 2202.8.13 marks deprecation of pipenv lock --requirements - # https://github.com/pypa/pipenv/blob/30067b458bd7a429f242736b7fde40c9bd4d4f14/CHANGELOG.rst#2022813-2022-08-13 - # pipenv 2023.7.9 marks deprecation of pipenv lock --keep-outdated - # https://github.com/pypa/pipenv/blob/30067b458bd7a429f242736b7fde40c9bd4d4f14/CHANGELOG.rst#202379-2023-07-09 steps: - name: Checkout repository uses: actions/checkout@v2 @@ -66,7 +61,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry + run: python -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -92,57 +87,3 @@ jobs: fi - name: Unit tests run: npm test - - windowsNode14: - name: '[Windows] Node.js v14: Unit tests' - runs-on: windows-latest - strategy: - matrix: - sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13', '2023.7.4', '2023.7.9'] - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: Retrieve dependencies from cache - id: cacheNpm - uses: actions/cache@v2 - with: - path: | - ~/.npm - node_modules - key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} - restore-keys: | - npm-v14-${{ runner.os }}-${{ github.ref }}- - npm-v14-${{ runner.os }}-refs/heads/master- - - - name: Set up Python 3.7 - uses: actions/setup-python@v2 - with: - python-version: 3.7 - - - name: Install Node.js and npm - uses: actions/setup-node@v1 - with: - node-version: 14.x - - - name: Check python version - run: | - python --version - - - name: Install setuptools - run: python -m pip install --force setuptools wheel - - - name: Install pipenv / poetry - run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - - - name: Install serverless - run: npm install -g serverless@${{ matrix.sls-version }} - - - name: Install dependencies - if: steps.cacheNpm.outputs.cache-hit != 'true' - run: | - npm update --no-save - npm update --save-dev --no-save - - name: Unit tests - run: npm test From bf6a4c34f0c6488b56dbc10427ab98c9cbe208f9 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 30 Oct 2023 23:23:10 +0100 Subject: [PATCH 310/328] ci: Temporarily disable test run on integrate (#800) --- .github/workflows/integrate.yml | 52 --------------------------------- 1 file changed, 52 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index 414f7d9b..d2da520e 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -10,61 +10,9 @@ env: FORCE_COLOR: 1 jobs: - linuxNode14: - name: '[Linux] Node.js 14: Unit tests' - runs-on: ubuntu-latest - strategy: - matrix: - sls-version: [2, 3] - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: Retrieve dependencies from cache - id: cacheNpm - uses: actions/cache@v2 - with: - path: | - ~/.npm - node_modules - key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} - restore-keys: npm-v14-${{ runner.os }}-${{ github.ref }}- - - - name: Set up Python 3.7 - uses: actions/setup-python@v2 - with: - python-version: 3.7 - - - name: Install Node.js and npm - uses: actions/setup-node@v1 - with: - node-version: 14.x - - - name: Check python version - run: | - python --version - - - name: Install setuptools - run: python -m pip install --force setuptools wheel - - - name: Install pipenv / poetry - run: python -m pip install pipenv poetry - - - name: Install serverless - run: npm install -g serverless@${{ matrix.sls-version }} - - - name: Install dependencies - if: steps.cacheNpm.outputs.cache-hit != 'true' - run: | - npm update --no-save - npm update --save-dev --no-save - - name: Unit tests - run: npm test - tagIfNewVersion: name: Tag if new version runs-on: ubuntu-latest - needs: [linuxNode14] steps: - name: Checkout repository uses: actions/checkout@v2 From 84ee8c5b1cba484f9c856541511d6bcfc5dba299 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Thu, 2 Nov 2023 09:49:49 +0100 Subject: [PATCH 311/328] Release v6.0.1 (#793) * chore: Bump dependencies * chore: Release v6.0.1 --- CHANGELOG.md | 12 ++++++++++++ package.json | 10 +++++----- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fb11a43a..f8667134 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,18 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [6.0.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.0.0...v6.0.1) (2023-10-22) + +### Bug Fixes + +- Add legacy `pipenv` backward compatability ([#742](https://github.com/UnitedIncome/serverless-python-requirements/issues/742)) ([22a1f83](https://github.com/UnitedIncome/serverless-python-requirements/commit/22a1f832ac8051f0963328743f9e768f8e66649e)) ([Randy Westergren](https://github.com/rwestergren)) +- Not crash when runtime is not `python` ([#773](https://github.com/UnitedIncome/serverless-python-requirements/issues/773)) ([c1f5ca1](https://github.com/UnitedIncome/serverless-python-requirements/commit/c1f5ca114de815ca19ad213a79e250b5b81f29b3)) ([Jim Kirkbride](https://github.com/jameskbride)) +- Remove outdated Pipenv requirements flag ([#780](https://github.com/UnitedIncome/serverless-python-requirements/issues/780)) ([ad40278](https://github.com/UnitedIncome/serverless-python-requirements/commit/ad40278629c63f4d0971637214b4d9bc20dbd288)) ([Jeff Gordon](https://github.com/jfgordon2)) + +### Maintenance Improvements + +- Fix integration test matrix configuration ([#755](https://github.com/UnitedIncome/serverless-python-requirements/issues/755)) ([e8b2e51](https://github.com/UnitedIncome/serverless-python-requirements/commit/e8b2e51c265792046bacc3946f22f7bd842c60e6)) ([Randy Westergren](https://github.com/rwestergren)) + ## [6.0.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.4.0...v6.0.0) (2022-10-23) ### ⚠ BREAKING CHANGES diff --git a/package.json b/package.json index 318eec59..c420e4d6 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "6.0.0", + "version": "6.0.1", "engines": { "node": ">=12.0" }, @@ -50,7 +50,7 @@ }, "devDependencies": { "cross-spawn": "*", - "eslint": "^8.26.0", + "eslint": "^8.52.0", "git-list-updated": "^1.2.1", "github-release-from-cc-changelog": "^2.3.0", "lodash": "^4.17.21", @@ -65,17 +65,17 @@ "bluebird": "^3.7.2", "child-process-ext": "^2.1.1", "fs-extra": "^10.1.0", - "glob-all": "^3.3.0", + "glob-all": "^3.3.1", "is-wsl": "^2.2.0", "jszip": "^3.10.1", "lodash.get": "^4.4.2", "lodash.uniqby": "^4.7.0", "lodash.values": "^4.3.0", "rimraf": "^3.0.2", - "semver": "^7.3.8", + "semver": "^7.5.4", "set-value": "^4.1.0", "sha256-file": "1.0.0", - "shell-quote": "^1.7.4" + "shell-quote": "^1.8.1" }, "peerDependencies": { "serverless": "^2.32 || 3" From ea2604ed29483ad04dbcf3a04d58961e88c6b3fb Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Fri, 17 Nov 2023 12:24:22 -0500 Subject: [PATCH 312/328] Update the description of myself (#802) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 6032725a..37a9d1df 100644 --- a/README.md +++ b/README.md @@ -567,7 +567,7 @@ package: ## Contributors -- [@dschep](https://github.com/dschep) - Lead developer & original maintainer +- [@dschep](https://github.com/dschep) - Original developer - [@azurelogic](https://github.com/azurelogic) - logging & documentation fixes - [@abetomo](https://github.com/abetomo) - style & linting - [@angstwad](https://github.com/angstwad) - `deploy --function` support From 421e9a6e9a168b741dbd0ce9b6c1d39f7d8f55b8 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 24 Nov 2023 23:48:09 +0100 Subject: [PATCH 313/328] ci: Pin versions to speed up poetry deps resolution (#806) --- tests/poetry/pyproject.toml | 8 ++++---- tests/poetry_individually/module1/pyproject.toml | 8 ++++---- tests/poetry_packages/pyproject.toml | 6 +++--- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/tests/poetry/pyproject.toml b/tests/poetry/pyproject.toml index b813968a..896b48e7 100644 --- a/tests/poetry/pyproject.toml +++ b/tests/poetry/pyproject.toml @@ -5,13 +5,13 @@ description = "" authors = ["Your Name "] [tool.poetry.dependencies] -python = "^3.6" -Flask = "^1.0" +python = "^3.7" +Flask = "2.0" bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} -boto3 = "^1.9" +boto3 = "1.29.6" [tool.poetry.dev-dependencies] [build-system] -requires = ["poetry>=0.12"] +requires = ["poetry"] build-backend = "poetry.masonry.api" diff --git a/tests/poetry_individually/module1/pyproject.toml b/tests/poetry_individually/module1/pyproject.toml index b813968a..896b48e7 100644 --- a/tests/poetry_individually/module1/pyproject.toml +++ b/tests/poetry_individually/module1/pyproject.toml @@ -5,13 +5,13 @@ description = "" authors = ["Your Name "] [tool.poetry.dependencies] -python = "^3.6" -Flask = "^1.0" +python = "^3.7" +Flask = "2.0" bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} -boto3 = "^1.9" +boto3 = "1.29.6" [tool.poetry.dev-dependencies] [build-system] -requires = ["poetry>=0.12"] +requires = ["poetry"] build-backend = "poetry.masonry.api" diff --git a/tests/poetry_packages/pyproject.toml b/tests/poetry_packages/pyproject.toml index 7bbe30bf..0f9fc705 100644 --- a/tests/poetry_packages/pyproject.toml +++ b/tests/poetry_packages/pyproject.toml @@ -5,14 +5,14 @@ description = "" authors = ["Your Name "] [tool.poetry.dependencies] -python = "^3.6" -Flask = "^1.0" +python = "^3.7" +Flask = "2.0" [tool.poetry.group.custom1.dependencies] bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} [tool.poetry.group.custom2.dependencies] -boto3 = "^1.9" +boto3 = "1.29.6" [build-system] requires = ["poetry-core"] From e33b02da750acfc40b3d341c35edc71a7bea08aa Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sat, 25 Nov 2023 00:11:54 +0100 Subject: [PATCH 314/328] [ci] Update to Node18 (#803) --- .github/workflows/publish.yml | 4 ++-- .github/workflows/validate.yml | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 6eee5b45..21d7cb71 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -26,12 +26,12 @@ jobs: path: | ~/.npm node_modules - key: npm-v14-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} + key: npm-v18-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} - name: Install Node.js and npm uses: actions/setup-node@v1 with: - node-version: 14.x + node-version: 18.x registry-url: https://registry.npmjs.org - name: Publish new version diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 8957e7f7..03eea961 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -10,8 +10,8 @@ env: FORCE_COLOR: 1 jobs: - linuxNode14: - name: '[Linux] Node.js v14: Lint, Eventual Commitlint, Eventual Changelog, Formatting & Unit tests' + linuxNode18: + name: '[Linux] Node.js v18: Lint, Eventual Commitlint, Eventual Changelog, Formatting & Unit tests' runs-on: ubuntu-latest strategy: matrix: @@ -38,10 +38,10 @@ jobs: path: | ~/.npm node_modules - key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + key: npm-v18-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} restore-keys: | - npm-v14-${{ runner.os }}-${{ github.ref }}- - npm-v14-${{ runner.os }}-refs/heads/master- + npm-v18-${{ runner.os }}-${{ github.ref }}- + npm-v18-${{ runner.os }}-refs/heads/master- - name: Set up Python 3.7 uses: actions/setup-python@v2 @@ -51,7 +51,7 @@ jobs: - name: Install Node.js and npm uses: actions/setup-node@v1 with: - node-version: 14.x + node-version: 18.x - name: Check python version run: | From 4bbb80ed0c0150e04696513f37537eb3ab0002a4 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sat, 25 Nov 2023 21:51:04 +0100 Subject: [PATCH 315/328] test: Update to py3.9 (#808) --- .github/workflows/validate.yml | 4 +- example/serverless.yml | 2 +- example_native_deps/serverless.yml | 2 +- test.js | 117 +++++++++++----------- tests/base/serverless.yml | 2 +- tests/individually/serverless.yml | 2 +- tests/non_build_pyproject/serverless.yml | 2 +- tests/non_poetry_pyproject/serverless.yml | 2 +- tests/pipenv/serverless.yml | 2 +- tests/poetry/serverless.yml | 2 +- tests/poetry_individually/serverless.yml | 2 +- tests/poetry_packages/serverless.yml | 2 +- 12 files changed, 72 insertions(+), 69 deletions(-) diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 03eea961..61935c3e 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -43,10 +43,10 @@ jobs: npm-v18-${{ runner.os }}-${{ github.ref }}- npm-v18-${{ runner.os }}-refs/heads/master- - - name: Set up Python 3.7 + - name: Set up Python 3.9 uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: 3.9 - name: Install Node.js and npm uses: actions/setup-node@v1 diff --git a/example/serverless.yml b/example/serverless.yml index 349cdcb8..e5c4c924 100644 --- a/example/serverless.yml +++ b/example/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.7 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/example_native_deps/serverless.yml b/example_native_deps/serverless.yml index 4deed44a..cfbd4913 100644 --- a/example_native_deps/serverless.yml +++ b/example_native_deps/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.7 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/test.js b/test.js index a4bb992b..f4afca45 100644 --- a/test.js +++ b/test.js @@ -234,7 +234,7 @@ test('default pythonBin can package flask with default options', async (t) => { t.end(); }); -test('py3.7 packages have the same hash', async (t) => { +test('py3.9 packages have the same hash', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -249,7 +249,7 @@ test('py3.7 packages have the same hash', async (t) => { t.end(); }); -test('py3.7 can package flask with default options', async (t) => { +test('py3.9 can package flask with default options', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -261,7 +261,7 @@ test('py3.7 can package flask with default options', async (t) => { }); test( - 'py3.7 can package flask with hashes', + 'py3.9 can package flask with hashes', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -279,7 +279,7 @@ test( { skip: brokenOn('win32') } ); -test('py3.7 can package flask with nested', async (t) => { +test('py3.9 can package flask with nested', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -295,7 +295,7 @@ test('py3.7 can package flask with nested', async (t) => { t.end(); }); -test('py3.7 can package flask with zip option', async (t) => { +test('py3.9 can package flask with zip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -313,7 +313,7 @@ test('py3.7 can package flask with zip option', async (t) => { t.end(); }); -test('py3.7 can package flask with slim option', async (t) => { +test('py3.9 can package flask with slim option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -332,7 +332,7 @@ test('py3.7 can package flask with slim option', async (t) => { t.end(); }); -test('py3.7 can package flask with slim & slimPatterns options', async (t) => { +test('py3.9 can package flask with slim & slimPatterns options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -353,7 +353,7 @@ test('py3.7 can package flask with slim & slimPatterns options', async (t) => { t.end(); }); -test("py3.7 doesn't package bottle with noDeploy option", async (t) => { +test("py3.9 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -371,7 +371,7 @@ test("py3.7 doesn't package bottle with noDeploy option", async (t) => { t.end(); }); -test('py3.7 can package boto3 with editable', async (t) => { +test('py3.9 can package boto3 with editable', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -391,7 +391,7 @@ test('py3.7 can package boto3 with editable', async (t) => { }); test( - 'py3.7 can package flask with dockerizePip option', + 'py3.9 can package flask with dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -406,7 +406,7 @@ test( ); test( - 'py3.7 can package flask with slim & dockerizePip option', + 'py3.9 can package flask with slim & dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -430,7 +430,7 @@ test( ); test( - 'py3.7 can package flask with slim & dockerizePip & slimPatterns options', + 'py3.9 can package flask with slim & dockerizePip & slimPatterns options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -455,7 +455,7 @@ test( ); test( - 'py3.7 can package flask with zip & dockerizePip option', + 'py3.9 can package flask with zip & dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -487,7 +487,7 @@ test( ); test( - 'py3.7 can package flask with zip & slim & dockerizePip option', + 'py3.9 can package flask with zip & slim & dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -520,7 +520,7 @@ test( { skip: !canUseDocker() || brokenOn('win32') } ); -test('pipenv py3.7 can package flask with default options', async (t) => { +test('pipenv py3.9 can package flask with default options', async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -535,7 +535,7 @@ test('pipenv py3.7 can package flask with default options', async (t) => { t.end(); }); -test('pipenv py3.7 can package flask with slim option', async (t) => { +test('pipenv py3.9 can package flask with slim option', async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -554,7 +554,7 @@ test('pipenv py3.7 can package flask with slim option', async (t) => { t.end(); }); -test('pipenv py3.7 can package flask with slim & slimPatterns options', async (t) => { +test('pipenv py3.9 can package flask with slim & slimPatterns options', async (t) => { process.chdir('tests/pipenv'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -576,7 +576,7 @@ test('pipenv py3.7 can package flask with slim & slimPatterns options', async (t t.end(); }); -test('pipenv py3.7 can package flask with zip option', async (t) => { +test('pipenv py3.9 can package flask with zip option', async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -594,7 +594,7 @@ test('pipenv py3.7 can package flask with zip option', async (t) => { t.end(); }); -test("pipenv py3.7 doesn't package bottle with noDeploy option", async (t) => { +test("pipenv py3.9 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -633,7 +633,7 @@ test('non poetry pyproject.toml without requirements.txt packages handler only', t.end(); }); -test('poetry py3.7 can package flask with default options', async (t) => { +test('poetry py3.9 can package flask with default options', async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -645,7 +645,7 @@ test('poetry py3.7 can package flask with default options', async (t) => { t.end(); }); -test('poetry py3.7 can package flask with slim option', async (t) => { +test('poetry py3.9 can package flask with slim option', async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -664,7 +664,7 @@ test('poetry py3.7 can package flask with slim option', async (t) => { t.end(); }); -test('poetry py3.7 can package flask with slim & slimPatterns options', async (t) => { +test('poetry py3.9 can package flask with slim & slimPatterns options', async (t) => { process.chdir('tests/poetry'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -686,7 +686,7 @@ test('poetry py3.7 can package flask with slim & slimPatterns options', async (t t.end(); }); -test('poetry py3.7 can package flask with zip option', async (t) => { +test('poetry py3.9 can package flask with zip option', async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -704,7 +704,7 @@ test('poetry py3.7 can package flask with zip option', async (t) => { t.end(); }); -test("poetry py3.7 doesn't package bottle with noDeploy option", async (t) => { +test("poetry py3.9 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -722,7 +722,7 @@ test("poetry py3.7 doesn't package bottle with noDeploy option", async (t) => { t.end(); }); -test('py3.7 can package flask with zip option and no explicit include', async (t) => { +test('py3.9 can package flask with zip option and no explicit include', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -742,7 +742,7 @@ test('py3.7 can package flask with zip option and no explicit include', async (t t.end(); }); -test('py3.7 can package lambda-decorators using vendor option', async (t) => { +test('py3.9 can package lambda-decorators using vendor option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -805,7 +805,7 @@ test( { skip: process.platform === 'win32' } ); -test('py3.7 can package flask in a project with a space in it', async (t) => { +test('py3.9 can package flask in a project with a space in it', async (t) => { copySync('tests/base', 'tests/base with a space'); process.chdir('tests/base with a space'); const path = npm(['pack', '../..']); @@ -818,7 +818,7 @@ test('py3.7 can package flask in a project with a space in it', async (t) => { }); test( - 'py3.7 can package flask in a project with a space in it with docker', + 'py3.9 can package flask in a project with a space in it with docker', async (t) => { copySync('tests/base', 'tests/base with a space'); process.chdir('tests/base with a space'); @@ -833,7 +833,7 @@ test( { skip: !canUseDocker() || brokenOn('win32') } ); -test('py3.7 supports custom file name with fileName option', async (t) => { +test('py3.9 supports custom file name with fileName option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); writeFileSync('puck', 'requests'); @@ -849,7 +849,7 @@ test('py3.7 supports custom file name with fileName option', async (t) => { t.end(); }); -test("py3.7 doesn't package bottle with zip option", async (t) => { +test("py3.9 doesn't package bottle with zip option", async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -885,7 +885,7 @@ test("py3.7 doesn't package bottle with zip option", async (t) => { t.end(); }); -test('py3.7 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { +test('py3.9 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -908,7 +908,7 @@ test('py3.7 can package flask with slim, slimPatterns & slimPatternsAppendDefaul }); test( - 'py3.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', + 'py3.9 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -937,7 +937,7 @@ test( { skip: !canUseDocker() || brokenOn('win32') } ); -test('pipenv py3.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { +test('pipenv py3.9 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { process.chdir('tests/pipenv'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -960,7 +960,7 @@ test('pipenv py3.7 can package flask with slim & slimPatterns & slimPatternsAppe t.end(); }); -test('poetry py3.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { +test('poetry py3.9 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { process.chdir('tests/poetry'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -983,7 +983,7 @@ test('poetry py3.7 can package flask with slim & slimPatterns & slimPatternsAppe t.end(); }); -test('poetry py3.7 can package flask with package individually option', async (t) => { +test('poetry py3.9 can package flask with package individually option', async (t) => { process.chdir('tests/poetry_individually'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -998,7 +998,7 @@ test('poetry py3.7 can package flask with package individually option', async (t t.end(); }); -test('py3.7 can package flask with package individually option', async (t) => { +test('py3.9 can package flask with package individually option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1080,7 +1080,7 @@ test('py3.7 can package flask with package individually option', async (t) => { t.end(); }); -test('py3.7 can package flask with package individually & slim option', async (t) => { +test('py3.9 can package flask with package individually & slim option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1162,7 +1162,7 @@ test('py3.7 can package flask with package individually & slim option', async (t t.end(); }); -test('py3.7 can package only requirements of module', async (t) => { +test('py3.9 can package only requirements of module', async (t) => { process.chdir('tests/individually'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1218,7 +1218,7 @@ test('py3.7 can package only requirements of module', async (t) => { t.end(); }); -test('py3.7 can package lambda-decorators using vendor and invidiually option', async (t) => { +test('py3.9 can package lambda-decorators using vendor and invidiually option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1373,7 +1373,8 @@ test( { skip: !canUseDocker() || process.platform === 'win32' } ); -test('py3.7 uses download cache by default option', +test( + 'py3.9 uses download cache by default option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1381,7 +1382,7 @@ test('py3.7 uses download cache by default option', sls(['package'], { env: {} }); const cachepath = getUserCachePath(); t.true( - pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), 'cache directory exists' ); t.end(); @@ -1389,7 +1390,8 @@ test('py3.7 uses download cache by default option', { skip: true } ); -test('py3.7 uses download cache by default', +test( + 'py3.9 uses download cache by default', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1405,7 +1407,7 @@ test('py3.7 uses download cache by default', ); test( - 'py3.7 uses download cache with dockerizePip option', + 'py3.9 uses download cache with dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1423,7 +1425,7 @@ test( ); test( - 'py3.7 uses download cache with dockerizePip by default option', + 'py3.9 uses download cache with dockerizePip by default option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1441,7 +1443,8 @@ test( { skip: true } ); -test('py3.7 uses static and download cache', +test( + 'py3.9 uses static and download cache', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1466,7 +1469,7 @@ test('py3.7 uses static and download cache', ); test( - 'py3.7 uses static and download cache with dockerizePip option', + 'py3.9 uses static and download cache with dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1490,7 +1493,7 @@ test( { skip: !canUseDocker() || brokenOn('win32') } ); -test('py3.7 uses static cache', async (t) => { +test('py3.9 uses static cache', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1511,7 +1514,7 @@ test('py3.7 uses static cache', async (t) => { '.completed_requirements exists in static-cache' ); - // py3.7 checking that static cache actually pulls from cache (by poisoning it) + // py3.9 checking that static cache actually pulls from cache (by poisoning it) writeFileSync( `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' @@ -1526,7 +1529,7 @@ test('py3.7 uses static cache', async (t) => { t.end(); }); -test('py3.7 uses static cache with cacheLocation option', async (t) => { +test('py3.9 uses static cache with cacheLocation option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1550,7 +1553,7 @@ test('py3.7 uses static cache with cacheLocation option', async (t) => { }); test( - 'py3.7 uses static cache with dockerizePip & slim option', + 'py3.9 uses static cache with dockerizePip & slim option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1572,7 +1575,7 @@ test( '.completed_requirements exists in static-cache' ); - // py3.7 checking that static cache actually pulls from cache (by poisoning it) + // py3.9 checking that static cache actually pulls from cache (by poisoning it) writeFileSync( `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' @@ -1595,7 +1598,7 @@ test( ); test( - 'py3.7 uses download cache with dockerizePip & slim option', + 'py3.9 uses download cache with dockerizePip & slim option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1620,7 +1623,7 @@ test( { skip: !canUseDocker() || brokenOn('win32') } ); -test('py3.7 can ignore functions defined with `image`', async (t) => { +test('py3.9 can ignore functions defined with `image`', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1646,7 +1649,7 @@ test('py3.7 can ignore functions defined with `image`', async (t) => { t.end(); }); -test('poetry py3.7 fails packaging if poetry.lock is missing and flag requirePoetryLockFile is set to true', async (t) => { +test('poetry py3.9 fails packaging if poetry.lock is missing and flag requirePoetryLockFile is set to true', async (t) => { copySync('tests/poetry', 'tests/base with a space'); process.chdir('tests/base with a space'); removeSync('poetry.lock'); @@ -1678,7 +1681,7 @@ test('works with provider.runtime not being python', async (t) => { t.end(); }); -test('poetry py3.7 packages additional optional packages', async (t) => { +test('poetry py3.9 packages additional optional packages', async (t) => { process.chdir('tests/poetry_packages'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1694,7 +1697,7 @@ test('poetry py3.7 packages additional optional packages', async (t) => { t.end(); }); -test('poetry py3.7 skips additional optional packages specified in withoutGroups', async (t) => { +test('poetry py3.9 skips additional optional packages specified in withoutGroups', async (t) => { process.chdir('tests/poetry_packages'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1711,7 +1714,7 @@ test('poetry py3.7 skips additional optional packages specified in withoutGroups t.end(); }); -test('poetry py3.7 only installs optional packages specified in onlyGroups', async (t) => { +test('poetry py3.9 only installs optional packages specified in onlyGroups', async (t) => { process.chdir('tests/poetry_packages'); const path = npm(['pack', '../..']); npm(['i', path]); diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index ef48e901..a82187ff 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: ${env:runtime, 'python3.7'} + runtime: ${env:runtime, 'python3.9'} plugins: - serverless-python-requirements diff --git a/tests/individually/serverless.yml b/tests/individually/serverless.yml index d73d613a..6409532b 100644 --- a/tests/individually/serverless.yml +++ b/tests/individually/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test-indiv provider: name: aws - runtime: python3.7 + runtime: python3.9 package: individually: true diff --git a/tests/non_build_pyproject/serverless.yml b/tests/non_build_pyproject/serverless.yml index b0436e61..d1bbaee6 100644 --- a/tests/non_build_pyproject/serverless.yml +++ b/tests/non_build_pyproject/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.7 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/tests/non_poetry_pyproject/serverless.yml b/tests/non_poetry_pyproject/serverless.yml index 2b16790c..7338b10b 100644 --- a/tests/non_poetry_pyproject/serverless.yml +++ b/tests/non_poetry_pyproject/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.7 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/tests/pipenv/serverless.yml b/tests/pipenv/serverless.yml index 315f6741..2b471526 100644 --- a/tests/pipenv/serverless.yml +++ b/tests/pipenv/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.7 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/tests/poetry/serverless.yml b/tests/poetry/serverless.yml index 2d032acd..d10c4997 100644 --- a/tests/poetry/serverless.yml +++ b/tests/poetry/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.7 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/tests/poetry_individually/serverless.yml b/tests/poetry_individually/serverless.yml index 527a2846..86dbb547 100644 --- a/tests/poetry_individually/serverless.yml +++ b/tests/poetry_individually/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.7 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/tests/poetry_packages/serverless.yml b/tests/poetry_packages/serverless.yml index 03652968..c6972ede 100644 --- a/tests/poetry_packages/serverless.yml +++ b/tests/poetry_packages/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.7 + runtime: python3.9 plugins: - serverless-python-requirements From 16c0e68b850d62eb1ce127b9c3886857ca955574 Mon Sep 17 00:00:00 2001 From: Carl Walsh Date: Mon, 1 Jan 2024 10:59:03 -0800 Subject: [PATCH 316/328] docs: Describe `cleanCache` in README (#794) --- README.md | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 37a9d1df..91172bf9 100644 --- a/README.md +++ b/README.md @@ -450,12 +450,27 @@ functions: vendor: ./hello-vendor # The option is also available at the function level ``` -## Manual invocations +## Manual invocation -The `.requirements` and `requirements.zip`(if using zip support) files are left -behind to speed things up on subsequent deploys. To clean them up, run -`sls requirements clean`. You can also create them (and `unzip_requirements` if -using zip support) manually with `sls requirements install`. +The `.requirements` and `requirements.zip` (if using zip support) files are left +behind to speed things up on subsequent deploys. To clean them up, run: + +```plaintext +sls requirements clean +``` + +You can also create them (and `unzip_requirements` if +using zip support) manually with: + +```plaintext +sls requirements install +``` + +The pip download/static cache is outside the serverless folder, and should be manually cleaned when i.e. changing python versions: + +```plaintext +sls requirements cleanCache +``` ## Invalidate requirements caches on package From 1b0faaeb6aadd2bc4b1b53526e35298a98d00aca Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 14 Jan 2024 22:46:13 +0100 Subject: [PATCH 317/328] feat: Support Scaleway provider (#812) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andy Méry --- README.md | 25 +++++++++++++++++ index.js | 5 +++- lib/inject.js | 23 +++++++++++++-- test.js | 17 ++++++++++++ tests/scaleway_provider/_slimPatterns.yml | 2 ++ tests/scaleway_provider/handler.py | 5 ++++ tests/scaleway_provider/package.json | 15 ++++++++++ tests/scaleway_provider/requirements.txt | 3 ++ tests/scaleway_provider/serverless.yml | 34 +++++++++++++++++++++++ 9 files changed, 126 insertions(+), 3 deletions(-) create mode 100644 tests/scaleway_provider/_slimPatterns.yml create mode 100644 tests/scaleway_provider/handler.py create mode 100644 tests/scaleway_provider/package.json create mode 100644 tests/scaleway_provider/requirements.txt create mode 100644 tests/scaleway_provider/serverless.yml diff --git a/README.md b/README.md index 91172bf9..d9127adb 100644 --- a/README.md +++ b/README.md @@ -580,6 +580,31 @@ package: - '**' ``` +## Custom Provider Support + +### Scaleway + +This plugin is compatible with the [Scaleway Serverless Framework Plugin](https://github.com/scaleway/serverless-scaleway-functions) to package dependencies for Python functions deployed on [Scaleway](https://www.scaleway.com/en/serverless-functions/). To use it, add the following to your `serverless.yml`: + +```yaml +provider: + name: scaleway + runtime: python311 + +plugins: + - serverless-python-requirements + - serverless-scaleway-functions +``` + +To handle native dependencies, it's recommended to use the Docker builder with the image provided by Scaleway: + +```yaml +custom: + pythonRequirements: + # Can use any Python version supported by Scaleway + dockerImage: rg.fr-par.scw.cloud/scwfunctionsruntimes-public/python-dep:3.11 +``` + ## Contributors - [@dschep](https://github.com/dschep) - Original developer diff --git a/index.js b/index.js index 246b121e..25cc34cd 100644 --- a/index.js +++ b/index.js @@ -72,7 +72,10 @@ class ServerlessPythonRequirements { ) { options.pythonBin = 'python'; } - + if (/python3[0-9]+/.test(options.pythonBin)) { + // "google" and "scaleway" providers' runtimes uses python3XX + options.pythonBin = options.pythonBin.replace(/3([0-9]+)/, '3.$1'); + } if (options.dockerizePip === 'non-linux') { options.dockerizePip = process.platform !== 'linux'; } diff --git a/lib/inject.js b/lib/inject.js index ea20e58d..12267376 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -13,10 +13,16 @@ BbPromise.promisifyAll(fse); * Inject requirements into packaged application. * @param {string} requirementsPath requirements folder path * @param {string} packagePath target package path + * @param {string} injectionRelativePath installation directory in target package * @param {Object} options our options object * @return {Promise} the JSZip object constructed. */ -function injectRequirements(requirementsPath, packagePath, options) { +function injectRequirements( + requirementsPath, + packagePath, + injectionRelativePath, + options +) { const noDeploy = new Set(options.noDeploy || []); return fse @@ -29,7 +35,13 @@ function injectRequirements(requirementsPath, packagePath, options) { dot: true, }) ) - .map((file) => [file, path.relative(requirementsPath, file)]) + .map((file) => [ + file, + path.join( + injectionRelativePath, + path.relative(requirementsPath, file) + ), + ]) .filter( ([file, relativeFile]) => !file.endsWith('/') && @@ -101,6 +113,11 @@ async function injectAllRequirements(funcArtifact) { this.serverless.cli.log('Injecting required Python packages to package...'); } + let injectionRelativePath = '.'; + if (this.serverless.service.provider.name == 'scaleway') { + injectionRelativePath = 'package'; + } + try { if (this.serverless.service.package.individually) { await BbPromise.resolve(this.targetFuncs) @@ -138,6 +155,7 @@ async function injectAllRequirements(funcArtifact) { : injectRequirements( path.join('.serverless', func.module, 'requirements'), func.package.artifact, + injectionRelativePath, this.options ); }); @@ -145,6 +163,7 @@ async function injectAllRequirements(funcArtifact) { await injectRequirements( path.join('.serverless', 'requirements'), this.serverless.service.package.artifact || funcArtifact, + injectionRelativePath, this.options ); } diff --git a/test.js b/test.js index f4afca45..fad21273 100644 --- a/test.js +++ b/test.js @@ -1729,3 +1729,20 @@ test('poetry py3.9 only installs optional packages specified in onlyGroups', asy t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }); + +test('py3.7 injects dependencies into `package` folder when using scaleway provider', async (t) => { + process.chdir('tests/scaleway_provider'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes(`package${sep}flask${sep}__init__.py`), + 'flask is packaged' + ); + t.true( + zipfiles.includes(`package${sep}boto3${sep}__init__.py`), + 'boto3 is packaged' + ); + t.end(); +}); diff --git a/tests/scaleway_provider/_slimPatterns.yml b/tests/scaleway_provider/_slimPatterns.yml new file mode 100644 index 00000000..443af9a0 --- /dev/null +++ b/tests/scaleway_provider/_slimPatterns.yml @@ -0,0 +1,2 @@ +slimPatterns: + - '**/__main__.py' diff --git a/tests/scaleway_provider/handler.py b/tests/scaleway_provider/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/scaleway_provider/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/scaleway_provider/package.json b/tests/scaleway_provider/package.json new file mode 100644 index 00000000..d54b88e0 --- /dev/null +++ b/tests/scaleway_provider/package.json @@ -0,0 +1,15 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz", + "serverless-scaleway-functions": "^0.4.8" + } +} diff --git a/tests/scaleway_provider/requirements.txt b/tests/scaleway_provider/requirements.txt new file mode 100644 index 00000000..23bfb7a6 --- /dev/null +++ b/tests/scaleway_provider/requirements.txt @@ -0,0 +1,3 @@ +flask==0.12.5 +bottle +boto3 diff --git a/tests/scaleway_provider/serverless.yml b/tests/scaleway_provider/serverless.yml new file mode 100644 index 00000000..5d827bdf --- /dev/null +++ b/tests/scaleway_provider/serverless.yml @@ -0,0 +1,34 @@ +service: sls-py-req-test + +configValidationMode: off + +provider: + name: scaleway + runtime: python39 + +plugins: + - serverless-python-requirements + - serverless-scaleway-functions + +custom: + pythonRequirements: + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + defaults: + zip: false + slimPatterns: false + slimPatternsAppendDefaults: true + slim: false + dockerizePip: false + +package: + patterns: + - '!**/*' + - 'handler.py' + +functions: + hello: + handler: handler.hello From 787b4791306e9a3ded5f0177c304cfbce081c119 Mon Sep 17 00:00:00 2001 From: Justin Lyons Date: Sat, 10 Feb 2024 04:26:35 -0500 Subject: [PATCH 318/328] feat: Improved pip failure logging (#813) Co-authored-by: Justin Lyons --- lib/pip.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index 149c0285..060ce829 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -423,8 +423,8 @@ async function installRequirements(targetFolder, pluginInstance) { } if (log) { - log.info(`Stdout: ${e.stdoutBuffer}`); - log.info(`Stderr: ${e.stderrBuffer}`); + log.error(`Stdout: ${e.stdoutBuffer}`); + log.error(`Stderr: ${e.stderrBuffer}`); } else { serverless.cli.log(`Stdout: ${e.stdoutBuffer}`); serverless.cli.log(`Stderr: ${e.stderrBuffer}`); From 27b70f4d6a7e43fd0e9711bbb56752fee2762901 Mon Sep 17 00:00:00 2001 From: Stijn IJzermans Date: Sat, 10 Feb 2024 10:27:37 +0100 Subject: [PATCH 319/328] fix: Ensure proper support for mixed runtimes and architectures (#815) * feat: Use function runtime & arch for docker * docs: Update readme for python3.9 * feat: Do not zip req for non-py functions * ci: Bump internal package version / python version * fix: Rename mixed test name to be more descriptive --------- Co-authored-by: Stijn IJzermans --- .python-version | 2 +- CONTRIBUTING.md | 10 ++- index.js | 19 +++-- lib/pip.js | 13 +-- lib/zip.js | 5 ++ test.js | 83 +++++++++++++++++++ tests/base/package.json | 2 +- tests/individually/package.json | 2 +- .../module1/handler1.ts | 3 + .../module2/handler2.py | 6 ++ .../module2/requirements.txt | 1 + tests/individually_mixed_runtime/package.json | 14 ++++ .../requirements-common.txt | 1 + .../individually_mixed_runtime/serverless.yml | 39 +++++++++ tests/non_build_pyproject/package.json | 2 +- tests/non_poetry_pyproject/package.json | 2 +- tests/pipenv/package.json | 2 +- tests/poetry/package.json | 2 +- tests/poetry_individually/package.json | 2 +- tests/poetry_packages/package.json | 2 +- 20 files changed, 187 insertions(+), 25 deletions(-) create mode 100644 tests/individually_mixed_runtime/module1/handler1.ts create mode 100644 tests/individually_mixed_runtime/module2/handler2.py create mode 100644 tests/individually_mixed_runtime/module2/requirements.txt create mode 100644 tests/individually_mixed_runtime/package.json create mode 100644 tests/individually_mixed_runtime/requirements-common.txt create mode 100644 tests/individually_mixed_runtime/serverless.yml diff --git a/.python-version b/.python-version index 475ba515..bd28b9c5 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.7 +3.9 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4616858b..900a425b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -13,12 +13,14 @@ Welcome, and thanks in advance for your help! ## Setup Pre-Reqs: -* Python 3.7 -* [poetry](https://python-poetry.org/docs/) (if you use multiple versions of Python be sure to install it with python 3.7) -* Perl (used in the tests) -* Node v14 or v16 + +- Python 3.9 +- [poetry](https://python-poetry.org/docs/) (if you use multiple versions of Python be sure to install it with python 3.9) +- Perl (used in the tests) +- Node v14 or v16 Then, to begin development: + 1. fork the repository 2. `npm install -g serverless@` (check the peer dependencies in the root `package.json` file for the version) 3. run `npm install` in its root folder diff --git a/index.js b/index.js index 25cc34cd..81e50981 100644 --- a/index.js +++ b/index.js @@ -106,13 +106,8 @@ class ServerlessPythonRequirements { throw new Error( 'Python Requirements: you can provide a dockerImage or a dockerFile option, not both.' ); - } else if (!options.dockerFile) { - // If no dockerFile is provided, use default image - const architecture = - this.serverless.service.provider.architecture || 'x86_64'; - const defaultImage = `public.ecr.aws/sam/build-${this.serverless.service.provider.runtime}:latest-${architecture}`; - options.dockerImage = options.dockerImage || defaultImage; } + if (options.layer) { // If layer was set as a boolean, set it to an empty object to use the layer defaults. if (options.layer === true) { @@ -188,6 +183,18 @@ class ServerlessPythonRequirements { this.commands.requirements.type = 'container'; } + this.dockerImageForFunction = (funcOptions) => { + const runtime = + funcOptions.runtime || this.serverless.service.provider.runtime; + + const architecture = + funcOptions.architecture || + this.serverless.service.provider.architecture || + 'x86_64'; + const defaultImage = `public.ecr.aws/sam/build-${runtime}:latest-${architecture}`; + return this.options.dockerImage || defaultImage; + }; + const isFunctionRuntimePython = (args) => { // If functionObj.runtime is undefined, python. if (!args[1].functionObj || !args[1].functionObj.runtime) { diff --git a/lib/pip.js b/lib/pip.js index 060ce829..16a802b0 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -125,12 +125,13 @@ async function pipAcceptsSystem(pythonBin, pluginInstance) { /** * Install requirements described from requirements in the targetFolder into that same targetFolder * @param {string} targetFolder - * @param {Object} serverless - * @param {Object} options + * @param {Object} pluginInstance + * @param {Object} funcOptions * @return {undefined} */ -async function installRequirements(targetFolder, pluginInstance) { - const { options, serverless, log, progress } = pluginInstance; +async function installRequirements(targetFolder, pluginInstance, funcOptions) { + const { options, serverless, log, progress, dockerImageForFunction } = + pluginInstance; const targetRequirementsTxt = path.join(targetFolder, 'requirements.txt'); let installProgress; @@ -253,7 +254,7 @@ async function installRequirements(targetFolder, pluginInstance) { buildDockerImageProgress && buildDockerImageProgress.remove(); } } else { - dockerImage = options.dockerImage; + dockerImage = dockerImageForFunction(funcOptions); } if (log) { log.info(`Docker Image: ${dockerImage}`); @@ -691,7 +692,7 @@ async function installRequirementsIfNeeded( fse.copySync(slsReqsTxt, path.join(workingReqsFolder, 'requirements.txt')); // Then install our requirements from this folder - await installRequirements(workingReqsFolder, pluginInstance); + await installRequirements(workingReqsFolder, pluginInstance, funcOptions); // Copy vendor libraries to requirements folder if (options.vendor) { diff --git a/lib/zip.js b/lib/zip.js index 4b652f98..3c21bbbf 100644 --- a/lib/zip.js +++ b/lib/zip.js @@ -114,6 +114,11 @@ function packRequirements() { if (this.options.zip) { if (this.serverless.service.package.individually) { return BbPromise.resolve(this.targetFuncs) + .filter((func) => { + return ( + func.runtime || this.serverless.service.provider.runtime + ).match(/^python.*/); + }) .map((f) => { if (!get(f, 'module')) { set(f, ['module'], '.'); diff --git a/test.js b/test.js index fad21273..b97f3fdc 100644 --- a/test.js +++ b/test.js @@ -1373,6 +1373,89 @@ test( { skip: !canUseDocker() || process.platform === 'win32' } ); +test( + 'py3.9 can package flask running in docker with module runtime & architecture of function', + async (t) => { + process.chdir('tests/individually_mixed_runtime'); + const path = npm(['pack', '../..']); + npm(['i', path]); + + sls(['package'], { + env: { dockerizePip: 'true' }, + }); + + const zipfiles_hello2 = await listZipFiles( + '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip' + ); + t.true( + zipfiles_hello2.includes('handler2.py'), + 'handler2.py is packaged at root level in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + }, + { + skip: !canUseDocker() || process.platform === 'win32', + } +); + +test( + 'py3.9 can package flask succesfully when using mixed architecture, docker and zipping', + async (t) => { + process.chdir('tests/individually_mixed_runtime'); + const path = npm(['pack', '../..']); + + npm(['i', path]); + sls(['package'], { env: { dockerizePip: 'true', zip: 'true' } }); + + const zipfiles_hello = await listZipFiles('.serverless/hello1.zip'); + t.true( + zipfiles_hello.includes(`module1${sep}handler1.ts`), + 'handler1.ts is packaged in module dir for hello1' + ); + t.false( + zipfiles_hello.includes('handler2.py'), + 'handler2.py is NOT packaged at root level in function hello1' + ); + t.false( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello1' + ); + + const zipfiles_hello2 = await listZipFiles( + '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip' + ); + const zippedReqs = await listRequirementsZipFiles( + '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip' + ); + t.true( + zipfiles_hello2.includes('handler2.py'), + 'handler2.py is packaged at root level in function hello2' + ); + t.false( + zipfiles_hello2.includes(`module1${sep}handler1.ts`), + 'handler1.ts is NOT included at module1 level in hello2' + ); + t.false( + zipfiles_hello2.includes(`pyaml${sep}__init__.py`), + 'pyaml is NOT packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`boto3${sep}__init__.py`), + 'boto3 is NOT included in zipfile' + ); + t.true( + zippedReqs.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2 in requirements.zip' + ); + + t.end(); + }, + { skip: !canUseDocker() || process.platform === 'win32' } +); + test( 'py3.9 uses download cache by default option', async (t) => { diff --git a/tests/base/package.json b/tests/base/package.json index 781a4259..b07744c9 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/individually/package.json b/tests/individually/package.json index 781a4259..b07744c9 100644 --- a/tests/individually/package.json +++ b/tests/individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/individually_mixed_runtime/module1/handler1.ts b/tests/individually_mixed_runtime/module1/handler1.ts new file mode 100644 index 00000000..b8062f8b --- /dev/null +++ b/tests/individually_mixed_runtime/module1/handler1.ts @@ -0,0 +1,3 @@ +function hello() { + return "hello" +} diff --git a/tests/individually_mixed_runtime/module2/handler2.py b/tests/individually_mixed_runtime/module2/handler2.py new file mode 100644 index 00000000..d9f5c465 --- /dev/null +++ b/tests/individually_mixed_runtime/module2/handler2.py @@ -0,0 +1,6 @@ +import flask + +def hello(event, context): + return { + 'status': 200, + } diff --git a/tests/individually_mixed_runtime/module2/requirements.txt b/tests/individually_mixed_runtime/module2/requirements.txt new file mode 100644 index 00000000..c09d0264 --- /dev/null +++ b/tests/individually_mixed_runtime/module2/requirements.txt @@ -0,0 +1 @@ +flask==2.0.3 diff --git a/tests/individually_mixed_runtime/package.json b/tests/individually_mixed_runtime/package.json new file mode 100644 index 00000000..b07744c9 --- /dev/null +++ b/tests/individually_mixed_runtime/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" + } +} diff --git a/tests/individually_mixed_runtime/requirements-common.txt b/tests/individually_mixed_runtime/requirements-common.txt new file mode 100644 index 00000000..30ddf823 --- /dev/null +++ b/tests/individually_mixed_runtime/requirements-common.txt @@ -0,0 +1 @@ +boto3 diff --git a/tests/individually_mixed_runtime/serverless.yml b/tests/individually_mixed_runtime/serverless.yml new file mode 100644 index 00000000..7c602239 --- /dev/null +++ b/tests/individually_mixed_runtime/serverless.yml @@ -0,0 +1,39 @@ +service: sls-py-req-test-indiv-mixed-runtime + +provider: + name: aws + runtime: nodejs18.x + architecture: arm64 + +package: + individually: true + +custom: + pythonRequirements: + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + zip: ${env:zip, self:custom.defaults.zip} + defaults: + dockerizePip: false + zip: false + +functions: + hello1: + handler: handler1.hello + architecture: x86_64 + package: + patterns: + - '!**' + - 'module1/**' + + hello2: + handler: handler2.hello + module: module2 + runtime: python3.9 + architecture: x86_64 + package: + patterns: + - '!**' + - 'module2/**' + +plugins: + - serverless-python-requirements diff --git a/tests/non_build_pyproject/package.json b/tests/non_build_pyproject/package.json index 781a4259..b07744c9 100644 --- a/tests/non_build_pyproject/package.json +++ b/tests/non_build_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/non_poetry_pyproject/package.json b/tests/non_poetry_pyproject/package.json index 781a4259..b07744c9 100644 --- a/tests/non_poetry_pyproject/package.json +++ b/tests/non_poetry_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/pipenv/package.json b/tests/pipenv/package.json index 781a4259..b07744c9 100644 --- a/tests/pipenv/package.json +++ b/tests/pipenv/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/poetry/package.json b/tests/poetry/package.json index 781a4259..b07744c9 100644 --- a/tests/poetry/package.json +++ b/tests/poetry/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/poetry_individually/package.json b/tests/poetry_individually/package.json index 781a4259..b07744c9 100644 --- a/tests/poetry_individually/package.json +++ b/tests/poetry_individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/poetry_packages/package.json b/tests/poetry_packages/package.json index 781a4259..b07744c9 100644 --- a/tests/poetry_packages/package.json +++ b/tests/poetry_packages/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } From 549aba00ff320e6ddabb6f0be44999e7922fc616 Mon Sep 17 00:00:00 2001 From: Jackson Borneman <11304426+jax-b@users.noreply.github.com> Date: Sat, 24 Feb 2024 18:41:31 -0500 Subject: [PATCH 320/328] test: Bump node version in test configs --- tests/base/serverless.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index a82187ff..87423210 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -47,7 +47,7 @@ functions: handler: handler.hello hello3: handler: handler.hello - runtime: nodejs8.10 + runtime: nodejs14.x hello4: handler: fn2_handler.hello module: fn2 From fa9ac03ea7ffa3b583aaf69bd8e615ec112cabcc Mon Sep 17 00:00:00 2001 From: hayden Date: Mon, 26 Feb 2024 00:42:20 +0900 Subject: [PATCH 321/328] Add docker rootless feature flag and its implementation for supporting docke rootless environment (#818) --- index.js | 1 + lib/pip.js | 33 +++++++++++++++++++++------------ 2 files changed, 22 insertions(+), 12 deletions(-) diff --git a/index.js b/index.js index 81e50981..ca8b191f 100644 --- a/index.js +++ b/index.js @@ -50,6 +50,7 @@ class ServerlessPythonRequirements { dockerBuildCmdExtraArgs: [], dockerRunCmdExtraArgs: [], dockerExtraFiles: [], + dockerRootless: false, useStaticCache: true, useDownloadCache: true, cacheLocation: false, diff --git a/lib/pip.js b/lib/pip.js index 16a802b0..40140d36 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -328,12 +328,17 @@ async function installRequirements(targetFolder, pluginInstance, funcOptions) { } // Install requirements with pip // Set the ownership of the current folder to user - pipCmds.push([ - 'chown', - '-R', - `${process.getuid()}:${process.getgid()}`, - '/var/task', - ]); + // If you use docker-rootless, you don't need to set the ownership + if (options.dockerRootless !== true) { + pipCmds.push([ + 'chown', + '-R', + `${process.getuid()}:${process.getgid()}`, + '/var/task', + ]); + } else { + pipCmds.push(['chown', '-R', '0:0', '/var/task']); + } } else { // Use same user so --cache-dir works dockerCmd.push('-u', await getDockerUid(bindPath, pluginInstance)); @@ -346,12 +351,16 @@ async function installRequirements(targetFolder, pluginInstance, funcOptions) { if (process.platform === 'linux') { if (options.useDownloadCache) { // Set the ownership of the download cache dir back to user - pipCmds.push([ - 'chown', - '-R', - `${process.getuid()}:${process.getgid()}`, - dockerDownloadCacheDir, - ]); + if (options.dockerRootless !== true) { + pipCmds.push([ + 'chown', + '-R', + `${process.getuid()}:${process.getgid()}`, + dockerDownloadCacheDir, + ]); + } else { + pipCmds.push(['chown', '-R', '0:0', dockerDownloadCacheDir]); + } } } From ca617bb3bc503db14f68d4e41cf532f798b40704 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Thu, 28 Mar 2024 00:05:29 +0100 Subject: [PATCH 322/328] chore: Bump dependencies --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index c420e4d6..6ec63fa3 100644 --- a/package.json +++ b/package.json @@ -50,7 +50,7 @@ }, "devDependencies": { "cross-spawn": "*", - "eslint": "^8.52.0", + "eslint": "^8.57.0", "git-list-updated": "^1.2.1", "github-release-from-cc-changelog": "^2.3.0", "lodash": "^4.17.21", @@ -72,7 +72,7 @@ "lodash.uniqby": "^4.7.0", "lodash.values": "^4.3.0", "rimraf": "^3.0.2", - "semver": "^7.5.4", + "semver": "^7.6.0", "set-value": "^4.1.0", "sha256-file": "1.0.0", "shell-quote": "^1.8.1" From 840d28dd319340f61aa9627cb4ca68af643780eb Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Thu, 28 Mar 2024 00:09:48 +0100 Subject: [PATCH 323/328] chore: Release v6.1.0 --- CHANGELOG.md | 11 +++++++++++ package.json | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f8667134..c63f9d06 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,17 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +## [6.1.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.0.1...v6.1.0) (2024-03-27) + +### Features + +- Support Scaleway provider ([#812](https://github.com/UnitedIncome/serverless-python-requirements/issues/812)) ([1b0faae](https://github.com/UnitedIncome/serverless-python-requirements/commit/1b0faaeb6aadd2bc4b1b53526e35298a98d00aca)) ([Andy Méry](https://github.com/cyclimse)) +- Improved pip failure logging ([#813](https://github.com/UnitedIncome/serverless-python-requirements/issues/813)) ([787b479](https://github.com/UnitedIncome/serverless-python-requirements/commit/787b4791306e9a3ded5f0177c304cfbce081c119)) ([Justin Lyons](https://github.com/babyhuey)) + +### Bug Fixes + +- Ensure proper support for mixed runtimes and architectures ([#815](https://github.com/UnitedIncome/serverless-python-requirements/issues/815)) ([27b70f4](https://github.com/UnitedIncome/serverless-python-requirements/commit/27b70f4d6a7e43fd0e9711bbb56752fee2762901)) ([Stijn IJzermans](https://github.com/stijzermans)) + ### [6.0.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.0.0...v6.0.1) (2023-10-22) ### Bug Fixes diff --git a/package.json b/package.json index 6ec63fa3..3612c2cb 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "6.0.1", + "version": "6.1.0", "engines": { "node": ">=12.0" }, From 6e806c09686e57af93904af1d46b3b20aa62a202 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 7 Apr 2024 15:28:52 +0200 Subject: [PATCH 324/328] ci: Update actions to latest versions --- .github/workflows/integrate.yml | 2 +- .github/workflows/publish.yml | 6 +++--- .github/workflows/validate.yml | 8 ++++---- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index d2da520e..01fb27a3 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: # Ensure to have complete history of commits pushed with given push operation # It's loose and imperfect assumption that no more than 30 commits will be pushed at once diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 21d7cb71..0e3dc867 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -17,11 +17,11 @@ jobs: GITHUB_TOKEN: ${{ secrets.USER_GITHUB_TOKEN }} steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Retrieve node_modules from cache id: cacheNodeModules - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: | ~/.npm @@ -29,7 +29,7 @@ jobs: key: npm-v18-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} - name: Install Node.js and npm - uses: actions/setup-node@v1 + uses: actions/setup-node@v4 with: node-version: 18.x registry-url: https://registry.npmjs.org diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 61935c3e..8ab2b366 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -18,7 +18,7 @@ jobs: sls-version: [2, 3] steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: # For commitlint purpose ensure to have complete list of PR commits # It's loose and imperfect assumption that PR has no more than 30 commits @@ -33,7 +33,7 @@ jobs: - name: Retrieve dependencies from cache id: cacheNpm - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: | ~/.npm @@ -44,12 +44,12 @@ jobs: npm-v18-${{ runner.os }}-refs/heads/master- - name: Set up Python 3.9 - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: 3.9 - name: Install Node.js and npm - uses: actions/setup-node@v1 + uses: actions/setup-node@v4 with: node-version: 18.x From d2e492f9a25b383f5acc6a926a45858c4f23ad5e Mon Sep 17 00:00:00 2001 From: Austen Date: Fri, 31 May 2024 12:00:39 -0700 Subject: [PATCH 325/328] chore: Update serverless dependency --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 3612c2cb..7a3b14a5 100644 --- a/package.json +++ b/package.json @@ -78,7 +78,7 @@ "shell-quote": "^1.8.1" }, "peerDependencies": { - "serverless": "^2.32 || 3" + "serverless": ">=2.32" }, "lint-staged": { "*.js": [ From 57f7c6b2189cb32272377117eb22ae6a4a4b4c7a Mon Sep 17 00:00:00 2001 From: Max Marze Date: Tue, 13 Aug 2024 13:28:33 -0400 Subject: [PATCH 326/328] chore: Release 6.1.1 (#840) * chore: Release 6.1.1 * fix: Add license key for tests * test: Update tests to support sls v4 --------- Co-authored-by: Tomasz Czubocha --- .github/workflows/validate.yml | 3 + CHANGELOG.md | 2 + package.json | 2 +- test.js | 179 +++++++++++++++++---------------- 4 files changed, 99 insertions(+), 87 deletions(-) diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 8ab2b366..38f69696 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -87,3 +87,6 @@ jobs: fi - name: Unit tests run: npm test + env: + SERVERLESS_PLATFORM_STAGE: dev + SERVERLESS_LICENSE_KEY: ${{ secrets.SERVERLESS_LICENSE_KEY }} diff --git a/CHANGELOG.md b/CHANGELOG.md index c63f9d06..3771a1e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,8 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [6.1.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.1.0...v6.1.1) (2024-06-03) + ## [6.1.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.0.1...v6.1.0) (2024-03-27) ### Features diff --git a/package.json b/package.json index 7a3b14a5..07466e23 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "6.1.0", + "version": "6.1.1", "engines": { "node": ">=12.0" }, diff --git a/test.js b/test.js index b97f3fdc..488b500f 100644 --- a/test.js +++ b/test.js @@ -44,7 +44,10 @@ const mkCommand = `${quote([cmd, ...args])} failed with status code ${status}` ); } - return stdout && stdout.toString().trim(); + return { + stdout: stdout && stdout.toString().trim(), + stderr: stderr && stderr.toString().trim(), + }; }; const sls = mkCommand('sls'); @@ -201,9 +204,9 @@ test( 'dockerPrivateKey option correctly resolves docker command', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - const stdout = sls(['package'], { + const { stderr } = sls(['package'], { noThrow: true, env: { dockerizePip: true, @@ -213,7 +216,7 @@ test( }, }); t.true( - stdout.includes( + stderr.includes( `-v ${__dirname}${sep}tests${sep}base${sep}custom_ssh:/root/.ssh/custom_ssh:z` ), 'docker command properly resolved' @@ -225,7 +228,7 @@ test( test('default pythonBin can package flask with default options', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -236,7 +239,7 @@ test('default pythonBin can package flask with default options', async (t) => { test('py3.9 packages have the same hash', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const fileHash = sha256File('.serverless/sls-py-req-test.zip'); @@ -251,7 +254,7 @@ test('py3.9 packages have the same hash', async (t) => { test('py3.9 can package flask with default options', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -264,7 +267,7 @@ test( 'py3.9 can package flask with hashes', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { @@ -281,7 +284,7 @@ test( test('py3.9 can package flask with nested', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { @@ -297,7 +300,7 @@ test('py3.9 can package flask with nested', async (t) => { test('py3.9 can package flask with zip option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -315,7 +318,7 @@ test('py3.9 can package flask with zip option', async (t) => { test('py3.9 can package flask with slim option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { slim: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -335,7 +338,7 @@ test('py3.9 can package flask with slim option', async (t) => { test('py3.9 can package flask with slim & slimPatterns options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -355,7 +358,7 @@ test('py3.9 can package flask with slim & slimPatterns options', async (t) => { test("py3.9 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); perl([ '-p', @@ -373,7 +376,7 @@ test("py3.9 doesn't package bottle with noDeploy option", async (t) => { test('py3.9 can package boto3 with editable', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { @@ -394,7 +397,7 @@ test( 'py3.9 can package flask with dockerizePip option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -409,7 +412,7 @@ test( 'py3.9 can package flask with slim & dockerizePip option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -434,7 +437,7 @@ test( async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -458,7 +461,7 @@ test( 'py3.9 can package flask with zip & dockerizePip option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true', zip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -490,7 +493,7 @@ test( 'py3.9 can package flask with zip & slim & dockerizePip option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true', zip: 'true', slim: 'true' }, @@ -522,7 +525,7 @@ test( test('pipenv py3.9 can package flask with default options', async (t) => { process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -537,7 +540,7 @@ test('pipenv py3.9 can package flask with default options', async (t) => { test('pipenv py3.9 can package flask with slim option', async (t) => { process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -558,7 +561,7 @@ test('pipenv py3.9 can package flask with slim & slimPatterns options', async (t process.chdir('tests/pipenv'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -578,7 +581,7 @@ test('pipenv py3.9 can package flask with slim & slimPatterns options', async (t test('pipenv py3.9 can package flask with zip option', async (t) => { process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -596,7 +599,7 @@ test('pipenv py3.9 can package flask with zip option', async (t) => { test("pipenv py3.9 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); perl([ '-p', @@ -614,7 +617,7 @@ test("pipenv py3.9 doesn't package bottle with noDeploy option", async (t) => { test('non build pyproject.toml uses requirements.txt', async (t) => { process.chdir('tests/non_build_pyproject'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -625,7 +628,7 @@ test('non build pyproject.toml uses requirements.txt', async (t) => { test('non poetry pyproject.toml without requirements.txt packages handler only', async (t) => { process.chdir('tests/non_poetry_pyproject'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -635,7 +638,7 @@ test('non poetry pyproject.toml without requirements.txt packages handler only', test('poetry py3.9 can package flask with default options', async (t) => { process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -647,7 +650,7 @@ test('poetry py3.9 can package flask with default options', async (t) => { test('poetry py3.9 can package flask with slim option', async (t) => { process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -668,7 +671,7 @@ test('poetry py3.9 can package flask with slim & slimPatterns options', async (t process.chdir('tests/poetry'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -688,7 +691,7 @@ test('poetry py3.9 can package flask with slim & slimPatterns options', async (t test('poetry py3.9 can package flask with zip option', async (t) => { process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -706,7 +709,7 @@ test('poetry py3.9 can package flask with zip option', async (t) => { test("poetry py3.9 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); perl([ '-p', @@ -724,7 +727,7 @@ test("poetry py3.9 doesn't package bottle with noDeploy option", async (t) => { test('py3.9 can package flask with zip option and no explicit include', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); perl(['-p', '-i.bak', '-e', 's/include://', 'serverless.yml']); perl(['-p', '-i.bak', '-e', 's/^.*handler.py.*$//', 'serverless.yml']); @@ -744,7 +747,7 @@ test('py3.9 can package flask with zip option and no explicit include', async (t test('py3.9 can package lambda-decorators using vendor option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { vendor: './vendor' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -761,7 +764,7 @@ test( "Don't nuke execute perms", async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); const perm = '755'; npm(['i', path]); @@ -808,7 +811,7 @@ test( test('py3.9 can package flask in a project with a space in it', async (t) => { copySync('tests/base', 'tests/base with a space'); process.chdir('tests/base with a space'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -822,7 +825,7 @@ test( async (t) => { copySync('tests/base', 'tests/base with a space'); process.chdir('tests/base with a space'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -835,7 +838,7 @@ test( test('py3.9 supports custom file name with fileName option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); writeFileSync('puck', 'requests'); npm(['i', path]); sls(['package'], { env: { fileName: 'puck' } }); @@ -851,7 +854,7 @@ test('py3.9 supports custom file name with fileName option', async (t) => { test("py3.9 doesn't package bottle with zip option", async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); perl([ '-p', @@ -888,7 +891,7 @@ test("py3.9 doesn't package bottle with zip option", async (t) => { test('py3.9 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, @@ -912,7 +915,7 @@ test( async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { @@ -940,7 +943,7 @@ test( test('pipenv py3.9 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { process.chdir('tests/pipenv'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { @@ -963,7 +966,7 @@ test('pipenv py3.9 can package flask with slim & slimPatterns & slimPatternsAppe test('poetry py3.9 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { process.chdir('tests/poetry'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { @@ -985,7 +988,7 @@ test('poetry py3.9 can package flask with slim & slimPatterns & slimPatternsAppe test('poetry py3.9 can package flask with package individually option', async (t) => { process.chdir('tests/poetry_individually'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); @@ -1000,7 +1003,7 @@ test('poetry py3.9 can package flask with package individually option', async (t test('py3.9 can package flask with package individually option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { individually: 'true' } }); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); @@ -1082,7 +1085,7 @@ test('py3.9 can package flask with package individually option', async (t) => { test('py3.9 can package flask with package individually & slim option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { individually: 'true', slim: 'true' } }); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); @@ -1164,7 +1167,7 @@ test('py3.9 can package flask with package individually & slim option', async (t test('py3.9 can package only requirements of module', async (t) => { process.chdir('tests/individually'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const zipfiles_hello = await listZipFiles( @@ -1220,7 +1223,7 @@ test('py3.9 can package only requirements of module', async (t) => { test('py3.9 can package lambda-decorators using vendor and invidiually option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { individually: 'true', vendor: './vendor' } }); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); @@ -1299,7 +1302,7 @@ test( "Don't nuke execute perms when using individually", async (t) => { process.chdir('tests/individually'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); const perm = '755'; writeFileSync(`module1${sep}foobar`, ''); chmodSync(`module1${sep}foobar`, perm); @@ -1338,7 +1341,7 @@ test( "Don't nuke execute perms when using individually w/docker", async (t) => { process.chdir('tests/individually'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); const perm = '755'; writeFileSync(`module1${sep}foobar`, '', { mode: perm }); chmodSync(`module1${sep}foobar`, perm); @@ -1377,7 +1380,7 @@ test( 'py3.9 can package flask running in docker with module runtime & architecture of function', async (t) => { process.chdir('tests/individually_mixed_runtime'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { @@ -1405,7 +1408,7 @@ test( 'py3.9 can package flask succesfully when using mixed architecture, docker and zipping', async (t) => { process.chdir('tests/individually_mixed_runtime'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true', zip: 'true' } }); @@ -1460,7 +1463,7 @@ test( 'py3.9 uses download cache by default option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const cachepath = getUserCachePath(); @@ -1477,7 +1480,7 @@ test( 'py3.9 uses download cache by default', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { cacheLocation: '.requirements-cache' } }); t.true( @@ -1493,7 +1496,7 @@ test( 'py3.9 uses download cache with dockerizePip option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true' } }); const cachepath = getUserCachePath(); @@ -1511,7 +1514,7 @@ test( 'py3.9 uses download cache with dockerizePip by default option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true', cacheLocation: '.requirements-cache' }, @@ -1530,7 +1533,7 @@ test( 'py3.9 uses static and download cache', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const cachepath = getUserCachePath(); @@ -1555,7 +1558,7 @@ test( 'py3.9 uses static and download cache with dockerizePip option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true' } }); const cachepath = getUserCachePath(); @@ -1578,7 +1581,7 @@ test( test('py3.9 uses static cache', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const cachepath = getUserCachePath(); @@ -1614,7 +1617,7 @@ test('py3.9 uses static cache', async (t) => { test('py3.9 uses static cache with cacheLocation option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); const cachepath = '.requirements-cache'; sls(['package'], { env: { cacheLocation: cachepath } }); @@ -1639,7 +1642,7 @@ test( 'py3.9 uses static cache with dockerizePip & slim option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const cachepath = getUserCachePath(); @@ -1684,7 +1687,7 @@ test( 'py3.9 uses download cache with dockerizePip & slim option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const cachepath = getUserCachePath(); @@ -1708,7 +1711,7 @@ test( test('py3.9 can ignore functions defined with `image`', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { individually: 'true' } }); t.true(pathExistsSync('.serverless/hello.zip'), 'function hello is packaged'); @@ -1737,14 +1740,14 @@ test('poetry py3.9 fails packaging if poetry.lock is missing and flag requirePoe process.chdir('tests/base with a space'); removeSync('poetry.lock'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - const stdout = sls(['package'], { + const { stderr } = sls(['package'], { env: { requirePoetryLockFile: 'true', slim: 'true' }, noThrow: true, }); t.true( - stdout.includes( + stderr.includes( 'poetry.lock file not found - set requirePoetryLockFile to false to disable this error' ), 'flag works and error is properly reported' @@ -1754,9 +1757,9 @@ test('poetry py3.9 fails packaging if poetry.lock is missing and flag requirePoe test('works with provider.runtime not being python', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - sls(['package'], { env: { runtime: 'nodejs12.x' } }); + sls(['package'], { env: { runtime: 'nodejs20.x' } }); t.true( pathExistsSync('.serverless/sls-py-req-test.zip'), 'sls-py-req-test is packaged' @@ -1766,7 +1769,7 @@ test('works with provider.runtime not being python', async (t) => { test('poetry py3.9 packages additional optional packages', async (t) => { process.chdir('tests/poetry_packages'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { @@ -1782,7 +1785,7 @@ test('poetry py3.9 packages additional optional packages', async (t) => { test('poetry py3.9 skips additional optional packages specified in withoutGroups', async (t) => { process.chdir('tests/poetry_packages'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { @@ -1799,7 +1802,7 @@ test('poetry py3.9 skips additional optional packages specified in withoutGroups test('poetry py3.9 only installs optional packages specified in onlyGroups', async (t) => { process.chdir('tests/poetry_packages'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { @@ -1813,19 +1816,23 @@ test('poetry py3.9 only installs optional packages specified in onlyGroups', asy t.end(); }); -test('py3.7 injects dependencies into `package` folder when using scaleway provider', async (t) => { - process.chdir('tests/scaleway_provider'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes(`package${sep}flask${sep}__init__.py`), - 'flask is packaged' - ); - t.true( - zipfiles.includes(`package${sep}boto3${sep}__init__.py`), - 'boto3 is packaged' - ); - t.end(); -}); +test( + 'py3.7 injects dependencies into `package` folder when using scaleway provider', + async (t) => { + process.chdir('tests/scaleway_provider'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes(`package${sep}flask${sep}__init__.py`), + 'flask is packaged' + ); + t.true( + zipfiles.includes(`package${sep}boto3${sep}__init__.py`), + 'boto3 is packaged' + ); + t.end(); + }, + { skip: true } // sls v4 supports aws provider only +); From bceb7371dd64d59829377fe6fd16e17f631d0251 Mon Sep 17 00:00:00 2001 From: Tomasz Czubocha Date: Tue, 11 Feb 2025 18:56:23 +0100 Subject: [PATCH 327/328] fix: Use absolute paths to ensure compatibility with v4 Compose (#854) --- .github/workflows/validate.yml | 2 +- index.js | 2 +- lib/inject.js | 9 +++++++-- package.json | 3 --- test.js | 8 ++++---- 5 files changed, 13 insertions(+), 11 deletions(-) diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 38f69696..23e2d67f 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -61,7 +61,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv poetry && poetry self add poetry-plugin-export - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} diff --git a/index.js b/index.js index ca8b191f..44906956 100644 --- a/index.js +++ b/index.js @@ -74,7 +74,7 @@ class ServerlessPythonRequirements { options.pythonBin = 'python'; } if (/python3[0-9]+/.test(options.pythonBin)) { - // "google" and "scaleway" providers' runtimes uses python3XX + // "google" and "scaleway" providers' runtimes use python3XX options.pythonBin = options.pythonBin.replace(/3([0-9]+)/, '3.$1'); } if (options.dockerizePip === 'non-linux') { diff --git a/lib/inject.js b/lib/inject.js index 12267376..f4acde9d 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -153,7 +153,12 @@ async function injectAllRequirements(funcArtifact) { return this.options.zip ? func : injectRequirements( - path.join('.serverless', func.module, 'requirements'), + path.join( + this.serverless.serviceDir, + '.serverless', + func.module, + 'requirements' + ), func.package.artifact, injectionRelativePath, this.options @@ -161,7 +166,7 @@ async function injectAllRequirements(funcArtifact) { }); } else if (!this.options.zip) { await injectRequirements( - path.join('.serverless', 'requirements'), + path.join(this.serverless.serviceDir, '.serverless', 'requirements'), this.serverless.service.package.artifact || funcArtifact, injectionRelativePath, this.options diff --git a/package.json b/package.json index 07466e23..f5754f24 100644 --- a/package.json +++ b/package.json @@ -77,9 +77,6 @@ "sha256-file": "1.0.0", "shell-quote": "^1.8.1" }, - "peerDependencies": { - "serverless": ">=2.32" - }, "lint-staged": { "*.js": [ "eslint" diff --git a/test.js b/test.js index 488b500f..1967330b 100644 --- a/test.js +++ b/test.js @@ -206,7 +206,7 @@ test( process.chdir('tests/base'); const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - const { stderr } = sls(['package'], { + const { stdout } = sls(['package'], { noThrow: true, env: { dockerizePip: true, @@ -216,7 +216,7 @@ test( }, }); t.true( - stderr.includes( + stdout.includes( `-v ${__dirname}${sep}tests${sep}base${sep}custom_ssh:/root/.ssh/custom_ssh:z` ), 'docker command properly resolved' @@ -1742,12 +1742,12 @@ test('poetry py3.9 fails packaging if poetry.lock is missing and flag requirePoe const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - const { stderr } = sls(['package'], { + const { stdout } = sls(['package'], { env: { requirePoetryLockFile: 'true', slim: 'true' }, noThrow: true, }); t.true( - stderr.includes( + stdout.includes( 'poetry.lock file not found - set requirePoetryLockFile to false to disable this error' ), 'flag works and error is properly reported' From b58ce60097717c369eea9f1e807a1374c7e4937f Mon Sep 17 00:00:00 2001 From: Tomasz Czubocha Date: Tue, 11 Feb 2025 19:40:17 +0100 Subject: [PATCH 328/328] chore: Release v6.1.2 (#855) --- CHANGELOG.md | 6 ++++++ package.json | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3771a1e0..42026cdc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [6.1.2](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.1.1...v6.1.2) (2025-02-11) + +### Bug Fixes + +- Use absolute paths to ensure compatibility with v4 Compose ([#854](https://github.com/UnitedIncome/serverless-python-requirements/issues/854)) ([bceb737](https://github.com/UnitedIncome/serverless-python-requirements/commit/bceb7371dd64d59829377fe6fd16e17f631d0251)) + ### [6.1.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.1.0...v6.1.1) (2024-06-03) ## [6.1.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.0.1...v6.1.0) (2024-03-27) diff --git a/package.json b/package.json index f5754f24..55ab4989 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "6.1.1", + "version": "6.1.2", "engines": { "node": ">=12.0" },