diff --git a/.gitignore b/.gitignore index 3c8fe028e..399d918e9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ node_modules dist -diffs +test/regression-fixtures +test/regression-diffs coverage bin/svgo-profiling .DS_Store diff --git a/package-lock.json b/package-lock.json index 54b209f7a..774a397bc 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1145,12 +1145,6 @@ "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=", "dev": true }, - "get-stream": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.0.tgz", - "integrity": "sha512-A1B3Bh1UmL0bidM/YX2NsCOTnGJePL9rO/M+Mw3m9f2gUpfokS0hi5Eah0WSUEWZdZhIZtMjkIYS7mDfOqNHbg==", - "dev": true - }, "glob": { "version": "7.1.6", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", diff --git a/package.json b/package.json index 5cc0c940d..11b723a47 100644 --- a/package.json +++ b/package.json @@ -54,7 +54,7 @@ "fix": "eslint --ignore-path .gitignore --fix . && prettier --write \"**/*.js\" --ignore-path .gitignore", "typecheck": "tsc", "test-browser": "rollup -c && node ./test/browser.js", - "test-regression": "NO_DIFF=1 node ./test/regression.js", + "test-regression": "node ./test/regression-extract.js && NO_DIFF=1 node ./test/regression.js", "prepublishOnly": "rm -rf dist && rollup -c" }, "prettier": { @@ -109,7 +109,6 @@ "chai": "^4.3.4", "del": "^6.0.0", "eslint": "^7.22.0", - "get-stream": "^6.0.0", "mocha": "^8.3.2", "mock-stdin": "^1.0.0", "node-fetch": "^2.6.1", diff --git a/test/regression-extract.js b/test/regression-extract.js new file mode 100644 index 000000000..efaddc77d --- /dev/null +++ b/test/regression-extract.js @@ -0,0 +1,57 @@ +'use strict'; + +const fs = require('fs'); +const path = require('path'); +const util = require('util'); +const zlib = require('zlib'); +const stream = require('stream'); +const fetch = require('node-fetch'); +const tarStream = require('tar-stream'); + +const pipeline = util.promisify(stream.pipeline); + +const extractTarGz = async (url, baseDir, include) => { + const extract = tarStream.extract(); + extract.on('entry', async (header, stream, next) => { + try { + if (include == null || include.test(header.name)) { + if (header.name.endsWith('.svg')) { + const file = path.join(baseDir, header.name); + await fs.promises.mkdir(path.dirname(file), { recursive: true }); + await pipeline(stream, fs.createWriteStream(file)); + } + if (header.name.endsWith('.svgz')) { + // .svgz -> .svg + const file = path.join(baseDir, header.name.slice(0, -1)); + await fs.promises.mkdir(path.dirname(file), { recursive: true }); + await pipeline( + stream, + zlib.createGunzip(), + fs.createWriteStream(file) + ); + } + } + } catch (error) { + console.error(error); + process.exit(1); + } + stream.resume(); + next(); + }); + const response = await fetch(url); + await pipeline(response.body, extract); +}; + +(async () => { + try { + console.info('Download W3C SVG 1.1 Test Suite and extract svg files'); + await extractTarGz( + 'https://www.w3.org/Graphics/SVG/Test/20110816/archives/W3C_SVG_11_TestSuite.tar.gz', + path.join(__dirname, 'regression-fixtures', 'w3c-svg-11-test-suite'), + /^svg\// + ); + } catch (error) { + console.error(error); + process.exit(1); + } +})(); diff --git a/test/regression.js b/test/regression.js index f43abcfb3..65fc3ba4e 100644 --- a/test/regression.js +++ b/test/regression.js @@ -2,92 +2,12 @@ const fs = require('fs'); const path = require('path'); -const util = require('util'); -const zlib = require('zlib'); const http = require('http'); -const stream = require('stream'); -const fetch = require('node-fetch'); -const tarStream = require('tar-stream'); -const getStream = require('get-stream'); const { chromium } = require('playwright'); const { PNG } = require('pngjs'); const pixelmatch = require('pixelmatch'); const { optimize } = require('../lib/svgo.js'); -const pipeline = util.promisify(stream.pipeline); - -const readSvgFiles = async () => { - const cachedArchiveFile = path.join( - process.cwd(), - 'node_modules/.cache/W3C_SVG_11_TestSuite.tar.gz' - ); - const svgFiles = new Map(); - let fileStream; - try { - await fs.promises.access(cachedArchiveFile); - fileStream = fs.createReadStream(cachedArchiveFile); - } catch { - const response = await fetch( - 'https://www.w3.org/Graphics/SVG/Test/20110816/archives/W3C_SVG_11_TestSuite.tar.gz' - ); - fileStream = response.body; - fileStream.pipe(fs.createWriteStream(cachedArchiveFile)); - } - const extract = tarStream.extract(); - extract.on('entry', async (header, stream, next) => { - try { - if (header.name.startsWith('svg/')) { - if (header.name.endsWith('.svg')) { - // strip folder and extension - const name = header.name.slice('svg/'.length, -'.svg'.length); - const string = await getStream(stream); - svgFiles.set(name, string); - } - if (header.name.endsWith('.svgz')) { - // strip folder and extension - const name = header.name.slice('svg/'.length, -'.svgz'.length); - const string = await getStream(stream.pipe(zlib.createGunzip())); - svgFiles.set(name, string); - } - } - } catch (error) { - console.error(error); - process.exit(1); - } - stream.resume(); - next(); - }); - await pipeline(fileStream, extract); - return svgFiles; -}; - -const optimizeSvgFiles = (svgFiles) => { - const optimizedFiles = new Map(); - let failed = 0; - for (const [name, string] of svgFiles) { - try { - const result = optimize(string, { path: name, floatPrecision: 4 }); - if (result.error) { - console.error(result.error); - console.error(`File: ${name}`); - failed += 1; - continue; - } else { - optimizedFiles.set(name, result.data); - } - } catch (error) { - console.error(error); - console.error(`File: ${name}`); - failed += 1; - continue; - } - } - if (failed !== 0) { - throw Error(`Failed to optimize ${failed} cases`); - } - return optimizedFiles; -}; - const chunkInto = (array, chunksCount) => { // take upper bound to include tail const chunkSize = Math.ceil(array.length / chunksCount); @@ -99,7 +19,7 @@ const chunkInto = (array, chunksCount) => { return result; }; -const runTests = async ({ svgFiles }) => { +const runTests = async ({ list }) => { let skipped = 0; let mismatched = 0; let passed = 0; @@ -107,29 +27,31 @@ const runTests = async ({ svgFiles }) => { const processFile = async (page, name) => { if ( // hard to detect the end of animation - name.startsWith('animate-') || + name.startsWith('w3c-svg-11-test-suite/svg/animate-') || // breaks because of optimisation despite of script - name === 'interact-pointer-04-f' || + name === 'w3c-svg-11-test-suite/svg/interact-pointer-04-f.svg' || // messed gradients - name === 'pservers-grad-18-b' || + name === 'w3c-svg-11-test-suite/svg/pservers-grad-18-b.svg' || // animated filter - name === 'filters-light-04-f' || + name === 'w3c-svg-11-test-suite/svg/filters-light-04-f.svg' || + // animated filter + name === 'w3c-svg-11-test-suite/svg/filters-composite-05-f.svg' || // removing wrapping breaks :first-child pseudo-class - name === 'styling-pres-04-f' || + name === 'w3c-svg-11-test-suite/svg/styling-pres-04-f.svg' || // messed case insensitivity while inlining styles - name === 'styling-css-10-f' || + name === 'w3c-svg-11-test-suite/svg/styling-css-10-f.svg' || // rect is converted to path which matches wrong styles - name === 'styling-css-08-f' || + name === 'w3c-svg-11-test-suite/svg/styling-css-08-f.svg' || // external image - name === 'struct-image-02-b' || + name === 'w3c-svg-11-test-suite/svg/struct-image-02-b.svg' || // complex selectors are messed becase of converting shapes to paths - name === 'struct-use-10-f' || - name === 'struct-use-11-f' || - name === 'styling-css-01-b' || - name === 'styling-css-03-b' || - name === 'styling-css-04-f' || + name === 'w3c-svg-11-test-suite/svg/struct-use-10-f.svg' || + name === 'w3c-svg-11-test-suite/svg/struct-use-11-f.svg' || + name === 'w3c-svg-11-test-suite/svg/styling-css-01-b.svg' || + name === 'w3c-svg-11-test-suite/svg/styling-css-03-b.svg' || + name === 'w3c-svg-11-test-suite/svg/styling-css-04-f.svg' || // strange artifact breaks inconsistently breaks regression tests - name === 'filters-conv-05-f' + name === 'w3c-svg-11-test-suite/svg/filters-conv-05-f.svg' ) { console.info(`${name} is skipped`); skipped += 1; @@ -166,17 +88,19 @@ const runTests = async ({ svgFiles }) => { mismatched += 1; console.error(`${name} is mismatched`); if (process.env.NO_DIFF == null) { - await fs.promises.mkdir('diffs', { recursive: true }); - await fs.promises.writeFile( - `diffs/${name}.diff.png`, - PNG.sync.write(diff) + const file = path.join( + __dirname, + 'regression-diffs', + `${name}.diff.png` ); + await fs.promises.mkdir(path.dirname(file), { recursive: true }); + await fs.promises.writeFile(file, PNG.sync.write(diff)); } } }; const browser = await chromium.launch(); const context = await browser.newContext({ javaScriptEnabled: false }); - const chunks = chunkInto(svgFiles, 8); + const chunks = chunkInto(list, 8); await Promise.all( chunks.map(async (chunk) => { const page = await context.newPage(); @@ -193,18 +117,60 @@ const runTests = async ({ svgFiles }) => { return mismatched === 0; }; +const readdirRecursive = async (absolute, relative = '') => { + let result = []; + const list = await fs.promises.readdir(absolute, { withFileTypes: true }); + for (const item of list) { + const itemAbsolute = path.join(absolute, item.name); + const itemRelative = path.join(relative, item.name); + if (item.isDirectory()) { + const itemList = await readdirRecursive(itemAbsolute, itemRelative); + result = [...result, ...itemList]; + } else if (item.name.endsWith('.svg')) { + result = [...result, itemRelative]; + } + } + return result; +}; + (async () => { try { const start = process.hrtime.bigint(); - console.info('Download W3C SVG 1.1 Test Suite and extract svg files'); - const svgFiles = await readSvgFiles(); - const optimizedFiles = optimizeSvgFiles(svgFiles); + const fixturesDir = path.join(__dirname, 'regression-fixtures'); + const list = await readdirRecursive(fixturesDir); + const originalFiles = new Map(); + const optimizedFiles = new Map(); + // read original and optimize + let failed = 0; + for (const name of list) { + try { + const file = path.join(fixturesDir, name); + const original = await fs.promises.readFile(file, 'utf-8'); + const result = optimize(original, { path: name, floatPrecision: 4 }); + if (result.error) { + console.error(result.error); + console.error(`File: ${name}`); + failed += 1; + } else { + originalFiles.set(name, original); + optimizedFiles.set(name, result.data); + } + } catch (error) { + console.error(error); + console.error(`File: ${name}`); + failed += 1; + } + } + if (failed !== 0) { + throw Error(`Failed to optimize ${failed} cases`); + } + // setup server const server = http.createServer((req, res) => { if (req.url.startsWith('/original/')) { const name = req.url.slice('/original/'.length); - if (svgFiles.has(name)) { + if (originalFiles.has(name)) { res.setHeader('Content-Type', 'image/svg+xml'); - res.end(svgFiles.get(name)); + res.end(originalFiles.get(name)); return; } } @@ -222,8 +188,9 @@ const runTests = async ({ svgFiles }) => { await new Promise((resolve) => { server.listen(5000, resolve); }); - const passed = await runTests({ svgFiles: Array.from(svgFiles.keys()) }); + const passed = await runTests({ list }); server.close(); + // compute time const end = process.hrtime.bigint(); const diff = (end - start) / BigInt(1e6); if (passed) {