forked from franzenzenhofer/lighthouse-script
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.js
286 lines (221 loc) · 8.55 KB
/
main.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
import { runLighthouseForUrls } from './lighthouse-script.js';
import { readPastRunsFile, writePastRunsFile, generateIndexHTML } from './index-utils.mjs';
import { writeFile, readFile, revertFile, writeFileWithBackup } from './file-utils.mjs';
import fs from 'fs/promises';
import express from 'express';
import path, { resolve } from 'path';
import { fileURLToPath } from 'url';
import open from 'open';
import url from 'url';
import archiver from 'archiver';
import crawl from './crawler.js';
import { createWebSocketServer } from './websocket-utils.mjs';
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const pastRunsFile = './results/pastRuns.json';
const indexFile = './results/index.html';
const port = 3005;
const wsPort = 3008;
const reportDirectory = './results';
const app = express();
app.use('/img', express.static(path.join(__dirname, 'static', 'img')));
app.use(express.json());
app.use('/static', express.static(path.join(__dirname, 'static')));
// Serve the urls-editor.html file from the static folder
app.get('/urls-editor', (req, res) => {
res.sendFile(resolve(__dirname, 'static', 'urls-editor.html'));
});
app.get('/download-zip/:date/:timestamp', (req, res) => {
const { date, timestamp } = req.params;
const folderPath = path.join(__dirname, `./results/${date}/${timestamp}`);
const archive = archiver('zip');
res.attachment(`lighthouse-results-${timestamp}.zip`);
archive.on('error', (err) => {
res.status(500).send({ error: err.message });
});
archive.pipe(res);
archive.directory(folderPath, false);
archive.finalize();
});
app.post('/crawl', async (req, res) => {
try {
const crawlUrl = req.body.url;
const urls = await crawl(crawlUrl);
console.log(`Crawled URLs for ${crawlUrl}:`, urls); // Add this line for debugging
res.json({ urls });
} catch (error) {
res.status(500).json({ error: error.message });
}
});
app.post('/file-operation', async (req, res) => {
try {
const operation = req.body.operation;
if (operation === 'read') {
const content = await readFile('./urls.txt');
res.json({ content });
} else if (operation === 'write') {
const content = req.body.content;
await writeFileWithBackup('./urls.txt', content); // Use writeFileWithBackup instead of writeFile
res.json({ success: true });
} else if (operation === 'revert') {
await revertFile('./urls.txt');
res.json({ success: true });
} else {
res.status(400).json({ error: 'Invalid operation' });
}
} catch (error) {
res.status(500).json({ error: error.message });
}
});
const { wss, setRunningTests, handleUpgrade, broadcast } = createWebSocketServer(wsPort);
app.post('/rerun-tests', async (req, res) => {
try {
setRunningTests(true);
console.log('Rerunning Lighthouse tests...');
const { results } = await runLighthouseForUrls(broadcast, chromeProfileDir);
console.log('Lighthouse run complete.');
console.log('Updating past runs...');
const pastRuns = (await updatePastRuns(results)).filter(run => run !== null);
console.log('Past runs updated.');
if (pastRuns && pastRuns.length > 0) {
console.log('Writing index HTML...');
await writeIndexHTML(pastRuns);
console.log('Index HTML written.');
} else {
console.warn('No past runs available. Skipping index HTML update.');
}
res.sendStatus(200);
} catch (error) {
console.error('Error rerunning tests:', error);
res.sendStatus(500);
} finally {
setRunningTests(false);
}
});
function extractMainDomain(domainUrl) {
const { hostname } = new url.URL(domainUrl);
const parts = hostname.split('.');
const mainDomain = parts.slice(-2).join('.');
return mainDomain;
}
async function updatePastRuns(results) {
try {
console.log('Reading past runs file...');
const pastRuns = await readPastRunsFile(pastRunsFile);
console.log('Past runs file read successfully.');
if (!results || results.length === 0) {
console.warn('No results available. Skipping past runs update.');
return pastRuns;
}
const firstResult = results[0];
const timestamp = firstResult?.reportFilename?.split('/')[2] ?? '';
const reportDir = firstResult?.reportFilename?.split('/').slice(0, 3).join('/') ?? '';
const testsCount = results.length;
const uniqueDomains = Array.from(
new Set(results.map(result => result.error ? null : extractMainDomain(result.url)))
).filter(domain => domain !== null);
const errorCount = results.filter(result => result.error).length;
if (errorCount === 0) {
pastRuns.unshift({ timestamp, reportDir, testsCount, uniqueDomains, errorCount });
console.log('Writing updated past runs to file...');
await writePastRunsFile(pastRunsFile, pastRuns);
console.log('Updated past runs written to file successfully.');
} else {
console.warn('Lighthouse run failed. Skipping past runs update.');
}
console.log('Writing index HTML...');
await writeIndexHTML(pastRuns);
console.log('Index HTML written.');
console.log('Past runs updated successfully.');
return pastRuns;
} catch (error) {
console.error('Error updating past runs:', error);
return [];
}
}
async function writeIndexHTML(pastRuns) {
try {
const updatedIndexHTML = generateIndexHTML(pastRuns);
await fs.writeFile(indexFile, updatedIndexHTML);
return updatedIndexHTML;
} catch (error) {
console.error('Error writing index HTML:', error);
return '';
}
}
async function startLocalServer(reportDirectory) {
app.use(express.static(reportDirectory));
const server = app.listen(port, () => {
console.log(`Server running at http://localhost:${port}`);
});
try {
await open(`http://localhost:${port}`);
} catch (error) {
console.error('Error opening browser:', error);
}
return server;
}
var chromeProfileDir = null;
async function main() {
let args = process.argv.slice(2);
// Find the index of '--user-data-dir' argument
let userDataDirArgIndex = args.findIndex(arg => arg === '--user-data-dir');
if (userDataDirArgIndex > -1) {
// Check if next argument exists (should be the path)
if (args[userDataDirArgIndex + 1]) {
let userDataDir = args[userDataDirArgIndex + 1];
console.log(`User data directory: ${userDataDir}`);
if (userDataDir && userDataDir.trim().length > 0) {
chromeProfileDir = userDataDir;
console.log(`Chrome profile directory set to: ${chromeProfileDir}`);
} else {
console.error('Invalid --user-data-dir argument');
}
} else {
console.error('No path provided for --user-data-dir argument');
}
} else {
console.warn('No --user-data-dir argument provided');
}
console.log(`Chrome profile directory: ${chromeProfileDir}`);
try {
// Create the results directory if it doesn't exist
await fs.mkdir(reportDirectory, { recursive: true });
// Check if index.html exists before generating an empty one
let pastRuns;
try {
await fs.access(indexFile, fs.constants.F_OK);
console.log('index.html exists.');
// Read past runs from the pastRuns.json file
console.log('Reading past runs file...');
pastRuns = await readPastRunsFile(pastRunsFile);
console.log('Past runs file read successfully.');
} catch (error) {
console.log('index.html does not exist, running Lighthouse for the first time.');
// Check if urls.txt exists before generating one
try {
await fs.access('./urls.txt', fs.constants.F_OK);
} catch (error) {
console.log('urls.txt does not exist, creating a new one with a default URL.');
await fs.writeFile('./urls.txt', 'https://www.fullstackoptimization.com/\n');
}
console.log('Running Lighthouse for URLs...');
console.log(chromeProfileDir);
const { results } = await runLighthouseForUrls(broadcast, chromeProfileDir);
console.log('Lighthouse run complete.');
console.log('Updating past runs...');
pastRuns = (await updatePastRuns(results)).filter(run => run !== null);
console.log('Past runs updated.');
}
// Write index.html based on pastRuns every time the application starts
console.log('Writing index HTML...');
await writeIndexHTML(pastRuns);
console.log('Index HTML written.');
console.log('Starting local server...');
const server = await startLocalServer(reportDirectory);
// Attach the handleUpgrade listener to the HTTP server instance
server.on('upgrade', handleUpgrade);
} catch (error) {
console.error('Unexpected error:', error);
}
}
main();