forked from github/docs
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add release script to soft-purge all rendered pages from the Redis ca…
…che (github#17164) * Create a release script to soft-purge all rendered pages from the Redis cache * Set NODE_ENV * Pass the Redis database number as an option rather than in the URL * Change key scanning pattern based on Heroku metadata presence * Shorten purge TTL to 30 minutes * Only fail hard on Heroku production releases * Don't return TOO early or else we forget to resume the scanStream! * Correct ioredis command casing to all lowercase * Add unexpectedly necessary exit * Tweak wording of dry run logging * Add some polish * Prevent accidental soft-purging of the current release's keys * Simplify the key check * Fix lint error
- Loading branch information
1 parent
cb656ad
commit bca0682
Showing
2 changed files
with
124 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1,3 @@ | ||
web: NODE_ENV=production node server.js | ||
|
||
release: NODE_ENV=production node script/purge-redis-pages.js |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,122 @@ | ||
#!/usr/bin/env node | ||
|
||
// [start-readme] | ||
// | ||
// Run this script to manually purge the Redis rendered page cache. | ||
// This will typically only be run by Heroku during the deployment process, | ||
// as triggered via our Procfile's "release" phase configuration. | ||
// | ||
// [end-readme] | ||
|
||
const program = require('commander') | ||
const Redis = require('ioredis') | ||
|
||
const { REDIS_URL, HEROKU_RELEASE_VERSION, HEROKU_PRODUCTION_APP } = process.env | ||
const isHerokuProd = HEROKU_PRODUCTION_APP === 'true' | ||
const pageCacheDatabaseNumber = 1 | ||
const keyScanningPattern = HEROKU_RELEASE_VERSION ? '*:rp:*' : 'rp:*' | ||
const scanSetSize = 250 | ||
|
||
const startTime = Date.now() | ||
const expirationDuration = 30 * 60 * 1000 // 30 minutes | ||
const expirationTimestamp = startTime + expirationDuration // 30 minutes from now | ||
|
||
program | ||
.description('Purge the Redis rendered page cache') | ||
.option('-d, --dry-run', 'print keys to be purged without actually purging') | ||
.parse(process.argv) | ||
|
||
const dryRun = program.dryRun | ||
|
||
// verify environment variables | ||
if (!REDIS_URL) { | ||
if (isHerokuProd) { | ||
console.error('Error: you must specify the REDIS_URL environment variable.\n') | ||
process.exit(1) | ||
} else { | ||
console.warn('Warning: you did not specify a REDIS_URL environment variable. Exiting...\n') | ||
process.exit(0) | ||
} | ||
} | ||
|
||
console.log({ | ||
HEROKU_RELEASE_VERSION, | ||
HEROKU_PRODUCTION_APP | ||
}) | ||
|
||
purgeRenderedPageCache() | ||
|
||
function purgeRenderedPageCache () { | ||
const redisClient = new Redis(REDIS_URL, { db: pageCacheDatabaseNumber }) | ||
let totalKeyCount = 0 | ||
let iteration = 0 | ||
|
||
// Create a readable stream (object mode) for the SCAN cursor | ||
const scanStream = redisClient.scanStream({ | ||
match: keyScanningPattern, | ||
count: scanSetSize | ||
}) | ||
|
||
scanStream.on('end', function () { | ||
console.log(`Done purging keys; affected total: ${totalKeyCount}`) | ||
console.log(`Time elapsed: ${Date.now() - startTime} ms`) | ||
|
||
// This seems to be unexpectedly necessary | ||
process.exit(0) | ||
}) | ||
|
||
scanStream.on('error', function (error) { | ||
console.error('An unexpected error occurred!\n' + error.stack) | ||
console.error('\nAborting...') | ||
process.exit(1) | ||
}) | ||
|
||
scanStream.on('data', async function (keys) { | ||
console.log(`[Iteration ${iteration++}] Received ${keys.length} keys...`) | ||
|
||
// NOTE: It is possible for a SCAN cursor iteration to return 0 keys when | ||
// using a MATCH because it is applied after the elements are retrieved | ||
if (keys.length === 0) return | ||
|
||
if (dryRun) { | ||
console.log(`DRY RUN! This iteration might have set TTL for up to ${keys.length} keys:\n - ${keys.join('\n - ')}`) | ||
return | ||
} | ||
|
||
// Pause the SCAN stream while we set a TTL on these keys | ||
scanStream.pause() | ||
|
||
// Find existing TTLs to ensure we aren't extending the TTL if it's already set | ||
// PTTL mykey // only operate on -1 result values or those greater than ONE_HOUR_FROM_NOW | ||
const pttlPipeline = redisClient.pipeline() | ||
keys.forEach(key => pttlPipeline.pttl(key)) | ||
const pttlResults = await pttlPipeline.exec() | ||
|
||
// Update pertinent keys to have TTLs set | ||
let updatingKeyCount = 0 | ||
const pexpireAtPipeline = redisClient.pipeline() | ||
keys.forEach((key, i) => { | ||
const [error, pttl] = pttlResults[i] | ||
const needsShortenedTtl = error == null && (pttl === -1 || pttl > expirationDuration) | ||
const isOldKey = !HEROKU_RELEASE_VERSION || !key.startsWith(`${HEROKU_RELEASE_VERSION}:`) | ||
|
||
if (needsShortenedTtl && isOldKey) { | ||
pexpireAtPipeline.pexpireat(key, expirationTimestamp) | ||
updatingKeyCount += 1 | ||
} | ||
}) | ||
|
||
// Only update TTLs if there are records worth updating | ||
if (updatingKeyCount > 0) { | ||
// Set all the TTLs | ||
const pexpireAtResults = await pexpireAtPipeline.exec() | ||
const updatedResults = pexpireAtResults.filter(([error, result]) => error == null && result === 1) | ||
|
||
// Count only the entries whose TTLs were successfully updated | ||
totalKeyCount += updatedResults.length | ||
} | ||
|
||
// Resume the SCAN stream | ||
scanStream.resume() | ||
}) | ||
} |