Skip to content

Commit d8e0b29

Browse files
authored
Use node-redis for soft-purging cache on deploy (#18474)
* Use [node-]redis as a direct dependency * Extract Redis client creation to its own module * Attach extensive logging in the Redis client creation module * Allow the rate limiter to pass requests when Redis is disconnected * Update rate-limit-redis * Default error input to empty object for formatRedisError method * Provide a name for the rate limiter's Redis client * Include redis-mock, exclude ioredis/ioredis-mock * Remove unused RedisAccessor#exists method * Switch RedisAccessor to use redis/redis-mock * Provide a name for logging on the Redis page cache * Remove extraneous trailing space from Redis logging prefix Our updated use of console.* will already be adding a space after the prefix * Replace ioredis-mock with redis-mock in tests * Revert removal of ioredis dependency * Bind Redis client to async promisified methods * Extract former RedisAccessor constructor tests to new create-client tests * Update RedisAccessor tests to work with the callback-based redis client * Handle formatting Redis errors (or not) with more resiliency * Remove ioredis as a dependency * Update Redis soft-purging script to use node-redis * Do more promisifying for less ugly code * Add comment explaining TTL of -1 * Make the script executable
1 parent b1ff6c1 commit d8e0b29

File tree

3 files changed

+111
-108
lines changed

3 files changed

+111
-108
lines changed

package-lock.json

Lines changed: 6 additions & 50 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,6 @@
5858
"hot-shots": "^8.2.0",
5959
"html-entities": "^1.2.1",
6060
"imurmurhash": "^0.1.4",
61-
"ioredis": "^4.24.4",
6261
"is-url": "^1.2.4",
6362
"js-cookie": "^2.2.1",
6463
"js-yaml": "^3.14.0",

script/purge-redis-pages.js

100644100755
Lines changed: 105 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,10 @@
88
//
99
// [end-readme]
1010

11-
const Redis = require('ioredis')
11+
require('dotenv').config()
12+
13+
const { promisify } = require('util')
14+
const createClient = require('../lib/redis/create-client')
1215

1316
const { REDIS_URL, HEROKU_RELEASE_VERSION, HEROKU_PRODUCTION_APP } = process.env
1417
const isHerokuProd = HEROKU_PRODUCTION_APP === 'true'
@@ -42,68 +45,109 @@ console.log({
4245
purgeRenderedPageCache()
4346

4447
function purgeRenderedPageCache () {
45-
const redisClient = new Redis(REDIS_URL, {
46-
db: pageCacheDatabaseNumber,
47-
48-
// Only add this configuration for TLS-enabled REDIS_URL values.
49-
// Otherwise, it breaks for local Redis instances without TLS enabled.
50-
...REDIS_URL.startsWith('rediss://') && {
51-
tls: {
52-
// Required for production Heroku Redis
53-
rejectUnauthorized: false
54-
}
55-
}
48+
const redisClient = createClient({
49+
url: REDIS_URL,
50+
db: pageCacheDatabaseNumber
5651
})
57-
let totalKeyCount = 0
52+
5853
let iteration = 0
54+
let potentialKeyCount = 0
55+
let totalKeyCount = 0
5956

60-
// Create a readable stream (object mode) for the SCAN cursor
61-
const scanStream = redisClient.scanStream({
62-
match: keyScanningPattern,
63-
count: scanSetSize
64-
})
57+
// Promise wrappers
58+
const scanAsync = promisify(redisClient.scan).bind(redisClient)
59+
const quitAsync = promisify(redisClient.quit).bind(redisClient)
6560

66-
scanStream.on('end', function () {
67-
console.log(`Done purging keys; affected total: ${totalKeyCount}`)
68-
console.log(`Time elapsed: ${Date.now() - startTime} ms`)
61+
// Run it!
62+
return scan()
6963

70-
// This seems to be unexpectedly necessary
71-
process.exit(0)
72-
})
64+
//
65+
// Define other subroutines
66+
//
7367

74-
scanStream.on('error', function (error) {
75-
console.error('An unexpected error occurred!\n' + error.stack)
76-
console.error('\nAborting...')
77-
process.exit(1)
78-
})
68+
async function scan (cursor = '0') {
69+
try {
70+
// [0]: Update the cursor position for the next scan
71+
// [1]: Get the SCAN result for this iteration
72+
const [nextCursor, keys] = await scanAsync(
73+
cursor,
74+
'MATCH', keyScanningPattern,
75+
'COUNT', scanSetSize.toString()
76+
)
7977

80-
scanStream.on('data', async function (keys) {
81-
console.log(`[Iteration ${iteration++}] Received ${keys.length} keys...`)
78+
console.log(`\n[Iteration ${iteration++}] Received ${keys.length} keys...`)
8279

83-
// NOTE: It is possible for a SCAN cursor iteration to return 0 keys when
84-
// using a MATCH because it is applied after the elements are retrieved
85-
if (keys.length === 0) return
80+
if (dryRun) {
81+
console.log(`DRY RUN! This iteration might have set TTL for up to ${keys.length} keys:\n - ${keys.join('\n - ')}`)
82+
}
8683

87-
if (dryRun) {
88-
console.log(`DRY RUN! This iteration might have set TTL for up to ${keys.length} keys:\n - ${keys.join('\n - ')}`)
89-
return
90-
}
84+
// NOTE: It is possible for a SCAN cursor iteration to return 0 keys when
85+
// using a MATCH because it is applied after the elements are retrieved
86+
//
87+
// Remember: more or less than COUNT or no keys may be returned
88+
// See http://redis.io/commands/scan#the-count-option
89+
// Also, SCAN may return the same key multiple times
90+
// See http://redis.io/commands/scan#scan-guarantees
91+
// Additionally, you should always have the code that uses the keys
92+
// before the code checking the cursor.
93+
if (keys.length > 0) {
94+
if (dryRun) {
95+
potentialKeyCount += keys.length
96+
} else {
97+
totalKeyCount += await updateTtls(keys)
98+
}
99+
}
91100

92-
// Pause the SCAN stream while we set a TTL on these keys
93-
scanStream.pause()
101+
// From <http://redis.io/commands/scan>:
102+
// 'An iteration starts when the cursor is set to 0,
103+
// and terminates when the cursor returned by the server is 0.'
104+
if (nextCursor === '0') {
105+
const dryRunTrailer = dryRun ? ` (potentially up to ${potentialKeyCount})` : ''
106+
console.log(`\nDone purging keys; affected total: ${totalKeyCount}${dryRunTrailer}`)
107+
console.log(`Time elapsed: ${Date.now() - startTime} ms`)
108+
109+
// Close the connection
110+
await quitAsync()
111+
return
112+
}
94113

95-
// Find existing TTLs to ensure we aren't extending the TTL if it's already set
96-
// PTTL mykey // only operate on -1 result values or those greater than ONE_HOUR_FROM_NOW
97-
const pttlPipeline = redisClient.pipeline()
114+
// Tail recursion
115+
return scan(nextCursor)
116+
} catch (error) {
117+
console.error('An unexpected error occurred!\n' + error.stack)
118+
console.error('\nAborting...')
119+
process.exit(1)
120+
}
121+
}
122+
123+
// Find existing TTLs to ensure we aren't extending the TTL if it's already set
124+
async function getTtls (keys) {
125+
const pttlPipeline = redisClient.batch()
98126
keys.forEach(key => pttlPipeline.pttl(key))
99-
const pttlResults = await pttlPipeline.exec()
100127

101-
// Update pertinent keys to have TTLs set
128+
const pttlPipelineExecAsync = promisify(pttlPipeline.exec).bind(pttlPipeline)
129+
const pttlResults = await pttlPipelineExecAsync()
130+
131+
if (pttlResults == null || pttlResults.length === 0) {
132+
throw new Error('PTTL results were empty')
133+
}
134+
135+
return pttlResults
136+
}
137+
138+
async function updateTtls (keys) {
139+
const pttlResults = await getTtls(keys)
140+
141+
// Find pertinent keys to have TTLs set
102142
let updatingKeyCount = 0
103-
const pexpireAtPipeline = redisClient.pipeline()
143+
const pexpireAtPipeline = redisClient.batch()
144+
104145
keys.forEach((key, i) => {
105-
const [error, pttl] = pttlResults[i]
106-
const needsShortenedTtl = error == null && (pttl === -1 || pttl > expirationDuration)
146+
// Only operate on -1 result values or those greater than ONE_HOUR_FROM_NOW
147+
const pttl = pttlResults[i]
148+
// A TTL of -1 means the entry was not configured with any TTL (expiration)
149+
// currently and will remain as a permanent entry unless a TTL is added
150+
const needsShortenedTtl = pttl === -1 || pttl > expirationDuration
107151
const isOldKey = !HEROKU_RELEASE_VERSION || !key.startsWith(`${HEROKU_RELEASE_VERSION}:`)
108152

109153
if (needsShortenedTtl && isOldKey) {
@@ -112,17 +156,21 @@ function purgeRenderedPageCache () {
112156
}
113157
})
114158

159+
console.log(`Purging ${updatingKeyCount} keys...`)
160+
115161
// Only update TTLs if there are records worth updating
116-
if (updatingKeyCount > 0) {
117-
// Set all the TTLs
118-
const pexpireAtResults = await pexpireAtPipeline.exec()
119-
const updatedResults = pexpireAtResults.filter(([error, result]) => error == null && result === 1)
162+
if (updatingKeyCount === 0) return
120163

121-
// Count only the entries whose TTLs were successfully updated
122-
totalKeyCount += updatedResults.length
164+
// Set all the TTLs
165+
const pexpireAtPipelineExecAsync = promisify(pexpireAtPipeline.exec).bind(pexpireAtPipeline)
166+
const pexpireAtResults = await pexpireAtPipelineExecAsync()
167+
168+
if (pttlResults == null || pttlResults.length === 0) {
169+
throw new Error('PEXPIREAT results were empty')
123170
}
124171

125-
// Resume the SCAN stream
126-
scanStream.resume()
127-
})
172+
// Count only the entries whose TTLs were successfully updated
173+
const updatedResults = pexpireAtResults.filter((result) => result === 1)
174+
return updatedResults.length
175+
}
128176
}

0 commit comments

Comments
 (0)