@@ -16,6 +16,7 @@ const debug = debugModule('cypress:server:browsers:memory')
1616const debugVerbose = debugModule ( 'cypress-verbose:server:browsers:memory' )
1717
1818const MEMORY_THRESHOLD_PERCENTAGE = Number ( process . env . CYPRESS_INTERNAL_MEMORY_THRESHOLD_PERCENTAGE ) || 50
19+ const EMERGENCY_MEMORY_THRESHOLD_PERCENTAGE = Number ( process . env . CYPRESS_INTERNAL_EMERGENCY_MEMORY_THRESHOLD_PERCENTAGE ) || 90
1920const MEMORY_PROFILER_INTERVAL = Number ( process . env . CYPRESS_INTERNAL_MEMORY_PROFILER_INTERVAL ) || 1000
2021const MEMORY_FOLDER = process . env . CYPRESS_INTERNAL_MEMORY_FOLDER_PATH || path . join ( 'cypress' , 'logs' , 'memory' )
2122const SAVE_MEMORY_STATS = [ '1' , 'true' ] . includes ( process . env . CYPRESS_INTERNAL_MEMORY_SAVE_STATS ?. toLowerCase ( ) as string )
@@ -44,17 +45,23 @@ export type MemoryHandler = {
4445/**
4546 * Algorithm:
4647 *
47- * When the test runs starts:
48+ * When the spec run starts:
4849 * 1. set total mem limit for the container/host by reading off cgroup memory limits (if available) otherwise use os.totalmem()
50+ * 2. set js heap size limit by reading off the browser
51+ * 3. turn on memory profiler
4952 *
5053 * On a defined interval (e.g. 1s):
5154 * 1. set current mem available for the container/host by reading off cgroup memory usage (if available) otherwise use si.mem().available
5255 * 2. set current renderer mem usage
5356 * 3. set max avail render mem to minimum of v8 heap size limit and total available mem (current available mem + current renderer mem usage)
5457 * 4. calc % of memory used, current renderer mem usage / max avail render mem
58+ * 5. if % of memory used exceeds the emergency memory threshold percentage (e.g. 90%) do a GC
5559 *
5660 * Before each test:
57- * 1. if that exceeds the defined memory threshold percentage (e.g. 50%) do a GC
61+ * 1. if any interval exceeded the defined memory threshold (e.g. 50%), do a GC
62+ *
63+ * After the spec run ends:
64+ * 1. turn off memory profiler
5865 */
5966
6067/**
@@ -203,7 +210,7 @@ export const getAvailableMemory: () => Promise<number> = measure(() => {
203210/**
204211 * Calculates the memory stats used to determine if garbage collection should be run before the next test starts.
205212 */
206- export const calculateMemoryStats : ( ) => Promise < void > = measure ( async ( ) => {
213+ export const calculateMemoryStats : ( automation : Automation ) => Promise < void > = measure ( async ( automation : Automation ) => {
207214 // retrieve the available memory and the renderer process memory usage
208215 const [ currentAvailableMemory , rendererProcessMemRss ] = await Promise . all ( [
209216 getAvailableMemory ( ) ,
@@ -221,12 +228,20 @@ export const calculateMemoryStats: () => Promise<void> = measure(async () => {
221228 const maxAvailableRendererMemory = Math . min ( jsHeapSizeLimit , currentAvailableMemory + rendererProcessMemRss )
222229
223230 const rendererUsagePercentage = ( rendererProcessMemRss / maxAvailableRendererMemory ) * 100
224- // if we're using more than MEMORY_THRESHOLD_PERCENTAGE of the available memory,
231+ // if the renderer's memory is above the MEMORY_THRESHOLD_PERCENTAGE, we should collect garbage on the next test
225232 const shouldCollectGarbage = rendererUsagePercentage >= MEMORY_THRESHOLD_PERCENTAGE && ! SKIP_GC
226233
227234 // if we should collect garbage, set the flag to true so we can collect garbage on the next test
228235 collectGarbageOnNextTest = collectGarbageOnNextTest || shouldCollectGarbage
229236
237+ // if the renderer's memory is above the EMERGENCY_MEMORY_THRESHOLD_PERCENTAGE, we should perform an emergency garbage collection now
238+ const shouldEmergencyCollectGarbage = rendererUsagePercentage >= EMERGENCY_MEMORY_THRESHOLD_PERCENTAGE && ! SKIP_GC
239+
240+ if ( shouldEmergencyCollectGarbage ) {
241+ debug ( 'emergency garbage collection triggered' )
242+ await checkMemoryPressure ( automation , shouldEmergencyCollectGarbage )
243+ }
244+
230245 // set all the memory stats on the stats log
231246 statsLog . jsHeapSizeLimit = jsHeapSizeLimit
232247 statsLog . totalMemoryLimit = totalMemoryLimit
@@ -236,6 +251,8 @@ export const calculateMemoryStats: () => Promise<void> = measure(async () => {
236251 statsLog . currentAvailableMemory = currentAvailableMemory
237252 statsLog . maxAvailableRendererMemory = maxAvailableRendererMemory
238253 statsLog . shouldCollectGarbage = shouldCollectGarbage
254+ statsLog . emergencyGarbageCollected = shouldEmergencyCollectGarbage
255+ statsLog . emergencyRendererMemoryThreshold = maxAvailableRendererMemory * ( EMERGENCY_MEMORY_THRESHOLD_PERCENTAGE / 100 )
239256 statsLog . timestamp = Date . now ( )
240257} , { name : 'calculateMemoryStats' , save : true } )
241258
@@ -264,8 +281,8 @@ const checkMemoryPressureAndLog = async ({ automation, test }: { automation: Aut
264281 * Collects the browser's garbage if it previously exceeded the threshold when it was measured.
265282 * @param automation the automation client used to collect garbage
266283 */
267- const checkMemoryPressure : ( automation : Automation ) => Promise < void > = measure ( async ( automation : Automation ) => {
268- if ( collectGarbageOnNextTest ) {
284+ const checkMemoryPressure : ( automation : Automation , emergencyCollectGarbage ?: boolean ) => Promise < void > = measure ( async ( automation : Automation , emergencyCollectGarbage : boolean = false ) => {
285+ if ( collectGarbageOnNextTest || emergencyCollectGarbage ) {
269286 debug ( 'forcing garbage collection' )
270287 try {
271288 await automation . request ( 'collect:garbage' , null , null )
@@ -292,24 +309,24 @@ const addCumulativeStats = (stats: { [key: string]: any }) => {
292309/**
293310 * Gathers the memory stats and schedules the next check.
294311 */
295- const gatherMemoryStats = async ( ) => {
312+ const gatherMemoryStats = async ( automation : Automation ) => {
296313 try {
297- await calculateMemoryStats ( )
314+ await calculateMemoryStats ( automation )
298315 addCumulativeStats ( statsLog )
299316 statsLog = { }
300317 } catch ( err ) {
301318 debug ( 'error gathering memory stats: %o' , err )
302319 }
303- scheduleMemoryCheck ( )
320+ scheduleMemoryCheck ( automation )
304321}
305322
306323/**
307324 * Schedules the next gathering of memory stats based on the MEMORY_PROFILER_INTERVAL.
308325 */
309- const scheduleMemoryCheck = ( ) => {
326+ const scheduleMemoryCheck = ( automation : Automation ) => {
310327 if ( started ) {
311328 // not setinterval, since gatherMemoryStats is asynchronous
312- timer = setTimeout ( gatherMemoryStats , MEMORY_PROFILER_INTERVAL )
329+ timer = setTimeout ( ( ) => gatherMemoryStats ( automation ) , MEMORY_PROFILER_INTERVAL )
313330 }
314331}
315332
@@ -348,7 +365,7 @@ async function startProfiling (automation: Automation, spec: { fileName: string
348365 totalMemoryLimit = await handler . getTotalMemoryLimit ( ) ,
349366 ] )
350367
351- await gatherMemoryStats ( )
368+ await gatherMemoryStats ( automation )
352369 } catch ( err ) {
353370 debug ( 'error starting memory profiler: %o' , err )
354371 }
0 commit comments