From 8017c35099812d77bce57cf5cce095975acb63ca Mon Sep 17 00:00:00 2001 From: Adam Horodyski Date: Fri, 27 Dec 2024 18:00:05 +0100 Subject: [PATCH 1/8] feat: reimplement the performance module to enable it during runtime --- .../BaseProfilingToolMenu.tsx | 4 + src/libs/Performance.tsx | 450 +++++++++--------- src/setup/platformSetup/index.native.ts | 5 +- tests/e2e/ADDING_TESTS.md | 7 +- 4 files changed, 236 insertions(+), 230 deletions(-) diff --git a/src/components/ProfilingToolMenu/BaseProfilingToolMenu.tsx b/src/components/ProfilingToolMenu/BaseProfilingToolMenu.tsx index a288396ad204..e0f9035af1c1 100644 --- a/src/components/ProfilingToolMenu/BaseProfilingToolMenu.tsx +++ b/src/components/ProfilingToolMenu/BaseProfilingToolMenu.tsx @@ -13,6 +13,7 @@ import toggleProfileTool from '@libs/actions/ProfilingTool'; import getPlatform from '@libs/getPlatform'; import Log from '@libs/Log'; import {Memoize} from '@libs/memoize'; +import Performance from '@libs/Performance'; import CONFIG from '@src/CONFIG'; import CONST from '@src/CONST'; import ONYXKEYS from '@src/ONYXKEYS'; @@ -69,12 +70,14 @@ function BaseProfilingToolMenu({isProfilingInProgress = false, showShareButton = setTotalMemory(amountOfTotalMemory); setUsedMemory(amountOfUsedMemory); setMemoizeStats(Memoize.stopMonitoring()); + Performance.disableMonitoring(); }, []); const onToggleProfiling = useCallback(() => { const shouldProfiling = !isProfilingInProgress; if (shouldProfiling) { Memoize.startMonitoring(); + Performance.enableMonitoring(); startProfiling(); } else { stop(); @@ -94,6 +97,7 @@ function BaseProfilingToolMenu({isProfilingInProgress = false, showShareButton = totalMemory: formatBytes(totalMemory, 2), usedMemory: formatBytes(usedMemory, 2), memoizeStats, + performance: Performance.getPerformanceMeasures(), }), [memoizeStats, totalMemory, usedMemory], ); diff --git a/src/libs/Performance.tsx b/src/libs/Performance.tsx index ef2b08e47229..3289b92410a1 100644 --- a/src/libs/Performance.tsx +++ b/src/libs/Performance.tsx @@ -3,51 +3,14 @@ import isObject from 'lodash/isObject'; import lodashTransform from 'lodash/transform'; import React, {forwardRef, Profiler} from 'react'; import {Alert, InteractionManager} from 'react-native'; -import type {PerformanceEntry, PerformanceMark, PerformanceMeasure, ReactNativePerformance, Performance as RNPerformance} from 'react-native-performance'; +import performance, {PerformanceObserver, setResourceLoggingEnabled} from 'react-native-performance'; +import type {PerformanceEntry, PerformanceMark, PerformanceMeasure} from 'react-native-performance'; import type {PerformanceObserverEntryList} from 'react-native-performance/lib/typescript/performance-observer'; import CONST from '@src/CONST'; import isE2ETestSession from './E2E/isE2ETestSession'; import getComponentDisplayName from './getComponentDisplayName'; import * as Metrics from './Metrics'; -type WrappedComponentConfig = {id: string}; - -type PerformanceEntriesCallback = (entry: PerformanceEntry) => void; - -type Phase = 'mount' | 'update' | 'nested-update'; - -type WithRenderTraceHOC =

>(WrappedComponent: React.ComponentType

) => React.ComponentType

>; - -type BlankHOC =

>(Component: React.ComponentType

) => React.ComponentType

; - -type SetupPerformanceObserver = () => void; -type DiffObject = (object: Record, base: Record) => Record; -type GetPerformanceMetrics = () => PerformanceEntry[]; -type PrintPerformanceMetrics = () => void; -type MarkStart = (name: string, detail?: Record) => PerformanceMark | void; -type MarkEnd = (name: string, detail?: Record) => PerformanceMark | void; -type MeasureFailSafe = (measureName: string, startOrMeasureOptions: string, endMark?: string) => void; -type MeasureTTI = (endMark?: string) => void; -type TraceRender = (id: string, phase: Phase, actualDuration: number, baseDuration: number, startTime: number, commitTime: number, interactions: Set) => PerformanceMeasure | void; -type WithRenderTrace = ({id}: WrappedComponentConfig) => WithRenderTraceHOC | BlankHOC; -type SubscribeToMeasurements = (callback: PerformanceEntriesCallback) => void; - -type PerformanceModule = { - diffObject: DiffObject; - setupPerformanceObserver: SetupPerformanceObserver; - getPerformanceMetrics: GetPerformanceMetrics; - printPerformanceMetrics: PrintPerformanceMetrics; - markStart: MarkStart; - markEnd: MarkEnd; - measureFailSafe: MeasureFailSafe; - measureTTI: MeasureTTI; - traceRender: TraceRender; - withRenderTrace: WithRenderTrace; - subscribeToMeasurements: SubscribeToMeasurements; -}; - -let rnPerformance: RNPerformance; - /** * Deep diff between two objects. Useful for figuring out what changed about an object from one render to the next so * that state and props updates can be optimized. @@ -66,204 +29,237 @@ function diffObject(object: Record, base: Record {}, - getPerformanceMetrics: () => [], - printPerformanceMetrics: () => {}, - markStart: () => {}, - markEnd: () => {}, - measureFailSafe: () => {}, - measureTTI: () => {}, - traceRender: () => {}, - withRenderTrace: - () => - // eslint-disable-next-line @typescript-eslint/naming-convention -

>(Component: React.ComponentType

): React.ComponentType

=> - Component, - subscribeToMeasurements: () => {}, -}; +function measureFailSafe(measureName: string, startOrMeasureOptions: string, endMark?: string): void { + try { + performance.measure(measureName, startOrMeasureOptions, endMark); + } catch (error) { + // Sometimes there might be no start mark recorded and the measure will fail with an error + if (error instanceof Error) { + console.debug(error.message); + } + } +} + +/** + * Measures the TTI time. To be called when the app is considered to be interactive. + */ +function measureTTI(endMark?: string): void { + // Make sure TTI is captured when the app is really usable + InteractionManager.runAfterInteractions(() => { + requestAnimationFrame(() => { + measureFailSafe('TTI', 'nativeLaunchStart', endMark); -if (Metrics.canCapturePerformanceMetrics()) { - const perfModule = require('react-native-performance'); - perfModule.setResourceLoggingEnabled(true); - rnPerformance = perfModule.default; - - Performance.measureFailSafe = (measureName: string, startOrMeasureOptions: string, endMark?: string) => { - try { - rnPerformance.measure(measureName, startOrMeasureOptions, endMark); - } catch (error) { - // Sometimes there might be no start mark recorded and the measure will fail with an error - if (error instanceof Error) { - console.debug(error.message); + // we don't want the alert to show on an e2e test session + if (!isE2ETestSession()) { + printPerformanceMetrics(); } + }); + }); +} + +/* + * Monitor native marks that we want to put on the timeline + */ + +const nativeMarksObserver = new PerformanceObserver((list, _observer) => { + list.getEntries().forEach((entry: PerformanceEntry) => { + if (entry.name === 'nativeLaunchEnd') { + measureFailSafe('nativeLaunch', 'nativeLaunchStart', 'nativeLaunchEnd'); + } + if (entry.name === 'downloadEnd') { + measureFailSafe('jsBundleDownload', 'downloadStart', 'downloadEnd'); + } + if (entry.name === 'runJsBundleEnd') { + measureFailSafe('runJsBundle', 'runJsBundleStart', 'runJsBundleEnd'); + } + if (entry.name === 'appCreationEnd') { + measureFailSafe('appCreation', 'appCreationStart', 'appCreationEnd'); + measureFailSafe('nativeLaunchEnd_To_appCreationStart', 'nativeLaunchEnd', 'appCreationStart'); + } + if (entry.name === 'contentAppeared') { + measureFailSafe('appCreationEnd_To_contentAppeared', 'appCreationEnd', 'contentAppeared'); } - }; - /** - * Measures the TTI time. To be called when the app is considered to be interactive. - */ - Performance.measureTTI = (endMark?: string) => { - // Make sure TTI is captured when the app is really usable - InteractionManager.runAfterInteractions(() => { - requestAnimationFrame(() => { - Performance.measureFailSafe('TTI', 'nativeLaunchStart', endMark); - - // we don't want the alert to show on an e2e test session - if (!isE2ETestSession()) { - Performance.printPerformanceMetrics(); - } - }); - }); - }; + // We don't need to keep the observer past this point + if (entry.name === 'runJsBundleEnd' || entry.name === 'downloadEnd') { + _observer.disconnect(); + } + }); +}); - /** - * Sets up an observer to capture events recorded in the native layer before the app fully initializes. - */ - Performance.setupPerformanceObserver = () => { - // Monitor some native marks that we want to put on the timeline - new perfModule.PerformanceObserver((list: PerformanceObserverEntryList, observer: PerformanceObserver) => { - list.getEntries().forEach((entry: PerformanceEntry) => { - if (entry.name === 'nativeLaunchEnd') { - Performance.measureFailSafe('nativeLaunch', 'nativeLaunchStart', 'nativeLaunchEnd'); - } - if (entry.name === 'downloadEnd') { - Performance.measureFailSafe('jsBundleDownload', 'downloadStart', 'downloadEnd'); - } - if (entry.name === 'runJsBundleEnd') { - Performance.measureFailSafe('runJsBundle', 'runJsBundleStart', 'runJsBundleEnd'); - } - if (entry.name === 'appCreationEnd') { - Performance.measureFailSafe('appCreation', 'appCreationStart', 'appCreationEnd'); - Performance.measureFailSafe('nativeLaunchEnd_To_appCreationStart', 'nativeLaunchEnd', 'appCreationStart'); - } - if (entry.name === 'contentAppeared') { - Performance.measureFailSafe('appCreationEnd_To_contentAppeared', 'appCreationEnd', 'contentAppeared'); - } - - // We don't need to keep the observer past this point - if (entry.name === 'runJsBundleEnd' || entry.name === 'downloadEnd') { - observer.disconnect(); - } - }); - }).observe({type: 'react-native-mark', buffered: true}); - - // Monitor for "_end" marks and capture "_start" to "_end" measures - new perfModule.PerformanceObserver((list: PerformanceObserverEntryList) => { - list.getEntriesByType('mark').forEach((mark: PerformanceEntry) => { - if (mark.name.endsWith('_end')) { - const end = mark.name; - const name = end.replace(/_end$/, ''); - const start = `${name}_start`; - Performance.measureFailSafe(name, start, end); - } - - // Capture any custom measures or metrics below - if (mark.name === `${CONST.TIMING.SIDEBAR_LOADED}_end`) { - Performance.measureFailSafe('contentAppeared_To_screenTTI', 'contentAppeared', mark.name); - Performance.measureTTI(mark.name); - } - }); - }).observe({type: 'mark', buffered: true}); - }; +function setNativeMarksObserver(enabled = false): void { + if (!enabled) { + nativeMarksObserver.disconnect(); + return; + } - Performance.getPerformanceMetrics = (): PerformanceEntry[] => - [ - ...rnPerformance.getEntriesByName('nativeLaunch'), - ...rnPerformance.getEntriesByName('nativeLaunchEnd_To_appCreationStart'), - ...rnPerformance.getEntriesByName('appCreation'), - ...rnPerformance.getEntriesByName('appCreationEnd_To_contentAppeared'), - ...rnPerformance.getEntriesByName('contentAppeared_To_screenTTI'), - ...rnPerformance.getEntriesByName('runJsBundle'), - ...rnPerformance.getEntriesByName('jsBundleDownload'), - ...rnPerformance.getEntriesByName('TTI'), - ...rnPerformance.getEntriesByName('regularAppStart'), - ...rnPerformance.getEntriesByName('appStartedToReady'), - ].filter((entry) => entry.duration > 0); - - /** - * Outputs performance stats. We alert these so that they are easy to access in release builds. - */ - Performance.printPerformanceMetrics = () => { - const stats = Performance.getPerformanceMetrics(); - const statsAsText = stats.map((entry) => `\u2022 ${entry.name}: ${entry.duration.toFixed(1)}ms`).join('\n'); - - if (stats.length > 0) { - Alert.alert('Performance', statsAsText); + nativeMarksObserver.disconnect(); + nativeMarksObserver.observe({type: 'react-native-mark', buffered: true}); +} + +/** + * Monitor for "_end" marks and capture "_start" to "_end" measures, including events recorded in the native layer before the app fully initializes. + */ +const customMarksObserver = new PerformanceObserver((list: PerformanceObserverEntryList) => { + list.getEntriesByType('mark').forEach((mark: PerformanceEntry) => { + if (mark.name.endsWith('_end')) { + const end = mark.name; + const name = end.replace(/_end$/, ''); + const start = `${name}_start`; + measureFailSafe(name, start, end); } - }; - Performance.subscribeToMeasurements = (callback: PerformanceEntriesCallback) => { - new perfModule.PerformanceObserver((list: PerformanceObserverEntryList) => { - list.getEntriesByType('measure').forEach(callback); - }).observe({type: 'measure', buffered: true}); + // Capture any custom measures or metrics below + if (mark.name === `${CONST.TIMING.SIDEBAR_LOADED}_end`) { + measureFailSafe('contentAppeared_To_screenTTI', 'contentAppeared', mark.name); + measureTTI(mark.name); + } + }); +}); + +function setCustomMarksObserver(enabled = false): void { + if (!enabled) { + customMarksObserver.disconnect(); + return; + } + + customMarksObserver.disconnect(); + customMarksObserver.observe({type: 'mark', buffered: true}); +} + +function getPerformanceMetrics(): PerformanceEntry[] { + return [ + ...performance.getEntriesByName('nativeLaunch'), + ...performance.getEntriesByName('nativeLaunchEnd_To_appCreationStart'), + ...performance.getEntriesByName('appCreation'), + ...performance.getEntriesByName('appCreationEnd_To_contentAppeared'), + ...performance.getEntriesByName('contentAppeared_To_screenTTI'), + ...performance.getEntriesByName('runJsBundle'), + ...performance.getEntriesByName('jsBundleDownload'), + ...performance.getEntriesByName('TTI'), + ...performance.getEntriesByName('regularAppStart'), + ...performance.getEntriesByName('appStartedToReady'), + ].filter((entry) => entry.duration > 0); +} + +function getPerformanceMeasures(): PerformanceEntry[] { + return performance.getEntriesByType('measure'); +} + +/** + * Outputs performance stats. We alert these so that they are easy to access in release builds. + */ +function printPerformanceMetrics(): void { + const stats = getPerformanceMetrics(); + const statsAsText = stats.map((entry) => `\u2022 ${entry.name}: ${entry.duration.toFixed(1)}ms`).join('\n'); + + if (stats.length > 0) { + Alert.alert('Performance', statsAsText); + } +} + +function subscribeToMeasurements(callback: (entry: PerformanceEntry) => void): () => void { + const observer = new PerformanceObserver((list: PerformanceObserverEntryList) => { + list.getEntriesByType('measure').forEach(callback); + }); + + observer.observe({type: 'measure', buffered: true}); + + return () => observer.disconnect(); +} + +/** + * Add a start mark to the performance entries + */ +function markStart(name: string, detail?: Record): PerformanceMark { + return performance.mark(`${name}_start`, {detail}); +} + +/** + * Add an end mark to the performance entries + * A measure between start and end is captured automatically + */ +function markEnd(name: string, detail?: Record): PerformanceMark { + return performance.mark(`${name}_end`, {detail}); +} + +type Phase = 'mount' | 'update' | 'nested-update'; + +/** + * Put data emitted by Profiler components on the timeline + * @param id the "id" prop of the Profiler tree that has just committed + * @param phase either "mount" (if the tree just mounted) or "update" (if it re-rendered) + * @param actualDuration time spent rendering the committed update + * @param baseDuration estimated time to render the entire subtree without memoization + * @param startTime when React began rendering this update + * @param commitTime when React committed this update + * @param interactions the Set of interactions belonging to this update + */ +function traceRender(id: string, phase: Phase, actualDuration: number, baseDuration: number, startTime: number, commitTime: number, interactions: Set): PerformanceMeasure { + return performance.measure(id, { + start: startTime, + duration: actualDuration, + detail: { + phase, + baseDuration, + commitTime, + interactions, + }, + }); +} + +type WrappedComponentConfig = {id: string}; + +/** + * A HOC that captures render timings of the Wrapped component + */ +function withRenderTrace({id}: WrappedComponentConfig) { + if (!Metrics.canCapturePerformanceMetrics()) { + return

>(WrappedComponent: React.ComponentType

): React.ComponentType

=> WrappedComponent; + } + + return

>(WrappedComponent: React.ComponentType

): React.ComponentType

> => { + const WithRenderTrace: React.ComponentType

> = forwardRef((props: P, ref) => ( + + + + )); + + WithRenderTrace.displayName = `withRenderTrace(${getComponentDisplayName(WrappedComponent as React.ComponentType)})`; + return WithRenderTrace; }; +} - /** - * Add a start mark to the performance entries - */ - Performance.markStart = (name: string, detail?: Record): PerformanceMark => rnPerformance.mark(`${name}_start`, {detail}); - - /** - * Add an end mark to the performance entries - * A measure between start and end is captured automatically - */ - Performance.markEnd = (name: string, detail?: Record): PerformanceMark => rnPerformance.mark(`${name}_end`, {detail}); - - /** - * Put data emitted by Profiler components on the timeline - * @param id the "id" prop of the Profiler tree that has just committed - * @param phase either "mount" (if the tree just mounted) or "update" (if it re-rendered) - * @param actualDuration time spent rendering the committed update - * @param baseDuration estimated time to render the entire subtree without memoization - * @param startTime when React began rendering this update - * @param commitTime when React committed this update - * @param interactions the Set of interactions belonging to this update - */ - Performance.traceRender = ( - id: string, - phase: Phase, - actualDuration: number, - baseDuration: number, - startTime: number, - commitTime: number, - interactions: Set, - ): PerformanceMeasure => - rnPerformance.measure(id, { - start: startTime, - duration: actualDuration, - detail: { - phase, - baseDuration, - commitTime, - interactions, - }, - }); +function enableMonitoring() { + setResourceLoggingEnabled(true); + setNativeMarksObserver(true); + setCustomMarksObserver(true); +} - /** - * A HOC that captures render timings of the Wrapped component - */ - Performance.withRenderTrace = - ({id}: WrappedComponentConfig) => - // eslint-disable-next-line @typescript-eslint/naming-convention -

>(WrappedComponent: React.ComponentType

): React.ComponentType

> => { - const WithRenderTrace: React.ComponentType

> = forwardRef((props: P, ref) => ( - - - - )); - - WithRenderTrace.displayName = `withRenderTrace(${getComponentDisplayName(WrappedComponent as React.ComponentType)})`; - return WithRenderTrace; - }; +function disableMonitoring() { + setResourceLoggingEnabled(false); + setNativeMarksObserver(false); + setCustomMarksObserver(false); } -export default Performance; +export default { + diffObject, + measureFailSafe, + measureTTI, + enableMonitoring, + disableMonitoring, + getPerformanceMetrics, + getPerformanceMeasures, + printPerformanceMetrics, + subscribeToMeasurements, + markStart, + markEnd, + withRenderTrace, +}; diff --git a/src/setup/platformSetup/index.native.ts b/src/setup/platformSetup/index.native.ts index 31000ad9daa6..ded98285aa3e 100644 --- a/src/setup/platformSetup/index.native.ts +++ b/src/setup/platformSetup/index.native.ts @@ -1,4 +1,5 @@ import crashlytics from '@react-native-firebase/crashlytics'; +import * as Metrics from '@libs/Metrics'; import Performance from '@libs/Performance'; import CONFIG from '@src/CONFIG'; @@ -10,5 +11,7 @@ export default function () { crashlytics().setCrashlyticsCollectionEnabled(false); } - Performance.setupPerformanceObserver(); + if (Metrics.canCapturePerformanceMetrics()) { + Performance.enableMonitoring(); + } } diff --git a/tests/e2e/ADDING_TESTS.md b/tests/e2e/ADDING_TESTS.md index 92f6404203c1..9f2750d0d0b4 100644 --- a/tests/e2e/ADDING_TESTS.md +++ b/tests/e2e/ADDING_TESTS.md @@ -68,7 +68,11 @@ const test = () => { Navigation.navigate(ROUTES.REPORT_WITH_ID.getRoute(firstReportIDInList)); // markEnd will be called in the Screen's implementation - performance.subscribeToMeasurements("navigateToReport", (measurement) => { + Performance.subscribeToMeasurements((measurement) => { + if (!measurement.name !== "navigateToReport") { + return; + } + // ... do something with the measurements E2EClient.submitTestResults({ name: "Navigate to report", @@ -111,4 +115,3 @@ It is recommended to run a debug build of the e2e tests first to iterate quickly You can use regular console statements to debug your test. The output will be visible in logcat. I recommend opening the android studio logcat window and filter for `ReactNativeJS` to see the output you'd otherwise typically see in your metro bundler instance. - From 736bedd357e78c17f21fd1c9122f74fea8fc1693 Mon Sep 17 00:00:00 2001 From: Adam Horodyski Date: Fri, 27 Dec 2024 18:13:51 +0100 Subject: [PATCH 2/8] chore: migrate BaseProfilingToolMenu to useOnyx --- .../BaseProfilingToolMenu.tsx | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/src/components/ProfilingToolMenu/BaseProfilingToolMenu.tsx b/src/components/ProfilingToolMenu/BaseProfilingToolMenu.tsx index e0f9035af1c1..5cdbd04798cc 100644 --- a/src/components/ProfilingToolMenu/BaseProfilingToolMenu.tsx +++ b/src/components/ProfilingToolMenu/BaseProfilingToolMenu.tsx @@ -1,7 +1,6 @@ import React, {useCallback, useEffect, useState} from 'react'; import DeviceInfo from 'react-native-device-info'; -import {withOnyx} from 'react-native-onyx'; -import type {OnyxEntry} from 'react-native-onyx'; +import {useOnyx} from 'react-native-onyx'; import {startProfiling, stopProfiling} from 'react-native-release-profiler'; import Button from '@components/Button'; import Switch from '@components/Switch'; @@ -21,10 +20,6 @@ import pkg from '../../../package.json'; import RNFS from './RNFS'; import Share from './Share'; -type BaseProfilingToolMenuOnyxProps = { - isProfilingInProgress: OnyxEntry; -}; - type BaseProfilingToolMenuProps = { /** Path used to save the file */ pathToBeUsed: string; @@ -32,7 +27,7 @@ type BaseProfilingToolMenuProps = { displayPath: string; /** Whether to show the share button */ showShareButton?: boolean; -} & BaseProfilingToolMenuOnyxProps; +}; function formatBytes(bytes: number, decimals = 2) { if (!+bytes) { @@ -51,7 +46,8 @@ function formatBytes(bytes: number, decimals = 2) { // WARNING: When changing this name make sure that the "scripts/symbolicate-profile.ts" script is still working! const newFileName = `Profile_trace_for_${pkg.version}.cpuprofile`; -function BaseProfilingToolMenu({isProfilingInProgress = false, showShareButton = false, pathToBeUsed, displayPath}: BaseProfilingToolMenuProps) { +function BaseProfilingToolMenu({showShareButton = false, pathToBeUsed, displayPath}: BaseProfilingToolMenuProps) { + const [isProfilingInProgress] = useOnyx(ONYXKEYS.APP_PROFILING_IN_PROGRESS); const styles = useThemeStyles(); const [filePath, setFilePath] = useState(''); const [sharePath, setSharePath] = useState(''); @@ -187,8 +183,4 @@ function BaseProfilingToolMenu({isProfilingInProgress = false, showShareButton = BaseProfilingToolMenu.displayName = 'BaseProfilingToolMenu'; -export default withOnyx({ - isProfilingInProgress: { - key: ONYXKEYS.APP_PROFILING_IN_PROGRESS, - }, -})(BaseProfilingToolMenu); +export default BaseProfilingToolMenu; From f2ab5e55ba117a78666925e35894bd213b604abc Mon Sep 17 00:00:00 2001 From: Adam Horodyski Date: Fri, 27 Dec 2024 18:19:28 +0100 Subject: [PATCH 3/8] chore: rename enable funcs for consistency --- src/libs/Performance.tsx | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/libs/Performance.tsx b/src/libs/Performance.tsx index 3289b92410a1..19c0a8e85de5 100644 --- a/src/libs/Performance.tsx +++ b/src/libs/Performance.tsx @@ -87,7 +87,7 @@ const nativeMarksObserver = new PerformanceObserver((list, _observer) => { }); }); -function setNativeMarksObserver(enabled = false): void { +function setNativeMarksObserverEnabled(enabled = false): void { if (!enabled) { nativeMarksObserver.disconnect(); return; @@ -117,7 +117,7 @@ const customMarksObserver = new PerformanceObserver((list: PerformanceObserverEn }); }); -function setCustomMarksObserver(enabled = false): void { +function setCustomMarksObserverEnabled(enabled = false): void { if (!enabled) { customMarksObserver.disconnect(); return; @@ -239,14 +239,14 @@ function withRenderTrace({id}: WrappedComponentConfig) { function enableMonitoring() { setResourceLoggingEnabled(true); - setNativeMarksObserver(true); - setCustomMarksObserver(true); + setNativeMarksObserverEnabled(true); + setCustomMarksObserverEnabled(true); } function disableMonitoring() { setResourceLoggingEnabled(false); - setNativeMarksObserver(false); - setCustomMarksObserver(false); + setNativeMarksObserverEnabled(false); + setCustomMarksObserverEnabled(false); } export default { From 48e5d1b05505704980aa9ab2590af984b6307a8b Mon Sep 17 00:00:00 2001 From: Adam Horodyski Date: Mon, 30 Dec 2024 16:52:56 +0100 Subject: [PATCH 4/8] chore: address e2e readme comments --- tests/e2e/ADDING_TESTS.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/e2e/ADDING_TESTS.md b/tests/e2e/ADDING_TESTS.md index 9f2750d0d0b4..5986e21f2fc8 100644 --- a/tests/e2e/ADDING_TESTS.md +++ b/tests/e2e/ADDING_TESTS.md @@ -69,11 +69,10 @@ const test = () => { // markEnd will be called in the Screen's implementation Performance.subscribeToMeasurements((measurement) => { - if (!measurement.name !== "navigateToReport") { + if (measurement.name !== "navigateToReport") { return; } - // ... do something with the measurements E2EClient.submitTestResults({ name: "Navigate to report", metric: measurement.duration, From 265287207a98bd3bfb1b830cc328b61f13edf41f Mon Sep 17 00:00:00 2001 From: Adam Horodyski Date: Mon, 30 Dec 2024 16:57:07 +0100 Subject: [PATCH 5/8] chore: fix comments --- src/libs/Performance.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libs/Performance.tsx b/src/libs/Performance.tsx index 19c0a8e85de5..f6a6ee8d7854 100644 --- a/src/libs/Performance.tsx +++ b/src/libs/Performance.tsx @@ -41,7 +41,8 @@ function measureFailSafe(measureName: string, startOrMeasureOptions: string, end } /** - * Measures the TTI time. To be called when the app is considered to be interactive. + * Measures the TTI (time to interactive) time starting from the `nativeLaunchStart` event. + * To be called when the app is considered to be interactive. */ function measureTTI(endMark?: string): void { // Make sure TTI is captured when the app is really usable @@ -60,7 +61,6 @@ function measureTTI(endMark?: string): void { /* * Monitor native marks that we want to put on the timeline */ - const nativeMarksObserver = new PerformanceObserver((list, _observer) => { list.getEntries().forEach((entry: PerformanceEntry) => { if (entry.name === 'nativeLaunchEnd') { From 722fbb673788c33dc73b249790770e68b7508f34 Mon Sep 17 00:00:00 2001 From: Adam Horodyski Date: Mon, 30 Dec 2024 18:52:23 +0100 Subject: [PATCH 6/8] chore: added a more descriptive comment on the observer early disconnect --- src/libs/Performance.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libs/Performance.tsx b/src/libs/Performance.tsx index f6a6ee8d7854..325e0a73d514 100644 --- a/src/libs/Performance.tsx +++ b/src/libs/Performance.tsx @@ -80,7 +80,8 @@ const nativeMarksObserver = new PerformanceObserver((list, _observer) => { measureFailSafe('appCreationEnd_To_contentAppeared', 'appCreationEnd', 'contentAppeared'); } - // We don't need to keep the observer past this point + // At this point we've captured and processed all the native marks we're interested in + // and are not expecting to have more thus we can safely disconnect the observer if (entry.name === 'runJsBundleEnd' || entry.name === 'downloadEnd') { _observer.disconnect(); } From 4e19a15d2d617473af9475317bd95f82bd37de95 Mon Sep 17 00:00:00 2001 From: Adam Horodyski Date: Mon, 30 Dec 2024 19:44:50 +0100 Subject: [PATCH 7/8] chore: rely on ts infer for observer callbacks --- src/libs/Performance.tsx | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/libs/Performance.tsx b/src/libs/Performance.tsx index 325e0a73d514..90233ef2652e 100644 --- a/src/libs/Performance.tsx +++ b/src/libs/Performance.tsx @@ -5,7 +5,6 @@ import React, {forwardRef, Profiler} from 'react'; import {Alert, InteractionManager} from 'react-native'; import performance, {PerformanceObserver, setResourceLoggingEnabled} from 'react-native-performance'; import type {PerformanceEntry, PerformanceMark, PerformanceMeasure} from 'react-native-performance'; -import type {PerformanceObserverEntryList} from 'react-native-performance/lib/typescript/performance-observer'; import CONST from '@src/CONST'; import isE2ETestSession from './E2E/isE2ETestSession'; import getComponentDisplayName from './getComponentDisplayName'; @@ -62,7 +61,7 @@ function measureTTI(endMark?: string): void { * Monitor native marks that we want to put on the timeline */ const nativeMarksObserver = new PerformanceObserver((list, _observer) => { - list.getEntries().forEach((entry: PerformanceEntry) => { + list.getEntries().forEach((entry) => { if (entry.name === 'nativeLaunchEnd') { measureFailSafe('nativeLaunch', 'nativeLaunchStart', 'nativeLaunchEnd'); } @@ -101,8 +100,8 @@ function setNativeMarksObserverEnabled(enabled = false): void { /** * Monitor for "_end" marks and capture "_start" to "_end" measures, including events recorded in the native layer before the app fully initializes. */ -const customMarksObserver = new PerformanceObserver((list: PerformanceObserverEntryList) => { - list.getEntriesByType('mark').forEach((mark: PerformanceEntry) => { +const customMarksObserver = new PerformanceObserver((list) => { + list.getEntriesByType('mark').forEach((mark) => { if (mark.name.endsWith('_end')) { const end = mark.name; const name = end.replace(/_end$/, ''); @@ -160,7 +159,7 @@ function printPerformanceMetrics(): void { } function subscribeToMeasurements(callback: (entry: PerformanceEntry) => void): () => void { - const observer = new PerformanceObserver((list: PerformanceObserverEntryList) => { + const observer = new PerformanceObserver((list) => { list.getEntriesByType('measure').forEach(callback); }); From d50284e272afba1d8820a17dbc399d0d20e290c4 Mon Sep 17 00:00:00 2001 From: Adam Horodyski Date: Tue, 31 Dec 2024 20:50:30 +0100 Subject: [PATCH 8/8] fix: metrics print alert on regular builds --- src/libs/Performance.tsx | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/libs/Performance.tsx b/src/libs/Performance.tsx index 90233ef2652e..df642e9f0681 100644 --- a/src/libs/Performance.tsx +++ b/src/libs/Performance.tsx @@ -49,10 +49,14 @@ function measureTTI(endMark?: string): void { requestAnimationFrame(() => { measureFailSafe('TTI', 'nativeLaunchStart', endMark); - // we don't want the alert to show on an e2e test session - if (!isE2ETestSession()) { - printPerformanceMetrics(); + // We don't want an alert to show: + // - on builds with performance metrics collection disabled by a feature flag + // - e2e test sessions + if (!Metrics.canCapturePerformanceMetrics() || isE2ETestSession()) { + return; } + + printPerformanceMetrics(); }); }); }