Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion lib/Logger.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,4 +38,6 @@ function logHmmm(message: string, parameters?: Parameters) {
logger({message: `[Onyx] ${message}`, level: 'hmmm', parameters});
}

export {registerLogger, logInfo, logAlert, logHmmm};
const Logger = {registerLogger, logInfo, logAlert, logHmmm};

export default Logger;
97 changes: 3 additions & 94 deletions lib/Onyx.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
/* eslint-disable no-continue */
import _ from 'underscore';
import * as Logger from './Logger';
import Logger from './Logger';
import cache, {TASK} from './OnyxCache';
import * as PerformanceUtils from './PerformanceUtils';
import Storage from './storage';
Expand Down Expand Up @@ -33,6 +33,7 @@ import type {Connection} from './OnyxConnectionManager';
import connectionManager from './OnyxConnectionManager';
import * as GlobalSettings from './GlobalSettings';
import decorateWithMetrics from './metrics';
import OnyxMerge from './OnyxMerge';

/** Initialize the store with actions and listening for storage events */
function init({
Expand Down Expand Up @@ -284,99 +285,7 @@ function merge<TKey extends OnyxKey>(key: TKey, changes: OnyxMergeInput<TKey>):
}
}

const mergeQueue = OnyxUtils.getMergeQueue();
const mergeQueuePromise = OnyxUtils.getMergeQueuePromise();

// Top-level undefined values are ignored
// Therefore, we need to prevent adding them to the merge queue
if (changes === undefined) {
return mergeQueue[key] ? mergeQueuePromise[key] : Promise.resolve();
}

// Merge attempts are batched together. The delta should be applied after a single call to get() to prevent a race condition.
// Using the initial value from storage in subsequent merge attempts will lead to an incorrect final merged value.
if (mergeQueue[key]) {
mergeQueue[key].push(changes);
return mergeQueuePromise[key];
}
mergeQueue[key] = [changes];

mergeQueuePromise[key] = OnyxUtils.get(key).then((existingValue) => {
// Calls to Onyx.set after a merge will terminate the current merge process and clear the merge queue
if (mergeQueue[key] == null) {
return Promise.resolve();
}

try {
// We first only merge the changes, so we can provide these to the native implementation (SQLite uses only delta changes in "JSON_PATCH" to merge)
// We don't want to remove null values from the "batchedDeltaChanges", because SQLite uses them to remove keys from storage natively.
const validChanges = mergeQueue[key].filter((change) => {
const {isCompatible, existingValueType, newValueType} = utils.checkCompatibilityWithExistingValue(change, existingValue);
if (!isCompatible) {
Logger.logAlert(logMessages.incompatibleUpdateAlert(key, 'merge', existingValueType, newValueType));
}
return isCompatible;
}) as Array<OnyxInput<TKey>>;

if (!validChanges.length) {
return Promise.resolve();
}
const batchedDeltaChanges = OnyxUtils.applyMerge(undefined, validChanges, false);

// Case (1): When there is no existing value in storage, we want to set the value instead of merge it.
// Case (2): The presence of a top-level `null` in the merge queue instructs us to drop the whole existing value.
// In this case, we can't simply merge the batched changes with the existing value, because then the null in the merge queue would have no effect
const shouldSetValue = !existingValue || mergeQueue[key].includes(null);

// Clean up the write queue, so we don't apply these changes again
delete mergeQueue[key];
delete mergeQueuePromise[key];

const logMergeCall = (hasChanged = true) => {
// Logging properties only since values could be sensitive things we don't want to log
Logger.logInfo(`merge called for key: ${key}${_.isObject(batchedDeltaChanges) ? ` properties: ${_.keys(batchedDeltaChanges).join(',')}` : ''} hasChanged: ${hasChanged}`);
};

// If the batched changes equal null, we want to remove the key from storage, to reduce storage size
const {wasRemoved} = OnyxUtils.removeNullValues(key, batchedDeltaChanges);

// Calling "OnyxUtils.removeNullValues" removes the key from storage and cache and updates the subscriber.
// Therefore, we don't need to further broadcast and update the value so we can return early.
if (wasRemoved) {
logMergeCall();
return Promise.resolve();
}

// For providers that can't handle delta changes, we need to merge the batched changes with the existing value beforehand.
// The "preMergedValue" will be directly "set" in storage instead of being merged
// Therefore we merge the batched changes with the existing value to get the final merged value that will be stored.
// We can remove null values from the "preMergedValue", because "null" implicates that the user wants to remove a value from storage.
const preMergedValue = OnyxUtils.applyMerge(shouldSetValue ? undefined : existingValue, [batchedDeltaChanges], true);

// In cache, we don't want to remove the key if it's null to improve performance and speed up the next merge.
const hasChanged = cache.hasValueChanged(key, preMergedValue);

logMergeCall(hasChanged);

// This approach prioritizes fast UI changes without waiting for data to be stored in device storage.
const updatePromise = OnyxUtils.broadcastUpdate(key, preMergedValue as OnyxValue<TKey>, hasChanged);

// If the value has not changed, calling Storage.setItem() would be redundant and a waste of performance, so return early instead.
if (!hasChanged) {
return updatePromise;
}

return Storage.mergeItem(key, batchedDeltaChanges as OnyxValue<TKey>, preMergedValue as OnyxValue<TKey>, shouldSetValue).then(() => {
OnyxUtils.sendActionToDevTools(OnyxUtils.METHOD.MERGE, key, changes, preMergedValue);
return updatePromise;
});
} catch (error) {
Logger.logAlert(`An error occurred while applying merge for key: ${key}, Error: ${error}`);
return Promise.resolve();
}
});

return mergeQueuePromise[key];
return OnyxMerge.merge(key, changes);
}

/**
Expand Down
1 change: 1 addition & 0 deletions lib/OnyxCache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,7 @@ class OnyxCache {
this.addKey(key);
this.addToAccessedKeys(key);

// In cache, we don't want to remove the key if it's null to improve performance and speed up the next merge.
if (value === null || value === undefined) {
this.addNullishStorageKey(key);
} else {
Expand Down
2 changes: 1 addition & 1 deletion lib/OnyxConnectionManager.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import bindAll from 'lodash/bindAll';
import * as Logger from './Logger';
import Logger from './Logger';
import type {ConnectOptions} from './Onyx';
import OnyxUtils from './OnyxUtils';
import * as Str from './Str';
Expand Down
56 changes: 56 additions & 0 deletions lib/OnyxMerge/index.native.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import type {OnyxMergeInput, OnyxKey, OnyxValue} from '../types';
import OnyxUtils from '../OnyxUtils';
import Storage from '../storage';
import Logger from '../Logger';
import cache from '../OnyxCache';
import {isValidChange, logMergeCall} from './mergeUtils';

const nativeBroadcastPromise: Record<OnyxKey, Promise<void>> = {};

function merge<TKey extends OnyxKey>(key: TKey, change: OnyxMergeInput<TKey>) {
// Top-level undefined values are ignored
// Therefore, we need to prevent adding them to the merge queue
if (change === undefined) {
return Promise.resolve();
}

// On mobile, we can get the existing value synchronously through NitroSQLite,
// therefore we don't need to batch merge changes into a merge queue.
const existingValue = Storage.getItemSync(key);

if (!isValidChange(key, change, existingValue)) {
return Promise.resolve();
}

if (change === null) {
Logger.logInfo(`merge called for key: ${key} wasRemoved: true`);
return OnyxUtils.remove(key);
}

// On mobile, we want to immediately apply the change
Storage.mergeItem(key, change);

nativeBroadcastPromise[key] = (nativeBroadcastPromise[key] ?? Promise.resolve()).then(() => {
try {
// We merge the valid changes with the existing value to get the final merged value that will be stored.
// We want to remove nested null values from the new value, because null implicates that the user wants to remove a value from storage.
const newValue = OnyxUtils.applyMerge(existingValue, [change], true);
const hasChanged = cache.hasValueChanged(key, newValue);

logMergeCall(key, newValue, hasChanged);

// This approach prioritizes fast UI changes without waiting for data to be stored in device storage.
OnyxUtils.sendActionToDevTools(OnyxUtils.METHOD.MERGE, key, change, newValue);
return OnyxUtils.broadcastUpdate(key, newValue as OnyxValue<TKey>, hasChanged);
} catch (error) {
Logger.logAlert(`An error occurred while applying merge for key: ${key}, Error: ${error}`);
return Promise.resolve();
}
});

return nativeBroadcastPromise[key];
}

const OnyxMerge = {merge};

export default OnyxMerge;
83 changes: 83 additions & 0 deletions lib/OnyxMerge/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
import type {OnyxMergeInput, OnyxKey, OnyxValue, OnyxInput} from '../types';
import OnyxUtils from '../OnyxUtils';
import Storage from '../storage';
import Logger from '../Logger';
import cache from '../OnyxCache';
import {isValidChange, logMergeCall} from './mergeUtils';

function merge<TKey extends OnyxKey>(key: TKey, change: OnyxMergeInput<TKey>) {
const mergeQueue = OnyxUtils.getMergeQueue();
const mergeQueuePromise = OnyxUtils.getMergeQueuePromise();

// Top-level undefined values are ignored
// Therefore, we need to prevent adding them to the merge queue
if (change === undefined) {
return mergeQueue[key] ? mergeQueuePromise[key] : Promise.resolve();
}

// Merge attempts are batched together. The delta should be applied after a single call to get() to prevent a race condition.
// Using the initial value from storage in subsequent merge attempts will lead to an incorrect final merged value.
if (mergeQueue[key]) {
mergeQueue[key].push(change);
return mergeQueuePromise[key];
}
mergeQueue[key] = [change];

mergeQueuePromise[key] = OnyxUtils.get(key).then((existingValue) => {
// Calls to Onyx.set after a merge will terminate the current merge process and clear the merge queue
if (mergeQueue[key] == null) {
return Promise.resolve();
}

try {
// We first only merge the changes, so we can provide these to the native implementation (SQLite uses only delta changes in "JSON_PATCH" to merge)
// We don't want to remove null values from the "batchedDeltaChanges", because SQLite uses them to remove keys from storage natively.
const validChanges = mergeQueue[key].filter((c) => isValidChange(key, c, existingValue)) as Array<OnyxInput<TKey>>;

if (!validChanges.length) {
return Promise.resolve();
}

// Clean up the write queue, so we don't apply these changes again
delete mergeQueue[key];
delete mergeQueuePromise[key];

// If the last change is null, we remove the key from storage and cache and update the subscriber immediately.
if (validChanges.at(-1) === null) {
OnyxUtils.remove(key);
// Logging properties only since values could be sensitive things we don't want to log
Logger.logInfo(`merge called for key: ${key} wasRemoved: true`);
return Promise.resolve();
}

// We merge the valid changes with the existing value to get the final merged value that will be stored.
// We want to remove nested null values from the new value, because null implicates that the user wants to remove a value from storage.
const newValue = OnyxUtils.applyMerge(existingValue, validChanges, true);
const hasChanged = cache.hasValueChanged(key, newValue);

logMergeCall(key, newValue, hasChanged);

// This approach prioritizes fast UI changes without waiting for data to be stored in device storage.
const updatePromise = OnyxUtils.broadcastUpdate(key, newValue as OnyxValue<TKey>, hasChanged);

// If the value has not changed, calling Storage.setItem() would be redundant and a waste of performance, so return early instead.
if (!hasChanged) {
return updatePromise;
}

return Storage.setItem(key, newValue as OnyxValue<TKey>).then(() => {
OnyxUtils.sendActionToDevTools(OnyxUtils.METHOD.MERGE, key, change, newValue);
return updatePromise;
});
} catch (error) {
Logger.logAlert(`An error occurred while applying merge for key: ${key}, Error: ${error}`);
return Promise.resolve();
}
});

return mergeQueuePromise[key];
}

const OnyxMerge = {merge};

export default OnyxMerge;
19 changes: 19 additions & 0 deletions lib/OnyxMerge/mergeUtils.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import _ from 'underscore';
import type {OnyxInput, OnyxKey} from '../types';
import logMessages from '../logMessages';
import utils from '../utils';
import Logger from '../Logger';

function isValidChange<TKey extends OnyxKey>(key: TKey, change: unknown, existingValue: unknown): change is OnyxInput<TKey> {
const {isCompatible, existingValueType, newValueType} = utils.checkCompatibilityWithExistingValue(change, existingValue);
if (!isCompatible) {
Logger.logAlert(logMessages.incompatibleUpdateAlert(key, 'merge', existingValueType, newValueType));
}
return isCompatible;
}

function logMergeCall(key: OnyxKey, changes: unknown, hasChanged = true) {
Logger.logInfo(`merge called for key: ${key}${_.isObject(changes) ? ` properties: ${_.keys(changes).join(',')}` : ''} hasChanged: ${hasChanged}`);
}

export {isValidChange, logMergeCall};
2 changes: 1 addition & 1 deletion lib/OnyxUtils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import lodashClone from 'lodash/clone';
import type {ValueOf} from 'type-fest';
import lodashPick from 'lodash/pick';
import DevTools from './DevTools';
import * as Logger from './Logger';
import Logger from './Logger';
import type Onyx from './Onyx';
import cache, {TASK} from './OnyxCache';
import * as PerformanceUtils from './PerformanceUtils';
Expand Down
62 changes: 34 additions & 28 deletions lib/storage/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import * as Logger from '../Logger';
import Logger from '../Logger';

import PlatformStorage from './platforms';
import InstanceSync from './InstanceSync';
Expand Down Expand Up @@ -28,33 +28,34 @@ function degradePerformance(error: Error) {
}

/**
* Runs a piece of code and degrades performance if certain errors are thrown
* Runs a piece of code synchronously and degrades performance if certain errors are thrown
*/
function tryOrDegradePerformance<T>(fn: () => Promise<T> | T, waitForInitialization = true): Promise<T> {
return new Promise<T>((resolve, reject) => {
const promise = waitForInitialization ? initPromise : Promise.resolve();

promise.then(() => {
try {
resolve(fn());
} catch (error) {
// Test for known critical errors that the storage provider throws, e.g. when storage is full
if (error instanceof Error) {
// IndexedDB error when storage is full (https://github.com/Expensify/App/issues/29403)
if (error.message.includes('Internal error opening backing store for indexedDB.open')) {
degradePerformance(error);
}

// catch the error if DB connection can not be established/DB can not be created
if (error.message.includes('IDBKeyVal store could not be created')) {
degradePerformance(error);
}
}

reject(error);
function tryOrDegradePerformanceSync<T, Callback extends (() => Promise<T>) | (() => T)>(fn: Callback): ReturnType<Callback> {
try {
return fn() as ReturnType<Callback>;
} catch (error) {
// Test for known critical errors that the storage provider throws, e.g. when storage is full
if (error instanceof Error) {
// IndexedDB error when storage is full (https://github.com/Expensify/App/issues/29403)
if (error.message.includes('Internal error opening backing store for indexedDB.open')) {
degradePerformance(error);
}
});
});

// catch the error if DB connection can not be established/DB can not be created
if (error.message.includes('IDBKeyVal store could not be created')) {
degradePerformance(error);
}
}

throw error;
}
}

/**
* Runs a piece of code and degrades performance if certain errors are thrown
*/
function tryOrDegradePerformance<T>(fn: () => Promise<T> | T, waitForInitialization = true) {
return (waitForInitialization ? initPromise : Promise.resolve()).then(() => tryOrDegradePerformanceSync(fn));
}

const storage: Storage = {
Expand All @@ -80,6 +81,11 @@ const storage: Storage = {
*/
getItem: (key) => tryOrDegradePerformance(() => provider.getItem(key)),

/**
* Get the value of a given key synchronously or return `null` if it's not available
*/
getItemSync: (key) => tryOrDegradePerformanceSync(() => provider.getItemSync(key)),

/**
* Get multiple key-value pairs for the give array of keys in a batch
*/
Expand Down Expand Up @@ -116,9 +122,9 @@ const storage: Storage = {
/**
* Merging an existing value with a new one
*/
mergeItem: (key, deltaChanges, preMergedValue, shouldSetValue = false) =>
mergeItem: (key, change) =>
tryOrDegradePerformance(() => {
const promise = provider.mergeItem(key, deltaChanges, preMergedValue, shouldSetValue);
const promise = provider.mergeItem(key, change);

if (shouldKeepInstancesSync) {
return promise.then(() => InstanceSync.mergeItem(key));
Expand Down
Loading
Loading