Migrate attachments on startup (#2193)
- [x] Run initial Backbone migrations 12–17 upon startup. - [x] ~~Run attachment migration (without Backbone references to avoid migrations): `MessageDataMigrator.processAll`.~~ Disabled in favor of background processing without index. - [x] ~~Run new migrations that can cover entire database (18+).~~ Not until we have such migrations. - [x] Add `runMigrations` module that ensures migrations are only run once. - [x] Add `settings` (`Signal.Settings`) module to interact with our app settings (`items` store) using a domain API, e.g. `isAttachmentMigrationComplete(…)` vs `storage.get('attachmentMigration_isComplete')`. Required to check attachment migration status without using Backbone. - [x] Add `database` (`Signal.Database`) CommonJS module to provide `Promise`-based interface to IndexedDB. - [x] Add `debug` (`Signal.Debug`) module for generating synthetic data to test performance of migration. - [x] Minor: Add `sleep` module for doing promise based sleeps. - [x] Minor: Extract `wrapDeferred` as CommonJS module named `deferredToPromise`.pull/1/head
commit
887bd83852
@ -0,0 +1,56 @@
|
||||
/* global indexedDB */
|
||||
|
||||
// Module for interacting with IndexedDB without Backbone IndexedDB adapter
|
||||
// and using promises. Revisit use of `idb` dependency as it might cover
|
||||
// this functionality.
|
||||
|
||||
const { isObject } = require('lodash');
|
||||
|
||||
|
||||
exports.open = (name, version) => {
|
||||
const request = indexedDB.open(name, version);
|
||||
return new Promise((resolve, reject) => {
|
||||
request.onblocked = () =>
|
||||
reject(new Error('Database blocked'));
|
||||
|
||||
request.onupgradeneeded = event =>
|
||||
reject(new Error('Unexpected database upgrade required:' +
|
||||
`oldVersion: ${event.oldVersion}, newVersion: ${event.newVersion}`));
|
||||
|
||||
request.onerror = event =>
|
||||
reject(event.target.error);
|
||||
|
||||
request.onsuccess = (event) => {
|
||||
const connection = event.target.result;
|
||||
resolve(connection);
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
exports.completeTransaction = transaction =>
|
||||
new Promise((resolve, reject) => {
|
||||
transaction.addEventListener('abort', event => reject(event.target.error));
|
||||
transaction.addEventListener('error', event => reject(event.target.error));
|
||||
transaction.addEventListener('complete', () => resolve());
|
||||
});
|
||||
|
||||
exports.getVersion = async (name) => {
|
||||
const connection = await exports.open(name);
|
||||
const { version } = connection;
|
||||
connection.close();
|
||||
return version;
|
||||
};
|
||||
|
||||
exports.getCount = async ({ store } = {}) => {
|
||||
if (!isObject(store)) {
|
||||
throw new TypeError('"store" is required');
|
||||
}
|
||||
|
||||
const request = store.count();
|
||||
return new Promise((resolve, reject) => {
|
||||
request.onerror = event =>
|
||||
reject(event.target.error);
|
||||
request.onsuccess = event =>
|
||||
resolve(event.target.result);
|
||||
});
|
||||
};
|
@ -0,0 +1,135 @@
|
||||
const {
|
||||
isFunction,
|
||||
isNumber,
|
||||
isObject,
|
||||
isString,
|
||||
random,
|
||||
range,
|
||||
sample,
|
||||
} = require('lodash');
|
||||
|
||||
const Message = require('./types/message');
|
||||
const { deferredToPromise } = require('./deferred_to_promise');
|
||||
const { sleep } = require('./sleep');
|
||||
|
||||
|
||||
// See: https://en.wikipedia.org/wiki/Fictitious_telephone_number#North_American_Numbering_Plan
|
||||
const SENDER_ID = '+12126647665';
|
||||
|
||||
exports.createConversation = async ({
|
||||
ConversationController,
|
||||
numMessages,
|
||||
WhisperMessage,
|
||||
} = {}) => {
|
||||
if (!isObject(ConversationController) ||
|
||||
!isFunction(ConversationController.getOrCreateAndWait)) {
|
||||
throw new TypeError('"ConversationController" is required');
|
||||
}
|
||||
|
||||
if (!isNumber(numMessages) || numMessages <= 0) {
|
||||
throw new TypeError('"numMessages" must be a positive number');
|
||||
}
|
||||
|
||||
if (!isFunction(WhisperMessage)) {
|
||||
throw new TypeError('"WhisperMessage" is required');
|
||||
}
|
||||
|
||||
const conversation =
|
||||
await ConversationController.getOrCreateAndWait(SENDER_ID, 'private');
|
||||
conversation.set({
|
||||
active_at: Date.now(),
|
||||
unread: numMessages,
|
||||
});
|
||||
await deferredToPromise(conversation.save());
|
||||
|
||||
const conversationId = conversation.get('id');
|
||||
|
||||
await Promise.all(range(0, numMessages).map(async (index) => {
|
||||
await sleep(index * 100);
|
||||
console.log(`Create message ${index + 1}`);
|
||||
const message = new WhisperMessage(createRandomMessage({ conversationId }));
|
||||
return deferredToPromise(message.save());
|
||||
}));
|
||||
};
|
||||
|
||||
const SAMPLE_MESSAGES = [
|
||||
'Lorem ipsum dolor sit amet, consectetur adipiscing elit.',
|
||||
'Integer et rutrum leo, eu ultrices ligula.',
|
||||
'Nam vel aliquam quam.',
|
||||
'Suspendisse posuere nunc vitae pulvinar lobortis.',
|
||||
'Nunc et sapien ex.',
|
||||
'Duis nec neque eu arcu ultrices ullamcorper in et mauris.',
|
||||
'Praesent mi felis, hendrerit a nulla id, mattis consectetur est.',
|
||||
'Duis venenatis posuere est sit amet congue.',
|
||||
'Vestibulum vitae sapien ultricies, auctor purus vitae, laoreet lacus.',
|
||||
'Fusce laoreet nisi dui, a bibendum metus consequat in.',
|
||||
'Nulla sed iaculis odio, sed lobortis lacus.',
|
||||
'Etiam massa felis, gravida at nibh viverra, tincidunt convallis justo.',
|
||||
'Maecenas ut egestas urna.',
|
||||
'Pellentesque consectetur mattis imperdiet.',
|
||||
'Maecenas pulvinar efficitur justo a cursus.',
|
||||
];
|
||||
|
||||
const ATTACHMENT_SAMPLE_RATE = 0.33;
|
||||
const createRandomMessage = ({ conversationId } = {}) => {
|
||||
if (!isString(conversationId)) {
|
||||
throw new TypeError('"conversationId" must be a string');
|
||||
}
|
||||
|
||||
const sentAt = Date.now() - random(100 * 24 * 60 * 60 * 1000);
|
||||
const receivedAt = sentAt + random(30 * 1000);
|
||||
|
||||
const hasAttachment = Math.random() <= ATTACHMENT_SAMPLE_RATE;
|
||||
const attachments = hasAttachment
|
||||
? [createRandomInMemoryAttachment()] : [];
|
||||
const type = sample(['incoming', 'outgoing']);
|
||||
const commonProperties = {
|
||||
attachments,
|
||||
body: sample(SAMPLE_MESSAGES),
|
||||
conversationId,
|
||||
received_at: receivedAt,
|
||||
sent_at: sentAt,
|
||||
timestamp: receivedAt,
|
||||
type,
|
||||
};
|
||||
|
||||
const message = _createMessage({ commonProperties, conversationId, type });
|
||||
return Message.initializeSchemaVersion(message);
|
||||
};
|
||||
|
||||
const _createMessage = ({ commonProperties, conversationId, type } = {}) => {
|
||||
switch (type) {
|
||||
case 'incoming':
|
||||
return Object.assign({}, commonProperties, {
|
||||
flags: 0,
|
||||
source: conversationId,
|
||||
sourceDevice: 1,
|
||||
});
|
||||
case 'outgoing':
|
||||
return Object.assign({}, commonProperties, {
|
||||
delivered: 1,
|
||||
delivered_to: [conversationId],
|
||||
expireTimer: 0,
|
||||
recipients: [conversationId],
|
||||
sent_to: [conversationId],
|
||||
synced: true,
|
||||
});
|
||||
default:
|
||||
throw new TypeError(`Unknown message type: '${type}'`);
|
||||
}
|
||||
};
|
||||
|
||||
const MEGA_BYTE = 1e6;
|
||||
const createRandomInMemoryAttachment = () => {
|
||||
const numBytes = (1 + Math.ceil((Math.random() * 50))) * MEGA_BYTE;
|
||||
const array = new Uint32Array(numBytes).fill(1);
|
||||
const data = array.buffer;
|
||||
const fileName = Math.random().toString().slice(2);
|
||||
|
||||
return {
|
||||
contentType: 'application/octet-stream',
|
||||
data,
|
||||
fileName,
|
||||
size: numBytes,
|
||||
};
|
||||
};
|
@ -0,0 +1,3 @@
|
||||
exports.deferredToPromise = deferred =>
|
||||
// eslint-disable-next-line more/no-then
|
||||
new Promise((resolve, reject) => deferred.then(resolve, reject));
|
@ -1,55 +0,0 @@
|
||||
const Message = require('../../types/message');
|
||||
|
||||
|
||||
exports.run = async (transaction) => {
|
||||
const messagesStore = transaction.objectStore('messages');
|
||||
|
||||
console.log('Initialize messages schema version');
|
||||
const numUpgradedMessages = await _initializeMessageSchemaVersion(messagesStore);
|
||||
console.log('Complete messages schema version initialization', { numUpgradedMessages });
|
||||
|
||||
console.log('Create index from attachment schema version to attachment');
|
||||
messagesStore.createIndex('schemaVersion', 'schemaVersion', { unique: false });
|
||||
};
|
||||
|
||||
const _initializeMessageSchemaVersion = messagesStore =>
|
||||
new Promise((resolve, reject) => {
|
||||
const messagePutOperations = [];
|
||||
|
||||
const cursorRequest = messagesStore.openCursor();
|
||||
cursorRequest.onsuccess = async (event) => {
|
||||
const cursor = event.target.result;
|
||||
const hasMoreData = Boolean(cursor);
|
||||
if (!hasMoreData) {
|
||||
await Promise.all(messagePutOperations);
|
||||
return resolve(messagePutOperations.length);
|
||||
}
|
||||
|
||||
const message = cursor.value;
|
||||
const messageWithSchemaVersion = Message.initializeSchemaVersion(message);
|
||||
messagePutOperations.push(putItem(
|
||||
messagesStore,
|
||||
messageWithSchemaVersion,
|
||||
messageWithSchemaVersion.id
|
||||
));
|
||||
|
||||
return cursor.continue();
|
||||
};
|
||||
|
||||
cursorRequest.onerror = event =>
|
||||
reject(event.target.error);
|
||||
});
|
||||
|
||||
// putItem :: IDBObjectStore -> Item -> Key -> Promise Item
|
||||
const putItem = (store, item, key) =>
|
||||
new Promise((resolve, reject) => {
|
||||
try {
|
||||
const request = store.put(item, key);
|
||||
request.onsuccess = event =>
|
||||
resolve(event.target.result);
|
||||
request.onerror = event =>
|
||||
reject(event.target.error);
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
}
|
||||
});
|
@ -0,0 +1,156 @@
|
||||
const { last } = require('lodash');
|
||||
|
||||
const { runMigrations } = require('./run_migrations');
|
||||
|
||||
|
||||
// IMPORTANT: The migrations below are run on a database that may be very large
|
||||
// due to attachments being directly stored inside the database. Please avoid
|
||||
// any expensive operations, e.g. modifying all messages / attachments, etc., as
|
||||
// it may cause out-of-memory errors for users with long histories:
|
||||
// https://github.com/signalapp/Signal-Desktop/issues/2163
|
||||
exports.migrations = [
|
||||
{
|
||||
version: '12.0',
|
||||
migrate(transaction, next) {
|
||||
console.log('Migration 12');
|
||||
console.log('creating object stores');
|
||||
const messages = transaction.db.createObjectStore('messages');
|
||||
messages.createIndex('conversation', ['conversationId', 'received_at'], {
|
||||
unique: false,
|
||||
});
|
||||
messages.createIndex('receipt', 'sent_at', { unique: false });
|
||||
messages.createIndex('unread', ['conversationId', 'unread'], { unique: false });
|
||||
messages.createIndex('expires_at', 'expires_at', { unique: false });
|
||||
|
||||
const conversations = transaction.db.createObjectStore('conversations');
|
||||
conversations.createIndex('inbox', 'active_at', { unique: false });
|
||||
conversations.createIndex('group', 'members', {
|
||||
unique: false,
|
||||
multiEntry: true,
|
||||
});
|
||||
conversations.createIndex('type', 'type', {
|
||||
unique: false,
|
||||
});
|
||||
conversations.createIndex('search', 'tokens', {
|
||||
unique: false,
|
||||
multiEntry: true,
|
||||
});
|
||||
|
||||
transaction.db.createObjectStore('groups');
|
||||
|
||||
transaction.db.createObjectStore('sessions');
|
||||
transaction.db.createObjectStore('identityKeys');
|
||||
transaction.db.createObjectStore('preKeys');
|
||||
transaction.db.createObjectStore('signedPreKeys');
|
||||
transaction.db.createObjectStore('items');
|
||||
|
||||
console.log('creating debug log');
|
||||
transaction.db.createObjectStore('debug');
|
||||
|
||||
next();
|
||||
},
|
||||
},
|
||||
{
|
||||
version: '13.0',
|
||||
migrate(transaction, next) {
|
||||
console.log('Migration 13');
|
||||
console.log('Adding fields to identity keys');
|
||||
const identityKeys = transaction.objectStore('identityKeys');
|
||||
const request = identityKeys.openCursor();
|
||||
const promises = [];
|
||||
request.onsuccess = (event) => {
|
||||
const cursor = event.target.result;
|
||||
if (cursor) {
|
||||
const attributes = cursor.value;
|
||||
attributes.timestamp = 0;
|
||||
attributes.firstUse = false;
|
||||
attributes.nonblockingApproval = false;
|
||||
attributes.verified = 0;
|
||||
promises.push(new Promise(((resolve, reject) => {
|
||||
const putRequest = identityKeys.put(attributes, attributes.id);
|
||||
putRequest.onsuccess = resolve;
|
||||
putRequest.onerror = (e) => {
|
||||
console.log(e);
|
||||
reject(e);
|
||||
};
|
||||
})));
|
||||
cursor.continue();
|
||||
} else {
|
||||
// no more results
|
||||
// eslint-disable-next-line more/no-then
|
||||
Promise.all(promises).then(() => {
|
||||
next();
|
||||
});
|
||||
}
|
||||
};
|
||||
request.onerror = (event) => {
|
||||
console.log(event);
|
||||
};
|
||||
},
|
||||
},
|
||||
{
|
||||
version: '14.0',
|
||||
migrate(transaction, next) {
|
||||
console.log('Migration 14');
|
||||
console.log('Adding unprocessed message store');
|
||||
const unprocessed = transaction.db.createObjectStore('unprocessed');
|
||||
unprocessed.createIndex('received', 'timestamp', { unique: false });
|
||||
next();
|
||||
},
|
||||
},
|
||||
{
|
||||
version: '15.0',
|
||||
migrate(transaction, next) {
|
||||
console.log('Migration 15');
|
||||
console.log('Adding messages index for de-duplication');
|
||||
const messages = transaction.objectStore('messages');
|
||||
messages.createIndex('unique', ['source', 'sourceDevice', 'sent_at'], {
|
||||
unique: true,
|
||||
});
|
||||
next();
|
||||
},
|
||||
},
|
||||
{
|
||||
version: '16.0',
|
||||
migrate(transaction, next) {
|
||||
console.log('Migration 16');
|
||||
console.log('Dropping log table, since we now log to disk');
|
||||
transaction.db.deleteObjectStore('debug');
|
||||
next();
|
||||
},
|
||||
},
|
||||
{
|
||||
version: 17,
|
||||
async migrate(transaction, next) {
|
||||
console.log('Migration 17');
|
||||
|
||||
const start = Date.now();
|
||||
|
||||
const messagesStore = transaction.objectStore('messages');
|
||||
console.log('Create index from attachment schema version to attachment');
|
||||
messagesStore.createIndex('schemaVersion', 'schemaVersion', { unique: false });
|
||||
|
||||
const duration = Date.now() - start;
|
||||
|
||||
console.log(
|
||||
'Complete migration to database version 17.',
|
||||
`Duration: ${duration}ms`
|
||||
);
|
||||
next();
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const database = {
|
||||
id: 'signal',
|
||||
nolog: true,
|
||||
migrations: exports.migrations,
|
||||
};
|
||||
|
||||
exports.run = ({ Backbone } = {}) =>
|
||||
runMigrations({ Backbone, database });
|
||||
|
||||
exports.getDatabase = () => ({
|
||||
name: database.id,
|
||||
version: last(exports.migrations).version,
|
||||
});
|
@ -0,0 +1,15 @@
|
||||
const { runMigrations } = require('./run_migrations');
|
||||
|
||||
|
||||
exports.migrations = [
|
||||
// {
|
||||
// version: 18,
|
||||
// async migrate(transaction, next) {
|
||||
// console.log('Migration 18');
|
||||
// console.log('Attachments stored on disk');
|
||||
// next();
|
||||
// },
|
||||
// },
|
||||
];
|
||||
|
||||
exports.run = runMigrations;
|
@ -0,0 +1,71 @@
|
||||
/* eslint-env browser */
|
||||
|
||||
const {
|
||||
head,
|
||||
isFunction,
|
||||
isObject,
|
||||
isString,
|
||||
last,
|
||||
} = require('lodash');
|
||||
|
||||
|
||||
const db = require('../database');
|
||||
const { deferredToPromise } = require('../deferred_to_promise');
|
||||
|
||||
|
||||
const closeDatabaseConnection = ({ Backbone } = {}) =>
|
||||
deferredToPromise(Backbone.sync('closeall'));
|
||||
|
||||
exports.runMigrations = async ({ Backbone, database } = {}) => {
|
||||
if (!isObject(Backbone) || !isObject(Backbone.Collection) ||
|
||||
!isFunction(Backbone.Collection.extend)) {
|
||||
throw new TypeError('"Backbone" is required');
|
||||
}
|
||||
|
||||
if (!isObject(database) || !isString(database.id) ||
|
||||
!Array.isArray(database.migrations)) {
|
||||
throw new TypeError('"database" is required');
|
||||
}
|
||||
|
||||
const {
|
||||
firstVersion: firstMigrationVersion,
|
||||
lastVersion: lastMigrationVersion,
|
||||
} = getMigrationVersions(database);
|
||||
|
||||
const databaseVersion = await db.getVersion(database.id);
|
||||
const isAlreadyUpgraded = databaseVersion >= lastMigrationVersion;
|
||||
|
||||
console.log('Database status', {
|
||||
firstMigrationVersion,
|
||||
lastMigrationVersion,
|
||||
databaseVersion,
|
||||
isAlreadyUpgraded,
|
||||
});
|
||||
|
||||
if (isAlreadyUpgraded) {
|
||||
return;
|
||||
}
|
||||
|
||||
const migrationCollection = new (Backbone.Collection.extend({
|
||||
database,
|
||||
storeName: 'items',
|
||||
}))();
|
||||
|
||||
await deferredToPromise(migrationCollection.fetch({ limit: 1 }));
|
||||
console.log('Close database connection');
|
||||
await closeDatabaseConnection({ Backbone });
|
||||
};
|
||||
|
||||
const getMigrationVersions = (database) => {
|
||||
if (!isObject(database) || !Array.isArray(database.migrations)) {
|
||||
throw new TypeError('"database" is required');
|
||||
}
|
||||
|
||||
const firstMigration = head(database.migrations);
|
||||
const lastMigration = last(database.migrations);
|
||||
|
||||
const firstVersion = firstMigration ? parseInt(firstMigration.version, 10) : null;
|
||||
const lastVersion = lastMigration ? parseInt(lastMigration.version, 10) : null;
|
||||
|
||||
return { firstVersion, lastVersion };
|
||||
};
|
@ -0,0 +1,86 @@
|
||||
const { isObject, isString } = require('lodash');
|
||||
|
||||
|
||||
const ITEMS_STORE_NAME = 'items';
|
||||
const LAST_PROCESSED_INDEX_KEY = 'attachmentMigration_lastProcessedIndex';
|
||||
const IS_MIGRATION_COMPLETE_KEY = 'attachmentMigration_isComplete';
|
||||
|
||||
// Public API
|
||||
exports.getAttachmentMigrationLastProcessedIndex = connection =>
|
||||
exports._getItem(connection, LAST_PROCESSED_INDEX_KEY);
|
||||
|
||||
exports.setAttachmentMigrationLastProcessedIndex = (connection, value) =>
|
||||
exports._setItem(connection, LAST_PROCESSED_INDEX_KEY, value);
|
||||
|
||||
exports.deleteAttachmentMigrationLastProcessedIndex = connection =>
|
||||
exports._deleteItem(connection, LAST_PROCESSED_INDEX_KEY);
|
||||
|
||||
exports.isAttachmentMigrationComplete = async connection =>
|
||||
Boolean(await exports._getItem(connection, IS_MIGRATION_COMPLETE_KEY));
|
||||
|
||||
exports.markAttachmentMigrationComplete = connection =>
|
||||
exports._setItem(connection, IS_MIGRATION_COMPLETE_KEY, true);
|
||||
|
||||
// Private API
|
||||
exports._getItem = (connection, key) => {
|
||||
if (!isObject(connection)) {
|
||||
throw new TypeError('"connection" is required');
|
||||
}
|
||||
|
||||
if (!isString(key)) {
|
||||
throw new TypeError('"key" must be a string');
|
||||
}
|
||||
|
||||
const transaction = connection.transaction(ITEMS_STORE_NAME, 'readonly');
|
||||
const itemsStore = transaction.objectStore(ITEMS_STORE_NAME);
|
||||
const request = itemsStore.get(key);
|
||||
return new Promise((resolve, reject) => {
|
||||
request.onerror = event =>
|
||||
reject(event.target.error);
|
||||
|
||||
request.onsuccess = event =>
|
||||
resolve(event.target.result ? event.target.result.value : null);
|
||||
});
|
||||
};
|
||||
|
||||
exports._setItem = (connection, key, value) => {
|
||||
if (!isObject(connection)) {
|
||||
throw new TypeError('"connection" is required');
|
||||
}
|
||||
|
||||
if (!isString(key)) {
|
||||
throw new TypeError('"key" must be a string');
|
||||
}
|
||||
|
||||
const transaction = connection.transaction(ITEMS_STORE_NAME, 'readwrite');
|
||||
const itemsStore = transaction.objectStore(ITEMS_STORE_NAME);
|
||||
const request = itemsStore.put({ id: key, value }, key);
|
||||
return new Promise((resolve, reject) => {
|
||||
request.onerror = event =>
|
||||
reject(event.target.error);
|
||||
|
||||
request.onsuccess = () =>
|
||||
resolve();
|
||||
});
|
||||
};
|
||||
|
||||
exports._deleteItem = (connection, key) => {
|
||||
if (!isObject(connection)) {
|
||||
throw new TypeError('"connection" is required');
|
||||
}
|
||||
|
||||
if (!isString(key)) {
|
||||
throw new TypeError('"key" must be a string');
|
||||
}
|
||||
|
||||
const transaction = connection.transaction(ITEMS_STORE_NAME, 'readwrite');
|
||||
const itemsStore = transaction.objectStore(ITEMS_STORE_NAME);
|
||||
const request = itemsStore.delete(key);
|
||||
return new Promise((resolve, reject) => {
|
||||
request.onerror = event =>
|
||||
reject(event.target.error);
|
||||
|
||||
request.onsuccess = () =>
|
||||
resolve();
|
||||
});
|
||||
};
|
@ -0,0 +1,4 @@
|
||||
/* global setTimeout */
|
||||
|
||||
exports.sleep = ms =>
|
||||
new Promise(resolve => setTimeout(resolve, ms));
|
@ -0,0 +1,18 @@
|
||||
#!/bin/bash
|
||||
ROOT=$1
|
||||
|
||||
if [[ "$1" == "" ]]; then
|
||||
echo "Usage: $(basename "$0") <signal-profile-path>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
while true
|
||||
do
|
||||
echo -n "$(date -u +"%Y-%m-%dT%H:%M:%SZ ")"
|
||||
du -sm "$ROOT/attachments.noindex"
|
||||
|
||||
echo -n "$(date -u +"%Y-%m-%dT%H:%M:%SZ ")"
|
||||
du -sm "$ROOT/IndexedDB"
|
||||
|
||||
sleep 1
|
||||
done
|
Loading…
Reference in New Issue