implement periodic db cleanup
This commit is contained in:
parent
9758039f5f
commit
92f6dd96c0
|
@ -1,13 +1,13 @@
|
|||
import { ACCOUNTS_STORE, RELATIONSHIPS_STORE } from './constants'
|
||||
import { accountsCache, relationshipsCache } from './cache'
|
||||
import { getGenericEntityWithId, setGenericEntityWithId } from './helpers'
|
||||
import { cloneForStorage, getGenericEntityWithId, setGenericEntityWithId } from './helpers'
|
||||
|
||||
export async function getAccount (instanceName, accountId) {
|
||||
return getGenericEntityWithId(ACCOUNTS_STORE, accountsCache, instanceName, accountId)
|
||||
}
|
||||
|
||||
export async function setAccount (instanceName, account) {
|
||||
return setGenericEntityWithId(ACCOUNTS_STORE, accountsCache, instanceName, account)
|
||||
return setGenericEntityWithId(ACCOUNTS_STORE, accountsCache, instanceName, cloneForStorage(account))
|
||||
}
|
||||
|
||||
export async function getRelationship (instanceName, accountId) {
|
||||
|
@ -15,5 +15,5 @@ export async function getRelationship (instanceName, accountId) {
|
|||
}
|
||||
|
||||
export async function setRelationship (instanceName, relationship) {
|
||||
return setGenericEntityWithId(RELATIONSHIPS_STORE, relationshipsCache, instanceName, relationship)
|
||||
return setGenericEntityWithId(RELATIONSHIPS_STORE, relationshipsCache, instanceName, cloneForStorage(relationship))
|
||||
}
|
||||
|
|
|
@ -0,0 +1,135 @@
|
|||
import { dbPromise, getDatabase } from './databaseLifecycle'
|
||||
import { scheduleIdleTask } from '../_utils/scheduleIdleTask'
|
||||
import {
|
||||
ACCOUNTS_STORE,
|
||||
NOTIFICATION_TIMELINES_STORE,
|
||||
NOTIFICATIONS_STORE,
|
||||
RELATIONSHIPS_STORE,
|
||||
STATUS_TIMELINES_STORE,
|
||||
STATUSES_STORE,
|
||||
TIMESTAMP
|
||||
} from './constants'
|
||||
import debounce from 'lodash/debounce'
|
||||
import { store } from '../_store/store'
|
||||
import { mark, stop } from '../_utils/marks'
|
||||
|
||||
const BATCH_SIZE = 20
|
||||
const TIME_AGO = 14 * 24 * 60 * 60 * 1000 // two weeks ago
|
||||
const DELAY = 5 * 60 * 1000 // five minutes
|
||||
|
||||
function batchedGetAll(callGetAll, callback) {
|
||||
function nextBatch() {
|
||||
callGetAll().onsuccess = function (e) {
|
||||
let results = e.target.result
|
||||
callback(results)
|
||||
if (results.length) {
|
||||
nextBatch()
|
||||
}
|
||||
}
|
||||
}
|
||||
nextBatch()
|
||||
}
|
||||
|
||||
function cleanupStatuses(statusesStore, statusTimelinesStore, cutoff) {
|
||||
batchedGetAll(
|
||||
() => statusesStore.index(TIMESTAMP).getAll(IDBKeyRange.upperBound(cutoff), BATCH_SIZE),
|
||||
results => {
|
||||
results.forEach(result => {
|
||||
statusesStore.delete(result.id)
|
||||
let req = statusTimelinesStore.index('statusId').getAll(IDBKeyRange.only(result.id))
|
||||
req.onsuccess = e => {
|
||||
let results = e.target.result
|
||||
results.forEach(result => {
|
||||
statusTimelinesStore.delete(result.id)
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function cleanupNotifications(notificationsStore, notificationTimelinesStore, cutoff) {
|
||||
batchedGetAll(
|
||||
() => notificationsStore.index(TIMESTAMP).getAll(IDBKeyRange.upperBound(cutoff), BATCH_SIZE),
|
||||
results => {
|
||||
results.forEach(result => {
|
||||
notificationsStore.delete(result.id)
|
||||
let req = notificationTimelinesStore.index('notificationId').getAll(IDBKeyRange.only(result.id))
|
||||
req.onsuccess = e => {
|
||||
let results = e.target.result
|
||||
results.forEach(result => {
|
||||
notificationTimelinesStore.delete(result.id)
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function cleanupAccounts(accountsStore, cutoff) {
|
||||
batchedGetAll(
|
||||
() => accountsStore.index(TIMESTAMP).getAll(IDBKeyRange.upperBound(cutoff), BATCH_SIZE),
|
||||
(results) => {
|
||||
results.forEach(result => {
|
||||
accountsStore.delete(result.id)
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function cleanupRelationships(relationshipsStore, cutoff) {
|
||||
batchedGetAll(
|
||||
() => relationshipsStore.index(TIMESTAMP).getAll(IDBKeyRange.upperBound(cutoff), BATCH_SIZE),
|
||||
(results) => {
|
||||
results.forEach(result => {
|
||||
relationshipsStore.delete(result.id)
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async function cleanup(instanceName) {
|
||||
console.log('cleanup', instanceName)
|
||||
mark(`cleanup:${instanceName}`)
|
||||
let db = await getDatabase(instanceName)
|
||||
let storeNames = [
|
||||
STATUSES_STORE,
|
||||
STATUS_TIMELINES_STORE,
|
||||
NOTIFICATIONS_STORE,
|
||||
NOTIFICATION_TIMELINES_STORE,
|
||||
ACCOUNTS_STORE,
|
||||
RELATIONSHIPS_STORE
|
||||
]
|
||||
await dbPromise(db, storeNames, 'readwrite', (stores) => {
|
||||
let [
|
||||
statusesStore,
|
||||
statusTimelinesStore,
|
||||
notificationsStore,
|
||||
notificationTimelinesStore,
|
||||
accountsStore,
|
||||
relationshipsStore
|
||||
] = stores
|
||||
|
||||
let cutoff = Date.now() - TIME_AGO
|
||||
|
||||
cleanupStatuses(statusesStore, statusTimelinesStore, cutoff)
|
||||
cleanupNotifications(notificationsStore, notificationTimelinesStore, cutoff)
|
||||
cleanupAccounts(accountsStore, cutoff)
|
||||
cleanupRelationships(relationshipsStore, cutoff)
|
||||
})
|
||||
stop(`cleanup:${instanceName}`)
|
||||
}
|
||||
|
||||
function doCleanup(instanceName) {
|
||||
scheduleIdleTask(() => cleanup(instanceName))
|
||||
}
|
||||
|
||||
function scheduledCleanup () {
|
||||
console.log('scheduledCleanup')
|
||||
let instances = store.get('loggedInInstancesInOrder')
|
||||
for (let instance of instances) {
|
||||
doCleanup(instance)
|
||||
}
|
||||
}
|
||||
|
||||
export const scheduleCleanup = debounce(() => scheduleIdleTask(scheduledCleanup), DELAY)
|
|
@ -6,3 +6,8 @@ export const RELATIONSHIPS_STORE = 'relationships'
|
|||
export const NOTIFICATIONS_STORE = 'notifications'
|
||||
export const NOTIFICATION_TIMELINES_STORE = 'notification_timelines'
|
||||
export const PINNED_STATUSES_STORE = 'pinned_statuses'
|
||||
|
||||
export const TIMESTAMP = '__pinafore_ts'
|
||||
export const ACCOUNT_ID = '__pinafore_acct_id'
|
||||
export const STATUS_ID = '__pinafore_status_id'
|
||||
export const REBLOG_ID = '__pinafore_reblog_id'
|
|
@ -6,13 +6,18 @@ import {
|
|||
RELATIONSHIPS_STORE,
|
||||
NOTIFICATIONS_STORE,
|
||||
NOTIFICATION_TIMELINES_STORE,
|
||||
PINNED_STATUSES_STORE
|
||||
PINNED_STATUSES_STORE,
|
||||
TIMESTAMP
|
||||
} from './constants'
|
||||
|
||||
const openReqs = {}
|
||||
const databaseCache = {}
|
||||
|
||||
const DB_VERSION = 2
|
||||
const DB_VERSION = 1
|
||||
|
||||
function objectStore(db, name, keyPath) {
|
||||
return db.createObjectStore(name, {keyPath: keypPath})
|
||||
}
|
||||
|
||||
export function getDatabase (instanceName) {
|
||||
if (!instanceName) {
|
||||
|
@ -31,21 +36,21 @@ export function getDatabase (instanceName) {
|
|||
}
|
||||
req.onupgradeneeded = (e) => {
|
||||
let db = req.result
|
||||
if (e.oldVersion < 1) {
|
||||
db.createObjectStore(META_STORE, {keyPath: 'key'})
|
||||
db.createObjectStore(STATUSES_STORE, {keyPath: 'id'})
|
||||
db.createObjectStore(ACCOUNTS_STORE, {keyPath: 'id'})
|
||||
db.createObjectStore(RELATIONSHIPS_STORE, {keyPath: 'id'})
|
||||
db.createObjectStore(NOTIFICATIONS_STORE, {keyPath: 'id'})
|
||||
.createIndex(TIMESTAMP, TIMESTAMP)
|
||||
db.createObjectStore(STATUS_TIMELINES_STORE, {keyPath: 'id'})
|
||||
.createIndex('statusId', 'statusId')
|
||||
db.createObjectStore(NOTIFICATIONS_STORE, {keyPath: 'id'})
|
||||
.createIndex(TIMESTAMP, TIMESTAMP)
|
||||
db.createObjectStore(NOTIFICATION_TIMELINES_STORE, {keyPath: 'id'})
|
||||
.createIndex('notificationId', 'notificationId')
|
||||
}
|
||||
if (e.oldVersion < 2) {
|
||||
db.createObjectStore(ACCOUNTS_STORE, {keyPath: 'id'})
|
||||
.createIndex(TIMESTAMP, TIMESTAMP)
|
||||
db.createObjectStore(RELATIONSHIPS_STORE, {keyPath: 'id'})
|
||||
.createIndex(TIMESTAMP, TIMESTAMP)
|
||||
db.createObjectStore(META_STORE, {keyPath: 'key'})
|
||||
db.createObjectStore(PINNED_STATUSES_STORE, {keyPath: 'id'})
|
||||
}
|
||||
}
|
||||
req.onsuccess = () => resolve(req.result)
|
||||
})
|
||||
return databaseCache[instanceName]
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { dbPromise, getDatabase } from './databaseLifecycle'
|
||||
import { getInCache, hasInCache, setInCache } from './cache'
|
||||
import { ACCOUNT_ID, REBLOG_ID, STATUS_ID, TIMESTAMP } from './constants'
|
||||
|
||||
export async function getGenericEntityWithId (store, cache, instanceName, id) {
|
||||
if (hasInCache(cache, instanceName, id)) {
|
||||
|
@ -20,3 +21,31 @@ export async function setGenericEntityWithId (store, cache, instanceName, entity
|
|||
store.put(entity)
|
||||
})
|
||||
}
|
||||
|
||||
export function cloneForStorage (obj) {
|
||||
let res = {}
|
||||
let keys = Object.keys(obj)
|
||||
for (let key of keys) {
|
||||
let value = obj[key]
|
||||
// save storage space by skipping nulls, 0s, falses, empty strings, and empty arrays
|
||||
if (!value || (Array.isArray(value) && value.length === 0)) {
|
||||
continue
|
||||
}
|
||||
switch (key) {
|
||||
case 'account':
|
||||
res[ACCOUNT_ID] = value.id
|
||||
break
|
||||
case 'status':
|
||||
res[STATUS_ID] = value.id
|
||||
break
|
||||
case 'reblog':
|
||||
res[REBLOG_ID] = value.id
|
||||
break
|
||||
default:
|
||||
res[key] = value
|
||||
break
|
||||
}
|
||||
}
|
||||
res[TIMESTAMP] = Date.now()
|
||||
return res
|
||||
}
|
|
@ -1,18 +1,19 @@
|
|||
import { toPaddedBigInt, toReversePaddedBigInt } from './utils'
|
||||
import { cloneForStorage } from './helpers'
|
||||
import { dbPromise, getDatabase } from './databaseLifecycle'
|
||||
import { accountsCache, getInCache, hasInCache, notificationsCache, setInCache, statusesCache } from './cache'
|
||||
import { scheduleCleanup } from './cleanup'
|
||||
import {
|
||||
ACCOUNTS_STORE,
|
||||
NOTIFICATION_TIMELINES_STORE,
|
||||
NOTIFICATIONS_STORE, PINNED_STATUSES_STORE,
|
||||
STATUS_TIMELINES_STORE,
|
||||
STATUSES_STORE
|
||||
STATUSES_STORE,
|
||||
ACCOUNT_ID,
|
||||
REBLOG_ID,
|
||||
STATUS_ID
|
||||
} from './constants'
|
||||
|
||||
const TIMESTAMP = '__pinafore_ts'
|
||||
const ACCOUNT_ID = '__pinafore_acct_id'
|
||||
const STATUS_ID = '__pinafore_status_id'
|
||||
const REBLOG_ID = '__pinafore_reblog_id'
|
||||
import { scheduleCleanup } from './cleanup'
|
||||
|
||||
function createTimelineKeyRange (timeline, maxId) {
|
||||
let negBigInt = maxId && toReversePaddedBigInt(maxId)
|
||||
|
@ -29,34 +30,6 @@ function createKeyRangeForStatusThread (timeline) {
|
|||
return IDBKeyRange.bound(start, end, true, true)
|
||||
}
|
||||
|
||||
function cloneForStorage (obj) {
|
||||
let res = {}
|
||||
let keys = Object.keys(obj)
|
||||
for (let key of keys) {
|
||||
let value = obj[key]
|
||||
// save storage space by skipping nulls, 0s, falses, empty strings, and empty arrays
|
||||
if (!value || (Array.isArray(value) && value.length === 0)) {
|
||||
continue
|
||||
}
|
||||
switch (key) {
|
||||
case 'account':
|
||||
res[ACCOUNT_ID] = value.id
|
||||
break
|
||||
case 'status':
|
||||
res[STATUS_ID] = value.id
|
||||
break
|
||||
case 'reblog':
|
||||
res[REBLOG_ID] = value.id
|
||||
break
|
||||
default:
|
||||
res[key] = value
|
||||
break
|
||||
}
|
||||
}
|
||||
res[TIMESTAMP] = Date.now()
|
||||
return res
|
||||
}
|
||||
|
||||
function cacheStatus (status, instanceName) {
|
||||
setInCache(statusesCache, instanceName, status.id, status)
|
||||
setInCache(accountsCache, instanceName, status.account.id, status.account)
|
||||
|
@ -202,6 +175,7 @@ function createTimelineId (timeline, id) {
|
|||
}
|
||||
|
||||
async function insertTimelineNotifications (instanceName, timeline, notifications) {
|
||||
/* no await */ scheduleCleanup()
|
||||
for (let notification of notifications) {
|
||||
setInCache(notificationsCache, instanceName, notification.id, notification)
|
||||
setInCache(accountsCache, instanceName, notification.account.id, notification.account)
|
||||
|
@ -221,6 +195,7 @@ async function insertTimelineNotifications (instanceName, timeline, notification
|
|||
}
|
||||
|
||||
async function insertTimelineStatuses (instanceName, timeline, statuses) {
|
||||
/* no await */ scheduleCleanup()
|
||||
for (let status of statuses) {
|
||||
cacheStatus(status, instanceName)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue