simplify database structure
This commit is contained in:
parent
fb2f16f34a
commit
47c058a19f
|
@ -1,2 +0,0 @@
|
|||
export const openReqs = {}
|
||||
export const databaseCache = {}
|
|
@ -1,57 +0,0 @@
|
|||
import { getKnownDbs } from './knownDbs'
|
||||
import debounce from 'lodash/debounce'
|
||||
import { TIMELINE_STORE, getTimelineDatabase } from './timelines'
|
||||
|
||||
const MAX_NUM_STORED_STATUSES = 1000
|
||||
const CLEANUP_INTERVAL = 60000
|
||||
|
||||
async function cleanup(instanceName, timeline) {
|
||||
const db = await getTimelineDatabase(instanceName, timeline)
|
||||
return await new Promise((resolve, reject) => {
|
||||
const tx = db.transaction(TIMELINE_STORE, 'readwrite')
|
||||
const store = tx.objectStore(TIMELINE_STORE)
|
||||
const index = store.index('pinafore_id_as_negative_big_int')
|
||||
|
||||
store.count().onsuccess = (e) => {
|
||||
let count = e.target.result
|
||||
if (count <= MAX_NUM_STORED_STATUSES) {
|
||||
return
|
||||
}
|
||||
let openKeyCursor = index.openKeyCursor || index.openCursor
|
||||
openKeyCursor.call(index, null, 'prev').onsuccess = (e) => {
|
||||
let cursor = e.target.result
|
||||
if (--count < MAX_NUM_STORED_STATUSES || !cursor) {
|
||||
return
|
||||
}
|
||||
store.delete(cursor.primaryKey).onsuccess = () => {
|
||||
cursor.continue()
|
||||
}
|
||||
}
|
||||
}
|
||||
tx.oncomplete = () => resolve()
|
||||
tx.onerror = () => reject(tx.error.name + ' ' + tx.error.message)
|
||||
})
|
||||
}
|
||||
|
||||
export const cleanupOldStatuses = debounce(async () => {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
console.log('cleanupOldStatuses')
|
||||
}
|
||||
|
||||
let knownDbs = await getKnownDbs()
|
||||
let instanceNames = Object.keys(knownDbs)
|
||||
for (let instanceName of instanceNames) {
|
||||
let knownDbsForInstance = knownDbs[instanceName] || []
|
||||
for (let knownDb of knownDbsForInstance) {
|
||||
let {type, dbName} = knownDb
|
||||
if (type !== 'timeline') {
|
||||
continue
|
||||
}
|
||||
let timeline = dbName.split('_').slice(-1)[0]
|
||||
await cleanup(instanceName, timeline)
|
||||
}
|
||||
}
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
console.log('done cleanupOldStatuses')
|
||||
}
|
||||
}, CLEANUP_INTERVAL)
|
|
@ -0,0 +1,3 @@
|
|||
export const STATUSES_STORE = 'statuses'
|
||||
export const TIMELINE_STORE = 'timelines'
|
||||
export const META_STORE = 'meta'
|
|
@ -1,34 +1,59 @@
|
|||
import { META_STORE, getMetaDatabase } from './meta'
|
||||
import { cleanupOldStatuses } from './cleanupTimelines'
|
||||
import { TIMELINE_STORE, getTimelineDatabase } from './timelines'
|
||||
import { toReversePaddedBigInt, transformStatusForStorage, dbPromise, deleteDbPromise } from './utils'
|
||||
import { getKnownDbsForInstance, deleteInstanceFromKnownDbs } from './knownDbs'
|
||||
import {
|
||||
toReversePaddedBigInt
|
||||
} from './utils'
|
||||
import {
|
||||
getDatabase,
|
||||
dbPromise,
|
||||
deleteDatabase,
|
||||
} from './databaseLifecycle'
|
||||
|
||||
import {
|
||||
META_STORE,
|
||||
TIMELINE_STORE,
|
||||
STATUSES_STORE
|
||||
} from './constants'
|
||||
|
||||
export async function getTimeline(instanceName, timeline, maxId = null, limit = 20) {
|
||||
const db = await getTimelineDatabase(instanceName, timeline)
|
||||
return await dbPromise(db, TIMELINE_STORE, 'readonly', (store, callback) => {
|
||||
const index = store.index('pinafore_id_as_negative_big_int')
|
||||
let sinceAsNegativeBigInt = maxId ? toReversePaddedBigInt(maxId) : null
|
||||
let query = sinceAsNegativeBigInt ? IDBKeyRange.lowerBound(sinceAsNegativeBigInt, false) : null
|
||||
const db = await getDatabase(instanceName, timeline)
|
||||
return await dbPromise(db, [TIMELINE_STORE, STATUSES_STORE], 'readonly', (stores, callback) => {
|
||||
let [ timelineStore, statusesStore ] = stores
|
||||
|
||||
index.getAll(query, limit).onsuccess = (e) => {
|
||||
callback(e.target.result)
|
||||
let negBigInt = maxId && toReversePaddedBigInt(maxId)
|
||||
let start = negBigInt ? (timeline + '\u0000' + negBigInt) : (timeline + '\u0000')
|
||||
let end = timeline + '\u0000\uffff'
|
||||
let query = IDBKeyRange.bound(start, end, false, false)
|
||||
|
||||
timelineStore.getAll(query, limit).onsuccess = e => {
|
||||
let timelineResults = e.target.result
|
||||
let res = new Array(timelineResults.length)
|
||||
timelineResults.forEach((timelineResult, i) => {
|
||||
statusesStore.get(timelineResult.statusId).onsuccess = e => {
|
||||
res[i] = e.target.result
|
||||
}
|
||||
})
|
||||
callback(res)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export async function insertStatuses(instanceName, timeline, statuses) {
|
||||
const db = await getTimelineDatabase(instanceName, timeline)
|
||||
await dbPromise(db, TIMELINE_STORE, 'readwrite', (store) => {
|
||||
const db = await getDatabase(instanceName, timeline)
|
||||
await dbPromise(db, [TIMELINE_STORE, STATUSES_STORE], 'readwrite', (stores) => {
|
||||
let [ timelineStore, statusesStore ] = stores
|
||||
for (let status of statuses) {
|
||||
store.put(transformStatusForStorage(status))
|
||||
statusesStore.put(status)
|
||||
// reverse chronological order, prefixed by timeline
|
||||
let id = timeline + '\u0000' + toReversePaddedBigInt(status.id)
|
||||
timelineStore.put({
|
||||
id: id,
|
||||
statusId: status.id
|
||||
})
|
||||
}
|
||||
})
|
||||
/* no await */ cleanupOldStatuses()
|
||||
}
|
||||
|
||||
export async function getInstanceVerifyCredentials(instanceName) {
|
||||
const db = await getMetaDatabase(instanceName)
|
||||
const db = await getDatabase(instanceName)
|
||||
return await dbPromise(db, META_STORE, 'readonly', (store, callback) => {
|
||||
store.get('verifyCredentials').onsuccess = (e) => {
|
||||
callback(e.target.result && e.target.result.value)
|
||||
|
@ -37,7 +62,7 @@ export async function getInstanceVerifyCredentials(instanceName) {
|
|||
}
|
||||
|
||||
export async function setInstanceVerifyCredentials(instanceName, verifyCredentials) {
|
||||
const db = await getMetaDatabase(instanceName)
|
||||
const db = await getDatabase(instanceName)
|
||||
return await dbPromise(db, META_STORE, 'readwrite', (store) => {
|
||||
store.put({
|
||||
key: 'verifyCredentials',
|
||||
|
@ -46,17 +71,6 @@ export async function setInstanceVerifyCredentials(instanceName, verifyCredentia
|
|||
})
|
||||
}
|
||||
|
||||
export async function clearDatabasesForInstance(instanceName) {
|
||||
console.log('clearDatabasesForInstance', instanceName)
|
||||
const knownDbsForInstance = await getKnownDbsForInstance(instanceName)
|
||||
for (let knownDb of knownDbsForInstance) {
|
||||
let { dbName } = knownDb
|
||||
try {
|
||||
await deleteDbPromise(dbName)
|
||||
console.error(`deleted database ${dbName}`)
|
||||
} catch (e) {
|
||||
console.error(`failed to delete database ${dbName}`)
|
||||
}
|
||||
}
|
||||
await deleteInstanceFromKnownDbs(instanceName)
|
||||
export async function clearDatabaseForInstance(instanceName) {
|
||||
await deleteDatabase(instanceName)
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
const openReqs = {}
|
||||
const databaseCache = {}
|
||||
|
||||
import {
|
||||
META_STORE,
|
||||
TIMELINE_STORE,
|
||||
STATUSES_STORE
|
||||
} from './constants'
|
||||
|
||||
export function getDatabase(instanceName) {
|
||||
if (databaseCache[instanceName]) {
|
||||
return Promise.resolve(databaseCache[instanceName])
|
||||
}
|
||||
|
||||
databaseCache[instanceName] = new Promise((resolve, reject) => {
|
||||
let req = indexedDB.open(instanceName, 1)
|
||||
openReqs[instanceName] = req
|
||||
req.onerror = reject
|
||||
req.onblocked = () => {
|
||||
console.log('idb blocked')
|
||||
}
|
||||
req.onupgradeneeded = () => {
|
||||
let db = req.result;
|
||||
db.createObjectStore(META_STORE, {keyPath: 'key'})
|
||||
db.createObjectStore(STATUSES_STORE, {keyPath: 'id'})
|
||||
let timelineStore = db.createObjectStore(TIMELINE_STORE, {keyPath: 'id'})
|
||||
timelineStore.createIndex('statusId', 'statusId')
|
||||
}
|
||||
req.onsuccess = () => resolve(req.result)
|
||||
})
|
||||
return databaseCache[instanceName]
|
||||
}
|
||||
|
||||
export async function dbPromise(db, storeName, readOnlyOrReadWrite, cb) {
|
||||
return await new Promise((resolve, reject) => {
|
||||
const tx = db.transaction(storeName, readOnlyOrReadWrite)
|
||||
let store = typeof storeName === 'string' ?
|
||||
tx.objectStore(storeName) :
|
||||
storeName.map(name => tx.objectStore(name))
|
||||
let res
|
||||
cb(store, (result) => {
|
||||
res = result
|
||||
})
|
||||
|
||||
tx.oncomplete = () => resolve(res)
|
||||
tx.onerror = () => reject(tx.error.name + ' ' + tx.error.message)
|
||||
})
|
||||
}
|
||||
|
||||
export function deleteDatabase(instanceName) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// close any open requests
|
||||
let openReq = openReqs[instanceName];
|
||||
if (openReq && openReq.result) {
|
||||
openReq.result.close()
|
||||
}
|
||||
delete openReqs[instanceName]
|
||||
delete databaseCache[instanceName]
|
||||
let req = indexedDB.deleteDatabase(instanceName)
|
||||
req.onsuccess = () => resolve()
|
||||
req.onerror = () => reject(req.error.name + ' ' + req.error.message)
|
||||
})
|
||||
}
|
|
@ -1,27 +0,0 @@
|
|||
import keyval from "idb-keyval"
|
||||
|
||||
export async function addKnownDb(instanceName, type, dbName) {
|
||||
let knownDbs = (await getKnownDbs())
|
||||
if (!knownDbs[instanceName]) {
|
||||
knownDbs[instanceName] = []
|
||||
}
|
||||
if (!knownDbs[instanceName].some(db => db.type === type && db.dbName === dbName)) {
|
||||
knownDbs[instanceName].push({type, dbName})
|
||||
}
|
||||
await keyval.set('known_dbs', knownDbs)
|
||||
}
|
||||
|
||||
export async function getKnownDbs() {
|
||||
return (await keyval.get('known_dbs')) || {}
|
||||
}
|
||||
|
||||
export async function getKnownDbsForInstance(instanceName) {
|
||||
let knownDbs = await getKnownDbs()
|
||||
return knownDbs[instanceName] || []
|
||||
}
|
||||
|
||||
export async function deleteInstanceFromKnownDbs(instanceName) {
|
||||
let knownDbs = await getKnownDbs()
|
||||
delete knownDbs[instanceName]
|
||||
await keyval.set('known_dbs', knownDbs)
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
import { addKnownDb } from './knownDbs'
|
||||
import { openReqs, databaseCache } from './cache'
|
||||
|
||||
export const META_STORE = 'meta'
|
||||
|
||||
export function getMetaDatabase(instanceName) {
|
||||
const dbName = `${instanceName}_${META_STORE}`
|
||||
if (databaseCache[dbName]) {
|
||||
return Promise.resolve(databaseCache[dbName])
|
||||
}
|
||||
|
||||
addKnownDb(instanceName, 'meta', dbName)
|
||||
|
||||
databaseCache[dbName] = new Promise((resolve, reject) => {
|
||||
let req = indexedDB.open(dbName, 1)
|
||||
openReqs[dbName] = req
|
||||
req.onerror = reject
|
||||
req.onblocked = () => {
|
||||
console.log('idb blocked')
|
||||
}
|
||||
req.onupgradeneeded = () => {
|
||||
let db = req.result;
|
||||
db.createObjectStore(META_STORE, {keyPath: 'key'})
|
||||
}
|
||||
req.onsuccess = () => resolve(req.result)
|
||||
})
|
||||
return databaseCache[dbName]
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
import { addKnownDb } from './knownDbs'
|
||||
import { openReqs, databaseCache } from './cache'
|
||||
|
||||
export const TIMELINE_STORE = 'statuses'
|
||||
|
||||
export function createTimelineDbName(instanceName, timeline) {
|
||||
return `${instanceName}_timeline_${timeline}`
|
||||
}
|
||||
|
||||
export function getTimelineDatabase(instanceName, timeline) {
|
||||
let dbName = createTimelineDbName(instanceName, timeline)
|
||||
|
||||
if (databaseCache[dbName]) {
|
||||
return Promise.resolve(databaseCache[dbName])
|
||||
}
|
||||
|
||||
addKnownDb(instanceName, 'timeline', dbName)
|
||||
|
||||
databaseCache[dbName] = new Promise((resolve, reject) => {
|
||||
let req = indexedDB.open(dbName, 1)
|
||||
openReqs[dbName] = req
|
||||
req.onerror = reject
|
||||
req.onblocked = () => {
|
||||
console.log('idb blocked')
|
||||
}
|
||||
req.onupgradeneeded = () => {
|
||||
let db = req.result;
|
||||
let oStore = db.createObjectStore(TIMELINE_STORE, {
|
||||
keyPath: 'id'
|
||||
})
|
||||
oStore.createIndex('pinafore_id_as_negative_big_int', 'pinafore_id_as_negative_big_int')
|
||||
}
|
||||
req.onsuccess = () => resolve(req.result)
|
||||
})
|
||||
return databaseCache[dbName]
|
||||
}
|
|
@ -1,6 +1,4 @@
|
|||
import cloneDeep from 'lodash/cloneDeep'
|
||||
import padStart from 'lodash/padStart'
|
||||
import { databaseCache, openReqs } from './cache'
|
||||
|
||||
export function toPaddedBigInt (id) {
|
||||
return padStart(id, 30, '0')
|
||||
|
@ -13,40 +11,4 @@ export function toReversePaddedBigInt (id) {
|
|||
res += (9 - parseInt(bigInt.charAt(i), 10)).toString(10)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
export function transformStatusForStorage (status) {
|
||||
status = cloneDeep(status)
|
||||
status.pinafore_id_as_negative_big_int = toReversePaddedBigInt(status.id)
|
||||
status.pinafore_stale = true
|
||||
return status
|
||||
}
|
||||
|
||||
export async function dbPromise(db, storeName, readOnlyOrReadWrite, cb) {
|
||||
return await new Promise((resolve, reject) => {
|
||||
const tx = db.transaction(storeName, readOnlyOrReadWrite)
|
||||
const store = tx.objectStore(storeName)
|
||||
let res
|
||||
cb(store, (result) => {
|
||||
res = result
|
||||
})
|
||||
|
||||
tx.oncomplete = () => resolve(res)
|
||||
tx.onerror = () => reject(tx.error.name + ' ' + tx.error.message)
|
||||
})
|
||||
}
|
||||
|
||||
export function deleteDbPromise(dbName) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// close any open requests
|
||||
let openReq = openReqs[dbName];
|
||||
if (openReq && openReq.result) {
|
||||
openReq.result.close()
|
||||
}
|
||||
delete openReqs[dbName]
|
||||
delete databaseCache[dbName]
|
||||
let req = indexedDB.deleteDatabase(dbName)
|
||||
req.onsuccess = () => resolve()
|
||||
req.onerror = () => reject(req.error.name + ' ' + req.error.message)
|
||||
})
|
||||
}
|
|
@ -174,7 +174,7 @@
|
|||
currentInstance: newInstance
|
||||
})
|
||||
this.store.save()
|
||||
database.clearDatabasesForInstance(instanceName)
|
||||
database.clearDatabaseForInstance(instanceName)
|
||||
switchToTheme(instanceThemes[newInstance] || 'default')
|
||||
toast.say(`Logged out of ${instanceName}`)
|
||||
goto('/settings/instances')
|
||||
|
|
Loading…
Reference in New Issue