// @flow import type {BaseModel} from '../models/index'; import electron from 'electron'; import NeDB from 'nedb'; import fs from 'fs'; import fsPath from 'path'; import {DB_PERSIST_INTERVAL} from './constants'; import {initModel} from '../models'; import * as models from '../models/index'; import AlertModal from '../ui/components/modals/alert-modal'; import {showModal} from '../ui/components/modals/index'; import {trackEvent} from '../analytics/index'; export const CHANGE_INSERT = 'insert'; export const CHANGE_UPDATE = 'update'; export const CHANGE_REMOVE = 'remove'; let db = {}; // ~~~~~~~ // // HELPERS // // ~~~~~~~ // function allTypes () { return Object.keys(db); } function getDBFilePath (modelType) { // NOTE: Do not EVER change this. EVER! const basePath = electron.remote.app.getPath('userData'); return fsPath.join(basePath, `insomnia.${modelType}.db`); } /** * Initialize the database. Note that this isn't actually async, but might be * in the future! * * @param types * @param config * @param forceReset * @returns {null} */ export async function init ( types: Array, config: Object = {}, forceReset: boolean = false ) { if (forceReset) { changeListeners = []; db = {}; } // Fill in the defaults for (const modelType of types) { if (db[modelType]) { console.warn(`[db] Already initialized DB.${modelType}`); continue; } const filePath = getDBFilePath(modelType); // Check to make sure the responses DB file isn't too big to parse. If it is, we // should delete it try { const MBs = fs.statSync(filePath).size / 1024 / 1024; if (modelType === models.response.type && MBs > 256) { // NOTE: Node.js can't have a string longer than 256MB. Since the response DB can reach // sizes that big, let's not even load it if it's bigger than that. Just start over. console.warn(`[db] Response DB too big (${MBs}). Deleting...`); fs.unlinkSync(filePath); // Can't show alert until the app renders, so delay for a bit first setTimeout(() => { showModal(AlertModal, { title: 'Response DB Too Large', message: 'Your combined responses have exceeded 256MB and have been flushed. ' + 'NOTE: A better solution to this will be implemented in a future release.' }); trackEvent('Alert', 'DB Too Large'); }, 1000); } } catch (err) { // File probably did not exist probably, so no big deal } const collection = new NeDB(Object.assign({ autoload: true, filename: filePath }, config)); collection.persistence.setAutocompactionInterval(DB_PERSIST_INTERVAL); db[modelType] = collection; } console.log(`[db] Initialized DB at ${getDBFilePath('$TYPE')}`); } // ~~~~~~~~~~~~~~~~ // // Change Listeners // // ~~~~~~~~~~~~~~~~ // let bufferingChanges = false; let changeBuffer = []; let changeListeners = []; export function onChange (callback: Function): void { changeListeners.push(callback); } export function offChange (callback: Function): void { changeListeners = changeListeners.filter(l => l !== callback); } export function bufferChanges (millis: number = 1000): void { bufferingChanges = true; setTimeout(flushChanges, millis); } export async function flushChanges (): Promise { bufferingChanges = false; const changes = [...changeBuffer]; changeBuffer = []; if (changes.length === 0) { // No work to do return; } for (const fn of changeListeners) { await fn(changes); } } async function notifyOfChange (event: string, doc: BaseModel, fromSync: boolean): Promise { changeBuffer.push([event, doc, fromSync]); // Flush right away if we're not buffering if (!bufferingChanges) { await flushChanges(); } } // ~~~~~~~ // // Helpers // // ~~~~~~~ // export async function getMostRecentlyModified ( type: string, query: Object = {} ): Promise { const docs = await findMostRecentlyModified(type, query, 1); return docs.length ? docs[0] : null; } export function findMostRecentlyModified ( type: string, query: Object = {}, limit: number | null = null ): Promise> { return new Promise(resolve => { db[type].find(query).sort({modified: -1}).limit(limit).exec(async (err, rawDocs) => { if (err) { console.warn('[db] Failed to find docs', err); resolve([]); return; } const docs = []; for (const rawDoc of rawDocs) { docs.push(await initModel(type, rawDoc)); } resolve(docs); }); }); } export function find ( type: string, query: Object = {}, sort: Object = {created: 1} ): Promise> { return new Promise((resolve, reject) => { db[type].find(query).sort(sort).exec(async (err, rawDocs) => { if (err) { return reject(err); } const docs = []; for (const rawDoc of rawDocs) { docs.push(await initModel(type, rawDoc)); } resolve(docs); }); }); } export function all (type: string): Promise> { return find(type); } export async function getWhere (type: string, query: Object): Promise { const docs = await find(type, query); return docs.length ? docs[0] : null; } export async function get (type: string, id: string): Promise { // Short circuit IDs used to represent nothing if (!id || id === 'n/a') { return null; } else { return getWhere(type, {_id: id}); } } export function count (type: string, query: Object = {}): Promise { return new Promise((resolve, reject) => { db[type].count(query, (err, count) => { if (err) { return reject(err); } resolve(count); }); }); } export async function upsert (doc: BaseModel, fromSync: boolean = false): Promise { const existingDoc = await get(doc.type, doc._id); if (existingDoc) { return update(doc, fromSync); } else { return insert(doc, fromSync); } } export function insert (doc: T, fromSync: boolean = false): Promise { return new Promise(async (resolve, reject) => { const docWithDefaults = await initModel(doc.type, doc); db[doc.type].insert(docWithDefaults, (err, newDoc) => { if (err) { return reject(err); } resolve(newDoc); // NOTE: This needs to be after we resolve notifyOfChange(CHANGE_INSERT, newDoc, fromSync); }); }); } export function update (doc: T, fromSync: boolean = false): Promise { return new Promise(async (resolve, reject) => { const docWithDefaults = await initModel(doc.type, doc); db[doc.type].update({_id: docWithDefaults._id}, docWithDefaults, err => { if (err) { return reject(err); } resolve(docWithDefaults); // NOTE: This needs to be after we resolve notifyOfChange(CHANGE_UPDATE, docWithDefaults, fromSync); }); }); } export async function remove (doc: T, fromSync: boolean = false): Promise { bufferChanges(); const docs = await withDescendants(doc); const docIds = docs.map(d => d._id); const types = [...new Set(docs.map(d => d.type))]; // Don't really need to wait for this to be over; types.map(t => db[t].remove({_id: {$in: docIds}}, {multi: true})); docs.map(d => notifyOfChange(CHANGE_REMOVE, d, fromSync)); flushChanges(); } export async function removeWhere (type: string, query: Object): Promise { bufferChanges(); for (const doc of await find(type, query)) { const docs = await withDescendants(doc); const docIds = docs.map(d => d._id); const types = [...new Set(docs.map(d => d.type))]; // Don't really need to wait for this to be over; types.map(t => db[t].remove({_id: {$in: docIds}}, {multi: true})); docs.map(d => notifyOfChange(CHANGE_REMOVE, d, false)); } flushChanges(); } // ~~~~~~~~~~~~~~~~~~~ // // DEFAULT MODEL STUFF // // ~~~~~~~~~~~~~~~~~~~ // export async function docUpdate (originalDoc: T, patch: Object = {}): Promise { const doc = await initModel( originalDoc.type, originalDoc, // NOTE: This is before `patch` because we want `patch.modified` to win if it has it {modified: Date.now()}, patch, ); return update(doc); } export async function docCreate ( type: string, ...patches: Array ): Promise { const doc = await initModel( type, ...patches, // Fields that the user can't touch {type: type} ); return insert(doc); } // ~~~~~~~ // // GENERAL // // ~~~~~~~ // export async function withDescendants ( doc: BaseModel, stopType: string | null = null ): Promise> { let docsToReturn = doc ? [doc] : []; async function next (docs: Array): Promise> { let foundDocs = []; for (const d of docs) { if (stopType && d.type === stopType) { continue; } for (const type of allTypes()) { // If the doc is null, we want to search for parentId === null const parentId = d ? d._id : null; const more = await find(type, {parentId}); foundDocs = [...foundDocs, ...more]; } } if (foundDocs.length === 0) { // Didn't find anything. We're done return docsToReturn; } // Continue searching for children docsToReturn = [...docsToReturn, ...foundDocs]; return await next(foundDocs); } return await next([doc]); } export async function withAncestors ( doc: BaseModel | null, types: Array = allTypes() ): Promise> { if (!doc) { return []; } let docsToReturn = doc ? [doc] : []; async function next (docs: Array): Promise> { let foundDocs = []; for (const d: BaseModel of docs) { for (const type of types) { // If the doc is null, we want to search for parentId === null const another = await get(type, d.parentId); another && foundDocs.push(another); } } if (foundDocs.length === 0) { // Didn't find anything. We're done return docsToReturn; } // Continue searching for children docsToReturn = [...docsToReturn, ...foundDocs]; return await next(foundDocs); } return await next([doc]); } export async function duplicate (originalDoc: T, patch: Object = {}): Promise { bufferChanges(); async function next (docToCopy: T, patch: Object): Promise { // 1. Copy the doc const newDoc = Object.assign({}, docToCopy, patch); delete newDoc._id; delete newDoc.created; delete newDoc.modified; const createdDoc = await docCreate(newDoc.type, newDoc); // 2. Get all the children for (const type of allTypes()) { // Note: We never want to duplicate a response if (!models.canDuplicate(type)) { continue; } const parentId = docToCopy._id; const children = await find(type, {parentId}); for (const doc of children) { await next(doc, {parentId: createdDoc._id}); } } return createdDoc; } const createdDoc = await next(originalDoc, patch); flushChanges(); return createdDoc; }