insomnia/packages/insomnia-app/app/common/database.js

664 lines
17 KiB
JavaScript
Raw Normal View History

2017-07-19 00:19:56 +00:00
// @flow
2018-06-25 17:42:50 +00:00
import type { BaseModel } from '../models/index';
import * as models from '../models/index';
import electron from 'electron';
import NeDB from 'nedb';
import fsPath from 'path';
2018-06-25 17:42:50 +00:00
import { DB_PERSIST_INTERVAL } from './constants';
import uuid from 'uuid';
export const CHANGE_INSERT = 'insert';
export const CHANGE_UPDATE = 'update';
export const CHANGE_REMOVE = 'remove';
2016-09-08 06:54:35 +00:00
const database = {};
const db = {
_empty: true
};
2016-09-21 21:46:42 +00:00
// ~~~~~~~ //
// HELPERS //
// ~~~~~~~ //
2018-06-25 17:42:50 +00:00
function allTypes() {
2016-11-10 01:15:27 +00:00
return Object.keys(db);
}
2016-04-16 23:24:57 +00:00
2018-06-25 17:42:50 +00:00
function getDBFilePath(modelType) {
// NOTE: Do not EVER change this. EVER!
2018-06-25 17:42:50 +00:00
const { app } = electron.remote || electron;
const basePath = app.getPath('userData');
return fsPath.join(basePath, `insomnia.${modelType}.db`);
2016-04-28 00:04:29 +00:00
}
2018-06-25 17:42:50 +00:00
export async function initClient() {
electron.ipcRenderer.on('db.changes', async (e, changes) => {
for (const fn of changeListeners) {
await fn(changes);
}
});
2017-11-22 00:07:28 +00:00
console.log('[db] Initialized DB client');
}
2018-10-17 16:42:33 +00:00
export async function init(types: Array<string>, config: Object = {}, forceReset: boolean = false) {
if (forceReset) {
changeListeners = [];
for (const attr of Object.keys(db)) {
if (attr === '_empty') {
continue;
2016-12-31 19:32:50 +00:00
}
delete db[attr];
}
}
// Fill in the defaults
for (const modelType of types) {
if (db[modelType]) {
console.log(`[db] Already initialized DB.${modelType}`);
continue;
2016-12-30 23:06:27 +00:00
}
const filePath = getDBFilePath(modelType);
2018-06-25 17:42:50 +00:00
const collection = new NeDB(
Object.assign(
{
autoload: true,
filename: filePath
},
config
)
);
2016-05-11 05:43:51 +00:00
collection.persistence.setAutocompactionInterval(DB_PERSIST_INTERVAL);
2017-07-19 01:55:47 +00:00
db[modelType] = collection;
2016-11-10 01:15:27 +00:00
}
delete db._empty;
electron.ipcMain.on('db.fn', async (e, fnName, replyChannel, ...args) => {
const result = await database[fnName](...args);
e.sender.send(replyChannel, result);
});
// NOTE: Only repair the DB if we're not running in memory. Repairing here causes tests to
// hang indefinitely for some reason.
// TODO: Figure out why this makes tests hang
if (!config.inMemoryOnly) {
await _repairDatabase();
}
if (!config.inMemoryOnly) {
console.log(`[db] Initialized DB at ${getDBFilePath('$TYPE')}`);
}
// This isn't the best place for this but w/e
// Listen for response deletions and delete corresponding response body files
onChange(async changes => {
2018-06-18 21:15:15 +00:00
for (const [type, doc] of changes) {
const m: Object | null = models.getModel(doc.type);
if (!m) {
continue;
}
if (type === CHANGE_REMOVE && typeof m.hookRemove === 'function') {
try {
await m.hookRemove(doc);
} catch (err) {
2018-10-17 16:42:33 +00:00
console.log(`[db] Delete hook failed for ${type} ${doc._id}: ${err.message}`);
}
}
if (type === CHANGE_INSERT && typeof m.hookInsert === 'function') {
try {
await m.hookInsert(doc);
} catch (err) {
2018-10-17 16:42:33 +00:00
console.log(`[db] Insert hook failed for ${type} ${doc._id}: ${err.message}`);
}
}
if (type === CHANGE_UPDATE && typeof m.hookUpdate === 'function') {
try {
await m.hookUpdate(doc);
} catch (err) {
2018-10-17 16:42:33 +00:00
console.log(`[db] Update hook failed for ${type} ${doc._id}: ${err.message}`);
}
}
}
});
}
2016-07-16 07:22:08 +00:00
// ~~~~~~~~~~~~~~~~ //
// Change Listeners //
// ~~~~~~~~~~~~~~~~ //
let bufferingChanges = false;
let changeBuffer = [];
let changeListeners = [];
2018-06-25 17:42:50 +00:00
export function onChange(callback: Function): void {
changeListeners.push(callback);
}
2018-06-25 17:42:50 +00:00
export function offChange(callback: Function): void {
changeListeners = changeListeners.filter(l => l !== callback);
}
2018-06-25 17:42:50 +00:00
export const bufferChanges = (database.bufferChanges = async function(
millis: number = 1000
): Promise<void> {
if (db._empty) return _send('bufferChanges', ...arguments);
bufferingChanges = true;
setTimeout(database.flushChanges, millis);
2018-06-25 17:42:50 +00:00
});
2018-06-25 17:42:50 +00:00
export const flushChangesAsync = (database.flushChangesAsync = async function() {
process.nextTick(async () => {
await flushChanges();
});
2018-06-25 17:42:50 +00:00
});
2018-06-25 17:42:50 +00:00
export const flushChanges = (database.flushChanges = async function() {
if (db._empty) return _send('flushChanges', ...arguments);
bufferingChanges = false;
const changes = [...changeBuffer];
changeBuffer = [];
2016-09-22 19:44:28 +00:00
if (changes.length === 0) {
// No work to do
return;
}
// Notify local listeners too
for (const fn of changeListeners) {
await fn(changes);
}
// Notify remote listeners
const windows = electron.BrowserWindow.getAllWindows();
for (const window of windows) {
window.webContents.send('db.changes', changes);
}
2018-06-25 17:42:50 +00:00
});
2018-10-17 16:42:33 +00:00
async function notifyOfChange(event: string, doc: BaseModel, fromSync: boolean): Promise<void> {
changeBuffer.push([event, doc, fromSync]);
// Flush right away if we're not buffering
if (!bufferingChanges) {
await database.flushChanges();
}
2016-09-08 06:54:35 +00:00
}
// ~~~~~~~ //
// Helpers //
// ~~~~~~~ //
2018-06-25 17:42:50 +00:00
export const getMostRecentlyModified = (database.getMostRecentlyModified = async function(
2017-07-19 00:19:56 +00:00
type: string,
query: Object = {}
2017-07-19 01:55:47 +00:00
): Promise<BaseModel | null> {
if (db._empty) return _send('getMostRecentlyModified', ...arguments);
const docs = await database.findMostRecentlyModified(type, query, 1);
return docs.length ? docs[0] : null;
2018-06-25 17:42:50 +00:00
});
2018-06-25 17:42:50 +00:00
export const findMostRecentlyModified = (database.findMostRecentlyModified = async function(
2017-07-19 00:19:56 +00:00
type: string,
query: Object = {},
limit: number | null = null
2017-07-19 01:55:47 +00:00
): Promise<Array<BaseModel>> {
if (db._empty) return _send('findMostRecentlyModified', ...arguments);
2016-09-08 06:54:35 +00:00
return new Promise(resolve => {
2018-06-25 17:42:50 +00:00
db[type]
.find(query)
.sort({ modified: -1 })
.limit(limit)
.exec(async (err, rawDocs) => {
if (err) {
console.warn('[db] Failed to find docs', err);
resolve([]);
return;
}
2018-06-25 17:42:50 +00:00
const docs = [];
for (const rawDoc of rawDocs) {
docs.push(await models.initModel(type, rawDoc));
}
2018-06-25 17:42:50 +00:00
resolve(docs);
});
});
2018-06-25 17:42:50 +00:00
});
2018-06-25 17:42:50 +00:00
export const find = (database.find = async function<T: BaseModel>(
2017-07-19 00:19:56 +00:00
type: string,
query: Object = {},
2018-06-25 17:42:50 +00:00
sort: Object = { created: 1 }
2017-07-19 01:55:47 +00:00
): Promise<Array<T>> {
if (db._empty) return _send('find', ...arguments);
return new Promise((resolve, reject) => {
2018-06-25 17:42:50 +00:00
db[type]
.find(query)
.sort(sort)
.exec(async (err, rawDocs) => {
if (err) {
return reject(err);
}
2018-06-25 17:42:50 +00:00
const docs = [];
for (const rawDoc of rawDocs) {
docs.push(await models.initModel(type, rawDoc));
}
2018-06-25 17:42:50 +00:00
resolve(docs);
});
});
2018-06-25 17:42:50 +00:00
});
2018-10-17 16:42:33 +00:00
export const all = (database.all = async function<T: BaseModel>(type: string): Promise<Array<T>> {
if (db._empty) return _send('all', ...arguments);
return database.find(type);
2018-06-25 17:42:50 +00:00
});
2018-06-25 17:42:50 +00:00
export const getWhere = (database.getWhere = async function<T: BaseModel>(
type: string,
query: Object
): Promise<T | null> {
if (db._empty) return _send('getWhere', ...arguments);
const docs = await database.find(type, query);
return docs.length ? docs[0] : null;
2018-06-25 17:42:50 +00:00
});
2018-06-25 17:42:50 +00:00
export const get = (database.get = async function<T: BaseModel>(
type: string,
id: string
): Promise<T | null> {
if (db._empty) return _send('get', ...arguments);
// Short circuit IDs used to represent nothing
if (!id || id === 'n/a') {
return null;
2017-07-19 01:55:47 +00:00
} else {
2018-06-25 17:42:50 +00:00
return database.getWhere(type, { _id: id });
}
2018-06-25 17:42:50 +00:00
});
2018-06-25 17:42:50 +00:00
export const count = (database.count = async function(
type: string,
query: Object = {}
): Promise<number> {
if (db._empty) return _send('count', ...arguments);
return new Promise((resolve, reject) => {
db[type].count(query, (err, count) => {
if (err) {
return reject(err);
}
2016-05-11 05:43:51 +00:00
resolve(count);
});
});
2018-06-25 17:42:50 +00:00
});
2016-04-16 23:24:57 +00:00
2018-06-25 17:42:50 +00:00
export const upsert = (database.upsert = async function(
doc: BaseModel,
fromSync: boolean = false
): Promise<BaseModel> {
if (db._empty) return _send('upsert', ...arguments);
const existingDoc = await database.get(doc.type, doc._id);
if (existingDoc) {
return database.update(doc, fromSync);
} else {
return database.insert(doc, fromSync);
}
2018-06-25 17:42:50 +00:00
});
2018-06-25 17:42:50 +00:00
export const insert = (database.insert = async function<T: BaseModel>(
doc: T,
fromSync: boolean = false
): Promise<T> {
if (db._empty) return _send('insert', ...arguments);
2017-09-13 06:11:49 +00:00
return new Promise(async (resolve, reject) => {
const docWithDefaults = await models.initModel(doc.type, doc);
db[doc.type].insert(docWithDefaults, (err, newDoc) => {
if (err) {
return reject(err);
}
2016-06-18 21:02:27 +00:00
resolve(newDoc);
// NOTE: This needs to be after we resolve
notifyOfChange(CHANGE_INSERT, newDoc, fromSync);
});
});
2018-06-25 17:42:50 +00:00
});
2018-06-25 17:42:50 +00:00
export const update = (database.update = async function<T: BaseModel>(
doc: T,
fromSync: boolean = false
): Promise<T> {
if (db._empty) return _send('update', ...arguments);
2017-09-13 06:11:49 +00:00
return new Promise(async (resolve, reject) => {
const docWithDefaults = await models.initModel(doc.type, doc);
2018-06-25 17:42:50 +00:00
db[doc.type].update({ _id: docWithDefaults._id }, docWithDefaults, err => {
if (err) {
return reject(err);
}
resolve(docWithDefaults);
// NOTE: This needs to be after we resolve
notifyOfChange(CHANGE_UPDATE, docWithDefaults, fromSync);
});
});
2018-06-25 17:42:50 +00:00
});
2018-06-25 17:42:50 +00:00
export const remove = (database.remove = async function<T: BaseModel>(
doc: T,
fromSync: boolean = false
): Promise<void> {
if (db._empty) return _send('remove', ...arguments);
await database.bufferChanges();
const docs = await database.withDescendants(doc);
const docIds = docs.map(d => d._id);
const types = [...new Set(docs.map(d => d.type))];
2016-08-15 22:31:30 +00:00
// Don't really need to wait for this to be over;
2018-06-25 17:42:50 +00:00
types.map(t => db[t].remove({ _id: { $in: docIds } }, { multi: true }));
docs.map(d => notifyOfChange(CHANGE_REMOVE, d, fromSync));
await database.flushChanges();
2018-06-25 17:42:50 +00:00
});
2018-06-25 17:42:50 +00:00
export const removeWhere = (database.removeWhere = async function(
type: string,
query: Object
): Promise<void> {
if (db._empty) return _send('removeWhere', ...arguments);
2016-04-16 23:24:57 +00:00
await database.bufferChanges();
2017-07-17 18:20:38 +00:00
for (const doc of await database.find(type, query)) {
const docs = await database.withDescendants(doc);
2017-07-17 18:20:38 +00:00
const docIds = docs.map(d => d._id);
const types = [...new Set(docs.map(d => d.type))];
// Don't really need to wait for this to be over;
2018-06-25 17:42:50 +00:00
types.map(t => db[t].remove({ _id: { $in: docIds } }, { multi: true }));
2017-07-17 18:20:38 +00:00
2017-07-19 01:55:47 +00:00
docs.map(d => notifyOfChange(CHANGE_REMOVE, d, false));
2017-07-17 18:20:38 +00:00
}
await database.flushChanges();
2018-06-25 17:42:50 +00:00
});
// ~~~~~~~~~~~~~~~~~~~ //
// DEFAULT MODEL STUFF //
// ~~~~~~~~~~~~~~~~~~~ //
2016-04-16 23:24:57 +00:00
2018-06-25 17:42:50 +00:00
export async function docUpdate<T: BaseModel>(
originalDoc: T,
...patches: Array<Object>
): Promise<T> {
const doc = await models.initModel(
originalDoc.type,
originalDoc,
// NOTE: This is before `patch` because we want `patch.modified` to win if it has it
2018-06-25 17:42:50 +00:00
{ modified: Date.now() },
...patches
);
return database.update(doc);
}
2016-07-19 16:15:03 +00:00
2018-10-17 16:42:33 +00:00
export async function docCreate<T: BaseModel>(type: string, ...patches: Array<Object>): Promise<T> {
const doc = await models.initModel(
type,
...patches,
2016-04-18 04:39:15 +00:00
2016-08-15 22:31:30 +00:00
// Fields that the user can't touch
2018-06-25 17:42:50 +00:00
{ type: type }
);
2016-04-18 04:39:15 +00:00
return database.insert(doc);
}
2016-08-15 22:31:30 +00:00
// ~~~~~~~ //
// GENERAL //
// ~~~~~~~ //
2018-06-25 17:42:50 +00:00
export const withDescendants = (database.withDescendants = async function(
doc: BaseModel | null,
2017-07-19 00:19:56 +00:00
stopType: string | null = null
): Promise<Array<BaseModel>> {
if (db._empty) return _send('withDescendants', ...arguments);
2016-08-15 22:31:30 +00:00
let docsToReturn = doc ? [doc] : [];
2018-10-17 16:42:33 +00:00
async function next(docs: Array<BaseModel | null>): Promise<Array<BaseModel>> {
let foundDocs = [];
for (const d of docs) {
if (stopType && d && d.type === stopType) {
continue;
}
2016-11-10 01:15:27 +00:00
for (const type of allTypes()) {
2016-08-15 22:31:30 +00:00
// If the doc is null, we want to search for parentId === null
const parentId = d ? d._id : null;
2018-06-25 17:42:50 +00:00
const more = await database.find(type, { parentId });
foundDocs = [...foundDocs, ...more];
2016-08-15 22:31:30 +00:00
}
}
if (foundDocs.length === 0) {
// Didn't find anything. We're done
return docsToReturn;
}
2016-08-15 22:31:30 +00:00
// Continue searching for children
docsToReturn = [...docsToReturn, ...foundDocs];
return next(foundDocs);
}
2016-08-15 22:31:30 +00:00
return next([doc]);
2018-06-25 17:42:50 +00:00
});
2016-08-15 22:31:30 +00:00
2018-06-25 17:42:50 +00:00
export const withAncestors = (database.withAncestors = async function(
2017-07-19 01:55:47 +00:00
doc: BaseModel | null,
2017-07-19 00:19:56 +00:00
types: Array<string> = allTypes()
2017-07-19 01:55:47 +00:00
): Promise<Array<BaseModel>> {
if (db._empty) return _send('withAncestors', ...arguments);
2017-07-19 01:55:47 +00:00
if (!doc) {
return [];
}
Sync Proof of Concept (#33) * Maybe working POC * Change to use remote url * Other URL too * Some logic * Got the push part working * Made some updates * Fix * Update * Add status code check * Stuff * Implemented new sync api * A bit more robust * Debounce changes * Change timeout * Some fixes * Remove .less * Better error handling * Fix base url * Support for created vs updated docs * Try silent * Silence removal too * Small fix after merge * Fix test * Stuff * Implement key generation algorithm * Tidy * stuff * A bunch of stuff for the new API * Integrated the session stuff * Stuff * Just started on encryption * Lots of updates to encryption * Finished createResourceGroup function * Full encryption/decryption working (I think) * Encrypt localstorage with sessionID * Some more * Some extra checks * Now uses separate DB. Still needs to be simplified a LOT * Fix deletion bug * Fixed unicode bug with encryption * Simplified and working * A bunch of polish * Some stuff * Removed some workspace meta properties * Migrated a few more meta properties * Small changes * Fix body scrolling and url cursor jumping * Removed duplication of webpack port * Remove workspaces reduces * Some small fixes * Added sync modal and opt-in setting * Good start to sync flow * Refactored modal footer css * Update sync status * Sync logger * A bit better logging * Fixed a bunch of sync-related bugs * Fixed signup form button * Gravatar component * Split sync modal into tabs * Tidying * Some more error handling * start sending 'user agent * Login/signup error handling * Use real UUIDs * Fixed tests * Remove unused function * Some extra checks * Moved cloud sync setting to about page * Some small changes * Some things
2016-10-21 17:20:36 +00:00
let docsToReturn = doc ? [doc] : [];
2018-06-25 17:42:50 +00:00
async function next(docs: Array<BaseModel>): Promise<Array<BaseModel>> {
Sync Proof of Concept (#33) * Maybe working POC * Change to use remote url * Other URL too * Some logic * Got the push part working * Made some updates * Fix * Update * Add status code check * Stuff * Implemented new sync api * A bit more robust * Debounce changes * Change timeout * Some fixes * Remove .less * Better error handling * Fix base url * Support for created vs updated docs * Try silent * Silence removal too * Small fix after merge * Fix test * Stuff * Implement key generation algorithm * Tidy * stuff * A bunch of stuff for the new API * Integrated the session stuff * Stuff * Just started on encryption * Lots of updates to encryption * Finished createResourceGroup function * Full encryption/decryption working (I think) * Encrypt localstorage with sessionID * Some more * Some extra checks * Now uses separate DB. Still needs to be simplified a LOT * Fix deletion bug * Fixed unicode bug with encryption * Simplified and working * A bunch of polish * Some stuff * Removed some workspace meta properties * Migrated a few more meta properties * Small changes * Fix body scrolling and url cursor jumping * Removed duplication of webpack port * Remove workspaces reduces * Some small fixes * Added sync modal and opt-in setting * Good start to sync flow * Refactored modal footer css * Update sync status * Sync logger * A bit better logging * Fixed a bunch of sync-related bugs * Fixed signup form button * Gravatar component * Split sync modal into tabs * Tidying * Some more error handling * start sending 'user agent * Login/signup error handling * Use real UUIDs * Fixed tests * Remove unused function * Some extra checks * Moved cloud sync setting to about page * Some small changes * Some things
2016-10-21 17:20:36 +00:00
let foundDocs = [];
2017-07-19 01:55:47 +00:00
for (const d: BaseModel of docs) {
for (const type of types) {
Sync Proof of Concept (#33) * Maybe working POC * Change to use remote url * Other URL too * Some logic * Got the push part working * Made some updates * Fix * Update * Add status code check * Stuff * Implemented new sync api * A bit more robust * Debounce changes * Change timeout * Some fixes * Remove .less * Better error handling * Fix base url * Support for created vs updated docs * Try silent * Silence removal too * Small fix after merge * Fix test * Stuff * Implement key generation algorithm * Tidy * stuff * A bunch of stuff for the new API * Integrated the session stuff * Stuff * Just started on encryption * Lots of updates to encryption * Finished createResourceGroup function * Full encryption/decryption working (I think) * Encrypt localstorage with sessionID * Some more * Some extra checks * Now uses separate DB. Still needs to be simplified a LOT * Fix deletion bug * Fixed unicode bug with encryption * Simplified and working * A bunch of polish * Some stuff * Removed some workspace meta properties * Migrated a few more meta properties * Small changes * Fix body scrolling and url cursor jumping * Removed duplication of webpack port * Remove workspaces reduces * Some small fixes * Added sync modal and opt-in setting * Good start to sync flow * Refactored modal footer css * Update sync status * Sync logger * A bit better logging * Fixed a bunch of sync-related bugs * Fixed signup form button * Gravatar component * Split sync modal into tabs * Tidying * Some more error handling * start sending 'user agent * Login/signup error handling * Use real UUIDs * Fixed tests * Remove unused function * Some extra checks * Moved cloud sync setting to about page * Some small changes * Some things
2016-10-21 17:20:36 +00:00
// If the doc is null, we want to search for parentId === null
const another = await database.get(type, d.parentId);
another && foundDocs.push(another);
Sync Proof of Concept (#33) * Maybe working POC * Change to use remote url * Other URL too * Some logic * Got the push part working * Made some updates * Fix * Update * Add status code check * Stuff * Implemented new sync api * A bit more robust * Debounce changes * Change timeout * Some fixes * Remove .less * Better error handling * Fix base url * Support for created vs updated docs * Try silent * Silence removal too * Small fix after merge * Fix test * Stuff * Implement key generation algorithm * Tidy * stuff * A bunch of stuff for the new API * Integrated the session stuff * Stuff * Just started on encryption * Lots of updates to encryption * Finished createResourceGroup function * Full encryption/decryption working (I think) * Encrypt localstorage with sessionID * Some more * Some extra checks * Now uses separate DB. Still needs to be simplified a LOT * Fix deletion bug * Fixed unicode bug with encryption * Simplified and working * A bunch of polish * Some stuff * Removed some workspace meta properties * Migrated a few more meta properties * Small changes * Fix body scrolling and url cursor jumping * Removed duplication of webpack port * Remove workspaces reduces * Some small fixes * Added sync modal and opt-in setting * Good start to sync flow * Refactored modal footer css * Update sync status * Sync logger * A bit better logging * Fixed a bunch of sync-related bugs * Fixed signup form button * Gravatar component * Split sync modal into tabs * Tidying * Some more error handling * start sending 'user agent * Login/signup error handling * Use real UUIDs * Fixed tests * Remove unused function * Some extra checks * Moved cloud sync setting to about page * Some small changes * Some things
2016-10-21 17:20:36 +00:00
}
}
if (foundDocs.length === 0) {
// Didn't find anything. We're done
return docsToReturn;
}
// Continue searching for children
docsToReturn = [...docsToReturn, ...foundDocs];
return next(foundDocs);
Sync Proof of Concept (#33) * Maybe working POC * Change to use remote url * Other URL too * Some logic * Got the push part working * Made some updates * Fix * Update * Add status code check * Stuff * Implemented new sync api * A bit more robust * Debounce changes * Change timeout * Some fixes * Remove .less * Better error handling * Fix base url * Support for created vs updated docs * Try silent * Silence removal too * Small fix after merge * Fix test * Stuff * Implement key generation algorithm * Tidy * stuff * A bunch of stuff for the new API * Integrated the session stuff * Stuff * Just started on encryption * Lots of updates to encryption * Finished createResourceGroup function * Full encryption/decryption working (I think) * Encrypt localstorage with sessionID * Some more * Some extra checks * Now uses separate DB. Still needs to be simplified a LOT * Fix deletion bug * Fixed unicode bug with encryption * Simplified and working * A bunch of polish * Some stuff * Removed some workspace meta properties * Migrated a few more meta properties * Small changes * Fix body scrolling and url cursor jumping * Removed duplication of webpack port * Remove workspaces reduces * Some small fixes * Added sync modal and opt-in setting * Good start to sync flow * Refactored modal footer css * Update sync status * Sync logger * A bit better logging * Fixed a bunch of sync-related bugs * Fixed signup form button * Gravatar component * Split sync modal into tabs * Tidying * Some more error handling * start sending 'user agent * Login/signup error handling * Use real UUIDs * Fixed tests * Remove unused function * Some extra checks * Moved cloud sync setting to about page * Some small changes * Some things
2016-10-21 17:20:36 +00:00
}
return next([doc]);
2018-06-25 17:42:50 +00:00
});
2018-06-25 17:42:50 +00:00
export const duplicate = (database.duplicate = async function<T: BaseModel>(
originalDoc: T,
patch: Object = {}
): Promise<T> {
if (db._empty) return _send('duplicate', ...arguments);
Sync Proof of Concept (#33) * Maybe working POC * Change to use remote url * Other URL too * Some logic * Got the push part working * Made some updates * Fix * Update * Add status code check * Stuff * Implemented new sync api * A bit more robust * Debounce changes * Change timeout * Some fixes * Remove .less * Better error handling * Fix base url * Support for created vs updated docs * Try silent * Silence removal too * Small fix after merge * Fix test * Stuff * Implement key generation algorithm * Tidy * stuff * A bunch of stuff for the new API * Integrated the session stuff * Stuff * Just started on encryption * Lots of updates to encryption * Finished createResourceGroup function * Full encryption/decryption working (I think) * Encrypt localstorage with sessionID * Some more * Some extra checks * Now uses separate DB. Still needs to be simplified a LOT * Fix deletion bug * Fixed unicode bug with encryption * Simplified and working * A bunch of polish * Some stuff * Removed some workspace meta properties * Migrated a few more meta properties * Small changes * Fix body scrolling and url cursor jumping * Removed duplication of webpack port * Remove workspaces reduces * Some small fixes * Added sync modal and opt-in setting * Good start to sync flow * Refactored modal footer css * Update sync status * Sync logger * A bit better logging * Fixed a bunch of sync-related bugs * Fixed signup form button * Gravatar component * Split sync modal into tabs * Tidying * Some more error handling * start sending 'user agent * Login/signup error handling * Use real UUIDs * Fixed tests * Remove unused function * Some extra checks * Moved cloud sync setting to about page * Some small changes * Some things
2016-10-21 17:20:36 +00:00
await database.bufferChanges();
2016-08-15 22:31:30 +00:00
2018-06-25 17:42:50 +00:00
async function next<T: BaseModel>(docToCopy: T, patch: Object): Promise<T> {
2017-06-07 00:07:09 +00:00
// 1. Copy the doc
const newDoc = Object.assign({}, docToCopy, patch);
delete newDoc._id;
delete newDoc.created;
delete newDoc.modified;
2016-08-15 22:31:30 +00:00
const createdDoc = await docCreate(newDoc.type, newDoc);
2016-09-08 06:54:35 +00:00
2017-06-07 00:07:09 +00:00
// 2. Get all the children
for (const type of allTypes()) {
// Note: We never want to duplicate a response
if (!models.canDuplicate(type)) {
continue;
}
2017-06-07 00:07:09 +00:00
const parentId = docToCopy._id;
2018-06-25 17:42:50 +00:00
const children = await database.find(type, { parentId });
2017-06-07 00:07:09 +00:00
for (const doc of children) {
2018-06-25 17:42:50 +00:00
await next(doc, { parentId: createdDoc._id });
2017-06-07 00:07:09 +00:00
}
}
2016-09-08 06:54:35 +00:00
2017-06-07 00:07:09 +00:00
return createdDoc;
2016-10-21 20:00:31 +00:00
}
2016-09-08 06:54:35 +00:00
2017-06-07 00:07:09 +00:00
const createdDoc = await next(originalDoc, patch);
await database.flushChanges();
2017-06-07 00:07:09 +00:00
return createdDoc;
2018-06-25 17:42:50 +00:00
});
// ~~~~~~~ //
// Helpers //
// ~~~~~~~ //
2018-06-25 17:42:50 +00:00
async function _send<T>(fnName: string, ...args: Array<any>): Promise<T> {
return new Promise((resolve, reject) => {
const replyChannel = `db.fn.reply:${uuid.v4()}`;
electron.ipcRenderer.send('db.fn', fnName, replyChannel, ...args);
electron.ipcRenderer.once(replyChannel, (e, result) => {
resolve(result);
});
});
}
/**
* Run various database repair scripts
*/
2018-06-25 17:42:50 +00:00
export async function _repairDatabase() {
console.log(`[fix] Running database repairs`);
for (const workspace of await find(models.workspace.type)) {
await _repairBaseEnvironments(workspace);
await _fixMultipleCookieJars(workspace);
}
}
/**
* This function repairs workspaces that have multiple base environments. Since a workspace
* can only have one, this function walks over all base environments, merges the data, and
* moves all children as well.
*/
2018-06-25 17:42:50 +00:00
async function _repairBaseEnvironments(workspace) {
const baseEnvironments = await find(models.environment.type, {
parentId: workspace._id
});
// Nothing to do here
if (baseEnvironments.length <= 1) {
return;
}
const chosenBase = baseEnvironments[0];
for (const baseEnvironment of baseEnvironments) {
if (baseEnvironment._id === chosenBase._id) {
continue;
}
chosenBase.data = Object.assign(baseEnvironment.data, chosenBase.data);
2018-06-25 17:42:50 +00:00
const subEnvironments = await find(models.environment.type, {
parentId: baseEnvironment._id
});
for (const subEnvironment of subEnvironments) {
2018-06-25 17:42:50 +00:00
await docUpdate(subEnvironment, { parentId: chosenBase._id });
}
// Remove unnecessary base env
await remove(baseEnvironment);
}
// Update remaining base env
await update(chosenBase);
2018-10-17 16:42:33 +00:00
console.log(`[fix] Merged ${baseEnvironments.length} base environments under ${workspace.name}`);
}
/**
* This function repairs workspaces that have multiple cookie jars. Since a workspace
* can only have one, this function walks over all jars and merges them and their cookies
* together.
*/
2018-06-25 17:42:50 +00:00
async function _fixMultipleCookieJars(workspace) {
const cookieJars = await find(models.cookieJar.type, {
parentId: workspace._id
});
// Nothing to do here
if (cookieJars.length <= 1) {
return;
}
const chosenJar = cookieJars[0];
for (const cookieJar of cookieJars) {
if (cookieJar._id === chosenJar._id) {
continue;
}
for (const cookie of cookieJar.cookies) {
if (chosenJar.cookies.find(c => c.id === cookie.id)) {
continue;
}
chosenJar.cookies.push(cookie);
}
// Remove unnecessary jar
await remove(cookieJar);
}
// Update remaining jar
await update(chosenJar);
2018-10-17 16:42:33 +00:00
console.log(`[fix] Merged ${cookieJars.length} cookie jars under ${workspace.name}`);
}