Repair envs and jars, and create missing ones (#616)

* Repair envs and jars, and create missing ones

* Hack to make tests work
This commit is contained in:
Gregory Schier 2017-11-22 14:00:12 +00:00 committed by GitHub
parent 1173001e46
commit ba26ec3163
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 1371 additions and 405 deletions

View File

@ -16,6 +16,8 @@ const localStorageMock = (function () {
};
})();
// Don't console log during testing. It's annoying
global.console.log = () => null;
global.localStorage = localStorageMock;
global.requestAnimationFrame = cb => process.nextTick(cb);
global.require = require;

View File

@ -68,7 +68,7 @@ describe('bufferChanges()', () => {
};
db.onChange(callback);
db.bufferChanges();
await db.bufferChanges();
const newDoc = await models.request.create(doc);
const updatedDoc = await models.request.update(newDoc, true);
@ -76,14 +76,14 @@ describe('bufferChanges()', () => {
expect(changesSeen.length).toBe(0);
// Assert changes seen after flush
db.flushChanges();
await db.flushChanges();
expect(changesSeen).toEqual([[
[db.CHANGE_INSERT, newDoc, false],
[db.CHANGE_UPDATE, updatedDoc, false]
]]);
// Assert no more changes seen after flush again
db.flushChanges();
await db.flushChanges();
expect(changesSeen).toEqual([[
[db.CHANGE_INSERT, newDoc, false],
[db.CHANGE_UPDATE, updatedDoc, false]
@ -157,3 +157,130 @@ describe('requestGroupDuplicate()', () => {
expect(newChildRequestGroups.length).toBe(1);
});
});
describe('_fixThings()', () => {
beforeEach(globalBeforeEach);
it('fixes duplicate environments', async () => {
// Create Workspace with no children
const workspace = await models.workspace.create({_id: 'w1'});
expect((await db.withDescendants(workspace)).length).toBe(1);
// Create one set of sub environments
await models.environment.create({_id: 'b1', parentId: 'w1', data: {foo: 'b1', b1: true}});
await models.environment.create({_id: 'b1_sub1', parentId: 'b1', data: {foo: '1'}});
await models.environment.create({_id: 'b1_sub2', parentId: 'b1', data: {foo: '2'}});
// Create second set of sub environments
await models.environment.create({_id: 'b2', parentId: 'w1', data: {foo: 'b2', b2: true}});
await models.environment.create({_id: 'b2_sub1', parentId: 'b2', data: {foo: '3'}});
await models.environment.create({_id: 'b2_sub2', parentId: 'b2', data: {foo: '4'}});
// Make sure we have everything
expect((await db.withDescendants(workspace)).length).toBe(7);
const descendants = (await db.withDescendants(workspace)).map(d => ({
_id: d._id,
parentId: d.parentId,
data: d.data || null
}));
expect(descendants).toEqual([
{_id: 'w1', data: null, parentId: null},
{_id: 'b1', data: {foo: 'b1', b1: true}, parentId: 'w1'},
{_id: 'b2', data: {foo: 'b2', b2: true}, parentId: 'w1'},
{_id: 'b1_sub1', data: {foo: '1'}, parentId: 'b1'},
{_id: 'b1_sub2', data: {foo: '2'}, parentId: 'b1'},
{_id: 'b2_sub1', data: {foo: '3'}, parentId: 'b2'},
{_id: 'b2_sub2', data: {foo: '4'}, parentId: 'b2'}
]);
// Run the fix algorithm
await db._repairDatabase();
// Make sure things get adjusted
const descendants2 = (await db.withDescendants(workspace)).map(
d => ({_id: d._id, parentId: d.parentId, data: d.data || null})
);
expect(descendants2).toEqual([
{_id: 'w1', data: null, parentId: null},
{_id: 'b1', data: {foo: 'b1', b1: true, b2: true}, parentId: 'w1'},
// Extra base environments should have been deleted
// {_id: 'b2', data: {foo: 'bar'}, parentId: 'w1'},
// Sub environments should have been moved to new "master" base environment
{_id: 'b1_sub1', data: {foo: '1'}, parentId: 'b1'},
{_id: 'b1_sub2', data: {foo: '2'}, parentId: 'b1'},
{_id: 'b2_sub1', data: {foo: '3'}, parentId: 'b1'},
{_id: 'b2_sub2', data: {foo: '4'}, parentId: 'b1'}
]);
});
it('fixes duplicate cookie jars', async () => {
// Create Workspace with no children
const workspace = await models.workspace.create({_id: 'w1'});
expect((await db.withDescendants(workspace)).length).toBe(1);
// Create one set of sub environments
await models.cookieJar.create({
_id: 'j1',
parentId: 'w1',
cookies: [
{id: '1', key: 'foo', value: '1'},
{id: 'j1_1', key: 'j1', value: '1'}
]
});
await models.cookieJar.create({
_id: 'j2',
parentId: 'w1',
cookies: [
{id: '1', key: 'foo', value: '2'},
{id: 'j2_1', key: 'j2', value: '2'}
]
});
// Make sure we have everything
expect((await db.withDescendants(workspace)).length).toBe(3);
const descendants = (await db.withDescendants(workspace)).map(
d => ({_id: d._id, cookies: d.cookies || null, parentId: d.parentId})
);
expect(descendants).toEqual([
{_id: 'w1', cookies: null, parentId: null},
{
_id: 'j1',
parentId: 'w1',
cookies: [
{id: '1', key: 'foo', value: '1'},
{id: 'j1_1', key: 'j1', value: '1'}
]
},
{
_id: 'j2',
parentId: 'w1',
cookies: [
{id: '1', key: 'foo', value: '2'},
{id: 'j2_1', key: 'j2', value: '2'}
]
}
]);
// Run the fix algorithm
await db._repairDatabase();
// Make sure things get adjusted
const descendants2 = (await db.withDescendants(workspace)).map(
d => ({_id: d._id, cookies: d.cookies || null, parentId: d.parentId})
);
expect(descendants2).toEqual([
{_id: 'w1', cookies: null, parentId: null},
{
_id: 'j1',
parentId: 'w1',
cookies: [
{id: '1', key: 'foo', value: '1'},
{id: 'j1_1', key: 'j1', value: '1'},
{id: 'j2_1', key: 'j2', value: '2'}
]
}
]);
});
});

View File

@ -1,5 +1,5 @@
import * as models from '../../models';
import * as importUtil from '../../ui/import';
import * as importUtil from '../import';
import {getAppVersion} from '../constants';
import {globalBeforeEach} from '../../__jest__/before-each';

View File

@ -179,13 +179,8 @@ describe('keyedDebounce()', () => {
beforeEach(async () => {
await globalBeforeEach();
jest.useFakeTimers();
// There has to be a better way to reset this...
setTimeout.mock.calls = [];
});
afterEach(() => jest.clearAllTimers());
it('debounces correctly', () => {
const resultsList = [];
const fn = misc.keyedDebounce(results => {
@ -215,13 +210,8 @@ describe('debounce()', () => {
beforeEach(async () => {
await globalBeforeEach();
jest.useFakeTimers();
// There has to be a better way to reset this...
setTimeout.mock.calls = [];
});
afterEach(() => jest.clearAllTimers());
it('debounces correctly', () => {
const resultList = [];
const fn = misc.debounce((...args) => {

View File

@ -59,7 +59,7 @@ export async function init (
// Fill in the defaults
for (const modelType of types) {
if (db[modelType]) {
console.warn(`[db] Already initialized DB.${modelType}`);
console.log(`[db] Already initialized DB.${modelType}`);
continue;
}
@ -81,8 +81,17 @@ export async function init (
e.sender.send(replyChannel, result);
});
// NOTE: Only repair the DB if we're not running in memory. Repairing here causes tests to
// hang indefinitely for some reason.
// TODO: Figure out why this makes tests hang
if (!config.inMemoryOnly) {
await _repairDatabase();
}
if (!config.inMemoryOnly) {
console.log(`[db] Initialized DB at ${getDBFilePath('$TYPE')}`);
}
}
// ~~~~~~~~~~~~~~~~ //
// Change Listeners //
@ -100,16 +109,19 @@ export function offChange (callback: Function): void {
changeListeners = changeListeners.filter(l => l !== callback);
}
export const bufferChanges = database.bufferChanges = function (millis: number = 1000): void {
if (db._empty) {
_send('bufferChanges', ...arguments);
return;
}
export const bufferChanges = database.bufferChanges = async function (millis: number = 1000): Promise<void> {
if (db._empty) return _send('bufferChanges', ...arguments);
bufferingChanges = true;
setTimeout(database.flushChanges, millis);
};
export const flushChangesAsync = database.flushChangesAsync = async function (): Promise<void> {
process.nextTick(async () => {
await flushChanges();
});
};
export const flushChanges = database.flushChanges = async function (): Promise<void> {
if (db._empty) return _send('flushChanges', ...arguments);
@ -314,7 +326,7 @@ export const remove = database.remove = async function <T: BaseModel> (
): Promise<void> {
if (db._empty) return _send('remove', ...arguments);
database.bufferChanges();
await database.bufferChanges();
const docs = await database.withDescendants(doc);
const docIds = docs.map(d => d._id);
@ -325,7 +337,7 @@ export const remove = database.remove = async function <T: BaseModel> (
docs.map(d => notifyOfChange(CHANGE_REMOVE, d, fromSync));
database.flushChanges();
await database.flushChanges();
};
export const removeWhere = database.removeWhere = async function (
@ -334,7 +346,7 @@ export const removeWhere = database.removeWhere = async function (
): Promise<void> {
if (db._empty) return _send('removeWhere', ...arguments);
database.bufferChanges();
await database.bufferChanges();
for (const doc of await database.find(type, query)) {
const docs = await database.withDescendants(doc);
@ -347,7 +359,7 @@ export const removeWhere = database.removeWhere = async function (
docs.map(d => notifyOfChange(CHANGE_REMOVE, d, false));
}
database.flushChanges();
await database.flushChanges();
};
// ~~~~~~~~~~~~~~~~~~~ //
@ -481,7 +493,7 @@ export const duplicate = database.duplicate = async function <T: BaseModel> (
): Promise<T> {
if (db._empty) return _send('duplicate', ...arguments);
database.bufferChanges();
await database.bufferChanges();
async function next<T: BaseModel> (docToCopy: T, patch: Object): Promise<T> {
// 1. Copy the doc
@ -511,7 +523,7 @@ export const duplicate = database.duplicate = async function <T: BaseModel> (
const createdDoc = await next(originalDoc, patch);
database.flushChanges();
await database.flushChanges();
return createdDoc;
};
@ -529,3 +541,87 @@ async function _send<T> (fnName: string, ...args: Array<any>): Promise<T> {
});
});
}
/**
* Run various database repair scripts
*/
export async function _repairDatabase () {
console.log(`[fix] Running database repairs`);
for (const workspace of await find(models.workspace.type)) {
await _repairBaseEnvironments(workspace);
await _fixMultipleCookieJars(workspace);
}
}
/**
* This function repairs workspaces that have multiple base environments. Since a workspace
* can only have one, this function walks over all base environments, merges the data, and
* moves all children as well.
*/
async function _repairBaseEnvironments (workspace) {
const baseEnvironments = await find(models.environment.type, {parentId: workspace._id});
// Nothing to do here
if (baseEnvironments.length <= 1) {
return;
}
const chosenBase = baseEnvironments[0];
for (const baseEnvironment of baseEnvironments) {
if (baseEnvironment._id === chosenBase._id) {
continue;
}
chosenBase.data = Object.assign(baseEnvironment.data, chosenBase.data);
const subEnvironments = await find(models.environment.type, {parentId: baseEnvironment._id});
for (const subEnvironment of subEnvironments) {
await docUpdate(subEnvironment, {parentId: chosenBase._id});
}
// Remove unnecessary base env
await remove(baseEnvironment);
}
// Update remaining base env
await update(chosenBase);
console.log(`[fix] Merged ${baseEnvironments.length} base environments under ${workspace.name}`);
}
/**
* This function repairs workspaces that have multiple cookie jars. Since a workspace
* can only have one, this function walks over all jars and merges them and their cookies
* together.
*/
async function _fixMultipleCookieJars (workspace) {
const cookieJars = await find(models.cookieJar.type, {parentId: workspace._id});
// Nothing to do here
if (cookieJars.length <= 1) {
return;
}
const chosenJar = cookieJars[0];
for (const cookieJar of cookieJars) {
if (cookieJar._id === chosenJar._id) {
continue;
}
for (const cookie of cookieJar.cookies) {
if (chosenJar.cookies.find(c => c.id === cookie.id)) {
continue;
}
chosenJar.cookies.push(cookie);
}
// Remove unnecessary jar
await remove(cookieJar);
}
// Update remaining jar
await update(chosenJar);
console.log(`[fix] Merged ${cookieJars.length} cookie jars under ${workspace.name}`);
}

View File

@ -1,14 +1,14 @@
import {convert} from 'insomnia-importers';
import * as db from '../common/database';
import * as har from '../common/har';
import * as db from './database';
import * as har from './har';
import * as models from '../models/index';
import {getAppVersion} from '../common/constants';
import * as misc from '../common/misc';
import {showModal} from './components/modals/index';
import AlertModal from './components/modals/alert-modal';
import * as fetch from '../common/fetch';
import {getAppVersion} from './constants';
import * as misc from './misc';
import {showModal} from '../ui/components/modals/index';
import AlertModal from '../ui/components/modals/alert-modal';
import * as fetch from './fetch';
import fs from 'fs';
import {trackEvent} from '../common/analytics';
import {trackEvent} from './analytics';
const EXPORT_FORMAT = 3;
@ -107,7 +107,7 @@ export async function importRaw (workspace, rawContent, generateNewIds = false)
for (const resource of data.resources) {
// Buffer DB changes
// NOTE: Doing it inside here so it's more "scalable"
db.bufferChanges(100);
await db.bufferChanges(100);
// Replace null parentIds with current workspace
if (!resource.parentId) {
@ -147,7 +147,7 @@ export async function importRaw (workspace, rawContent, generateNewIds = false)
importedDocs[newDoc.type].push(newDoc);
}
db.flushChanges();
await db.flushChanges();
return {
source: results.type.id,

View File

@ -25,25 +25,27 @@ describe('migrate()', () => {
expect(certs.length).toBe(2);
expect(certs.sort((c1, c2) => c1._id > c2._id ? -1 : 1)).toEqual([{
_id: 'crt_a262d22b5fa8491c9bd958fba03e301e',
cert: null,
disabled: false,
isPrivate: false,
key: 'key',
parentId: 'wrk_cc1dd2ca4275747aa88199e8efd42403',
passphrase: 'mypass',
pfx: null,
type: 'ClientCertificate'
}, {
_id: 'crt_2e7c268809ee44b8900d5cbbaa7d3a19',
_id: 'crt_e3e96e5fdd6842298b66dee1f0940f3d',
cert: 'cert',
disabled: false,
isPrivate: false,
host: '',
key: null,
parentId: 'wrk_cc1dd2ca4275747aa88199e8efd42403',
passphrase: null,
pfx: null,
type: 'ClientCertificate'
}, {
_id: 'crt_dd2ccc1a2745477a881a9e8ef9d42403',
cert: null,
disabled: false,
isPrivate: false,
host: '',
key: 'key',
parentId: 'wrk_cc1dd2ca4275747aa88199e8efd42403',
passphrase: 'mypass',
pfx: null,
type: 'ClientCertificate'
}]);
expect(migratedWorkspace.certificates).toBeUndefined();

View File

@ -30,10 +30,7 @@ export async function migrate (doc: Workspace): Promise<Workspace> {
process.nextTick(() => update(doc, {parentId: null}));
}
await _ensureDependencies(doc);
doc = await _migrateExtractClientCertificates(doc);
return doc;
return _migrateExtractClientCertificates(doc);
}
export function getById (id: string): Promise<Workspace | null> {
@ -41,9 +38,7 @@ export function getById (id: string): Promise<Workspace | null> {
}
export async function create (patch: Object = {}): Promise<Workspace> {
const doc = await db.docCreate(type, patch);
await _ensureDependencies(doc);
return doc;
return db.docCreate(type, patch);
}
export async function all (): Promise<Array<Workspace>> {
@ -69,11 +64,6 @@ export function remove (workspace: Workspace): Promise<void> {
return db.remove(workspace);
}
async function _ensureDependencies (workspace: Workspace) {
await models.cookieJar.getOrCreateForParentId(workspace._id);
await models.environment.getOrCreateForWorkspaceId(workspace._id);
}
async function _migrateExtractClientCertificates (workspace: Workspace): Promise<Workspace> {
const certificates = (workspace: Object).certificates || null;
if (!Array.isArray(certificates)) {
@ -84,7 +74,7 @@ async function _migrateExtractClientCertificates (workspace: Workspace): Promise
for (const cert of certificates) {
await models.clientCertificate.create({
parentId: workspace._id,
host: cert.host,
host: cert.host || '',
passphrase: cert.passphrase || null,
cert: cert.cert || null,
key: cert.key || null,

View File

@ -257,8 +257,8 @@ describe('Integration tests for creating Resources and pushing', () => {
// Assert that all our new models were created
expect((await models.workspace.all()).length).toBe(2);
expect((await models.request.all()).length).toBe(3);
expect((await models.environment.all()).length).toBe(3);
expect((await models.cookieJar.all()).length).toBe(2);
expect((await models.environment.all()).length).toBe(1);
expect((await models.cookieJar.all()).length).toBe(0);
// Assert that initializing sync will create the initial resources
expect((await syncStorage.allConfigs()).length).toBe(0);
@ -267,7 +267,7 @@ describe('Integration tests for creating Resources and pushing', () => {
jest.runOnlyPendingTimers();
await promise;
expect((await syncStorage.allConfigs()).length).toBe(2);
expect((await syncStorage.allResources()).length).toBe(9);
expect((await syncStorage.allResources()).length).toBe(5);
// Mark all configs as auto sync
const configs = await syncStorage.allConfigs();
@ -286,7 +286,7 @@ describe('Integration tests for creating Resources and pushing', () => {
it('Resources created on DB change', async () => {
// Fetch the workspace and create a new request
db.bufferChanges();
await db.bufferChanges();
await models.request.create({
_id: 'req_t',
url: 'https://google.com',
@ -302,19 +302,19 @@ describe('Integration tests for creating Resources and pushing', () => {
// Assert
expect((await syncStorage.allConfigs()).length).toBe(2);
expect((await syncStorage.allResources()).length).toBe(10);
expect((await syncStorage.allResources()).length).toBe(6);
expect(_decryptResource(resource).url).toBe('https://google.com');
expect(resource.removed).toBe(false);
expect(session.syncPush.mock.calls.length).toBe(1);
expect(session.syncPush.mock.calls[0][0].length).toBe(10);
expect(session.syncPush.mock.calls[0][0].length).toBe(6);
expect(session.syncPull.mock.calls).toEqual([]);
});
it('Resources revived on DB change', async () => {
// Fetch the workspace and create a new request
db.bufferChanges();
await db.bufferChanges();
const request = await models.request.create({
_id: 'req_t',
name: 'Original Request',
@ -332,7 +332,7 @@ describe('Integration tests for creating Resources and pushing', () => {
);
// Update it and push it again
db.bufferChanges();
await db.bufferChanges();
await models.request.update(request, {name: 'New Name'});
await db.flushChanges();
await sync.writePendingChanges();
@ -349,7 +349,7 @@ describe('Integration tests for creating Resources and pushing', () => {
// Create, update a request, and fetch it's resource
const request = await models.request.getById('req_1');
const resource = await syncStorage.getResourceByDocId(request._id);
db.bufferChanges();
await db.bufferChanges();
const updatedRequest = await models.request.update(request, {name: 'New Name'});
// Drain and fetch new resource
@ -366,7 +366,7 @@ describe('Integration tests for creating Resources and pushing', () => {
expect(resource.removed).toBe(false);
expect(session.syncPush.mock.calls.length).toBe(1);
expect(session.syncPush.mock.calls[0][0].length).toBe(9);
expect(session.syncPush.mock.calls[0][0].length).toBe(5);
expect(session.syncPull.mock.calls).toEqual([]);
});
@ -375,7 +375,7 @@ describe('Integration tests for creating Resources and pushing', () => {
// Create, update a request, and fetch it's resource
const request = await models.request.getById('req_1');
const resource = await syncStorage.getResourceByDocId(request._id);
db.bufferChanges();
await db.bufferChanges();
await models.request.remove(request);
// Drain and fetch new resource
@ -389,7 +389,7 @@ describe('Integration tests for creating Resources and pushing', () => {
expect(updatedResource.removed).toBe(true);
expect(session.syncPush.mock.calls.length).toBe(1);
expect(session.syncPush.mock.calls[0][0].length).toBe(9);
expect(session.syncPush.mock.calls[0][0].length).toBe(5);
expect(session.syncPull.mock.calls).toEqual([]);
});

View File

@ -248,7 +248,7 @@ export async function push (resourceGroupId = null) {
}
// Resolve conflicts
db.bufferChanges();
await db.bufferChanges();
for (const serverResource of conflicts) {
const localResource = await store.getResourceByDocId(
serverResource.id,
@ -283,7 +283,7 @@ export async function push (resourceGroupId = null) {
}
}
db.flushChanges();
db.flushChangesAsync();
}
export async function pull (resourceGroupId = null, createMissingResources = true) {
@ -347,7 +347,7 @@ export async function pull (resourceGroupId = null, createMissingResources = tru
// Insert all the created docs to the DB //
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
db.bufferChanges();
await db.bufferChanges();
for (const serverResource of createdResources) {
let doc;
@ -390,13 +390,13 @@ export async function pull (resourceGroupId = null, createMissingResources = tru
logger.debug(`Pull created ${createdResources.length} resources`);
}
db.flushChanges();
db.flushChangesAsync();
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
// Save all the updated docs to the DB //
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
db.bufferChanges();
await db.bufferChanges();
for (const serverResource of updatedResources) {
try {
const {resourceGroupId, encContent} = serverResource;
@ -416,7 +416,7 @@ export async function pull (resourceGroupId = null, createMissingResources = tru
logger.warn('Failed to decode updated resource', e, serverResource);
}
}
db.flushChanges();
db.flushChangesAsync();
if (updatedResources.length) {
logger.debug(`Pull updated ${updatedResources.length} resources`);
@ -426,7 +426,7 @@ export async function pull (resourceGroupId = null, createMissingResources = tru
// Remove all the docs that need removing //
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
db.bufferChanges();
await db.bufferChanges();
for (const id of idsToRemove) {
const resource = await store.getResourceByDocId(id);
if (!resource) {
@ -444,7 +444,7 @@ export async function pull (resourceGroupId = null, createMissingResources = tru
// Remove from DB
await db.remove(doc, true);
}
db.flushChanges();
db.flushChangesAsync();
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
// Push all the docs that need pushing //

View File

@ -154,7 +154,7 @@ export function getPrivateKey () {
}
export function getCurrentSessionId () {
if (window) {
if (global.window) { // NOTE: Must check for window on global
return window.localStorage.getItem('currentSessionId');
} else {
return false;

View File

@ -56,6 +56,8 @@ class App extends PureComponent {
paneHeight: props.paneHeight || DEFAULT_PANE_HEIGHT
};
this._isMigratingChildren = false;
this._getRenderContextPromiseCache = {};
this._savePaneWidth = debounce(paneWidth => this._updateActiveWorkspaceMeta({paneWidth}));
@ -778,7 +780,54 @@ class App extends PureComponent {
document.removeEventListener('mousemove', this._handleMouseMove);
}
async _ensureWorkspaceChildren (props) {
const {activeWorkspace, activeCookieJar, environments} = props;
const baseEnvironments = environments.filter(e => e.parentId === activeWorkspace._id);
// Nothing to do
if (baseEnvironments.length && activeCookieJar) {
return;
}
// We already started migrating. Let it finish.
if (this._isMigratingChildren) {
return;
}
// Prevent rendering of everything
this._isMigratingChildren = true;
await db.bufferChanges();
if (baseEnvironments.length === 0) {
await models.environment.create({parentId: activeWorkspace._id});
console.log(`[app] Created missing base environment for ${activeWorkspace.name}`);
}
if (!activeCookieJar) {
await models.cookieJar.create({parentId: this.props.activeWorkspace._id});
console.log(`[app] Created missing cookie jar for ${activeWorkspace.name}`);
}
await db.flushChanges();
// Flush "transaction"
this._isMigratingChildren = false;
}
componentWillReceiveProps (nextProps) {
this._ensureWorkspaceChildren(nextProps);
}
componentWillMount () {
this._ensureWorkspaceChildren(this.props);
}
render () {
if (this._isMigratingChildren) {
console.log('[app] Waiting for migration to complete');
return null;
}
return (
<KeydownBinder onKeydown={this._handleKeyDown}>
<div className="app">
@ -1015,7 +1064,7 @@ async function _moveDoc (docToMove, parentId, targetId, targetOffset) {
// anyway
console.log(`[app] Recreating Sort Keys ${beforeKey} ${afterKey}`);
db.bufferChanges(300);
await db.bufferChanges(300);
docs.map((r, i) => __updateDoc(r, {metaSortKey: i * 100, parentId}));
} else {
const metaSortKey = afterKey - ((afterKey - beforeKey) / 2);

View File

@ -6,7 +6,7 @@ import path from 'path';
import AskModal from '../../../ui/components/modals/ask-modal';
import * as moment from 'moment';
import * as importUtils from '../../import';
import * as importUtils from '../../../common/import';
import {trackEvent} from '../../../common/analytics';
import AlertModal from '../../components/modals/alert-modal';
import PaymentNotificationModal from '../../components/modals/payment-notification-modal';

1354
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -141,7 +141,7 @@
"eslint-plugin-standard": "^3.0.1",
"file-loader": "^1.1.5",
"flow-bin": "^0.58.0",
"jest": "^19.0.2",
"jest": "^21.2.1",
"less": "^2.7.2",
"less-loader": "^4.0.5",
"ncp": "^2.0.0",