2016-11-10 05:56:23 +00:00
|
|
|
import * as db from '../common/database';
|
|
|
|
import * as models from '../models';
|
2016-10-21 17:20:36 +00:00
|
|
|
import * as crypt from './crypt';
|
|
|
|
import * as session from './session';
|
2016-10-24 23:30:37 +00:00
|
|
|
import * as store from './storage';
|
2016-11-10 21:03:12 +00:00
|
|
|
import * as misc from '../common/misc';
|
2016-11-10 22:14:39 +00:00
|
|
|
import Logger from './logger';
|
2017-01-11 03:18:15 +00:00
|
|
|
import {trackEvent} from '../analytics/index';
|
2017-07-31 22:09:16 +00:00
|
|
|
import * as zlib from 'zlib';
|
2016-10-21 17:20:36 +00:00
|
|
|
|
2017-01-09 21:59:52 +00:00
|
|
|
export const START_DELAY = 1E3;
|
2017-01-11 03:18:15 +00:00
|
|
|
export const PULL_PERIOD = 15E3;
|
2017-01-11 21:59:35 +00:00
|
|
|
export const WRITE_PERIOD = 1E3;
|
2016-10-21 17:20:36 +00:00
|
|
|
|
|
|
|
const WHITE_LIST = {
|
2017-01-09 21:59:52 +00:00
|
|
|
[models.workspace.type]: true,
|
2016-11-10 01:15:27 +00:00
|
|
|
[models.request.type]: true,
|
|
|
|
[models.requestGroup.type]: true,
|
|
|
|
[models.environment.type]: true,
|
|
|
|
[models.cookieJar.type]: true
|
2016-10-21 17:20:36 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
export const logger = new Logger();
|
|
|
|
|
|
|
|
// TODO: Move this stuff somewhere else
|
|
|
|
const NO_VERSION = '__NO_VERSION__';
|
2016-11-07 20:24:38 +00:00
|
|
|
const resourceGroupSymmetricKeysCache = {};
|
2017-01-09 21:59:52 +00:00
|
|
|
let _pullChangesInterval = null;
|
2017-01-11 21:59:35 +00:00
|
|
|
let _writeChangesInterval = null;
|
|
|
|
let _pendingDBChanges = {};
|
2017-01-09 21:59:52 +00:00
|
|
|
let _isInitialized = false;
|
2016-11-16 17:18:39 +00:00
|
|
|
|
|
|
|
export async function init () {
|
2017-01-09 21:59:52 +00:00
|
|
|
if (_isInitialized) {
|
2016-10-21 17:20:36 +00:00
|
|
|
logger.debug('Already enabled');
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2017-01-09 21:59:52 +00:00
|
|
|
// NOTE: This is at the top to prevent race conditions
|
|
|
|
_isInitialized = true;
|
|
|
|
db.onChange(async changes => {
|
|
|
|
// To help prevent bugs, put Workspaces first
|
|
|
|
const sortedChanges = changes.sort(
|
|
|
|
([event, doc, fromSync]) => doc.type === models.workspace.type ? 1 : -1
|
|
|
|
);
|
2016-11-10 21:03:12 +00:00
|
|
|
|
2017-01-09 21:59:52 +00:00
|
|
|
for (const [event, doc, fromSync] of sortedChanges) {
|
2016-11-10 21:23:23 +00:00
|
|
|
const notOnWhitelist = !WHITE_LIST[doc.type];
|
2016-11-12 08:38:55 +00:00
|
|
|
const notLoggedIn = !session.isLoggedIn();
|
2017-01-09 21:59:52 +00:00
|
|
|
|
2017-02-15 20:32:15 +00:00
|
|
|
if (doc.isPrivate) {
|
|
|
|
logger.debug(`Skip private doc change ${doc._id}`);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2016-11-12 08:38:55 +00:00
|
|
|
if (notLoggedIn || notOnWhitelist || fromSync) {
|
2016-10-24 23:30:37 +00:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2017-01-11 21:59:35 +00:00
|
|
|
const key = `${event}:${doc._id}`;
|
|
|
|
_pendingDBChanges[key] = [event, doc, Date.now()];
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2017-01-09 21:59:52 +00:00
|
|
|
await misc.delay(START_DELAY);
|
2017-01-12 00:12:03 +00:00
|
|
|
|
2017-01-11 03:18:15 +00:00
|
|
|
await push();
|
2017-01-09 21:59:52 +00:00
|
|
|
await pull();
|
|
|
|
|
2017-07-27 23:27:51 +00:00
|
|
|
let nextSyncTime = 0;
|
|
|
|
let isSyncing = false;
|
2017-01-12 00:12:03 +00:00
|
|
|
_pullChangesInterval = setInterval(async () => {
|
2017-07-27 23:27:51 +00:00
|
|
|
if (isSyncing) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (Date.now() < nextSyncTime) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Mark that we are currently executing a sync op
|
|
|
|
isSyncing = true;
|
|
|
|
|
|
|
|
const syncStartTime = Date.now();
|
|
|
|
|
2017-07-31 20:46:04 +00:00
|
|
|
let extraDelay = 0;
|
2017-07-27 23:27:51 +00:00
|
|
|
try {
|
|
|
|
await push();
|
|
|
|
await pull();
|
|
|
|
} catch (err) {
|
|
|
|
logger.error('Sync failed with', err);
|
2017-07-31 20:46:04 +00:00
|
|
|
extraDelay += PULL_PERIOD;
|
2017-07-27 23:27:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Add sync duration to give the server some room if it's being slow
|
2017-07-31 20:46:04 +00:00
|
|
|
extraDelay += (Date.now() - syncStartTime) * 2;
|
|
|
|
|
|
|
|
nextSyncTime = Date.now() + PULL_PERIOD + extraDelay;
|
2017-07-27 23:27:51 +00:00
|
|
|
isSyncing = false;
|
|
|
|
}, PULL_PERIOD / 5);
|
2017-01-12 00:12:03 +00:00
|
|
|
|
2017-01-11 21:59:35 +00:00
|
|
|
_writeChangesInterval = setInterval(writePendingChanges, WRITE_PERIOD);
|
2017-01-09 21:59:52 +00:00
|
|
|
|
2016-10-21 17:20:36 +00:00
|
|
|
logger.debug('Initialized');
|
|
|
|
}
|
|
|
|
|
2017-01-09 21:59:52 +00:00
|
|
|
// Used only during tests!
|
|
|
|
export function _testReset () {
|
|
|
|
_isInitialized = false;
|
|
|
|
clearInterval(_pullChangesInterval);
|
2017-01-11 21:59:35 +00:00
|
|
|
clearInterval(_writeChangesInterval);
|
2017-01-09 21:59:52 +00:00
|
|
|
}
|
|
|
|
|
2016-10-28 21:27:05 +00:00
|
|
|
/**
|
|
|
|
* Non-blocking function to perform initial sync for an account. This will pull
|
|
|
|
* all remote resources (if they exist) before initializing sync.
|
|
|
|
*/
|
|
|
|
export function doInitialSync () {
|
|
|
|
process.nextTick(async () => {
|
|
|
|
// First, pull down all remote resources, without first creating new ones.
|
|
|
|
// This makes sure that the first sync won't create resources locally, when
|
|
|
|
// they already exist on the server.
|
|
|
|
await pull(null, false);
|
|
|
|
|
|
|
|
// Make sure sync is on (start the timers)
|
2016-11-16 17:18:39 +00:00
|
|
|
await init();
|
2016-10-28 21:27:05 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2017-01-11 03:18:15 +00:00
|
|
|
/**
|
|
|
|
* This is a function to clean up Workspaces that might have had more than one
|
|
|
|
* ResourceGroup created for them. This function should be called on init (or maybe
|
|
|
|
* even periodically) and can be removed once the bug stops persisting.
|
|
|
|
*/
|
|
|
|
export async function fixDuplicateResourceGroups () {
|
|
|
|
if (!session.isLoggedIn()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
let duplicateCount = 0;
|
|
|
|
const workspaces = await models.workspace.all();
|
|
|
|
for (const workspace of workspaces) {
|
|
|
|
const resources = await store.findResourcesByDocId(workspace._id);
|
|
|
|
|
|
|
|
// No duplicates found
|
|
|
|
if (resources.length <= 1) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Fix duplicates
|
|
|
|
const ids = resources.map(r => r.resourceGroupId);
|
|
|
|
const {deleteResourceGroupIds} = await session.syncFixDupes(ids);
|
|
|
|
|
|
|
|
for (const idToDelete of deleteResourceGroupIds) {
|
|
|
|
await store.removeResourceGroup(idToDelete);
|
|
|
|
}
|
|
|
|
|
|
|
|
duplicateCount++;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (duplicateCount) {
|
|
|
|
logger.debug(`Fixed ${duplicateCount}/${workspaces.length} duplicate synced Workspaces`);
|
|
|
|
trackEvent('Sync', 'Fixed Duplicate');
|
|
|
|
} else {
|
|
|
|
logger.debug('No dupes found to fix');
|
|
|
|
}
|
|
|
|
}
|
2017-01-09 21:59:52 +00:00
|
|
|
|
2017-01-11 21:59:35 +00:00
|
|
|
export async function writePendingChanges () {
|
|
|
|
// First make a copy and clear pending changes
|
|
|
|
const changes = Object.assign({}, _pendingDBChanges);
|
|
|
|
_pendingDBChanges = {};
|
|
|
|
|
|
|
|
const keys = Object.keys(changes);
|
|
|
|
|
|
|
|
if (keys.length === 0) {
|
|
|
|
// No changes, just return
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
for (const key of Object.keys(changes)) {
|
|
|
|
const [event, doc, timestamp] = changes[key];
|
|
|
|
await _handleChangeAndPush(event, doc, timestamp);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-11 03:18:15 +00:00
|
|
|
export async function push (resourceGroupId = null) {
|
2016-10-21 17:20:36 +00:00
|
|
|
if (!session.isLoggedIn()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-10-24 23:30:37 +00:00
|
|
|
let dirtyResources = [];
|
|
|
|
if (resourceGroupId) {
|
2017-03-03 20:09:08 +00:00
|
|
|
dirtyResources = await store.findActiveDirtyResourcesForResourceGroup(resourceGroupId);
|
2016-10-24 23:30:37 +00:00
|
|
|
} else {
|
2017-01-09 21:59:52 +00:00
|
|
|
dirtyResources = await store.findActiveDirtyResources();
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (!dirtyResources.length) {
|
|
|
|
logger.debug('No changes to push');
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
let responseBody;
|
|
|
|
try {
|
2017-01-09 21:59:52 +00:00
|
|
|
responseBody = await session.syncPush(dirtyResources);
|
2016-10-21 17:20:36 +00:00
|
|
|
} catch (e) {
|
|
|
|
logger.error('Failed to push changes', e);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-11-07 20:24:38 +00:00
|
|
|
const {
|
|
|
|
updated,
|
|
|
|
created,
|
|
|
|
removed,
|
2017-03-03 20:09:08 +00:00
|
|
|
conflicts
|
2016-11-07 20:24:38 +00:00
|
|
|
} = responseBody;
|
|
|
|
|
2016-10-21 17:20:36 +00:00
|
|
|
// Update all resource versions with the ones that were returned
|
|
|
|
for (const {id, version} of updated) {
|
2016-11-07 20:24:38 +00:00
|
|
|
const resource = await store.getResourceByDocId(id);
|
2016-10-24 23:30:37 +00:00
|
|
|
await store.updateResource(resource, {version, dirty: false});
|
2016-11-10 22:34:55 +00:00
|
|
|
}
|
|
|
|
if (updated.length) {
|
|
|
|
logger.debug(`Push updated ${updated.length} resources`);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Update all resource versions with the ones that were returned
|
|
|
|
for (const {id, version} of created) {
|
2016-11-07 20:24:38 +00:00
|
|
|
const resource = await store.getResourceByDocId(id);
|
2016-10-24 23:30:37 +00:00
|
|
|
await store.updateResource(resource, {version, dirty: false});
|
2016-11-10 22:34:55 +00:00
|
|
|
}
|
|
|
|
if (created.length) {
|
|
|
|
logger.debug(`Push created ${created.length} resources`);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Update all resource versions with the ones that were returned
|
|
|
|
for (const {id, version} of removed) {
|
2016-11-07 20:24:38 +00:00
|
|
|
const resource = await store.getResourceByDocId(id);
|
2016-10-24 23:30:37 +00:00
|
|
|
await store.updateResource(resource, {version, dirty: false});
|
2016-11-10 22:34:55 +00:00
|
|
|
}
|
|
|
|
if (removed.length) {
|
|
|
|
logger.debug(`Push removed ${removed.length} resources`);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Resolve conflicts
|
2016-11-09 03:18:25 +00:00
|
|
|
db.bufferChanges();
|
2016-10-21 17:20:36 +00:00
|
|
|
for (const serverResource of conflicts) {
|
2016-11-07 23:36:40 +00:00
|
|
|
const localResource = await store.getResourceByDocId(
|
|
|
|
serverResource.id,
|
|
|
|
serverResource.resourceGroupId
|
|
|
|
);
|
2016-10-21 17:20:36 +00:00
|
|
|
|
|
|
|
// On conflict, choose last edited one
|
|
|
|
const serverIsNewer = serverResource.lastEdited > localResource.lastEdited;
|
|
|
|
const winner = serverIsNewer ? serverResource : localResource;
|
|
|
|
|
|
|
|
// Update local resource
|
|
|
|
// NOTE: using localResource as the base to make sure we have _id
|
2016-10-24 23:30:37 +00:00
|
|
|
await store.updateResource(localResource, winner, {
|
2016-10-21 17:20:36 +00:00
|
|
|
version: serverResource.version, // Act as the server resource no matter what
|
|
|
|
dirty: !serverIsNewer // It's dirty if we chose the local doc
|
|
|
|
});
|
|
|
|
|
2016-11-07 20:24:38 +00:00
|
|
|
// Decrypt the docs from the resources. Don't fetch the local doc from the
|
|
|
|
// app database, because it might have been deleted.
|
|
|
|
const winnerName = serverIsNewer ? 'Server' : 'Local';
|
|
|
|
logger.debug(`Resolved conflict for ${serverResource.id} (${winnerName})`);
|
|
|
|
|
2016-10-28 21:27:05 +00:00
|
|
|
// If the server won, update ourselves. If we won, we already have the
|
|
|
|
// latest version, so do nothing.
|
|
|
|
if (serverIsNewer) {
|
2017-01-09 21:59:52 +00:00
|
|
|
const doc = await decryptDoc(winner.resourceGroupId, winner.encContent);
|
2016-10-28 21:27:05 +00:00
|
|
|
if (winner.removed) {
|
|
|
|
await db.remove(doc, true);
|
|
|
|
} else {
|
|
|
|
await db.update(doc, true);
|
|
|
|
}
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
}
|
2017-01-09 21:59:52 +00:00
|
|
|
|
2016-11-09 03:18:25 +00:00
|
|
|
db.flushChanges();
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
2016-10-28 21:27:05 +00:00
|
|
|
export async function pull (resourceGroupId = null, createMissingResources = true) {
|
2016-10-21 17:20:36 +00:00
|
|
|
if (!session.isLoggedIn()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2017-01-11 03:18:15 +00:00
|
|
|
// Try to fix duplicates first. Don't worry if this is called a lot since if there
|
|
|
|
// are no duplicates found it doesn't contact the network.
|
|
|
|
await fixDuplicateResourceGroups();
|
|
|
|
|
2016-10-28 21:27:05 +00:00
|
|
|
let allResources;
|
|
|
|
if (createMissingResources) {
|
2017-01-09 21:59:52 +00:00
|
|
|
allResources = await getOrCreateAllActiveResources(resourceGroupId);
|
2016-10-28 21:27:05 +00:00
|
|
|
} else {
|
|
|
|
allResources = await store.allActiveResources(resourceGroupId);
|
|
|
|
}
|
2016-10-24 23:30:37 +00:00
|
|
|
|
|
|
|
let blacklistedConfigs;
|
|
|
|
if (resourceGroupId) {
|
2017-01-11 03:18:15 +00:00
|
|
|
// When doing specific sync, blacklist all configs except the one we're trying to sync.
|
2016-10-24 23:30:37 +00:00
|
|
|
const allConfigs = await store.allConfigs();
|
2017-01-11 03:18:15 +00:00
|
|
|
blacklistedConfigs = allConfigs.filter(c => c.resourceGroupId !== resourceGroupId);
|
2016-10-24 23:30:37 +00:00
|
|
|
} else {
|
2016-10-28 21:27:05 +00:00
|
|
|
// When doing a full sync, blacklist the inactive configs
|
2016-10-24 23:30:37 +00:00
|
|
|
blacklistedConfigs = await store.findInactiveConfigs(resourceGroupId);
|
|
|
|
}
|
|
|
|
|
|
|
|
const resources = allResources.map(r => ({
|
2016-10-21 17:20:36 +00:00
|
|
|
id: r.id,
|
|
|
|
resourceGroupId: r.resourceGroupId,
|
|
|
|
version: r.version,
|
|
|
|
removed: r.removed
|
|
|
|
}));
|
|
|
|
|
2016-10-28 21:27:05 +00:00
|
|
|
const blacklistedResourceGroupIds = blacklistedConfigs.map(c => c.resourceGroupId);
|
2017-01-11 03:18:15 +00:00
|
|
|
|
|
|
|
const body = {
|
|
|
|
resources,
|
2017-03-03 20:09:08 +00:00
|
|
|
blacklist: blacklistedResourceGroupIds
|
2017-01-11 03:18:15 +00:00
|
|
|
};
|
2016-10-24 23:30:37 +00:00
|
|
|
|
2016-11-07 20:24:38 +00:00
|
|
|
logger.debug(`Pulling with ${resources.length} resources`);
|
2016-10-21 17:20:36 +00:00
|
|
|
|
|
|
|
let responseBody;
|
|
|
|
try {
|
2017-01-09 21:59:52 +00:00
|
|
|
responseBody = await session.syncPull(body);
|
2016-10-21 17:20:36 +00:00
|
|
|
} catch (e) {
|
|
|
|
logger.error('Failed to sync changes', e, body);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const {
|
|
|
|
updatedResources,
|
|
|
|
createdResources,
|
|
|
|
idsToPush,
|
2017-03-03 20:09:08 +00:00
|
|
|
idsToRemove
|
2016-10-21 17:20:36 +00:00
|
|
|
} = responseBody;
|
|
|
|
|
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
|
|
|
// Insert all the created docs to the DB //
|
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
|
|
|
|
2016-11-09 03:18:25 +00:00
|
|
|
db.bufferChanges();
|
|
|
|
for (const serverResource of createdResources) {
|
2016-10-28 21:27:05 +00:00
|
|
|
let doc;
|
|
|
|
|
2016-10-21 17:20:36 +00:00
|
|
|
try {
|
|
|
|
const {resourceGroupId, encContent} = serverResource;
|
2017-01-09 21:59:52 +00:00
|
|
|
doc = await decryptDoc(resourceGroupId, encContent);
|
2016-10-28 21:27:05 +00:00
|
|
|
} catch (e) {
|
|
|
|
logger.warn('Failed to decode created resource', e, serverResource);
|
|
|
|
return;
|
|
|
|
}
|
2016-10-21 17:20:36 +00:00
|
|
|
|
2016-10-28 21:27:05 +00:00
|
|
|
// Update local Resource
|
|
|
|
try {
|
2016-10-24 23:30:37 +00:00
|
|
|
await store.insertResource(serverResource, {dirty: false});
|
2016-10-21 17:20:36 +00:00
|
|
|
} catch (e) {
|
2016-10-28 21:27:05 +00:00
|
|
|
// This probably means we already have it. This should never happen, but
|
|
|
|
// might due to a rare race condition.
|
2016-11-16 17:18:39 +00:00
|
|
|
logger.error('Failed to insert resource', e, serverResource);
|
2016-10-28 21:27:05 +00:00
|
|
|
return;
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
2016-10-28 21:27:05 +00:00
|
|
|
|
|
|
|
// NOTE: If the above Resource insert succeeded, that means we have safely
|
|
|
|
// insert the document. However, we're using an upsert here instead because
|
|
|
|
// it's very possible that the client already had that document locally.
|
|
|
|
// This might happen, for example, if the user logs out and back in again.
|
2017-07-27 22:59:07 +00:00
|
|
|
const existingDoc = await db.get(doc.type, doc._id);
|
|
|
|
if (existingDoc) {
|
|
|
|
await db.update(doc, true);
|
|
|
|
} else {
|
|
|
|
// Mark as not seen if we created a new workspace from sync
|
|
|
|
if (doc.type === models.workspace.type) {
|
|
|
|
const workspaceMeta = await models.workspaceMeta.getOrCreateByParentId(doc._id);
|
|
|
|
await models.workspaceMeta.update(workspaceMeta, {hasSeen: false});
|
|
|
|
}
|
|
|
|
await db.insert(doc, true);
|
|
|
|
}
|
2016-11-09 03:18:25 +00:00
|
|
|
}
|
2016-10-21 17:20:36 +00:00
|
|
|
|
|
|
|
if (createdResources.length) {
|
2016-11-07 20:24:38 +00:00
|
|
|
logger.debug(`Pull created ${createdResources.length} resources`);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
2016-11-16 17:18:39 +00:00
|
|
|
db.flushChanges();
|
|
|
|
|
2016-10-21 17:20:36 +00:00
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
|
|
|
// Save all the updated docs to the DB //
|
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
|
|
|
|
2016-11-09 03:18:25 +00:00
|
|
|
db.bufferChanges();
|
|
|
|
for (const serverResource of updatedResources) {
|
2016-10-21 17:20:36 +00:00
|
|
|
try {
|
|
|
|
const {resourceGroupId, encContent} = serverResource;
|
2017-01-09 21:59:52 +00:00
|
|
|
const doc = await decryptDoc(resourceGroupId, encContent);
|
2016-10-21 17:20:36 +00:00
|
|
|
|
2016-10-28 21:27:05 +00:00
|
|
|
// Update app database
|
2017-01-11 03:18:15 +00:00
|
|
|
// Needs to be upsert because we could be "undeleting" something
|
|
|
|
await db.upsert(doc, true);
|
2016-10-21 17:20:36 +00:00
|
|
|
|
|
|
|
// Update local resource
|
2016-11-07 23:36:40 +00:00
|
|
|
const resource = await store.getResourceByDocId(
|
|
|
|
serverResource.id,
|
|
|
|
serverResource.resourceGroupId
|
|
|
|
);
|
2016-10-24 23:30:37 +00:00
|
|
|
await store.updateResource(resource, serverResource, {dirty: false});
|
2016-10-21 17:20:36 +00:00
|
|
|
} catch (e) {
|
|
|
|
logger.warn('Failed to decode updated resource', e, serverResource);
|
|
|
|
}
|
2016-11-09 03:18:25 +00:00
|
|
|
}
|
|
|
|
db.flushChanges();
|
2016-10-21 17:20:36 +00:00
|
|
|
|
|
|
|
if (updatedResources.length) {
|
2016-11-07 20:24:38 +00:00
|
|
|
logger.debug(`Pull updated ${updatedResources.length} resources`);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
|
|
|
// Remove all the docs that need removing //
|
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
|
|
|
|
2016-11-09 03:18:25 +00:00
|
|
|
db.bufferChanges();
|
2016-10-21 17:20:36 +00:00
|
|
|
for (const id of idsToRemove) {
|
2016-11-07 20:24:38 +00:00
|
|
|
const resource = await store.getResourceByDocId(id);
|
2016-10-21 17:20:36 +00:00
|
|
|
if (!resource) {
|
2017-03-03 20:09:08 +00:00
|
|
|
throw new Error(`Could not find Resource to remove for ${id}`);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
2017-01-09 21:59:52 +00:00
|
|
|
const doc = await decryptDoc(resource.resourceGroupId, resource.encContent);
|
2016-10-21 17:20:36 +00:00
|
|
|
if (!doc) {
|
2017-03-03 20:09:08 +00:00
|
|
|
throw new Error(`Could not find doc to remove ${id}`);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Mark resource as deleted
|
2016-10-24 23:30:37 +00:00
|
|
|
await store.updateResource(resource, {dirty: false, removed: true});
|
2016-10-21 17:20:36 +00:00
|
|
|
|
2016-10-28 21:27:05 +00:00
|
|
|
// Remove from DB
|
2016-10-24 23:30:37 +00:00
|
|
|
await db.remove(doc, true);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
2016-11-09 03:18:25 +00:00
|
|
|
db.flushChanges();
|
2016-10-21 17:20:36 +00:00
|
|
|
|
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
|
|
|
// Push all the docs that need pushing //
|
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
|
|
|
|
|
|
|
for (const id of idsToPush) {
|
2016-11-07 20:24:38 +00:00
|
|
|
const resource = await store.getResourceByDocId(id);
|
2016-10-21 17:20:36 +00:00
|
|
|
if (!resource) {
|
2017-03-03 20:09:08 +00:00
|
|
|
throw new Error(`Could not find Resource to push for id ${id}`);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
2016-11-07 20:24:38 +00:00
|
|
|
// Mark all resources to push as dirty for the next push
|
|
|
|
await store.updateResource(resource, {dirty: true});
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
2016-10-24 23:30:37 +00:00
|
|
|
|
|
|
|
return updatedResources.length + createdResources.length;
|
|
|
|
}
|
|
|
|
|
|
|
|
export async function getOrCreateConfig (resourceGroupId) {
|
|
|
|
const config = await store.getConfig(resourceGroupId);
|
|
|
|
|
|
|
|
if (!config) {
|
|
|
|
return await store.insertConfig({resourceGroupId});
|
|
|
|
} else {
|
|
|
|
return config;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-10 22:34:55 +00:00
|
|
|
export async function ensureConfigExists (resourceGroupId, syncMode) {
|
|
|
|
const config = await store.getConfig(resourceGroupId);
|
|
|
|
if (!config) {
|
|
|
|
await store.insertConfig({resourceGroupId, syncMode});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-24 23:30:37 +00:00
|
|
|
export async function createOrUpdateConfig (resourceGroupId, syncMode) {
|
|
|
|
const config = await store.getConfig(resourceGroupId);
|
2016-10-28 21:27:05 +00:00
|
|
|
const patch = {resourceGroupId, syncMode};
|
2016-10-24 23:30:37 +00:00
|
|
|
|
|
|
|
if (config) {
|
|
|
|
return await store.updateConfig(config, patch);
|
|
|
|
} else {
|
|
|
|
return await store.insertConfig(patch);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-07 20:24:38 +00:00
|
|
|
export async function logout () {
|
|
|
|
await session.logout();
|
2017-05-03 17:48:23 +00:00
|
|
|
await resetLocalData();
|
2016-11-07 20:24:38 +00:00
|
|
|
}
|
|
|
|
|
2017-01-11 03:18:15 +00:00
|
|
|
export async function cancelTrial () {
|
|
|
|
await session.endTrial();
|
2017-02-01 20:30:51 +00:00
|
|
|
await session.logout();
|
2017-01-11 03:18:15 +00:00
|
|
|
await resetLocalData();
|
2016-11-07 20:24:38 +00:00
|
|
|
}
|
|
|
|
|
2016-10-25 01:23:49 +00:00
|
|
|
export async function resetLocalData () {
|
|
|
|
for (const c of await store.allConfigs()) {
|
|
|
|
await store.removeConfig(c);
|
|
|
|
}
|
2017-03-29 02:21:49 +00:00
|
|
|
|
|
|
|
for (const r of await store.allResources()) {
|
|
|
|
await store.removeResource(r);
|
|
|
|
}
|
2016-10-25 01:23:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
export async function resetRemoteData () {
|
2017-01-09 21:59:52 +00:00
|
|
|
await session.syncResetData();
|
2016-10-25 01:23:49 +00:00
|
|
|
}
|
|
|
|
|
2016-10-24 23:30:37 +00:00
|
|
|
// ~~~~~~~ //
|
|
|
|
// HELPERS //
|
|
|
|
// ~~~~~~~ //
|
|
|
|
|
2016-11-10 21:23:23 +00:00
|
|
|
async function _handleChangeAndPush (event, doc, timestamp) {
|
2016-11-10 21:03:12 +00:00
|
|
|
// Update the resource content and set dirty
|
|
|
|
// TODO: Remove one of these steps since it does encryption twice
|
|
|
|
// in the case where the resource does not exist yet
|
|
|
|
const resource = await getOrCreateResourceForDoc(doc);
|
|
|
|
|
|
|
|
const updatedResource = await store.updateResource(resource, {
|
|
|
|
name: doc.name || 'n/a',
|
|
|
|
lastEdited: timestamp,
|
|
|
|
lastEditedBy: session.getAccountId(),
|
2017-01-09 21:59:52 +00:00
|
|
|
encContent: await encryptDoc(resource.resourceGroupId, doc),
|
2016-11-10 21:03:12 +00:00
|
|
|
removed: event === db.CHANGE_REMOVE,
|
|
|
|
dirty: true
|
|
|
|
});
|
2016-10-24 23:30:37 +00:00
|
|
|
|
2016-11-10 21:03:12 +00:00
|
|
|
// Debounce pushing of dirty resources
|
|
|
|
logger.debug(`Queue ${event} ${updatedResource.id}`);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Fetch a ResourceGroup. If it has been fetched before, lookup from memory
|
|
|
|
*
|
|
|
|
* @param resourceGroupId
|
|
|
|
* @returns {*}
|
|
|
|
*/
|
2016-11-07 20:24:38 +00:00
|
|
|
const _fetchResourceGroupPromises = {};
|
2017-01-11 20:58:37 +00:00
|
|
|
const _resourceGroupCache = {};
|
2017-07-31 22:09:16 +00:00
|
|
|
|
2017-01-11 20:58:37 +00:00
|
|
|
export async function fetchResourceGroup (resourceGroupId, invalidateCache = false) {
|
|
|
|
if (invalidateCache) {
|
|
|
|
delete _resourceGroupCache[resourceGroupId];
|
|
|
|
delete _fetchResourceGroupPromises[resourceGroupId];
|
|
|
|
}
|
|
|
|
|
2016-11-07 20:24:38 +00:00
|
|
|
// PERF: If we're currently fetching, return stored promise
|
|
|
|
// TODO: Maybe move parallel fetch caching into the fetch helper
|
|
|
|
if (_fetchResourceGroupPromises[resourceGroupId]) {
|
|
|
|
return _fetchResourceGroupPromises[resourceGroupId];
|
|
|
|
}
|
|
|
|
|
|
|
|
const promise = new Promise(async (resolve, reject) => {
|
2017-01-11 20:58:37 +00:00
|
|
|
let resourceGroup = _resourceGroupCache[resourceGroupId];
|
2016-11-07 20:24:38 +00:00
|
|
|
|
|
|
|
if (!resourceGroup) {
|
|
|
|
try {
|
2017-01-09 21:59:52 +00:00
|
|
|
resourceGroup = await session.syncGetResourceGroup(resourceGroupId);
|
2016-11-07 20:24:38 +00:00
|
|
|
} catch (e) {
|
2017-01-11 03:18:15 +00:00
|
|
|
if (e.statusCode === 404) {
|
2017-01-11 18:35:44 +00:00
|
|
|
await store.removeResourceGroup(resourceGroupId);
|
|
|
|
logger.debug('ResourceGroup not found. Deleting...');
|
2017-01-11 03:18:15 +00:00
|
|
|
reject(new Error('ResourceGroup was not found'));
|
|
|
|
return;
|
|
|
|
} else {
|
|
|
|
logger.error(`Failed to get ResourceGroup ${resourceGroupId}: ${e}`);
|
|
|
|
reject(e);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (resourceGroup.isDisabled) {
|
|
|
|
await store.removeResourceGroup(resourceGroup.id);
|
|
|
|
logger.debug('ResourceGroup was disabled. Deleting...');
|
|
|
|
reject(new Error('ResourceGroup was disabled'));
|
|
|
|
return;
|
2016-11-07 20:24:38 +00:00
|
|
|
}
|
2016-10-21 17:20:36 +00:00
|
|
|
|
2016-11-07 20:24:38 +00:00
|
|
|
// Also make sure a config exists when we first fetch it.
|
2017-03-03 01:44:07 +00:00
|
|
|
// (This may not be needed but we'll do it just in case)
|
|
|
|
await ensureConfigExists(resourceGroupId);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
2016-11-07 20:24:38 +00:00
|
|
|
// Bust cached promise because we're done with it.
|
|
|
|
_fetchResourceGroupPromises[resourceGroupId] = null;
|
|
|
|
|
|
|
|
// Cache the ResourceGroup for next time (they never change)
|
2017-01-11 20:58:37 +00:00
|
|
|
_resourceGroupCache[resourceGroupId] = resourceGroup;
|
2016-11-07 20:24:38 +00:00
|
|
|
|
|
|
|
// Return the ResourceGroup
|
|
|
|
resolve(resourceGroup);
|
|
|
|
});
|
|
|
|
|
|
|
|
// Cache the Promise in case we get asked for the same thing before done
|
|
|
|
_fetchResourceGroupPromises[resourceGroupId] = promise;
|
|
|
|
return promise;
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Get a ResourceGroup's symmetric encryption key
|
|
|
|
*
|
|
|
|
* @param resourceGroupId
|
|
|
|
* @private
|
|
|
|
*/
|
|
|
|
async function _getResourceGroupSymmetricKey (resourceGroupId) {
|
2016-11-07 20:24:38 +00:00
|
|
|
let key = resourceGroupSymmetricKeysCache[resourceGroupId];
|
|
|
|
|
|
|
|
if (!key) {
|
2017-01-11 18:35:44 +00:00
|
|
|
const resourceGroup = await fetchResourceGroup(resourceGroupId);
|
2016-11-07 20:24:38 +00:00
|
|
|
const accountPrivateKey = await session.getPrivateKey();
|
|
|
|
|
|
|
|
const symmetricKeyStr = crypt.decryptRSAWithJWK(
|
|
|
|
accountPrivateKey,
|
|
|
|
resourceGroup.encSymmetricKey
|
|
|
|
);
|
2016-10-21 17:20:36 +00:00
|
|
|
|
2016-11-07 20:24:38 +00:00
|
|
|
key = JSON.parse(symmetricKeyStr);
|
2016-10-21 17:20:36 +00:00
|
|
|
|
2016-11-07 20:24:38 +00:00
|
|
|
// Update cache
|
|
|
|
resourceGroupSymmetricKeysCache[resourceGroupId] = key;
|
|
|
|
}
|
|
|
|
|
|
|
|
return key;
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
2017-01-09 21:59:52 +00:00
|
|
|
export async function encryptDoc (resourceGroupId, doc) {
|
2016-10-21 17:20:36 +00:00
|
|
|
try {
|
|
|
|
const symmetricKey = await _getResourceGroupSymmetricKey(resourceGroupId);
|
2017-07-31 22:09:16 +00:00
|
|
|
|
|
|
|
// TODO: Turn on compression once enough users are on version >= 5.7.0
|
|
|
|
// const jsonStr = JSON.stringify(doc);
|
|
|
|
// const docStr = zlib.gzipSync(jsonStr);
|
|
|
|
|
|
|
|
// Don't use compression for now
|
2016-10-21 17:20:36 +00:00
|
|
|
const docStr = JSON.stringify(doc);
|
2017-07-31 22:09:16 +00:00
|
|
|
|
2016-10-21 17:20:36 +00:00
|
|
|
const message = crypt.encryptAES(symmetricKey, docStr);
|
|
|
|
return JSON.stringify(message);
|
|
|
|
} catch (e) {
|
|
|
|
logger.error(`Failed to encrypt for ${resourceGroupId}: ${e}`);
|
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-09 21:59:52 +00:00
|
|
|
export async function decryptDoc (resourceGroupId, messageJSON) {
|
|
|
|
let decrypted;
|
2016-10-21 17:20:36 +00:00
|
|
|
try {
|
|
|
|
const symmetricKey = await _getResourceGroupSymmetricKey(resourceGroupId);
|
|
|
|
const message = JSON.parse(messageJSON);
|
2017-01-09 21:59:52 +00:00
|
|
|
decrypted = crypt.decryptAES(symmetricKey, message);
|
|
|
|
} catch (e) {
|
|
|
|
logger.error(`Failed to decrypt from ${resourceGroupId}: ${e}`, messageJSON);
|
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
|
2017-07-31 22:09:16 +00:00
|
|
|
try {
|
|
|
|
decrypted = zlib.gunzipSync(decrypted);
|
|
|
|
} catch (err) {
|
|
|
|
// It's not compressed (legacy), which is okay for now
|
|
|
|
}
|
|
|
|
|
2017-01-09 21:59:52 +00:00
|
|
|
try {
|
2016-10-21 17:20:36 +00:00
|
|
|
return JSON.parse(decrypted);
|
|
|
|
} catch (e) {
|
2017-01-09 21:59:52 +00:00
|
|
|
logger.error(`Failed to parse after decrypt from ${resourceGroupId}: ${e}`, decrypted);
|
2016-10-21 17:20:36 +00:00
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async function _getWorkspaceForDoc (doc) {
|
|
|
|
const ancestors = await db.withAncestors(doc);
|
2016-11-10 01:15:27 +00:00
|
|
|
return ancestors.find(d => d.type === models.workspace.type);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
2017-01-09 21:59:52 +00:00
|
|
|
export async function createResourceGroup (parentId, name) {
|
2016-10-21 17:20:36 +00:00
|
|
|
// Generate symmetric key for ResourceGroup
|
|
|
|
const rgSymmetricJWK = await crypt.generateAES256Key();
|
|
|
|
const rgSymmetricJWKStr = JSON.stringify(rgSymmetricJWK);
|
|
|
|
|
|
|
|
// Encrypt the symmetric key with Account public key
|
|
|
|
const publicJWK = session.getPublicKey();
|
|
|
|
const encRGSymmetricJWK = crypt.encryptRSAWithJWK(publicJWK, rgSymmetricJWKStr);
|
|
|
|
|
|
|
|
// Create the new ResourceGroup
|
|
|
|
let resourceGroup;
|
|
|
|
try {
|
2017-01-09 21:59:52 +00:00
|
|
|
resourceGroup = await session.syncCreateResourceGroup(parentId, name, encRGSymmetricJWK);
|
2016-10-21 17:20:36 +00:00
|
|
|
} catch (e) {
|
|
|
|
logger.error(`Failed to create ResourceGroup: ${e}`);
|
2017-03-03 20:09:08 +00:00
|
|
|
throw e;
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
2016-10-24 23:30:37 +00:00
|
|
|
// Create a config for it
|
2017-03-03 01:44:07 +00:00
|
|
|
await ensureConfigExists(resourceGroup.id, store.SYNC_MODE_UNSET);
|
2016-10-24 23:30:37 +00:00
|
|
|
|
2016-11-07 20:24:38 +00:00
|
|
|
logger.debug(`Created ResourceGroup ${resourceGroup.id}`);
|
2016-10-21 17:20:36 +00:00
|
|
|
return resourceGroup;
|
|
|
|
}
|
|
|
|
|
2017-01-09 21:59:52 +00:00
|
|
|
export async function createResource (doc, resourceGroupId) {
|
2017-03-29 02:21:49 +00:00
|
|
|
return await store.insertResource({
|
2016-10-21 17:20:36 +00:00
|
|
|
id: doc._id,
|
2016-11-07 20:24:38 +00:00
|
|
|
name: doc.name || 'n/a', // Set name to the doc name if it has one
|
2016-10-21 17:20:36 +00:00
|
|
|
resourceGroupId: resourceGroupId,
|
|
|
|
version: NO_VERSION,
|
|
|
|
createdBy: session.getAccountId(),
|
|
|
|
lastEdited: doc.modified,
|
|
|
|
lastEditedBy: session.getAccountId(),
|
|
|
|
removed: false,
|
|
|
|
type: doc.type,
|
2017-01-09 21:59:52 +00:00
|
|
|
encContent: await encryptDoc(resourceGroupId, doc),
|
2016-10-21 17:20:36 +00:00
|
|
|
dirty: true
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2017-01-09 21:59:52 +00:00
|
|
|
export async function createResourceForDoc (doc) {
|
2016-10-21 17:20:36 +00:00
|
|
|
// No resource yet, so create one
|
|
|
|
const workspace = await _getWorkspaceForDoc(doc);
|
|
|
|
|
|
|
|
if (!workspace) {
|
2016-11-29 20:55:31 +00:00
|
|
|
// Workspace was probably deleted before it's children could be synced.
|
2017-01-09 21:59:52 +00:00
|
|
|
// TODO: Handle this case better
|
2016-10-24 23:30:37 +00:00
|
|
|
throw new Error(`Could not find workspace for doc ${doc._id}`);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
2016-11-07 20:24:38 +00:00
|
|
|
let workspaceResource = await store.getResourceByDocId(workspace._id);
|
2016-10-21 17:20:36 +00:00
|
|
|
|
|
|
|
if (!workspaceResource) {
|
2017-01-09 21:59:52 +00:00
|
|
|
const workspaceResourceGroup = await createResourceGroup(workspace._id, workspace.name);
|
|
|
|
workspaceResource = await createResource(workspace, workspaceResourceGroup.id);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (workspace === doc) {
|
|
|
|
// If the current doc IS a Workspace, just return it
|
|
|
|
return workspaceResource;
|
|
|
|
} else {
|
2017-01-09 21:59:52 +00:00
|
|
|
return await createResource(doc, workspaceResource.resourceGroupId);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-24 23:30:37 +00:00
|
|
|
export async function getOrCreateResourceForDoc (doc) {
|
2017-07-31 22:09:16 +00:00
|
|
|
let [resource, ...extras] = await store.findResourcesByDocId(doc._id);
|
|
|
|
|
|
|
|
// Sometimes there may be multiple resources created by accident for
|
|
|
|
// the same doc. Let's delete the extras here if there are any.
|
|
|
|
for (const resource of extras) {
|
|
|
|
await store.removeResource(resource);
|
|
|
|
}
|
2016-10-21 17:20:36 +00:00
|
|
|
|
2016-11-07 20:24:38 +00:00
|
|
|
if (resource) {
|
|
|
|
return resource;
|
|
|
|
} else {
|
2017-01-09 21:59:52 +00:00
|
|
|
return await createResourceForDoc(doc);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-09 21:59:52 +00:00
|
|
|
export async function getOrCreateAllActiveResources (resourceGroupId = null) {
|
2016-11-07 20:24:38 +00:00
|
|
|
const startTime = Date.now();
|
2016-10-24 23:30:37 +00:00
|
|
|
const activeResourceMap = {};
|
|
|
|
|
|
|
|
let activeResources;
|
|
|
|
if (resourceGroupId) {
|
|
|
|
activeResources = await store.activeResourcesForResourceGroup(resourceGroupId);
|
|
|
|
} else {
|
2016-10-28 21:27:05 +00:00
|
|
|
activeResources = await store.allActiveResources();
|
2016-10-24 23:30:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for (const r of activeResources) {
|
|
|
|
activeResourceMap[r.id] = r;
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
2017-01-09 21:59:52 +00:00
|
|
|
// Make sure Workspace is first, because the loop below depends on it
|
2017-01-11 03:18:15 +00:00
|
|
|
const modelTypes = Object.keys(WHITE_LIST).sort(
|
|
|
|
(a, b) => a.type === models.workspace.type ? 1 : -1
|
|
|
|
);
|
2017-01-09 21:59:52 +00:00
|
|
|
|
2017-01-11 03:18:15 +00:00
|
|
|
let created = 0;
|
2017-01-09 21:59:52 +00:00
|
|
|
for (const type of modelTypes) {
|
2016-10-21 17:20:36 +00:00
|
|
|
for (const doc of await db.all(type)) {
|
2017-02-15 20:32:15 +00:00
|
|
|
if (doc.isPrivate) {
|
|
|
|
logger.debug(`Skip private doc ${doc._id}`);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2016-11-07 20:24:38 +00:00
|
|
|
const resource = await store.getResourceByDocId(doc._id);
|
2016-10-21 17:20:36 +00:00
|
|
|
if (!resource) {
|
2016-11-07 23:36:40 +00:00
|
|
|
try {
|
2017-01-09 21:59:52 +00:00
|
|
|
activeResourceMap[doc._id] = await createResourceForDoc(doc);
|
2017-01-11 03:18:15 +00:00
|
|
|
created++;
|
2016-11-07 23:36:40 +00:00
|
|
|
} catch (e) {
|
2017-02-20 18:32:27 +00:00
|
|
|
logger.error(`Failed to create resource for ${doc._id} ${e}`, {doc});
|
2016-11-07 23:36:40 +00:00
|
|
|
}
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-07 20:24:38 +00:00
|
|
|
const resources = Object.keys(activeResourceMap).map(k => activeResourceMap[k]);
|
|
|
|
|
|
|
|
const time = (Date.now() - startTime) / 1000;
|
2017-06-07 00:07:09 +00:00
|
|
|
if (created > 0) {
|
|
|
|
logger.debug(`Created ${created}/${resources.length} Resources (${time.toFixed(2)}s)`);
|
|
|
|
}
|
2016-11-07 20:24:38 +00:00
|
|
|
return resources;
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|