insomnia/packages/insomnia-app/app/common/__tests__/database.test.js

585 lines
18 KiB
JavaScript
Raw Normal View History

2016-11-10 05:56:23 +00:00
import * as models from '../../models';
import * as db from '../database';
2018-06-25 17:42:50 +00:00
import { globalBeforeEach } from '../../__jest__/before-each';
2016-09-04 21:32:36 +00:00
2018-06-25 17:42:50 +00:00
function loadFixture(name) {
const fixtures = require(`../__fixtures__/${name}`).data;
2016-09-08 06:54:35 +00:00
const promises = [];
for (const type of Object.keys(fixtures)) {
for (const doc of fixtures[type]) {
2018-06-25 17:42:50 +00:00
promises.push(db.insert(Object.assign({}, doc, { type })));
2016-09-08 06:54:35 +00:00
}
}
return Promise.all(promises);
2016-09-08 06:54:35 +00:00
}
describe('init()', () => {
2017-07-20 03:36:44 +00:00
beforeEach(globalBeforeEach);
2016-11-10 09:00:29 +00:00
it('handles being initialized twice', async () => {
2018-06-25 17:42:50 +00:00
await db.init(models.types(), { inMemoryOnly: true });
await db.init(models.types(), { inMemoryOnly: true });
2016-11-10 09:00:29 +00:00
expect((await db.all(models.request.type)).length).toBe(0);
});
});
describe('onChange()', () => {
2017-07-20 03:36:44 +00:00
beforeEach(globalBeforeEach);
2016-11-10 09:00:29 +00:00
it('handles change listeners', async () => {
const doc = {
type: models.request.type,
2016-12-01 17:03:33 +00:00
parentId: 'nothing',
name: 'foo',
2016-11-10 09:00:29 +00:00
};
const changesSeen = [];
const callback = change => {
changesSeen.push(change);
};
db.onChange(callback);
2016-12-01 17:03:33 +00:00
const newDoc = await models.request.create(doc);
2018-06-25 17:42:50 +00:00
const updatedDoc = await models.request.update(newDoc, { name: 'bar' });
2016-11-10 09:00:29 +00:00
expect(changesSeen.length).toBe(2);
2016-12-01 17:03:33 +00:00
2016-11-10 09:00:29 +00:00
expect(changesSeen).toEqual([
2016-12-01 17:03:33 +00:00
[[db.CHANGE_INSERT, newDoc, false]],
[[db.CHANGE_UPDATE, updatedDoc, false]],
2016-11-10 09:00:29 +00:00
]);
db.offChange(callback);
2016-12-01 17:03:33 +00:00
await models.request.create(doc);
2016-11-10 09:00:29 +00:00
expect(changesSeen.length).toBe(2);
});
});
describe('bufferChanges()', () => {
2017-07-20 03:36:44 +00:00
beforeEach(globalBeforeEach);
2016-11-10 09:00:29 +00:00
it('properly buffers changes', async () => {
const doc = {
type: models.request.type,
2016-12-01 17:03:33 +00:00
parentId: 'n/a',
name: 'foo',
2016-11-10 09:00:29 +00:00
};
const changesSeen = [];
const callback = change => {
changesSeen.push(change);
};
db.onChange(callback);
await db.bufferChanges();
2016-12-01 17:03:33 +00:00
const newDoc = await models.request.create(doc);
const updatedDoc = await models.request.update(newDoc, true);
2016-11-10 09:00:29 +00:00
// Assert no change seen before flush
expect(changesSeen.length).toBe(0);
// Assert changes seen after flush
await db.flushChanges();
2018-06-25 17:42:50 +00:00
expect(changesSeen).toEqual([
[
[db.CHANGE_INSERT, newDoc, false],
[db.CHANGE_UPDATE, updatedDoc, false],
],
2018-06-25 17:42:50 +00:00
]);
2016-11-10 09:00:29 +00:00
// Assert no more changes seen after flush again
await db.flushChanges();
2018-06-25 17:42:50 +00:00
expect(changesSeen).toEqual([
[
[db.CHANGE_INSERT, newDoc, false],
[db.CHANGE_UPDATE, updatedDoc, false],
],
2018-06-25 17:42:50 +00:00
]);
2016-11-10 09:00:29 +00:00
});
it('should auto flush after a default wait', async () => {
const doc = {
type: models.request.type,
parentId: 'n/a',
name: 'foo',
};
const changesSeen = [];
const callback = change => {
changesSeen.push(change);
};
db.onChange(callback);
await db.bufferChanges();
const newDoc = await models.request.create(doc);
const updatedDoc = await models.request.update(newDoc, true);
// Default flush timeout is 1000ms after starting buffering
await new Promise(resolve => setTimeout(resolve, 1500));
expect(changesSeen).toEqual([
[
[db.CHANGE_INSERT, newDoc, false],
[db.CHANGE_UPDATE, updatedDoc, false],
],
]);
});
it('should auto flush after a specified wait', async () => {
const doc = {
type: models.request.type,
parentId: 'n/a',
name: 'foo',
};
const changesSeen = [];
const callback = change => {
changesSeen.push(change);
};
db.onChange(callback);
await db.bufferChanges(500);
const newDoc = await models.request.create(doc);
const updatedDoc = await models.request.update(newDoc, true);
await new Promise(resolve => setTimeout(resolve, 1000));
expect(changesSeen).toEqual([
[
[db.CHANGE_INSERT, newDoc, false],
[db.CHANGE_UPDATE, updatedDoc, false],
],
]);
});
});
describe('bufferChangesIndefinitely()', () => {
beforeEach(globalBeforeEach);
it('should not auto flush', async () => {
const doc = {
type: models.request.type,
parentId: 'n/a',
name: 'foo',
};
const changesSeen = [];
const callback = change => {
changesSeen.push(change);
};
db.onChange(callback);
await db.bufferChangesIndefinitely();
const newDoc = await models.request.create(doc);
const updatedDoc = await models.request.update(newDoc, true);
// Default flush timeout is 1000ms after starting buffering
await new Promise(resolve => setTimeout(resolve, 1500));
// Assert no change seen before flush
expect(changesSeen.length).toBe(0);
// Assert changes seen after flush
await db.flushChanges();
expect(changesSeen).toEqual([
[
[db.CHANGE_INSERT, newDoc, false],
[db.CHANGE_UPDATE, updatedDoc, false],
],
]);
});
2016-11-10 09:00:29 +00:00
});
2016-09-04 21:32:36 +00:00
describe('requestCreate()', () => {
2017-07-20 03:36:44 +00:00
beforeEach(globalBeforeEach);
it('creates a valid request', async () => {
2016-09-04 21:32:36 +00:00
const now = Date.now();
const patch = {
name: 'My Request',
parentId: 'wrk_123',
2016-09-04 21:32:36 +00:00
};
2016-11-10 01:15:27 +00:00
const r = await models.request.create(patch);
expect(Object.keys(r).length).toBe(21);
2016-09-04 21:32:36 +00:00
Sync Proof of Concept (#33) * Maybe working POC * Change to use remote url * Other URL too * Some logic * Got the push part working * Made some updates * Fix * Update * Add status code check * Stuff * Implemented new sync api * A bit more robust * Debounce changes * Change timeout * Some fixes * Remove .less * Better error handling * Fix base url * Support for created vs updated docs * Try silent * Silence removal too * Small fix after merge * Fix test * Stuff * Implement key generation algorithm * Tidy * stuff * A bunch of stuff for the new API * Integrated the session stuff * Stuff * Just started on encryption * Lots of updates to encryption * Finished createResourceGroup function * Full encryption/decryption working (I think) * Encrypt localstorage with sessionID * Some more * Some extra checks * Now uses separate DB. Still needs to be simplified a LOT * Fix deletion bug * Fixed unicode bug with encryption * Simplified and working * A bunch of polish * Some stuff * Removed some workspace meta properties * Migrated a few more meta properties * Small changes * Fix body scrolling and url cursor jumping * Removed duplication of webpack port * Remove workspaces reduces * Some small fixes * Added sync modal and opt-in setting * Good start to sync flow * Refactored modal footer css * Update sync status * Sync logger * A bit better logging * Fixed a bunch of sync-related bugs * Fixed signup form button * Gravatar component * Split sync modal into tabs * Tidying * Some more error handling * start sending 'user agent * Login/signup error handling * Use real UUIDs * Fixed tests * Remove unused function * Some extra checks * Moved cloud sync setting to about page * Some small changes * Some things
2016-10-21 17:20:36 +00:00
expect(r._id).toMatch(/^req_[a-zA-Z0-9]{32}$/);
expect(r.created).toBeGreaterThanOrEqual(now);
expect(r.modified).toBeGreaterThanOrEqual(now);
expect(r.type).toBe('Request');
expect(r.name).toBe('My Request');
expect(r.url).toBe('');
expect(r.method).toBe('GET');
expect(r.body).toEqual({});
expect(r.parameters).toEqual([]);
expect(r.headers).toEqual([]);
expect(r.authentication).toEqual({});
expect(r.metaSortKey).toBeLessThanOrEqual(-1 * now);
expect(r.parentId).toBe('wrk_123');
2016-09-04 21:32:36 +00:00
});
it('throws when missing parentID', () => {
2018-06-25 17:42:50 +00:00
const fn = () => models.request.create({ name: 'My Request' });
2016-09-04 21:32:36 +00:00
expect(fn).toThrowError('New Requests missing `parentId`');
});
});
2016-09-08 06:54:35 +00:00
describe('requestGroupDuplicate()', () => {
beforeEach(async () => {
2017-07-20 03:36:44 +00:00
await globalBeforeEach();
await loadFixture('nestedfolders');
2016-09-08 06:54:35 +00:00
});
it('duplicates a RequestGroup', async () => {
2016-11-10 01:15:27 +00:00
const requestGroup = await models.requestGroup.getById('fld_1');
expect(requestGroup.name).toBe('Fld 1');
2016-09-08 06:54:35 +00:00
2016-11-10 01:15:27 +00:00
const newRequestGroup = await models.requestGroup.duplicate(requestGroup);
expect(newRequestGroup._id).not.toBe(requestGroup._id);
expect(newRequestGroup.name).toBe('Fld 1 (Copy)');
2016-09-08 06:54:35 +00:00
2016-11-10 01:15:27 +00:00
const allRequests = await models.request.all();
const allRequestGroups = await models.requestGroup.all();
const childRequests = await models.request.findByParentId(requestGroup._id);
2018-10-17 16:42:33 +00:00
const childRequestGroups = await models.requestGroup.findByParentId(requestGroup._id);
const newChildRequests = await models.request.findByParentId(newRequestGroup._id);
const newChildRequestGroups = await models.requestGroup.findByParentId(newRequestGroup._id);
// This asserting is pretty garbage but it at least checks
// to see that the recursion worked (for the most part)
expect(allRequests.length).toBe(8);
expect(allRequestGroups.length).toBe(5);
2016-09-08 06:54:35 +00:00
expect(childRequests.length).toBe(2);
expect(childRequestGroups.length).toBe(1);
2016-09-08 06:54:35 +00:00
expect(newChildRequests.length).toBe(2);
expect(newChildRequestGroups.length).toBe(1);
});
2016-09-08 06:54:35 +00:00
});
describe('_repairDatabase()', () => {
beforeEach(globalBeforeEach);
it('fixes duplicate environments', async () => {
// Create Workspace with no children
2018-06-25 17:42:50 +00:00
const workspace = await models.workspace.create({ _id: 'w1' });
const spec = await models.apiSpec.getByParentId(workspace._id);
expect((await db.withDescendants(workspace)).length).toBe(2);
// Create one set of sub environments
2018-06-25 17:42:50 +00:00
await models.environment.create({
_id: 'b1',
parentId: 'w1',
data: { foo: 'b1', b1: true },
2018-06-25 17:42:50 +00:00
});
await models.environment.create({
_id: 'b1_sub1',
parentId: 'b1',
data: { foo: '1' },
2018-06-25 17:42:50 +00:00
});
await models.environment.create({
_id: 'b1_sub2',
parentId: 'b1',
data: { foo: '2' },
2018-06-25 17:42:50 +00:00
});
// Create second set of sub environments
2018-06-25 17:42:50 +00:00
await models.environment.create({
_id: 'b2',
parentId: 'w1',
data: { foo: 'b2', b2: true },
2018-06-25 17:42:50 +00:00
});
await models.environment.create({
_id: 'b2_sub1',
parentId: 'b2',
data: { foo: '3' },
2018-06-25 17:42:50 +00:00
});
await models.environment.create({
_id: 'b2_sub2',
parentId: 'b2',
data: { foo: '4' },
2018-06-25 17:42:50 +00:00
});
// Make sure we have everything
expect((await db.withDescendants(workspace)).length).toBe(8);
const descendants = (await db.withDescendants(workspace)).map(d => ({
_id: d._id,
parentId: d.parentId,
data: d.data || null,
}));
expect(descendants).toEqual([
2018-06-25 17:42:50 +00:00
{ _id: 'w1', data: null, parentId: null },
{ _id: 'b1', data: { foo: 'b1', b1: true }, parentId: 'w1' },
{ _id: 'b2', data: { foo: 'b2', b2: true }, parentId: 'w1' },
expect.objectContaining({ _id: spec._id, parentId: 'w1' }),
2018-06-25 17:42:50 +00:00
{ _id: 'b1_sub1', data: { foo: '1' }, parentId: 'b1' },
{ _id: 'b1_sub2', data: { foo: '2' }, parentId: 'b1' },
{ _id: 'b2_sub1', data: { foo: '3' }, parentId: 'b2' },
{ _id: 'b2_sub2', data: { foo: '4' }, parentId: 'b2' },
]);
// Run the fix algorithm
await db._repairDatabase();
// Make sure things get adjusted
2018-06-25 17:42:50 +00:00
const descendants2 = (await db.withDescendants(workspace)).map(d => ({
_id: d._id,
parentId: d.parentId,
data: d.data || null,
2018-06-25 17:42:50 +00:00
}));
expect(descendants2).toEqual([
2018-06-25 17:42:50 +00:00
{ _id: 'w1', data: null, parentId: null },
{ _id: 'b1', data: { foo: 'b1', b1: true, b2: true }, parentId: 'w1' },
expect.objectContaining({ _id: spec._id, parentId: 'w1' }),
// Extra base environments should have been deleted
// {_id: 'b2', data: {foo: 'bar'}, parentId: 'w1'},
// Sub environments should have been moved to new "master" base environment
2018-06-25 17:42:50 +00:00
{ _id: 'b1_sub1', data: { foo: '1' }, parentId: 'b1' },
{ _id: 'b1_sub2', data: { foo: '2' }, parentId: 'b1' },
{ _id: 'b2_sub1', data: { foo: '3' }, parentId: 'b1' },
{ _id: 'b2_sub2', data: { foo: '4' }, parentId: 'b1' },
]);
});
it('fixes duplicate cookie jars', async () => {
// Create Workspace with no children
2018-06-25 17:42:50 +00:00
const workspace = await models.workspace.create({ _id: 'w1' });
const spec = await models.apiSpec.getByParentId(workspace._id);
expect((await db.withDescendants(workspace)).length).toBe(2);
// Create one set of sub environments
await models.cookieJar.create({
_id: 'j1',
parentId: 'w1',
cookies: [
{ id: '1', key: 'foo', value: '1' },
{ id: 'j1_1', key: 'j1', value: '1' },
],
});
await models.cookieJar.create({
_id: 'j2',
parentId: 'w1',
cookies: [
{ id: '1', key: 'foo', value: '2' },
{ id: 'j2_1', key: 'j2', value: '2' },
],
});
// Make sure we have everything
expect((await db.withDescendants(workspace)).length).toBe(4);
2018-06-25 17:42:50 +00:00
const descendants = (await db.withDescendants(workspace)).map(d => ({
_id: d._id,
cookies: d.cookies || null,
parentId: d.parentId,
2018-06-25 17:42:50 +00:00
}));
expect(descendants).toEqual([
2018-06-25 17:42:50 +00:00
{ _id: 'w1', cookies: null, parentId: null },
{
_id: 'j1',
parentId: 'w1',
cookies: [
{ id: '1', key: 'foo', value: '1' },
{ id: 'j1_1', key: 'j1', value: '1' },
],
},
{
_id: 'j2',
parentId: 'w1',
cookies: [
{ id: '1', key: 'foo', value: '2' },
{ id: 'j2_1', key: 'j2', value: '2' },
],
},
expect.objectContaining({ _id: spec._id, parentId: 'w1' }),
]);
// Run the fix algorithm
await db._repairDatabase();
// Make sure things get adjusted
2018-06-25 17:42:50 +00:00
const descendants2 = (await db.withDescendants(workspace)).map(d => ({
_id: d._id,
cookies: d.cookies || null,
parentId: d.parentId,
2018-06-25 17:42:50 +00:00
}));
expect(descendants2).toEqual([
2018-06-25 17:42:50 +00:00
{ _id: 'w1', cookies: null, parentId: null },
{
_id: 'j1',
parentId: 'w1',
cookies: [
2018-06-25 17:42:50 +00:00
{ id: '1', key: 'foo', value: '1' },
{ id: 'j1_1', key: 'j1', value: '1' },
{ id: 'j2_1', key: 'j2', value: '2' },
],
},
expect.objectContaining({ _id: spec._id, parentId: 'w1' }),
]);
});
it('fixes the filename on an apiSpec', async () => {
// Create Workspace with apiSpec child (migration in workspace will automatically create this as it is not mocked)
const w1 = await models.workspace.create({ _id: 'w1', name: 'Workspace 1' });
const w2 = await models.workspace.create({ _id: 'w2', name: 'Workspace 2' });
const w3 = await models.workspace.create({ _id: 'w3', name: 'Workspace 3' });
await models.apiSpec.updateOrCreateForParentId(w1._id, { fileName: '' });
await models.apiSpec.updateOrCreateForParentId(w2._id, {
fileName: models.apiSpec.init().fileName,
});
await models.apiSpec.updateOrCreateForParentId(w3._id, { fileName: 'Unique name' });
// Make sure we have everything
expect((await models.apiSpec.getByParentId(w1._id)).fileName).toBe('');
expect((await models.apiSpec.getByParentId(w2._id)).fileName).toBe('New Document');
expect((await models.apiSpec.getByParentId(w3._id)).fileName).toBe('Unique name');
// Run the fix algorithm
await db._repairDatabase();
// Make sure things get adjusted
expect((await models.apiSpec.getByParentId(w1._id)).fileName).toBe('Workspace 1'); // Should fix
expect((await models.apiSpec.getByParentId(w2._id)).fileName).toBe('Workspace 2'); // Should fix
expect((await models.apiSpec.getByParentId(w3._id)).fileName).toBe('Unique name'); // should not fix
});
it('fixes old git uris', async () => {
const oldRepoWithSuffix = await models.gitRepository.create({
uri: 'https://github.com/foo/bar.git',
uriNeedsMigration: true,
});
const oldRepoWithoutSuffix = await models.gitRepository.create({
uri: 'https://github.com/foo/bar',
uriNeedsMigration: true,
});
const newRepoWithSuffix = await models.gitRepository.create({
uri: 'https://github.com/foo/bar.git',
});
const newRepoWithoutSuffix = await models.gitRepository.create({
uri: 'https://github.com/foo/bar',
});
await db._repairDatabase();
expect(await db.get(models.gitRepository.type, oldRepoWithSuffix._id)).toEqual(
expect.objectContaining({
uri: 'https://github.com/foo/bar.git',
uriNeedsMigration: false,
}),
);
expect(await db.get(models.gitRepository.type, oldRepoWithoutSuffix._id)).toEqual(
expect.objectContaining({
uri: 'https://github.com/foo/bar.git',
uriNeedsMigration: false,
}),
);
expect(await db.get(models.gitRepository.type, newRepoWithSuffix._id)).toEqual(
expect.objectContaining({
uri: 'https://github.com/foo/bar.git',
uriNeedsMigration: false,
}),
);
expect(await db.get(models.gitRepository.type, newRepoWithoutSuffix._id)).toEqual(
expect.objectContaining({
uri: 'https://github.com/foo/bar',
uriNeedsMigration: false,
}),
);
});
});
describe('duplicate()', () => {
beforeEach(globalBeforeEach);
afterEach(() => jest.restoreAllMocks());
it('should overwrite appropriate fields on the parent when duplicating', async () => {
const date = 1478795580200;
Date.now = jest.fn().mockReturnValue(date);
const workspace = await models.workspace.create({
name: 'Test Workspace',
});
const newDescription = 'test';
const duplicated = await db.duplicate(workspace, { description: newDescription });
expect(duplicated._id).not.toEqual(workspace._id);
expect(duplicated._id).toMatch(/^wrk_[a-z0-9]{32}$/);
delete workspace._id;
delete duplicated._id;
expect(duplicated).toEqual({
...workspace,
description: newDescription,
modified: date,
created: date,
type: models.workspace.type,
});
});
it('should should not call migrate when duplicating', async () => {
const workspace = await models.workspace.create({
name: 'Test Workspace',
});
const spy = jest.spyOn(models.workspace, 'migrate');
await db.duplicate(workspace);
expect(spy).not.toHaveBeenCalled();
});
});
describe('docCreate()', () => {
beforeEach(globalBeforeEach);
afterEach(() => jest.restoreAllMocks());
it('should call migrate when creating', async () => {
const spy = jest.spyOn(models.workspace, 'migrate');
await db.docCreate(models.workspace.type, {
name: 'Test Workspace',
});
// TODO: This is actually called twice, not once - we should avoid the double model.init() call.
expect(spy).toHaveBeenCalled();
});
});
describe('withAncestors()', () => {
beforeEach(globalBeforeEach);
it('should return itself and all parents but exclude siblings', async () => {
const wrk = await models.workspace.create();
const wrkReq = await models.request.create({ parentId: wrk._id });
const wrkGrpcReq = await models.grpcRequest.create({ parentId: wrk._id });
const grp = await models.requestGroup.create({ parentId: wrk._id });
const grpReq = await models.request.create({ parentId: grp._id });
const grpGrpcReq = await models.grpcRequest.create({ parentId: grp._id });
// Workspace child searching for ancestors
await expect(db.withAncestors(wrk)).resolves.toStrictEqual([wrk]);
await expect(db.withAncestors(wrkReq)).resolves.toStrictEqual([wrkReq, wrk]);
await expect(db.withAncestors(wrkGrpcReq)).resolves.toStrictEqual([wrkGrpcReq, wrk]);
// Group searching for ancestors
await expect(db.withAncestors(grp)).resolves.toStrictEqual([grp, wrk]);
// Group child searching for ancestors
await expect(db.withAncestors(grpReq)).resolves.toStrictEqual([grpReq, grp, wrk]);
await expect(db.withAncestors(grpGrpcReq)).resolves.toStrictEqual([grpGrpcReq, grp, wrk]);
// Group child searching for ancestors with filters
await expect(db.withAncestors(grpGrpcReq, [models.requestGroup.type])).resolves.toStrictEqual([
grpGrpcReq,
grp,
]);
await expect(
db.withAncestors(grpGrpcReq, [models.requestGroup.type, models.workspace.type]),
).resolves.toStrictEqual([grpGrpcReq, grp, wrk]);
// Group child searching for ancestors but excluding groups will not find the workspace
await expect(db.withAncestors(grpGrpcReq, [models.workspace.type])).resolves.toStrictEqual([
grpGrpcReq,
]);
});
});