mirror of
https://github.com/Kong/insomnia
synced 2024-11-08 06:39:48 +00:00
Multi proto file support (#3006)
This commit is contained in:
parent
139095de5a
commit
42ab4e4465
10
packages/insomnia-app/app/__jest__/redux-state-for-test.js
Normal file
10
packages/insomnia-app/app/__jest__/redux-state-for-test.js
Normal file
@ -0,0 +1,10 @@
|
||||
// @flow
|
||||
|
||||
import * as entities from '../ui/redux/modules/entities';
|
||||
|
||||
const reduxStateForTest = async (activeWorkspaceId: string): Promise<Object> => ({
|
||||
entities: entities.reducer({}, entities.initializeWith(await entities.allDocs())),
|
||||
global: { activeWorkspaceId },
|
||||
});
|
||||
|
||||
export default reduxStateForTest;
|
@ -3,9 +3,11 @@ import { difference } from 'lodash';
|
||||
import {
|
||||
isGrpcRequest,
|
||||
isGrpcRequestId,
|
||||
isProtoDirectory,
|
||||
isProtoFile,
|
||||
isRequest,
|
||||
isRequestGroup,
|
||||
isWorkspace,
|
||||
} from '../is-model';
|
||||
import { generateId } from '../../../common/misc';
|
||||
|
||||
@ -89,3 +91,29 @@ describe('isProtoFile', () => {
|
||||
expect(isProtoFile({ type })).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isProtoDirectory', () => {
|
||||
const supported = [models.protoDirectory.type];
|
||||
const unsupported = difference(allTypes, supported);
|
||||
|
||||
it.each(supported)('should return true: "%s"', type => {
|
||||
expect(isProtoDirectory({ type })).toBe(true);
|
||||
});
|
||||
|
||||
it.each(unsupported)('should return false: "%s"', type => {
|
||||
expect(isProtoDirectory({ type })).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isWorkspace', () => {
|
||||
const supported = [models.workspace.type];
|
||||
const unsupported = difference(allTypes, supported);
|
||||
|
||||
it.each(supported)('should return true: "%s"', type => {
|
||||
expect(isWorkspace({ type })).toBe(true);
|
||||
});
|
||||
|
||||
it.each(unsupported)('should return false: "%s"', type => {
|
||||
expect(isWorkspace({ type })).toBe(false);
|
||||
});
|
||||
});
|
||||
|
@ -1,6 +1,6 @@
|
||||
// @flow
|
||||
import type { BaseModel } from '../index';
|
||||
import { grpcRequest, request, requestGroup, protoFile } from '../index';
|
||||
import { grpcRequest, request, requestGroup, protoFile, protoDirectory, workspace } from '../index';
|
||||
|
||||
export function isGrpcRequest(obj: BaseModel): boolean {
|
||||
return obj.type === grpcRequest.type;
|
||||
@ -26,3 +26,11 @@ export function isRequestGroup(obj: BaseModel): boolean {
|
||||
export function isProtoFile(obj: BaseModel): boolean {
|
||||
return obj.type === protoFile.type;
|
||||
}
|
||||
|
||||
export function isProtoDirectory(obj: BaseModel): boolean {
|
||||
return obj.type === protoDirectory.type;
|
||||
}
|
||||
|
||||
export function isWorkspace(obj: BaseModel): boolean {
|
||||
return obj.type === workspace.type;
|
||||
}
|
||||
|
@ -18,6 +18,7 @@ import * as _unitTest from './unit-test';
|
||||
import * as _unitTestResult from './unit-test-result';
|
||||
import * as _unitTestSuite from './unit-test-suite';
|
||||
import * as _protoFile from './proto-file';
|
||||
import * as _protoDirectory from './proto-directory';
|
||||
import * as _grpcRequest from './grpc-request';
|
||||
import * as _grpcRequestMeta from './grpc-request-meta';
|
||||
import * as _workspace from './workspace';
|
||||
@ -52,6 +53,7 @@ export const unitTest = _unitTest;
|
||||
export const unitTestSuite = _unitTestSuite;
|
||||
export const unitTestResult = _unitTestResult;
|
||||
export const protoFile = _protoFile;
|
||||
export const protoDirectory = _protoDirectory;
|
||||
export const grpcRequest = _grpcRequest;
|
||||
export const grpcRequestMeta = _grpcRequestMeta;
|
||||
export const workspace = _workspace;
|
||||
@ -80,6 +82,7 @@ export function all() {
|
||||
unitTestResult,
|
||||
unitTest,
|
||||
protoFile,
|
||||
protoDirectory,
|
||||
grpcRequest,
|
||||
grpcRequestMeta,
|
||||
];
|
||||
|
49
packages/insomnia-app/app/models/proto-directory.js
Normal file
49
packages/insomnia-app/app/models/proto-directory.js
Normal file
@ -0,0 +1,49 @@
|
||||
// @flow
|
||||
import * as db from '../common/database';
|
||||
import type { BaseModel } from './index';
|
||||
|
||||
export const name = 'Proto Directory';
|
||||
export const type = 'ProtoDirectory';
|
||||
export const prefix = 'pd';
|
||||
export const canDuplicate = true;
|
||||
export const canSync = true;
|
||||
|
||||
type BaseProtoDirectory = {
|
||||
name: string,
|
||||
};
|
||||
|
||||
export type ProtoDirectory = BaseModel & BaseProtoDirectory;
|
||||
|
||||
export function init(): BaseProtoDirectory {
|
||||
return {
|
||||
name: 'New Proto Directory',
|
||||
};
|
||||
}
|
||||
|
||||
export function migrate(doc: ProtoDirectory): ProtoDirectory {
|
||||
return doc;
|
||||
}
|
||||
|
||||
export function create(patch: $Shape<ProtoDirectory> = {}): Promise<ProtoDirectory> {
|
||||
if (!patch.parentId) {
|
||||
throw new Error('New ProtoDirectory missing `parentId`');
|
||||
}
|
||||
|
||||
return db.docCreate(type, patch);
|
||||
}
|
||||
|
||||
export function getById(_id: string): Promise<ProtoDirectory | null> {
|
||||
return db.getWhere(type, { _id });
|
||||
}
|
||||
|
||||
export function getByParentId(parentId: string): Promise<ProtoDirectory | null> {
|
||||
return db.getWhere(type, { parentId });
|
||||
}
|
||||
|
||||
export function remove(obj: ProtoDirectory): Promise<void> {
|
||||
return db.remove(obj);
|
||||
}
|
||||
|
||||
export function all(): Promise<Array<ProtoDirectory>> {
|
||||
return db.all(type);
|
||||
}
|
@ -46,6 +46,10 @@ export function getById(_id: string): Promise<ProtoFile | null> {
|
||||
return db.getWhere(type, { _id });
|
||||
}
|
||||
|
||||
export function getByParentId(parentId: string): Promise<ProtoFile | null> {
|
||||
return db.getWhere(type, { parentId });
|
||||
}
|
||||
|
||||
export function findByParentId(parentId: string): Promise<Array<ProtoFile>> {
|
||||
return db.find(type, { parentId });
|
||||
}
|
||||
|
@ -0,0 +1,13 @@
|
||||
syntax = "proto3";
|
||||
import "google/protobuf/timestamp.proto";
|
||||
import "google/protobuf/empty.proto";
|
||||
|
||||
package time;
|
||||
|
||||
service TimeService {
|
||||
rpc GetTime(google.protobuf.Empty) returns (TimeResponse);
|
||||
}
|
||||
|
||||
message TimeResponse {
|
||||
google.protobuf.Timestamp timestamp_value = 27;
|
||||
}
|
@ -0,0 +1,6 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package lib;
|
||||
|
||||
import public 'hello.proto';
|
||||
import public 'nested/time/time.proto';
|
@ -0,0 +1,93 @@
|
||||
// @flow
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import os from 'os';
|
||||
import * as protoManager from '../proto-manager';
|
||||
import * as protoLoader from '../proto-loader';
|
||||
import * as models from '../../../models';
|
||||
import { globalBeforeEach } from '../../../__jest__/before-each';
|
||||
import selectFileOrFolder from '../../../common/select-file-or-folder';
|
||||
|
||||
jest.mock('../../../common/select-file-or-folder', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('proto management integration test', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('can ingest proto file and load methods from it', async () => {
|
||||
const w = await models.workspace.create();
|
||||
|
||||
// Mock folder selection
|
||||
const protoFilePath = path.join(__dirname, '../__fixtures__/library/hello.proto');
|
||||
selectFileOrFolder.mockResolvedValue({ filePath: protoFilePath });
|
||||
|
||||
// Ingest into database
|
||||
let createdProtoFileId;
|
||||
await protoManager.addFile(w._id, id => {
|
||||
createdProtoFileId = id;
|
||||
});
|
||||
|
||||
expect(selectFileOrFolder).toHaveBeenCalledWith({
|
||||
itemTypes: ['file'],
|
||||
extensions: ['proto'],
|
||||
});
|
||||
|
||||
// Find proto file entries
|
||||
const helloProto = await models.protoFile.getById(createdProtoFileId);
|
||||
|
||||
// Load protoMethods
|
||||
const helloMethods = await protoLoader.loadMethods(helloProto);
|
||||
|
||||
expect(helloMethods.length).toBe(4);
|
||||
});
|
||||
|
||||
it('can ingest proto directory tree and load methods from any file', async () => {
|
||||
const w = await models.workspace.create();
|
||||
|
||||
// Mock folder selection
|
||||
const libraryDirPath = path.join(__dirname, '../__fixtures__/library');
|
||||
selectFileOrFolder.mockResolvedValue({ filePath: libraryDirPath });
|
||||
|
||||
// Ingest into database
|
||||
await protoManager.addDirectory(w._id);
|
||||
|
||||
expect(selectFileOrFolder).toHaveBeenCalledWith({
|
||||
itemTypes: ['directory'],
|
||||
extensions: ['proto'],
|
||||
});
|
||||
|
||||
// Find proto file entries
|
||||
const protoFiles = await models.protoFile.all();
|
||||
const rootProto = protoFiles.find(pf => pf.name === 'root.proto');
|
||||
const helloProto = protoFiles.find(pf => pf.name === 'hello.proto');
|
||||
const timeProto = protoFiles.find(pf => pf.name === 'time.proto');
|
||||
|
||||
// Load protoMethods
|
||||
const rootMethods = await protoLoader.loadMethods(rootProto);
|
||||
const helloMethods = await protoLoader.loadMethods(helloProto);
|
||||
const timeMethods = await protoLoader.loadMethods(timeProto);
|
||||
|
||||
expect(rootMethods.length).toBe(helloMethods.length + timeMethods.length);
|
||||
expect(helloMethods.length).toBe(4);
|
||||
expect(timeMethods.length).toBe(1);
|
||||
|
||||
// Create request
|
||||
const gr = await models.grpcRequest.create({
|
||||
parentId: w._id,
|
||||
protoFileId: rootProto._id,
|
||||
protoMethodName: rootMethods[0].path,
|
||||
});
|
||||
|
||||
// Load selected method
|
||||
const selectedMethod = await protoLoader.getSelectedMethod(gr);
|
||||
|
||||
expect(JSON.stringify(selectedMethod)).toEqual(JSON.stringify(rootMethods[0]));
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
const tempDirPath = path.join(os.tmpdir(), 'insomnia-grpc');
|
||||
await fs.promises.rmdir(tempDirPath, { recursive: true });
|
||||
});
|
||||
});
|
@ -1,39 +0,0 @@
|
||||
// @flow
|
||||
import * as protoLoader from '../proto-loader';
|
||||
import writeProtoFile from '../write-proto-file';
|
||||
import path from 'path';
|
||||
|
||||
jest.mock('../write-proto-file', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(),
|
||||
}));
|
||||
|
||||
const protoFile = {
|
||||
protoText: 'this is just a placeholder because writing to a file is mocked',
|
||||
};
|
||||
|
||||
describe('loadMethods', () => {
|
||||
const protoFilePath = path.join(__dirname, '../__fixtures__/hello.proto');
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
it('should load methods', async () => {
|
||||
writeProtoFile.mockResolvedValue(protoFilePath);
|
||||
|
||||
const methods = await protoLoader.loadMethods(protoFile);
|
||||
|
||||
expect(writeProtoFile).toHaveBeenCalledWith(protoFile.protoText);
|
||||
|
||||
expect(methods).toHaveLength(4);
|
||||
expect(methods.map(c => c.path)).toStrictEqual(
|
||||
expect.arrayContaining([
|
||||
'/hello.HelloService/SayHello',
|
||||
'/hello.HelloService/LotsOfReplies',
|
||||
'/hello.HelloService/LotsOfGreetings',
|
||||
'/hello.HelloService/BidiHello',
|
||||
]),
|
||||
);
|
||||
});
|
||||
});
|
@ -1,4 +1,5 @@
|
||||
module.exports = {
|
||||
loadMethods: jest.fn(),
|
||||
loadMethodsFromPath: jest.fn(),
|
||||
getSelectedMethod: jest.fn(),
|
||||
};
|
@ -0,0 +1,54 @@
|
||||
// @flow
|
||||
import * as protoLoader from '../index';
|
||||
import writeProtoFile from '../write-proto-file';
|
||||
import path from 'path';
|
||||
import { globalBeforeEach } from '../../../../__jest__/before-each';
|
||||
import * as models from '../../../../models';
|
||||
|
||||
jest.mock('../write-proto-file', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('loadMethods', () => {
|
||||
const protoFilePath = path.join(__dirname, '../../__fixtures__/library/hello.proto');
|
||||
|
||||
beforeEach(() => {
|
||||
globalBeforeEach();
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
it('should load methods', async () => {
|
||||
const w = await models.workspace.create();
|
||||
const pf = await models.protoFile.create({
|
||||
parentId: w._id,
|
||||
protoText: 'this is just a placeholder because writing to a file is mocked',
|
||||
});
|
||||
writeProtoFile.mockResolvedValue({ filePath: protoFilePath, dirs: [] });
|
||||
|
||||
const methods = await protoLoader.loadMethods(pf);
|
||||
|
||||
expect(writeProtoFile).toHaveBeenCalledWith(pf);
|
||||
|
||||
expect(methods).toHaveLength(4);
|
||||
expect(methods.map(c => c.path)).toStrictEqual(
|
||||
expect.arrayContaining([
|
||||
'/hello.HelloService/SayHello',
|
||||
'/hello.HelloService/LotsOfReplies',
|
||||
'/hello.HelloService/LotsOfGreetings',
|
||||
'/hello.HelloService/BidiHello',
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should load no methods if protofile does not exist or is empty', async () => {
|
||||
const w = await models.workspace.create();
|
||||
const pf = await models.protoFile.create({
|
||||
parentId: w._id,
|
||||
protoText: '',
|
||||
});
|
||||
|
||||
await expect(protoLoader.loadMethods(undefined)).resolves.toHaveLength(0);
|
||||
await expect(protoLoader.loadMethods(pf)).resolves.toHaveLength(0);
|
||||
});
|
||||
});
|
@ -0,0 +1,258 @@
|
||||
// @flow
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import fs from 'fs';
|
||||
import * as models from '../../../../models';
|
||||
import { globalBeforeEach } from '../../../../__jest__/before-each';
|
||||
import writeProtoFile from '../write-proto-file';
|
||||
import mkdirp from 'mkdirp';
|
||||
|
||||
describe('writeProtoFile', () => {
|
||||
let mkdirpSyncSpy: * | JestMockFn<*, *>;
|
||||
let tmpDirSpy: * | JestMockFn<*, *>;
|
||||
let existsSyncSpy: * | JestMockFn<*, *>;
|
||||
let writeFileSpy: * | JestMockFn<*, Promise<*>>;
|
||||
|
||||
const _setupSpies = () => {
|
||||
tmpDirSpy = jest.spyOn(os, 'tmpdir');
|
||||
existsSyncSpy = jest.spyOn(fs, 'existsSync');
|
||||
mkdirpSyncSpy = jest.spyOn(mkdirp, 'sync');
|
||||
writeFileSpy = jest.spyOn(fs.promises, 'writeFile');
|
||||
};
|
||||
|
||||
const _configureSpies = (tmpDir: string, exists: boolean) => {
|
||||
mkdirpSyncSpy.mockImplementation(() => {});
|
||||
writeFileSpy.mockResolvedValue();
|
||||
tmpDirSpy.mockReturnValue(tmpDir);
|
||||
existsSyncSpy.mockReturnValue(exists);
|
||||
};
|
||||
|
||||
const _restoreSpies = () => {
|
||||
tmpDirSpy.mockRestore();
|
||||
existsSyncSpy.mockRestore();
|
||||
writeFileSpy.mockRestore();
|
||||
mkdirpSyncSpy.mockRestore();
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
await globalBeforeEach();
|
||||
|
||||
// Spies should be setup AFTER globalBeforeEach()
|
||||
_setupSpies();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
_restoreSpies();
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
describe('individual files', () => {
|
||||
it('can write individual file', async () => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
const pf = await models.protoFile.create({
|
||||
parentId: w._id,
|
||||
protoText: 'text',
|
||||
});
|
||||
|
||||
const tmpDirPath = path.join('.', 'foo', 'bar', 'baz');
|
||||
_configureSpies(tmpDirPath, false); // file doesn't already exist
|
||||
|
||||
// Act
|
||||
const result = await writeProtoFile(pf);
|
||||
|
||||
// Assert
|
||||
const expectedDir = path.join(tmpDirPath, 'insomnia-grpc');
|
||||
const expectedFileName = `${pf._id}.${pf.modified}.proto`;
|
||||
const expectedFullPath = path.join(expectedDir, expectedFileName);
|
||||
|
||||
expect(result.filePath).toEqual(expectedFileName);
|
||||
expect(result.dirs).toEqual([expectedDir]);
|
||||
|
||||
expect(mkdirpSyncSpy).toHaveBeenCalledWith(expectedDir);
|
||||
expect(existsSyncSpy).toHaveBeenCalledWith(expectedFullPath);
|
||||
expect(writeFileSpy).toHaveBeenCalledWith(expectedFullPath, pf.protoText);
|
||||
});
|
||||
it('doesnt write individual file if it already exists', async () => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
const pf = await models.protoFile.create({
|
||||
parentId: w._id,
|
||||
protoText: 'text',
|
||||
});
|
||||
|
||||
const tmpDirPath = path.join('.', 'foo', 'bar', 'baz');
|
||||
_configureSpies(tmpDirPath, true); // file already exists
|
||||
|
||||
// Act
|
||||
const result = await writeProtoFile(pf);
|
||||
|
||||
// Assert
|
||||
const expectedDir = path.join(tmpDirPath, 'insomnia-grpc');
|
||||
const expectedFileName = `${pf._id}.${pf.modified}.proto`;
|
||||
const expectedFullPath = path.join(expectedDir, expectedFileName);
|
||||
|
||||
expect(result.filePath).toEqual(expectedFileName);
|
||||
expect(result.dirs).toEqual([expectedDir]);
|
||||
|
||||
expect(mkdirpSyncSpy).toHaveBeenCalledWith(expectedDir);
|
||||
expect(existsSyncSpy).toHaveBeenCalledWith(expectedFullPath);
|
||||
expect(writeFileSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('nested files', () => {
|
||||
it('can write file contained in a single folder', async () => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
const pd = await models.protoDirectory.create({
|
||||
parentId: w._id,
|
||||
name: 'dirName',
|
||||
});
|
||||
const pf = await models.protoFile.create({
|
||||
parentId: pd._id,
|
||||
name: 'hello.proto',
|
||||
protoText: 'text',
|
||||
});
|
||||
|
||||
const tmpDirPath = path.join('.', 'foo', 'bar', 'baz');
|
||||
_configureSpies(tmpDirPath, false); // file doesn't already exist
|
||||
|
||||
// Act
|
||||
const result = await writeProtoFile(pf);
|
||||
|
||||
// Assert
|
||||
const expectedRootDir = path.join(
|
||||
tmpDirPath,
|
||||
'insomnia-grpc',
|
||||
`${pd._id}.${pd.modified}`,
|
||||
pd.name,
|
||||
);
|
||||
const expectedFilePath = pf.name;
|
||||
const expectedFullPath = path.join(expectedRootDir, expectedFilePath);
|
||||
|
||||
expect(result.filePath).toEqual(expectedFilePath);
|
||||
expect(result.dirs).toEqual([expectedRootDir]);
|
||||
|
||||
expect(mkdirpSyncSpy).toHaveBeenCalledWith(expectedRootDir);
|
||||
expect(existsSyncSpy).toHaveBeenCalledWith(expectedFullPath);
|
||||
expect(writeFileSpy).toHaveBeenCalledWith(expectedFullPath, pf.protoText);
|
||||
});
|
||||
it('can write files contained in nested folders', async () => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
const pdRoot = await models.protoDirectory.create({
|
||||
parentId: w._id,
|
||||
name: 'rootDir',
|
||||
});
|
||||
const pdNested = await models.protoDirectory.create({
|
||||
parentId: pdRoot._id,
|
||||
name: 'nestedDir',
|
||||
});
|
||||
const pfRoot = await models.protoFile.create({
|
||||
parentId: pdRoot._id,
|
||||
name: 'root.proto',
|
||||
protoText: 'root',
|
||||
});
|
||||
const pfNested = await models.protoFile.create({
|
||||
parentId: pdNested._id,
|
||||
name: 'nested.proto',
|
||||
protoText: 'nested',
|
||||
});
|
||||
|
||||
const tmpDirPath = path.join('.', 'foo', 'bar', 'baz');
|
||||
_configureSpies(tmpDirPath, false); // files don't already exist
|
||||
|
||||
// Act
|
||||
const result = await writeProtoFile(pfNested);
|
||||
|
||||
// Assert
|
||||
const expectedRootDir = path.join(
|
||||
tmpDirPath,
|
||||
'insomnia-grpc',
|
||||
`${pdRoot._id}.${pdRoot.modified}`,
|
||||
pdRoot.name,
|
||||
);
|
||||
const expectedNestedDir = path.join(expectedRootDir, pdNested.name);
|
||||
|
||||
const expectedFilePath = {
|
||||
root: pfRoot.name,
|
||||
nested: path.join(pdNested.name, pfNested.name),
|
||||
};
|
||||
const expectedFullPath = {
|
||||
root: path.join(expectedRootDir, expectedFilePath.root),
|
||||
nested: path.join(expectedRootDir, expectedFilePath.nested),
|
||||
};
|
||||
|
||||
expect(result.filePath).toEqual(expectedFilePath.nested);
|
||||
expect(result.dirs).toEqual([expectedRootDir, expectedNestedDir]);
|
||||
|
||||
// Root folder should be created and written to
|
||||
expect(mkdirpSyncSpy).toHaveBeenCalledWith(expectedRootDir);
|
||||
expect(existsSyncSpy).toHaveBeenCalledWith(expectedFullPath.root);
|
||||
expect(writeFileSpy).toHaveBeenCalledWith(expectedFullPath.root, pfRoot.protoText);
|
||||
|
||||
// Nested folder should be created and written to
|
||||
expect(mkdirpSyncSpy).toHaveBeenCalledWith(expectedNestedDir);
|
||||
expect(existsSyncSpy).toHaveBeenCalledWith(expectedFullPath.nested);
|
||||
expect(writeFileSpy).toHaveBeenCalledWith(expectedFullPath.nested, pfNested.protoText);
|
||||
});
|
||||
it('should not write file if it already exists', async () => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
const pdRoot = await models.protoDirectory.create({
|
||||
parentId: w._id,
|
||||
name: 'rootDir',
|
||||
});
|
||||
const pdNested = await models.protoDirectory.create({
|
||||
parentId: pdRoot._id,
|
||||
name: 'nestedDir',
|
||||
});
|
||||
const pfRoot = await models.protoFile.create({
|
||||
parentId: pdRoot._id,
|
||||
name: 'root.proto',
|
||||
protoText: 'root',
|
||||
});
|
||||
const pfNested = await models.protoFile.create({
|
||||
parentId: pdNested._id,
|
||||
name: 'nested.proto',
|
||||
protoText: 'nested',
|
||||
});
|
||||
|
||||
const tmpDirPath = path.join('.', 'foo', 'bar', 'baz');
|
||||
_configureSpies(tmpDirPath, true); // files already exists
|
||||
|
||||
// Act
|
||||
const result = await writeProtoFile(pfNested);
|
||||
|
||||
// Assert
|
||||
const expectedRootDir = path.join(
|
||||
tmpDirPath,
|
||||
'insomnia-grpc',
|
||||
`${pdRoot._id}.${pdRoot.modified}`,
|
||||
pdRoot.name,
|
||||
);
|
||||
const expectedNestedDir = path.join(expectedRootDir, pdNested.name);
|
||||
|
||||
const expectedFilePath = {
|
||||
root: pfRoot.name,
|
||||
nested: path.join(pdNested.name, pfNested.name),
|
||||
};
|
||||
const expectedFullPath = {
|
||||
root: path.join(expectedRootDir, expectedFilePath.root),
|
||||
nested: path.join(expectedRootDir, expectedFilePath.nested),
|
||||
};
|
||||
|
||||
expect(result.filePath).toEqual(expectedFilePath.nested);
|
||||
expect(result.dirs).toEqual([expectedRootDir, expectedNestedDir]);
|
||||
|
||||
expect(mkdirpSyncSpy).toHaveBeenCalledWith(expectedRootDir);
|
||||
expect(existsSyncSpy).toHaveBeenCalledWith(expectedFullPath.root);
|
||||
|
||||
expect(mkdirpSyncSpy).toHaveBeenCalledWith(expectedNestedDir);
|
||||
expect(existsSyncSpy).toHaveBeenCalledWith(expectedFullPath.nested);
|
||||
|
||||
expect(writeFileSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
@ -1,8 +1,8 @@
|
||||
// @flow
|
||||
|
||||
import type { GrpcMethodDefinition } from './method';
|
||||
import type { GrpcMethodDefinition } from '../method';
|
||||
import * as protoLoader from '@grpc/proto-loader';
|
||||
import * as models from '../../models';
|
||||
import * as models from '../../../models';
|
||||
import writeProtoFile from './write-proto-file';
|
||||
|
||||
const GRPC_LOADER_OPTIONS = {
|
||||
@ -18,8 +18,6 @@ const isServiceDefinition = (obj: Object) => !isTypeOrEnumDefinition(obj);
|
||||
|
||||
// TODO: The file path for protoLoader.load can also be a URL, so we can avoid
|
||||
// writing to a file in those cases, but it becomes more important to cache
|
||||
// We also need to think about how to store a reference to a proto file and it's
|
||||
// implications on import/export/sync - INS-271
|
||||
export const loadMethods = async (
|
||||
protoFile: ProtoFile | undefined,
|
||||
): Promise<Array<GrpcMethodDefinition>> => {
|
||||
@ -27,12 +25,18 @@ export const loadMethods = async (
|
||||
return [];
|
||||
}
|
||||
|
||||
return await loadMethodsFromText(protoFile.protoText);
|
||||
const { filePath, dirs } = await writeProtoFile(protoFile);
|
||||
return await loadMethodsFromPath(filePath, dirs);
|
||||
};
|
||||
|
||||
export const loadMethodsFromText = async (text: string): Promise<Array<GrpcMethodDefinition>> => {
|
||||
const tempProtoFile = await writeProtoFile(text);
|
||||
const definition = await protoLoader.load(tempProtoFile, GRPC_LOADER_OPTIONS);
|
||||
export const loadMethodsFromPath = async (
|
||||
filePath: string,
|
||||
includeDirs?: Array<string>,
|
||||
): Promise<Array<GrpcMethodDefinition>> => {
|
||||
const definition = await protoLoader.load(filePath, {
|
||||
...GRPC_LOADER_OPTIONS,
|
||||
includeDirs,
|
||||
});
|
||||
|
||||
return Object.values(definition)
|
||||
.filter(isServiceDefinition)
|
@ -0,0 +1,131 @@
|
||||
// @flow
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import mkdirp from 'mkdirp';
|
||||
import fs from 'fs';
|
||||
import type { ProtoFile } from '../../../models/proto-file';
|
||||
import type { ProtoDirectory } from '../../../models/proto-directory';
|
||||
import * as db from '../../../common/database';
|
||||
import * as models from '../../../models';
|
||||
import { isProtoDirectory, isProtoFile, isWorkspace } from '../../../models/helpers/is-model';
|
||||
import type { BaseModel } from '../../../models';
|
||||
import type { Workspace } from '../../../models/workspace';
|
||||
|
||||
const getProtoTempFileName = ({ _id, modified }: ProtoFile): string => `${_id}.${modified}.proto`;
|
||||
const getProtoTempDirectoryName = ({ _id, modified }: ProtoDirectory): string =>
|
||||
`${_id}.${modified}`;
|
||||
|
||||
type WriteResult = {
|
||||
filePath: string,
|
||||
dirs: Array<string>,
|
||||
};
|
||||
|
||||
const writeIndividualProtoFile = async (protoFile: ProtoFile): Promise<WriteResult> => {
|
||||
// Create temp folder
|
||||
const rootDir = path.join(os.tmpdir(), 'insomnia-grpc');
|
||||
mkdirp.sync(rootDir);
|
||||
|
||||
const filePath = getProtoTempFileName(protoFile);
|
||||
const result = { filePath, dirs: [rootDir] };
|
||||
|
||||
// Check if file already exists
|
||||
const fullPath = path.join(rootDir, filePath);
|
||||
if (fs.existsSync(fullPath)) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// Write file
|
||||
await fs.promises.writeFile(fullPath, protoFile.protoText);
|
||||
return result;
|
||||
};
|
||||
|
||||
const writeNestedProtoFile = async (protoFile: ProtoFile, dirPath: string): Promise<void> => {
|
||||
// Check if file already exists
|
||||
const fullPath = path.join(dirPath, protoFile.name);
|
||||
if (fs.existsSync(fullPath)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Write file
|
||||
await fs.promises.writeFile(fullPath, protoFile.protoText);
|
||||
};
|
||||
|
||||
const writeProtoFileTree = async (
|
||||
ancestors: Array<ProtoDirectory | Workspace>,
|
||||
): Promise<Array<string>> => {
|
||||
// Find the ancestor workspace
|
||||
const ancestorWorkspace = ancestors.find(isWorkspace);
|
||||
|
||||
// Find the root ancestor directory
|
||||
const rootAncestorProtoDirectory = ancestors.find(
|
||||
c => isProtoDirectory(c) && c.parentId === ancestorWorkspace._id,
|
||||
);
|
||||
|
||||
// Find all descendants of the root ancestor directory
|
||||
const descendants = await db.withDescendants(rootAncestorProtoDirectory);
|
||||
|
||||
// Recursively write the root ancestor directory children
|
||||
const tempDirPath = path.join(
|
||||
os.tmpdir(),
|
||||
'insomnia-grpc',
|
||||
getProtoTempDirectoryName(rootAncestorProtoDirectory),
|
||||
);
|
||||
|
||||
const dirs = await recursiveWriteProtoDirectory(
|
||||
rootAncestorProtoDirectory,
|
||||
descendants,
|
||||
tempDirPath,
|
||||
);
|
||||
|
||||
return dirs;
|
||||
};
|
||||
|
||||
const recursiveWriteProtoDirectory = async (
|
||||
dir: ProtoDirectory,
|
||||
descendants: Array<BaseModel>,
|
||||
currentDirPath: string,
|
||||
): Promise<Array<string>> => {
|
||||
// Increment folder path
|
||||
const dirPath = path.join(currentDirPath, dir.name);
|
||||
mkdirp.sync(dirPath);
|
||||
|
||||
// Get and write proto files
|
||||
const files = descendants.filter(f => isProtoFile(f) && f.parentId === dir._id);
|
||||
await Promise.all(files.map(f => writeNestedProtoFile(f, dirPath)));
|
||||
|
||||
// Get and write subdirectories
|
||||
const subDirs = descendants.filter(f => isProtoDirectory(f) && f.parentId === dir._id);
|
||||
const createdDirs = await Promise.all(
|
||||
subDirs.map(f => recursiveWriteProtoDirectory(f, descendants, dirPath)),
|
||||
);
|
||||
|
||||
return [dirPath, ...createdDirs.flat()];
|
||||
};
|
||||
|
||||
const writeProtoFile = async (protoFile: ProtoFile): Promise<WriteResult> => {
|
||||
// Find all ancestors
|
||||
const ancestors = await db.withAncestors(protoFile, [
|
||||
models.protoDirectory.type,
|
||||
models.workspace.type,
|
||||
]);
|
||||
|
||||
const ancestorDirectories = ancestors.filter(isProtoDirectory);
|
||||
|
||||
// Is this file part of a directory?
|
||||
if (ancestorDirectories.length) {
|
||||
// Write proto file tree from root directory
|
||||
const treeRootDirs = await writeProtoFileTree(ancestors);
|
||||
// Get all ancestor directories excluding the root (ignore the first entry after reversing the array)
|
||||
const subDirs = ancestorDirectories
|
||||
.map(f => f.name)
|
||||
.reverse()
|
||||
.slice(1);
|
||||
const filePath = path.join(...subDirs, protoFile.name);
|
||||
return { filePath, dirs: treeRootDirs };
|
||||
} else {
|
||||
// Write single file
|
||||
return writeIndividualProtoFile(protoFile);
|
||||
}
|
||||
};
|
||||
|
||||
export default writeProtoFile;
|
@ -0,0 +1,283 @@
|
||||
// @flow
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { globalBeforeEach } from '../../../../__jest__/before-each';
|
||||
import selectFileOrFolder from '../../../../common/select-file-or-folder';
|
||||
import * as protoManager from '../index';
|
||||
import * as protoLoader from '../../proto-loader';
|
||||
import * as models from '../../../../models';
|
||||
import * as modals from '../../../../ui/components/modals';
|
||||
import * as db from '../../../../common/database';
|
||||
|
||||
jest.mock('../../../../common/select-file-or-folder', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('../../../../ui/components/modals');
|
||||
jest.mock('../../proto-loader');
|
||||
|
||||
describe('protoManager', () => {
|
||||
const selectFileOrFolderMock: JestMockFn<*, *> = selectFileOrFolder;
|
||||
|
||||
beforeEach(() => {
|
||||
globalBeforeEach();
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
describe('addFile', () => {
|
||||
it('should not create database entry if file loading canceled', async () => {
|
||||
// Arrange
|
||||
const cbMock = jest.fn();
|
||||
const w = await models.workspace.create();
|
||||
selectFileOrFolderMock.mockResolvedValue({ canceled: true });
|
||||
|
||||
// Act
|
||||
await protoManager.addFile(w._id, cbMock);
|
||||
|
||||
// Assert
|
||||
expect(cbMock).not.toHaveBeenCalled();
|
||||
const pf = await models.protoFile.getByParentId(w._id);
|
||||
expect(pf).toBeNull();
|
||||
|
||||
expect(selectFileOrFolderMock).toHaveBeenCalledWith({
|
||||
itemTypes: ['file'],
|
||||
extensions: ['proto'],
|
||||
});
|
||||
});
|
||||
|
||||
it('should not create database entry if file loading throws error', async () => {
|
||||
// Arrange
|
||||
const cbMock = jest.fn();
|
||||
const w = await models.workspace.create();
|
||||
const error = new Error();
|
||||
selectFileOrFolderMock.mockRejectedValue(error);
|
||||
|
||||
// Act
|
||||
await protoManager.addFile(w._id, cbMock);
|
||||
|
||||
// Assert
|
||||
expect(cbMock).not.toHaveBeenCalled();
|
||||
const pf = await models.protoFile.getByParentId(w._id);
|
||||
expect(pf).toBeNull();
|
||||
|
||||
expect(modals.showError).toHaveBeenCalledWith({ error });
|
||||
});
|
||||
|
||||
it('should not create database entry if methods cannot be parsed', async () => {
|
||||
// Arrange
|
||||
const cbMock = jest.fn();
|
||||
const w = await models.workspace.create();
|
||||
const error = new Error();
|
||||
const filePath = 'path';
|
||||
selectFileOrFolderMock.mockResolvedValue({ filePath });
|
||||
protoLoader.loadMethodsFromPath.mockRejectedValue(error);
|
||||
|
||||
// Act
|
||||
await protoManager.addFile(w._id, cbMock);
|
||||
|
||||
// Assert
|
||||
expect(cbMock).not.toHaveBeenCalled();
|
||||
const pf = await models.protoFile.getByParentId(w._id);
|
||||
expect(pf).toBeNull();
|
||||
|
||||
expect(modals.showError).toHaveBeenCalledWith({
|
||||
title: 'Invalid Proto File',
|
||||
message: `The file ${filePath} and could not be parsed`,
|
||||
error,
|
||||
});
|
||||
});
|
||||
|
||||
it('should create database entry', async () => {
|
||||
// Arrange
|
||||
const cbMock = jest.fn();
|
||||
const w = await models.workspace.create();
|
||||
const filePath = 'filename.proto';
|
||||
selectFileOrFolderMock.mockResolvedValue({ filePath });
|
||||
protoLoader.loadMethodsFromPath.mockResolvedValue();
|
||||
|
||||
const fsReadFileSpy = jest.spyOn(fs.promises, 'readFile');
|
||||
const contents = 'contents';
|
||||
fsReadFileSpy.mockResolvedValue(contents);
|
||||
|
||||
// Act
|
||||
await protoManager.addFile(w._id, cbMock);
|
||||
|
||||
// Assert
|
||||
const pf = await models.protoFile.getByParentId(w._id);
|
||||
expect(cbMock).toHaveBeenCalledWith(pf._id);
|
||||
|
||||
expect(pf.name).toBe(filePath);
|
||||
expect(pf.protoText).toBe(contents);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateFile', () => {
|
||||
it('should update database entry', async () => {
|
||||
// Arrange
|
||||
const cbMock = jest.fn();
|
||||
const w = await models.workspace.create();
|
||||
const pf = await models.protoFile.create({ parentId: w._id });
|
||||
|
||||
const filePath = 'filename.proto';
|
||||
selectFileOrFolderMock.mockResolvedValue({ filePath });
|
||||
protoLoader.loadMethodsFromPath.mockResolvedValue();
|
||||
|
||||
const fsReadFileSpy = jest.spyOn(fs.promises, 'readFile');
|
||||
const contents = 'contents';
|
||||
fsReadFileSpy.mockResolvedValue(contents);
|
||||
|
||||
// Act
|
||||
await protoManager.updateFile(pf, cbMock);
|
||||
|
||||
// Assert
|
||||
expect(cbMock).toHaveBeenCalledWith(pf._id);
|
||||
|
||||
const updatedPf = await models.protoFile.getById(pf._id);
|
||||
expect(updatedPf.name).toBe(filePath);
|
||||
expect(updatedPf.protoText).toBe(contents);
|
||||
});
|
||||
});
|
||||
|
||||
describe('renameFile', () => {
|
||||
it('should rename the file', async () => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
const pf = await models.protoFile.create({ parentId: w._id, name: 'original' });
|
||||
|
||||
// Act
|
||||
const updatedName = 'updated';
|
||||
await protoManager.renameFile(pf, updatedName);
|
||||
|
||||
// Assert
|
||||
const updatedPf = await models.protoFile.getById(pf._id);
|
||||
expect(updatedPf.name).toBe(updatedName);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteFile', () => {
|
||||
it('should alert the user before deleting a file', async () => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
const pf = await models.protoFile.create({ parentId: w._id, name: 'pfName.proto' });
|
||||
const cbMock = jest.fn();
|
||||
|
||||
// Act
|
||||
await protoManager.deleteFile(pf, cbMock);
|
||||
const showAlertCallArg = (modals.showAlert: JestMockFn).mock.calls[0][0];
|
||||
expect(showAlertCallArg.title).toBe('Delete pfName.proto');
|
||||
await showAlertCallArg.onConfirm();
|
||||
|
||||
// Assert
|
||||
expect(cbMock).toHaveBeenCalledWith(pf._id);
|
||||
await expect(models.protoFile.getById(pf._id)).resolves.toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteDirectory', () => {
|
||||
it('should alert the user before deleting a directory', async () => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
const pd = await models.protoDirectory.create({ parentId: w._id, name: 'pdName' });
|
||||
const pf1 = await models.protoFile.create({ parentId: pd._id, name: 'pfName1.proto' });
|
||||
const pf2 = await models.protoFile.create({ parentId: pd._id, name: 'pfName2.proto' });
|
||||
|
||||
const cbMock = jest.fn();
|
||||
|
||||
// Act
|
||||
await protoManager.deleteDirectory(pd, cbMock);
|
||||
const showAlertCallArg = (modals.showAlert: JestMockFn).mock.calls[0][0];
|
||||
expect(showAlertCallArg.title).toBe('Delete pdName');
|
||||
await showAlertCallArg.onConfirm();
|
||||
|
||||
// Assert
|
||||
expect(cbMock).toHaveBeenCalledWith(expect.arrayContaining([pf1._id, pf2._id]));
|
||||
await expect(models.protoDirectory.getById(pd._id)).resolves.toBeNull();
|
||||
await expect(models.protoFile.getById(pf1._id)).resolves.toBeNull();
|
||||
await expect(models.protoFile.getById(pf2._id)).resolves.toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('addDirectory', () => {
|
||||
let dbBufferChangesSpy: * | JestMockFn<*, *>;
|
||||
let dbFlushChangesSpy: * | JestMockFn<*, *>;
|
||||
|
||||
beforeEach(() => {
|
||||
dbBufferChangesSpy = jest.spyOn(db, 'bufferChanges');
|
||||
dbFlushChangesSpy = jest.spyOn(db, 'flushChanges');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
expect(dbBufferChangesSpy).toHaveBeenCalled();
|
||||
expect(dbFlushChangesSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not create database entries if loading canceled', async () => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
selectFileOrFolderMock.mockResolvedValue({ canceled: true });
|
||||
|
||||
// Act
|
||||
await protoManager.addDirectory(w._id);
|
||||
|
||||
// Assert
|
||||
await expect(models.protoDirectory.all()).resolves.toHaveLength(0);
|
||||
await expect(models.protoFile.all()).resolves.toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should not create database entry if file loading throws error', async () => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
const error = new Error();
|
||||
selectFileOrFolderMock.mockRejectedValue(error);
|
||||
|
||||
// Act
|
||||
await protoManager.addDirectory(w._id);
|
||||
|
||||
// Assert
|
||||
await expect(models.protoDirectory.all()).resolves.toHaveLength(0);
|
||||
await expect(models.protoFile.all()).resolves.toHaveLength(0);
|
||||
|
||||
expect(modals.showError).toHaveBeenCalledWith({ error });
|
||||
});
|
||||
|
||||
it('should show alert if no directory was created', async () => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
const filePath = path.join(__dirname, '../../__fixtures__/', 'library', 'empty');
|
||||
selectFileOrFolderMock.mockResolvedValue({ filePath });
|
||||
|
||||
// Act
|
||||
await protoManager.addDirectory(w._id);
|
||||
|
||||
// Assert
|
||||
await expect(models.protoDirectory.all()).resolves.toHaveLength(0);
|
||||
await expect(models.protoFile.all()).resolves.toHaveLength(0);
|
||||
|
||||
expect(modals.showAlert).toHaveBeenCalledWith({
|
||||
title: 'No files found',
|
||||
message: `No .proto files were found under ${filePath}.`,
|
||||
});
|
||||
});
|
||||
|
||||
it('should create database entries', async () => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
const filePath = path.join(__dirname, '../../__fixtures__/', 'library');
|
||||
selectFileOrFolderMock.mockResolvedValue({ filePath });
|
||||
|
||||
// Act
|
||||
await protoManager.addDirectory(w._id);
|
||||
|
||||
// Assert
|
||||
await expect(models.protoDirectory.all()).resolves.toHaveLength(3);
|
||||
await expect(models.protoFile.all()).resolves.toHaveLength(3);
|
||||
|
||||
// Each individual entry is not validated here because it is
|
||||
// too involved to mock everything, and an integration test exists
|
||||
// which uses this code path. As long as the expected number of
|
||||
// entities are loaded from the fixture directory, this test is sufficient.
|
||||
});
|
||||
});
|
||||
});
|
@ -0,0 +1,61 @@
|
||||
// @flow
|
||||
import * as models from '../../../../models';
|
||||
import ingestProtoDirectory from '../ingest-proto-directory';
|
||||
import path from 'path';
|
||||
import { globalBeforeEach } from '../../../../__jest__/before-each';
|
||||
|
||||
describe('ingestProtoDirectory', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it.each(['does-not-exist', 'empty', 'resources'])(
|
||||
'should return null if loading directory __fixtures__/%s',
|
||||
async dir => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
const dirToIngest = path.join(__dirname, '../../__fixtures__', 'library', dir);
|
||||
|
||||
// Act
|
||||
const result = await ingestProtoDirectory(dirToIngest, w._id);
|
||||
|
||||
// Assert
|
||||
expect(result).toBe(null);
|
||||
expect(models.protoDirectory.all()).resolves.toHaveLength(0);
|
||||
expect(models.protoFile.all()).resolves.toHaveLength(0);
|
||||
},
|
||||
);
|
||||
|
||||
it('should read all proto files in nested directories', async () => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
const dirToIngest = path.join(__dirname, '../../__fixtures__', 'library');
|
||||
|
||||
// Act
|
||||
const result = await ingestProtoDirectory(dirToIngest, w._id);
|
||||
|
||||
// Assert
|
||||
expect(result).toStrictEqual(expect.objectContaining({ name: 'library', parentId: w._id }));
|
||||
expect(models.protoDirectory.all()).resolves.toHaveLength(3);
|
||||
expect(models.protoFile.all()).resolves.toHaveLength(3);
|
||||
|
||||
// Ensure ingested tree structure is correct
|
||||
const libraryFolder = await models.protoDirectory.getByParentId(w._id);
|
||||
expect(libraryFolder.name).toBe('library');
|
||||
|
||||
const protos = await models.protoFile.findByParentId(libraryFolder._id);
|
||||
expect(protos).toStrictEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ name: 'hello.proto' }),
|
||||
expect.objectContaining({ name: 'root.proto' }),
|
||||
]),
|
||||
);
|
||||
|
||||
const nestedFolder = await models.protoDirectory.getByParentId(libraryFolder._id);
|
||||
expect(nestedFolder.name).toBe('nested');
|
||||
|
||||
const timeFolder = await models.protoDirectory.getByParentId(nestedFolder._id);
|
||||
expect(timeFolder.name).toBe('time');
|
||||
|
||||
const timeProto = await models.protoFile.getByParentId(timeFolder._id);
|
||||
expect(timeProto.name).toBe('time.proto');
|
||||
});
|
||||
});
|
148
packages/insomnia-app/app/network/grpc/proto-manager/index.js
Normal file
148
packages/insomnia-app/app/network/grpc/proto-manager/index.js
Normal file
@ -0,0 +1,148 @@
|
||||
// @flow
|
||||
|
||||
import type { ProtoFile } from '../../../models/proto-file';
|
||||
import { showAlert, showError } from '../../../ui/components/modals';
|
||||
import * as models from '../../../models';
|
||||
import React from 'react';
|
||||
import type { ProtoDirectory } from '../../../models/proto-directory';
|
||||
import * as db from '../../../common/database';
|
||||
import selectFileOrFolder from '../../../common/select-file-or-folder';
|
||||
import ingestProtoDirectory from './ingest-proto-directory';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import * as protoLoader from '../proto-loader';
|
||||
|
||||
export async function deleteFile(protoFile: ProtoFile, callback: string => void): Promise<void> {
|
||||
showAlert({
|
||||
title: `Delete ${protoFile.name}`,
|
||||
message: (
|
||||
<span>
|
||||
Really delete <strong>{protoFile.name}</strong>? All requests that use this proto file will
|
||||
stop working.
|
||||
</span>
|
||||
),
|
||||
addCancel: true,
|
||||
onConfirm: async () => {
|
||||
await models.protoFile.remove(protoFile);
|
||||
callback(protoFile._id);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function deleteDirectory(
|
||||
protoDirectory: ProtoDirectory,
|
||||
callback: (Array<string>) => void,
|
||||
): Promise<void> {
|
||||
showAlert({
|
||||
title: `Delete ${protoDirectory.name}`,
|
||||
message: (
|
||||
<span>
|
||||
Really delete <strong>{protoDirectory.name}</strong> and all proto files contained within?
|
||||
All requests that use these proto files will stop working.
|
||||
</span>
|
||||
),
|
||||
addCancel: true,
|
||||
onConfirm: async () => {
|
||||
const descendant = await db.withDescendants(protoDirectory);
|
||||
await models.protoDirectory.remove(protoDirectory);
|
||||
|
||||
callback(descendant.map(c => c._id));
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function addDirectory(workspaceId: string): Promise<void> {
|
||||
const bufferId = await db.bufferChanges();
|
||||
let rollback = false;
|
||||
try {
|
||||
// Select file
|
||||
const { filePath, canceled } = await selectFileOrFolder({
|
||||
itemTypes: ['directory'],
|
||||
extensions: ['proto'],
|
||||
});
|
||||
|
||||
// Exit if no file selected
|
||||
if (canceled || !filePath) {
|
||||
return;
|
||||
}
|
||||
|
||||
const createdDir = await ingestProtoDirectory(filePath, workspaceId);
|
||||
|
||||
// Show warning if no files found
|
||||
if (!createdDir) {
|
||||
showAlert({
|
||||
title: 'No files found',
|
||||
message: `No .proto files were found under ${filePath}.`,
|
||||
});
|
||||
}
|
||||
// TODO: validate all of the imported proto files
|
||||
} catch (e) {
|
||||
rollback = true;
|
||||
showError({ error: e });
|
||||
} finally {
|
||||
await db.flushChanges(bufferId, rollback);
|
||||
}
|
||||
}
|
||||
|
||||
async function _readFile(): Promise<{ fileName: string, fileContents: string } | null> {
|
||||
try {
|
||||
// Select file
|
||||
const { filePath, canceled } = await selectFileOrFolder({
|
||||
itemTypes: ['file'],
|
||||
extensions: ['proto'],
|
||||
});
|
||||
|
||||
// Exit if no file selected
|
||||
if (canceled || !filePath) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Try parse proto file to make sure the file is valid
|
||||
try {
|
||||
await protoLoader.loadMethodsFromPath(filePath);
|
||||
} catch (e) {
|
||||
showError({
|
||||
title: 'Invalid Proto File',
|
||||
message: `The file ${filePath} and could not be parsed`,
|
||||
error: e,
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Read contents
|
||||
const contents = await fs.promises.readFile(filePath, 'utf-8');
|
||||
const name = path.basename(filePath);
|
||||
|
||||
return { fileName: name, fileContents: contents };
|
||||
} catch (e) {
|
||||
showError({ error: e });
|
||||
}
|
||||
}
|
||||
|
||||
export async function addFile(workspaceId: string, callback: string => void): Promise<void> {
|
||||
const result = await _readFile();
|
||||
if (result) {
|
||||
const newFile = await models.protoFile.create({
|
||||
name: result.fileName,
|
||||
parentId: workspaceId,
|
||||
protoText: result.fileContents,
|
||||
});
|
||||
callback(newFile._id);
|
||||
}
|
||||
}
|
||||
|
||||
export async function updateFile(protoFile: ProtoFile, callback: string => void): Promise<void> {
|
||||
const result = await _readFile();
|
||||
if (result) {
|
||||
const updatedFile = await models.protoFile.update(protoFile, {
|
||||
name: result.fileName,
|
||||
protoText: result.fileContents,
|
||||
});
|
||||
callback(updatedFile._id);
|
||||
}
|
||||
}
|
||||
|
||||
export async function renameFile(protoFile: ProtoFile, name: string): Promise<void> {
|
||||
await models.protoFile.update(protoFile, { name });
|
||||
}
|
@ -0,0 +1,70 @@
|
||||
// @flow
|
||||
|
||||
import * as models from '../../../models';
|
||||
import type { ProtoDirectory } from '../../../models/proto-directory';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
|
||||
const _parseDir = async (entryPath: string, dirParentId: string): Promise<boolean> => {
|
||||
const result = await ingestProtoDirectory(entryPath, dirParentId);
|
||||
return Boolean(result);
|
||||
};
|
||||
|
||||
const _parseFile = async (entryPath: string, dirParentId: string): Promise<boolean> => {
|
||||
const extension = path.extname(entryPath);
|
||||
|
||||
// Ignore if not a .proto file
|
||||
if (extension !== '.proto') {
|
||||
return false;
|
||||
}
|
||||
|
||||
const contents = await fs.promises.readFile(entryPath, 'utf-8');
|
||||
const name = path.basename(entryPath);
|
||||
|
||||
await models.protoFile.create({
|
||||
name,
|
||||
parentId: dirParentId,
|
||||
protoText: contents,
|
||||
});
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
const ingestProtoDirectory = async (
|
||||
dirPath: string,
|
||||
dirParentId: string,
|
||||
): Promise<ProtoDirectory | null> => {
|
||||
// Check exists
|
||||
if (!fs.existsSync(dirPath)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Create dir in database
|
||||
const createdProtoDir = await models.protoDirectory.create({
|
||||
name: path.basename(dirPath),
|
||||
parentId: dirParentId,
|
||||
});
|
||||
|
||||
// Read contents
|
||||
const entries = await fs.promises.readdir(dirPath, { withFileTypes: true });
|
||||
|
||||
// Loop and read all entries
|
||||
const parsePromises: Array<Promise<boolean>> = entries.map(entry => {
|
||||
const fullEntryPath = path.resolve(dirPath, entry.name);
|
||||
return entry.isDirectory()
|
||||
? _parseDir(fullEntryPath, createdProtoDir._id)
|
||||
: _parseFile(fullEntryPath, createdProtoDir._id);
|
||||
});
|
||||
|
||||
const filesFound = await Promise.all(parsePromises);
|
||||
|
||||
// Delete the directory if no .proto file is found in the tree
|
||||
if (!filesFound.some(c => c)) {
|
||||
await models.protoDirectory.remove(createdProtoDir);
|
||||
return null;
|
||||
}
|
||||
|
||||
return createdProtoDir;
|
||||
};
|
||||
|
||||
export default ingestProtoDirectory;
|
@ -1,15 +0,0 @@
|
||||
// @flow
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import mkdirp from 'mkdirp';
|
||||
import fs from 'fs';
|
||||
|
||||
const writeProtoFile = async (src: string): Promise<string> => {
|
||||
const root = path.join(os.tmpdir(), 'insomnia-grpc');
|
||||
mkdirp.sync(root);
|
||||
const p = path.join(root, `${Math.random()}.proto`);
|
||||
await fs.promises.writeFile(p, src);
|
||||
return p;
|
||||
};
|
||||
|
||||
export default writeProtoFile;
|
@ -0,0 +1,9 @@
|
||||
// eslint-disable-next-line filenames/match-exported
|
||||
const modals = jest.requireActual('../index');
|
||||
|
||||
modals.showError = jest.fn();
|
||||
modals.showAlert = jest.fn();
|
||||
modals.showPrompt = jest.fn();
|
||||
modals.showModal = jest.fn();
|
||||
|
||||
module.exports = modals;
|
@ -44,7 +44,7 @@ export function hideAllModals() {
|
||||
}
|
||||
|
||||
function _getModal(modalCls) {
|
||||
const m = modals[modalCls.name];
|
||||
const m = modals[modalCls.name || modalCls.WrappedComponent?.name];
|
||||
if (!m) {
|
||||
throw new Error('Modal was not registered with the app');
|
||||
}
|
||||
|
@ -1,6 +1,5 @@
|
||||
// @flow
|
||||
import * as React from 'react';
|
||||
import * as models from '../../../models';
|
||||
import type { ProtoFile } from '../../../models/proto-file';
|
||||
import ModalHeader from '../base/modal-header';
|
||||
import ModalBody from '../base/modal-body';
|
||||
@ -9,20 +8,20 @@ import autobind from 'autobind-decorator';
|
||||
import type { Workspace } from '../../../models/workspace';
|
||||
import Modal from '../base/modal';
|
||||
import ProtoFileList from '../proto-file/proto-file-list';
|
||||
import { showAlert, showError } from './index';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import selectFileOrFolder from '../../../common/select-file-or-folder';
|
||||
import { AsyncButton } from 'insomnia-components';
|
||||
import type { GrpcDispatch } from '../../context/grpc';
|
||||
import { grpcActions, sendGrpcIpcMultiple } from '../../context/grpc';
|
||||
import { GrpcRequestEventEnum } from '../../../common/grpc-events';
|
||||
import * as protoLoader from '../../../network/grpc/proto-loader';
|
||||
import { connect } from 'react-redux';
|
||||
import type { ExpandedProtoDirectory } from '../../redux/proto-selectors';
|
||||
import { selectExpandedActiveProtoDirectories } from '../../redux/proto-selectors';
|
||||
import type { ProtoDirectory } from '../../../models/proto-directory';
|
||||
import * as protoManager from '../../../network/grpc/proto-manager';
|
||||
|
||||
type Props = {|
|
||||
grpcDispatch: GrpcDispatch,
|
||||
workspace: Workspace,
|
||||
protoFiles: Array<ProtoFile>,
|
||||
protoDirectories: Array<ExpandedProtoDirectory>,
|
||||
|};
|
||||
|
||||
type State = {|
|
||||
@ -80,85 +79,50 @@ class ProtoFilesModal extends React.PureComponent<Props, State> {
|
||||
this.setState({ selectedProtoFileId: id });
|
||||
}
|
||||
|
||||
async _handleDelete(protoFile: ProtoFile) {
|
||||
showAlert({
|
||||
title: `Delete ${protoFile.name}`,
|
||||
message: (
|
||||
<span>
|
||||
Really delete <strong>{protoFile.name}</strong>? All requests that use this proto file
|
||||
will stop working.
|
||||
</span>
|
||||
),
|
||||
addCancel: true,
|
||||
onConfirm: async () => {
|
||||
await models.protoFile.remove(protoFile);
|
||||
_handleDeleteFile(protoFile: ProtoFile): Promise<void> {
|
||||
return protoManager.deleteFile(protoFile, deletedId => {
|
||||
// if the deleted protoFile was previously selected, clear the selection
|
||||
if (this.state.selectedProtoFileId === deletedId) {
|
||||
this.setState({ selectedProtoFileId: '' });
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// if the deleted protoFile was previously selected, clear the selection
|
||||
if (this.state.selectedProtoFileId === protoFile._id) {
|
||||
this.setState({ selectedProtoFileId: '' });
|
||||
}
|
||||
},
|
||||
_handleDeleteDirectory(protoDirectory: ProtoDirectory): Promise<void> {
|
||||
return protoManager.deleteDirectory(protoDirectory, deletedIds => {
|
||||
// if previously selected protoFile has been deleted, clear the selection
|
||||
if (deletedIds.contains(this.state.selectedProtoFileId)) {
|
||||
this.setState({ selectedProtoFileId: '' });
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_handleAdd(): Promise<void> {
|
||||
return this._handleUpload();
|
||||
return protoManager.addFile(this.props.workspace._id, createdId => {
|
||||
this.setState({ selectedProtoFileId: createdId });
|
||||
});
|
||||
}
|
||||
|
||||
async _handleUpload(protoFile?: ProtoFile): Promise<void> {
|
||||
const { workspace, grpcDispatch } = this.props;
|
||||
_handleUpload(protoFile: ProtoFile): Promise<void> {
|
||||
const { grpcDispatch } = this.props;
|
||||
|
||||
try {
|
||||
// Select file
|
||||
const { filePath, canceled } = await selectFileOrFolder({
|
||||
itemTypes: ['file'],
|
||||
extensions: ['proto'],
|
||||
});
|
||||
|
||||
// Exit if no file selected
|
||||
if (canceled || !filePath) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Read contents
|
||||
const protoText = fs.readFileSync(filePath, 'utf-8');
|
||||
const name = path.basename(filePath);
|
||||
|
||||
// Try parse proto file to make sure the file is valid
|
||||
try {
|
||||
await protoLoader.loadMethodsFromText(protoText);
|
||||
} catch (e) {
|
||||
showError({
|
||||
title: 'Invalid Proto File',
|
||||
message: `The file ${filePath} and could not be parsed`,
|
||||
error: e,
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Create or update a protoFile
|
||||
if (protoFile) {
|
||||
await models.protoFile.update(protoFile, { name, protoText });
|
||||
const action = await grpcActions.invalidateMany(protoFile._id);
|
||||
|
||||
grpcDispatch(action);
|
||||
sendGrpcIpcMultiple(GrpcRequestEventEnum.cancelMultiple, action?.requestIds);
|
||||
} else {
|
||||
const newFile = await models.protoFile.create({ name, parentId: workspace._id, protoText });
|
||||
this.setState({ selectedProtoFileId: newFile._id });
|
||||
}
|
||||
} catch (e) {
|
||||
showError({ error: e });
|
||||
}
|
||||
return protoManager.updateFile(protoFile, async updatedId => {
|
||||
const action = await grpcActions.invalidateMany(updatedId);
|
||||
grpcDispatch(action);
|
||||
sendGrpcIpcMultiple(GrpcRequestEventEnum.cancelMultiple, action?.requestIds);
|
||||
});
|
||||
}
|
||||
|
||||
async _handleRename(protoFile: ProtoFile, name: string): Promise<void> {
|
||||
await models.protoFile.update(protoFile, { name });
|
||||
_handleAddDirectory(): Promise<void> {
|
||||
return protoManager.addDirectory(this.props.workspace._id);
|
||||
}
|
||||
|
||||
_handleRename(protoFile: ProtoFile, name: string): Promise<void> {
|
||||
return protoManager.renameFile(protoFile, name);
|
||||
}
|
||||
|
||||
render() {
|
||||
const { protoFiles } = this.props;
|
||||
const { protoDirectories } = this.props;
|
||||
const { selectedProtoFileId } = this.state;
|
||||
|
||||
return (
|
||||
@ -167,17 +131,26 @@ class ProtoFilesModal extends React.PureComponent<Props, State> {
|
||||
<ModalBody className="wide pad">
|
||||
<div className="row-spaced margin-bottom bold">
|
||||
Files
|
||||
<AsyncButton onClick={this._handleAdd} loadingNode={spinner}>
|
||||
Add Proto File
|
||||
</AsyncButton>
|
||||
<span>
|
||||
<AsyncButton
|
||||
className="margin-right-sm"
|
||||
onClick={this._handleAddDirectory}
|
||||
loadingNode={spinner}>
|
||||
Add Directory
|
||||
</AsyncButton>
|
||||
<AsyncButton onClick={this._handleAdd} loadingNode={spinner}>
|
||||
Add Proto File
|
||||
</AsyncButton>
|
||||
</span>
|
||||
</div>
|
||||
<ProtoFileList
|
||||
protoFiles={protoFiles}
|
||||
protoDirectories={protoDirectories}
|
||||
selectedId={selectedProtoFileId}
|
||||
handleSelect={this._handleSelect}
|
||||
handleUpdate={this._handleUpload}
|
||||
handleDelete={this._handleDelete}
|
||||
handleDelete={this._handleDeleteFile}
|
||||
handleRename={this._handleRename}
|
||||
handleDeleteDirectory={this._handleDeleteDirectory}
|
||||
/>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
@ -192,4 +165,10 @@ class ProtoFilesModal extends React.PureComponent<Props, State> {
|
||||
}
|
||||
}
|
||||
|
||||
export default ProtoFilesModal;
|
||||
const mapStateToProps = (state, props) => {
|
||||
const protoDirectories = selectExpandedActiveProtoDirectories(state, props);
|
||||
|
||||
return { protoDirectories };
|
||||
};
|
||||
|
||||
export default connect(mapStateToProps, null, null, { forwardRef: true })(ProtoFilesModal);
|
||||
|
@ -0,0 +1,41 @@
|
||||
// @flow
|
||||
import * as React from 'react';
|
||||
import { Button } from 'insomnia-components';
|
||||
import ProtoListItem from './proto-list-item';
|
||||
import type { ProtoDirectory } from '../../../models/proto-directory';
|
||||
import type { DeleteProtoDirectoryHandler } from './proto-file-list';
|
||||
|
||||
type Props = {
|
||||
dir: ProtoDirectory,
|
||||
indentLevel: number,
|
||||
handleDeleteDirectory: DeleteProtoDirectoryHandler,
|
||||
};
|
||||
|
||||
const ProtoDirectoryListItem = ({ dir, indentLevel, handleDeleteDirectory }: Props) => {
|
||||
const handleDeleteCallback = React.useCallback(
|
||||
async (e: SyntheticEvent<HTMLButtonElement>) => {
|
||||
e.stopPropagation();
|
||||
await handleDeleteDirectory(dir);
|
||||
},
|
||||
[handleDeleteDirectory, dir],
|
||||
);
|
||||
|
||||
return (
|
||||
<ProtoListItem indentLevel={indentLevel}>
|
||||
{dir.name}
|
||||
{indentLevel === 0 && (
|
||||
<div className="row">
|
||||
<Button
|
||||
variant="text"
|
||||
title="Delete Directory"
|
||||
onClick={handleDeleteCallback}
|
||||
bg="danger">
|
||||
<i className="fa fa-trash-o" />
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</ProtoListItem>
|
||||
);
|
||||
};
|
||||
|
||||
export default ProtoDirectoryListItem;
|
@ -1,6 +1,5 @@
|
||||
// @flow
|
||||
import * as React from 'react';
|
||||
import styled from 'styled-components';
|
||||
import type { ProtoFile } from '../../../models/proto-file';
|
||||
import type {
|
||||
DeleteProtoFileHandler,
|
||||
@ -8,13 +7,15 @@ import type {
|
||||
SelectProtoFileHandler,
|
||||
UpdateProtoFileHandler,
|
||||
} from './proto-file-list';
|
||||
import { ListGroupItem, Button, AsyncButton } from '../../../../../insomnia-components';
|
||||
import { Button, AsyncButton } from 'insomnia-components';
|
||||
import Editable from '../base/editable';
|
||||
import ProtoListItem from './proto-list-item';
|
||||
|
||||
type Props = {
|
||||
protoFile: ProtoFile,
|
||||
isSelected?: boolean,
|
||||
handleSelect: SelectProtoFileHandler,
|
||||
indentLevel: number,
|
||||
handleDelete: DeleteProtoFileHandler,
|
||||
handleRename: RenameProtoFileHandler,
|
||||
handleUpdate: UpdateProtoFileHandler,
|
||||
@ -22,18 +23,6 @@ type Props = {
|
||||
|
||||
const spinner = <i className="fa fa-spin fa-refresh" />;
|
||||
|
||||
const SelectableListItem: React.PureComponent<{ isSelected?: boolean }> = styled(ListGroupItem)`
|
||||
&:hover {
|
||||
background-color: var(--hl-sm) !important;
|
||||
}
|
||||
background-color: ${({ isSelected }) =>
|
||||
isSelected && 'var(--hl-xs) !important; font-weight: bold;'};
|
||||
|
||||
i.fa {
|
||||
font-size: var(--font-size-lg);
|
||||
}
|
||||
`;
|
||||
|
||||
const ProtoFileListItem = ({
|
||||
protoFile,
|
||||
isSelected,
|
||||
@ -41,6 +30,7 @@ const ProtoFileListItem = ({
|
||||
handleDelete,
|
||||
handleRename,
|
||||
handleUpdate,
|
||||
indentLevel,
|
||||
}: Props) => {
|
||||
const { name, _id } = protoFile;
|
||||
|
||||
@ -68,29 +58,38 @@ const ProtoFileListItem = ({
|
||||
[handleUpdate, protoFile],
|
||||
);
|
||||
|
||||
const isReadOnly = indentLevel > 0;
|
||||
|
||||
return (
|
||||
<SelectableListItem isSelected={isSelected} onClick={handleSelectCallback}>
|
||||
<div className="row-spaced">
|
||||
<Editable className="wide" onSubmit={handleRenameCallback} value={name} preventBlank />
|
||||
<div className="row">
|
||||
<AsyncButton
|
||||
variant="text"
|
||||
title="Re-upload Proto File"
|
||||
onClick={handleUpdateCallback}
|
||||
loadingNode={spinner}
|
||||
className="space-right">
|
||||
<i className="fa fa-upload" />
|
||||
</AsyncButton>
|
||||
<Button
|
||||
variant="text"
|
||||
title="Delete Proto File"
|
||||
bg="danger"
|
||||
onClick={handleDeleteCallback}>
|
||||
<i className="fa fa-trash-o" />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</SelectableListItem>
|
||||
<ProtoListItem
|
||||
selectable
|
||||
isSelected={isSelected}
|
||||
onClick={handleSelectCallback}
|
||||
indentLevel={indentLevel}>
|
||||
{isReadOnly && <span className="wide">{name}</span>}
|
||||
{!isReadOnly && (
|
||||
<>
|
||||
<Editable className="wide" onSubmit={handleRenameCallback} value={name} preventBlank />
|
||||
<div className="row">
|
||||
<AsyncButton
|
||||
variant="text"
|
||||
title="Re-upload Proto File"
|
||||
onClick={handleUpdateCallback}
|
||||
loadingNode={spinner}
|
||||
className="space-right">
|
||||
<i className="fa fa-upload" />
|
||||
</AsyncButton>
|
||||
<Button
|
||||
variant="text"
|
||||
title="Delete Proto File"
|
||||
bg="danger"
|
||||
onClick={handleDeleteCallback}>
|
||||
<i className="fa fa-trash-o" />
|
||||
</Button>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</ProtoListItem>
|
||||
);
|
||||
};
|
||||
|
||||
|
@ -3,42 +3,70 @@ import * as React from 'react';
|
||||
import type { ProtoFile } from '../../../models/proto-file';
|
||||
import { ListGroup, ListGroupItem } from 'insomnia-components';
|
||||
import ProtoFileListItem from './proto-file-list-item';
|
||||
import type { ExpandedProtoDirectory } from '../../redux/proto-selectors';
|
||||
import ProtoDirectoryListItem from './proto-directory-list-item';
|
||||
|
||||
export type SelectProtoFileHandler = (id: string) => void;
|
||||
export type DeleteProtoFileHandler = (protofile: ProtoFile) => Promise<void>;
|
||||
export type DeleteProtoDirectoryHandler = (protoDirectory: ProtoDirectory) => Promise<void>;
|
||||
export type UpdateProtoFileHandler = (protofile: ProtoFile) => Promise<void>;
|
||||
export type RenameProtoFileHandler = (protoFile: ProtoFile, name: string) => Promise<void>;
|
||||
|
||||
type Props = {
|
||||
protoFiles: Array<ProtoFile>,
|
||||
protoDirectories: Array<ExpandedProtoDirectory>,
|
||||
selectedId?: string,
|
||||
handleSelect: SelectProtoFileHandler,
|
||||
handleDelete: DeleteProtoFileHandler,
|
||||
handleRename: RenameProtoFileHandler,
|
||||
handleUpdate: UpdateProtoFileHandler,
|
||||
handleDeleteDirectory: DeleteProtoDirectoryHandler,
|
||||
};
|
||||
|
||||
const ProtoFileList = ({
|
||||
protoFiles,
|
||||
selectedId,
|
||||
handleSelect,
|
||||
handleDelete,
|
||||
handleRename,
|
||||
handleUpdate,
|
||||
}: Props) => (
|
||||
const recursiveRender = (
|
||||
{ dir, files, subDirs }: ExpandedProtoDirectory,
|
||||
props: Props,
|
||||
indent: number,
|
||||
) => {
|
||||
const {
|
||||
handleDelete,
|
||||
handleDeleteDirectory,
|
||||
handleRename,
|
||||
handleSelect,
|
||||
handleUpdate,
|
||||
selectedId,
|
||||
} = props;
|
||||
|
||||
const dirNode = dir && (
|
||||
<ProtoDirectoryListItem
|
||||
key={dir.name}
|
||||
dir={dir}
|
||||
indentLevel={indent++}
|
||||
handleDeleteDirectory={handleDeleteDirectory}
|
||||
/>
|
||||
);
|
||||
const fileNodes = files.map(f => (
|
||||
<ProtoFileListItem
|
||||
key={f._id}
|
||||
protoFile={f}
|
||||
isSelected={f._id === selectedId}
|
||||
handleSelect={handleSelect}
|
||||
handleDelete={handleDelete}
|
||||
handleRename={handleRename}
|
||||
handleUpdate={handleUpdate}
|
||||
indentLevel={indent}
|
||||
/>
|
||||
));
|
||||
const subDirNodes = subDirs.map(sd => recursiveRender(sd, props, indent));
|
||||
|
||||
return [dirNode, ...fileNodes, ...subDirNodes];
|
||||
};
|
||||
|
||||
const ProtoFileList = (props: Props) => (
|
||||
<ListGroup bordered>
|
||||
{!protoFiles.length && <ListGroupItem>No proto files exist for this workspace</ListGroupItem>}
|
||||
{protoFiles.map(p => (
|
||||
<ProtoFileListItem
|
||||
key={p._id}
|
||||
protoFile={p}
|
||||
isSelected={p._id === selectedId}
|
||||
handleSelect={handleSelect}
|
||||
handleDelete={handleDelete}
|
||||
handleRename={handleRename}
|
||||
handleUpdate={handleUpdate}
|
||||
/>
|
||||
))}
|
||||
{!props.protoDirectories.length && (
|
||||
<ListGroupItem>No proto files exist for this workspace</ListGroupItem>
|
||||
)}
|
||||
{props.protoDirectories.map(dir => recursiveRender(dir, props, 0))}
|
||||
</ListGroup>
|
||||
);
|
||||
|
||||
|
@ -0,0 +1,13 @@
|
||||
// @flow
|
||||
import styled from 'styled-components';
|
||||
import { ListGroupItem } from 'insomnia-components';
|
||||
|
||||
const ProtoListItem = styled(ListGroupItem).attrs(() => ({ className: 'row-spaced' }))`
|
||||
i.fa {
|
||||
font-size: var(--font-size-lg);
|
||||
}
|
||||
|
||||
height: var(--line-height-sm);
|
||||
`;
|
||||
|
||||
export default ProtoListItem;
|
@ -186,7 +186,6 @@ export type WrapperProps = {
|
||||
activeCookieJar: CookieJar,
|
||||
activeEnvironment: Environment | null,
|
||||
activeGitRepository: GitRepository | null,
|
||||
activeProtoFiles: Array<ProtoFile>,
|
||||
activeUnitTestResult: UnitTestResult | null,
|
||||
activeUnitTestSuites: Array<UnitTestSuite>,
|
||||
activeUnitTests: Array<UnitTest>,
|
||||
@ -529,7 +528,6 @@ class Wrapper extends React.PureComponent<WrapperProps, State> {
|
||||
activeCookieJar,
|
||||
activeEnvironment,
|
||||
activeGitRepository,
|
||||
activeProtoFiles,
|
||||
activeRequest,
|
||||
activeWorkspace,
|
||||
activeWorkspaceClientCertificates,
|
||||
@ -794,7 +792,6 @@ class Wrapper extends React.PureComponent<WrapperProps, State> {
|
||||
ref={registerModal}
|
||||
grpcDispatch={dispatch}
|
||||
workspace={activeWorkspace}
|
||||
protoFiles={activeProtoFiles}
|
||||
/>
|
||||
)}
|
||||
</GrpcDispatchModalWrapper>
|
||||
|
@ -39,7 +39,6 @@ import {
|
||||
selectActiveCookieJar,
|
||||
selectActiveGitRepository,
|
||||
selectActiveOAuth2Token,
|
||||
selectActiveProtoFiles,
|
||||
selectActiveRequest,
|
||||
selectActiveRequestMeta,
|
||||
selectActiveRequestResponses,
|
||||
@ -1491,16 +1490,12 @@ function mapStateToProps(state, props) {
|
||||
const activeUnitTestSuites = selectActiveUnitTestSuites(state, props);
|
||||
const activeUnitTestResult = selectActiveUnitTestResult(state, props);
|
||||
|
||||
// Proto file stuff
|
||||
const activeProtoFiles = selectActiveProtoFiles(state, props);
|
||||
|
||||
return Object.assign({}, state, {
|
||||
activity: activeActivity,
|
||||
activeApiSpec,
|
||||
activeCookieJar,
|
||||
activeEnvironment,
|
||||
activeGitRepository,
|
||||
activeProtoFiles,
|
||||
activeRequest,
|
||||
activeRequestResponses,
|
||||
activeResponse,
|
||||
|
@ -0,0 +1,183 @@
|
||||
// @flow
|
||||
|
||||
import { globalBeforeEach } from '../../../__jest__/before-each';
|
||||
import { selectExpandedActiveProtoDirectories } from '../proto-selectors';
|
||||
import * as models from '../../../models';
|
||||
import reduxStateForTest from '../../../__jest__/redux-state-for-test';
|
||||
|
||||
describe('selectExpandedActiveProtoDirectories', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('should return empty array if no proto files or directories exist', async () => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
|
||||
// Act
|
||||
const state = await reduxStateForTest(w._id);
|
||||
const expandedDirs = selectExpandedActiveProtoDirectories(state);
|
||||
|
||||
// Assert
|
||||
expect(expandedDirs).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should return empty array if active workspace is empty', async () => {
|
||||
// Arrange workspace
|
||||
const w1 = await models.workspace.create();
|
||||
const pd1 = await models.protoDirectory.create({ parentId: w1._id });
|
||||
const pd2 = await models.protoDirectory.create({ parentId: pd1._id });
|
||||
await models.protoFile.create({ parentId: pd1._id });
|
||||
await models.protoFile.create({ parentId: pd2._id });
|
||||
await models.protoFile.create({ parentId: w1._id });
|
||||
|
||||
const w2 = await models.workspace.create();
|
||||
|
||||
// Act
|
||||
const state = await reduxStateForTest(w2._id);
|
||||
const expandedDirs = selectExpandedActiveProtoDirectories(state);
|
||||
|
||||
// Assert
|
||||
expect(expandedDirs).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should return only directories if no proto files exist', async () => {
|
||||
// Arrange workspace 1
|
||||
const w = await models.workspace.create();
|
||||
const pd1 = await models.protoDirectory.create({ parentId: w._id });
|
||||
const pd2 = await models.protoDirectory.create({ parentId: pd1._id });
|
||||
|
||||
// Act
|
||||
const state = await reduxStateForTest(w._id);
|
||||
const expandedDirs = selectExpandedActiveProtoDirectories(state);
|
||||
|
||||
// Assert
|
||||
expect(expandedDirs).toHaveLength(1);
|
||||
expect(expandedDirs).toStrictEqual([
|
||||
{
|
||||
files: [],
|
||||
dir: pd1,
|
||||
subDirs: [
|
||||
{
|
||||
files: [],
|
||||
dir: pd2,
|
||||
subDirs: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return individual files in a null expanded dir', async () => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
const pf1 = await models.protoFile.create({ parentId: w._id });
|
||||
const pf2 = await models.protoFile.create({ parentId: w._id });
|
||||
|
||||
// Act
|
||||
const state = await reduxStateForTest(w._id);
|
||||
const expandedDirs = selectExpandedActiveProtoDirectories(state);
|
||||
|
||||
// Assert
|
||||
expect(expandedDirs).toHaveLength(1);
|
||||
expect(expandedDirs).toStrictEqual([
|
||||
{
|
||||
files: expect.arrayContaining([pf1, pf2]),
|
||||
dir: null,
|
||||
subDirs: [],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should expand root directories', async () => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
const pd1 = await models.protoDirectory.create({ parentId: w._id });
|
||||
const pd2 = await models.protoDirectory.create({ parentId: w._id });
|
||||
const pf1 = await models.protoFile.create({ parentId: pd1._id });
|
||||
const pf2 = await models.protoFile.create({ parentId: pd2._id });
|
||||
|
||||
// Act
|
||||
const state = await reduxStateForTest(w._id);
|
||||
const expandedDirs = selectExpandedActiveProtoDirectories(state);
|
||||
|
||||
// Assert
|
||||
expect(expandedDirs).toHaveLength(2);
|
||||
expect(expandedDirs).toStrictEqual(
|
||||
expect.arrayContaining([
|
||||
{
|
||||
files: [pf1],
|
||||
dir: pd1,
|
||||
subDirs: [],
|
||||
},
|
||||
{
|
||||
files: [pf2],
|
||||
dir: pd2,
|
||||
subDirs: [],
|
||||
},
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should expand nested directories', async () => {
|
||||
// Arrange
|
||||
const w = await models.workspace.create();
|
||||
const pd1 = await models.protoDirectory.create({ parentId: w._id });
|
||||
const pd2 = await models.protoDirectory.create({ parentId: pd1._id });
|
||||
const pf1 = await models.protoFile.create({ parentId: pd1._id });
|
||||
const pf2 = await models.protoFile.create({ parentId: pd2._id });
|
||||
|
||||
// Act
|
||||
const state = await reduxStateForTest(w._id);
|
||||
const expandedDirs = selectExpandedActiveProtoDirectories(state);
|
||||
|
||||
// Assert
|
||||
expect(expandedDirs).toHaveLength(1);
|
||||
expect(expandedDirs).toStrictEqual([
|
||||
{
|
||||
files: [pf1],
|
||||
dir: pd1,
|
||||
subDirs: [
|
||||
{
|
||||
files: [pf2],
|
||||
dir: pd2,
|
||||
subDirs: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should include all individual files and nested directories', async () => {
|
||||
// Arrange workspace 1
|
||||
const w = await models.workspace.create();
|
||||
const pd1 = await models.protoDirectory.create({ parentId: w._id });
|
||||
const pd2 = await models.protoDirectory.create({ parentId: pd1._id });
|
||||
const pf1 = await models.protoFile.create({ parentId: pd1._id });
|
||||
const pf2 = await models.protoFile.create({ parentId: pd2._id });
|
||||
const pf3 = await models.protoFile.create({ parentId: w._id });
|
||||
|
||||
// Act
|
||||
const state = await reduxStateForTest(w._id);
|
||||
const expandedDirs = selectExpandedActiveProtoDirectories(state);
|
||||
|
||||
// Assert
|
||||
expect(expandedDirs).toHaveLength(2);
|
||||
expect(expandedDirs).toStrictEqual([
|
||||
{
|
||||
files: [pf3],
|
||||
dir: null,
|
||||
subDirs: [],
|
||||
},
|
||||
{
|
||||
files: [pf1],
|
||||
dir: pd1,
|
||||
subDirs: [
|
||||
{
|
||||
files: [pf2],
|
||||
dir: pd2,
|
||||
subDirs: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
@ -119,6 +119,7 @@ export async function allDocs() {
|
||||
...(await models.unitTest.all()),
|
||||
...(await models.unitTestResult.all()),
|
||||
...(await models.protoFile.all()),
|
||||
...(await models.protoDirectory.all()),
|
||||
...(await models.grpcRequest.all()),
|
||||
...(await models.grpcRequestMeta.all()),
|
||||
];
|
||||
|
58
packages/insomnia-app/app/ui/redux/proto-selectors.js
Normal file
58
packages/insomnia-app/app/ui/redux/proto-selectors.js
Normal file
@ -0,0 +1,58 @@
|
||||
// @flow
|
||||
|
||||
import { createSelector } from 'reselect';
|
||||
import { selectActiveWorkspace, selectEntitiesLists } from './selectors';
|
||||
import type { ProtoDirectory } from '../../models/proto-directory';
|
||||
import type { ProtoFile } from '../../models/proto-file';
|
||||
|
||||
export type ExpandedProtoDirectory = {
|
||||
files: Array<ProtoFile>,
|
||||
dir: ProtoDirectory | null,
|
||||
subDirs: Array<ExpandedProtoDirectory>,
|
||||
};
|
||||
|
||||
const selectAllProtoFiles = createSelector(
|
||||
selectEntitiesLists,
|
||||
entities => entities.protoFiles || [],
|
||||
);
|
||||
|
||||
const selectAllProtoDirectories = createSelector(
|
||||
selectEntitiesLists,
|
||||
entities => entities.protoDirectories || [],
|
||||
);
|
||||
|
||||
export const selectExpandedActiveProtoDirectories = createSelector(
|
||||
selectActiveWorkspace,
|
||||
selectAllProtoFiles,
|
||||
selectAllProtoDirectories,
|
||||
(workspace, allFiles, allDirs): Array<ExpandedProtoDirectory> => {
|
||||
// Get files where the parent is the workspace
|
||||
const individualFiles = allFiles.filter(pf => pf.parentId === workspace._id);
|
||||
|
||||
// Get directories where the parent is the workspace
|
||||
const rootDirs = allDirs.filter(pd => pd.parentId === workspace._id);
|
||||
|
||||
// Expand each directory
|
||||
const expandedDirs = rootDirs.map(dir => expandDir(dir, allFiles, allDirs));
|
||||
|
||||
if (individualFiles.length) {
|
||||
return [{ files: individualFiles, dir: null, subDirs: [] }, ...expandedDirs];
|
||||
}
|
||||
|
||||
return expandedDirs;
|
||||
},
|
||||
);
|
||||
|
||||
const expandDir = (
|
||||
dir: ProtoDirectory,
|
||||
allFiles: Array<ProtoFile>,
|
||||
allDirs: Array<ProtoDirectory>,
|
||||
): ExpandedProtoDirectory => {
|
||||
const filesInDir = allFiles.filter(pf => pf.parentId === dir._id);
|
||||
const subDirs = allDirs.filter(pd => pd.parentId === dir._id);
|
||||
|
||||
// Expand sub directories
|
||||
const expandedSubDirs = subDirs.map(subDir => expandDir(subDir, allFiles, allDirs));
|
||||
|
||||
return { dir, files: filesInDir, subDirs: expandedSubDirs };
|
||||
};
|
@ -172,14 +172,6 @@ export const selectActiveRequest = createSelector(
|
||||
},
|
||||
);
|
||||
|
||||
export const selectActiveProtoFiles = createSelector(
|
||||
selectEntitiesLists,
|
||||
selectActiveWorkspace,
|
||||
(entities, workspace) => {
|
||||
return entities.protoFiles.filter(pf => pf.parentId === workspace._id);
|
||||
},
|
||||
);
|
||||
|
||||
export const selectActiveCookieJar = createSelector(
|
||||
selectEntitiesLists,
|
||||
selectActiveWorkspace,
|
||||
|
@ -36,6 +36,7 @@
|
||||
"\\.(css|less|png)$": "<rootDir>/__mocks__/dummy.js",
|
||||
"^worker-loader!": "<rootDir>/__mocks__/dummy.js"
|
||||
},
|
||||
"modulePathIgnorePatterns": ["<rootDir>/network/.*/__mocks__"],
|
||||
"testMatch": [
|
||||
"**/__tests__/**/*.test.js?(x)"
|
||||
],
|
||||
@ -51,6 +52,7 @@
|
||||
"!**/static/**",
|
||||
"!**/ui/components/**",
|
||||
"!**/__fixtures__/**",
|
||||
"!**/__schemas__/**",
|
||||
"!**/__tests__/**",
|
||||
"!**/__jest__/**",
|
||||
"!**/__mocks__/**",
|
||||
|
@ -1,7 +1,33 @@
|
||||
// @Flow
|
||||
import styled from 'styled-components';
|
||||
// @flow
|
||||
import * as React from 'react';
|
||||
import styled, { css } from 'styled-components';
|
||||
|
||||
export default styled.li`
|
||||
type Props = { isSelected?: boolean, selectable?: boolean, indentLevel?: number };
|
||||
|
||||
const ListGroupItem: React.AbstractComponent<Props> = styled.li`
|
||||
border-bottom: 1px solid var(--hl-xs);
|
||||
padding: var(--padding-sm) var(--padding-sm);
|
||||
|
||||
${({ selectable }) =>
|
||||
selectable &&
|
||||
css`
|
||||
&:hover {
|
||||
background-color: var(--hl-sm) !important;
|
||||
}
|
||||
`}
|
||||
|
||||
${({ isSelected }) =>
|
||||
isSelected &&
|
||||
css`
|
||||
background-color: var(--hl-xs) !important;
|
||||
font-weight: bold;
|
||||
`}
|
||||
|
||||
${({ indentLevel }) =>
|
||||
indentLevel &&
|
||||
css`
|
||||
padding-left: calc(var(--padding-sm) + var(--padding-md) * ${indentLevel});
|
||||
`};
|
||||
`;
|
||||
|
||||
export default ListGroupItem;
|
||||
|
@ -28,6 +28,44 @@ export const _bordered = () => (
|
||||
</div>
|
||||
);
|
||||
|
||||
export const _indented = () => (
|
||||
<div style={{ width: '350px' }}>
|
||||
<ListGroup bordered>
|
||||
<ListGroupItem indentLevel={0}>Indent 0</ListGroupItem>
|
||||
<ListGroupItem indentLevel={1}>Indent 1</ListGroupItem>
|
||||
<ListGroupItem indentLevel={2}>Indent 2</ListGroupItem>
|
||||
<ListGroupItem indentLevel={3}>Indent 3</ListGroupItem>
|
||||
<ListGroupItem indentLevel={4}>Indent 4</ListGroupItem>
|
||||
<ListGroupItem indentLevel={3}>Indent 3</ListGroupItem>
|
||||
<ListGroupItem indentLevel={2}>Indent 2</ListGroupItem>
|
||||
<ListGroupItem indentLevel={1}>Indent 1</ListGroupItem>
|
||||
<ListGroupItem indentLevel={0}>Indent 0</ListGroupItem>
|
||||
</ListGroup>
|
||||
</div>
|
||||
);
|
||||
|
||||
export const _selectable = () => {
|
||||
const [selected, setSelected] = React.useState(-1);
|
||||
return (
|
||||
<div style={{ width: '350px' }}>
|
||||
<ListGroup>
|
||||
<ListGroupItem selectable isSelected={selected === 0} onClick={() => setSelected(0)}>
|
||||
Selectable
|
||||
</ListGroupItem>
|
||||
<ListGroupItem selectable isSelected={selected === 1} onClick={() => setSelected(1)}>
|
||||
list
|
||||
</ListGroupItem>
|
||||
<ListGroupItem selectable isSelected={selected === 2} onClick={() => setSelected(2)}>
|
||||
of
|
||||
</ListGroupItem>
|
||||
<ListGroupItem selectable isSelected={selected === 3} onClick={() => setSelected(3)}>
|
||||
things...
|
||||
</ListGroupItem>
|
||||
</ListGroup>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const unitTestResults = [
|
||||
{
|
||||
_id: 'ut_A',
|
||||
|
Loading…
Reference in New Issue
Block a user