initial setup for fetchResource service

This commit is contained in:
deityhub 2022-01-10 07:48:35 +01:00
parent 9257b2af96
commit 8275a84331
17 changed files with 5529 additions and 0 deletions

View File

@ -0,0 +1,12 @@
node_modules/
.vscode/
.DS_Store
.env
.env.local
.env.development.local
.env.test.local
.env.production.local
npm-debug.log*
yarn-debug.log*
yarn-error.log*
yarn.lock

6
fetch-resources/.env Normal file
View File

@ -0,0 +1,6 @@
MONGO_URL=mongodb://localhost:27017/fyipedb
CLUSTER_KEY=f414c23b4cdf4e84a6a66ecfd528eff2
SLACK_ERROR_LOG_WEBHOOK=https://hooks.slack.com/services/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
SLACK_ERROR_LOG_CHANNEL=fyipe-engineering
PORT=3400
REALTIME_URL=http://localhost:3300

19
fetch-resources/.gitignore vendored Normal file
View File

@ -0,0 +1,19 @@
# See https://help.github.com/ignore-files/ for more about ignoring files.
# dependencies
#/backend/node_modules
/kubernetes
/node_modules
.idea
# misc
.DS_Store
.env.local
.env.development.local
.env.test.local
.env.production.local
npm-debug.log*
yarn-debug.log*
yarn-error.log*
yarn.lock

46
fetch-resources/Dockerfile Executable file
View File

@ -0,0 +1,46 @@
#
# OneUptime-fetch-resources Dockerfile
#
# Pull base image nodejs image.
FROM node:16
#Use bash shell by default
SHELL ["/bin/bash", "-c"]
#SET ENV Variables
ENV PRODUCTION=true
RUN OS_ARCHITECTURE="amd64"
RUN if [[ "$(uname -m)" -eq "aarch64" ]] ; then OS_ARCHITECTURE="arm64" ; fi
RUN if [[ "$(uname -m)" -eq "arm64" ]] ; then OS_ARCHITECTURE="arm64" ; fi
RUN mkdir -p /usr/src/app
WORKDIR /usr/src/app
# Install kubectl for kubernetes monitor scanning
RUN curl -LO "https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/$(OS_ARCHITECTURE)/kubectl"
RUN chmod +x ./kubectl
RUN mv ./kubectl /usr/local/bin/kubectl && \
chown root: /usr/local/bin/kubectl
# Due to this EPROTO wrong signature error on http fetch, we need to comment out CipherString
# in the file /etc/ssl/openssl.cnf
# https://github.com/nodejs/help/issues/2804
# https://developers.exlibrisgroup.com/forums/topic/cant-fetch-from-alma-api-with-node/
RUN sed -i 's/^CipherString$\?/#CipherString/g' /etc/ssl/openssl.cnf
# Install app dependencies
COPY package*.json /usr/src/app/
RUN npm ci --only=production
# Bundle app source
COPY . /usr/src/app
# Expose ports.
EXPOSE 3400
#Run the app
CMD [ "npm", "start"]

View File

@ -0,0 +1,47 @@
#
# OneUptime-fetch-resources Dockerfile
#
# Pull base image nodejs image.
FROM node:16
#Use bash shell by default
SHELL ["/bin/bash", "-c"]
WORKDIR /usr/src/app
RUN OS_ARCHITECTURE="amd64"
RUN if [[ "$(uname -m)" -eq "aarch64" ]] ; then OS_ARCHITECTURE="arm64" ; fi
RUN if [[ "$(uname -m)" -eq "arm64" ]] ; then OS_ARCHITECTURE="arm64" ; fi
# Install kubectl for kubernetes monitor scanning
RUN curl -LO "https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/$(OS_ARCHITECTURE)/kubectl"
RUN chmod +x ./kubectl
RUN mv ./kubectl /usr/local/bin/kubectl && \
chown root: /usr/local/bin/kubectl
# Due to this EPROTO wrong signature error on http fetch, we need to comment out CipherString
# in the file /etc/ssl/openssl.cnf
# https://github.com/nodejs/help/issues/2804
# https://developers.exlibrisgroup.com/forums/topic/cant-fetch-from-alma-api-with-node/
RUN sed -i 's/^CipherString$\?/#CipherString/g' /etc/ssl/openssl.cnf
# Install app dependencies
RUN cd /usr/src/app
# Copy package.json files
COPY ./package.json /usr/src/app/package.json
COPY ./package-lock.json /usr/src/app/package-lock.json
RUN npm ci
# Expose ports.
EXPOSE 3400
# Expose Debugging port.
EXPOSE 9229
#Run the app
CMD [ "npm", "run", "dev"]

35
fetch-resources/api/probe.js Executable file
View File

@ -0,0 +1,35 @@
/**
*
* Copyright HackerBay, Inc.
*
*/
const express = require('express');
const MonitorService = require('../services/monitorService');
const router = express.Router();
const isAuthorizedProbe = require('../middlewares/probeAuthorization')
.isAuthorizedProbe;
const sendErrorResponse = require('../middlewares/response').sendErrorResponse;
const sendListResponse = require('../middlewares/response').sendListResponse;
router.get('/monitors', isAuthorizedProbe, async function(req, res) {
try {
const { limit = 10 } = req.query;
const monitors = await MonitorService.getProbeMonitors(
req.probe.id,
limit,
new Date(new Date().getTime() - 60 * 1000)
);
return sendListResponse(
req,
res,
JSON.stringify(monitors),
monitors.length
);
} catch (error) {
return sendErrorResponse(req, res, error);
}
});
module.exports = router;

155
fetch-resources/index.js Normal file
View File

@ -0,0 +1,155 @@
/* eslint-disable no-console */
const { NODE_ENV } = process.env;
if (!NODE_ENV || NODE_ENV === 'development') {
// Load env vars from /data-ingestor/.env
require('custom-env').env();
}
const express = require('express');
const Sentry = require('@sentry/node');
const Tracing = require('@sentry/tracing');
const app = express();
const http = require('http').createServer(app);
const cors = require('cors');
const { mongoUrl } = require('./utils/config');
const MongoClient = require('mongodb').MongoClient;
process.on('exit', () => {
console.log('Server Shutting Shutdown');
});
process.on('unhandledRejection', err => {
console.error('Unhandled rejection in server process occurred');
console.error(err);
});
process.on('uncaughtException', err => {
console.error('Uncaught exception in server process occurred');
console.error(err);
});
Sentry.init({
dsn: process.env.SENTRY_DSN,
release: `fetch-resources@${process.env.npm_package_version}`,
environment: process.env.NODE_ENV,
integrations: [
// enable HTTP calls tracing
new Sentry.Integrations.Http({ tracing: true }),
// enable Express.js middleware tracing
new Tracing.Integrations.Express({
app,
}),
new Sentry.Integrations.OnUncaughtException({
onFatalError() {
// override default behaviour
return;
},
}),
],
tracesSampleRate: 0.0,
});
// mongodb
function getMongoClient() {
return new MongoClient(mongoUrl, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
}
// setup mongodb connection
const client = getMongoClient();
(async function() {
try {
console.log('connecting to db');
await client.connect();
console.log('connected to db');
} catch (error) {
console.log('connection error: ', error);
}
})();
// attach the database to global object
global.db = client.db('fyipedb');
// Sentry: The request handler must be the first middleware on the app
app.use(Sentry.Handlers.requestHandler());
app.use(Sentry.Handlers.tracingHandler());
app.use(cors());
app.use(function(req, res, next) {
if (typeof req.body === 'string') {
req.body = JSON.parse(req.body);
}
res.header('Access-Control-Allow-Credentials', true);
res.header('Access-Control-Allow-Origin', req.headers.origin);
res.header('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS');
res.header(
'Access-Control-Allow-Headers',
'X-Requested-With, X-HTTP-Method-Override, Content-Type, Accept,Authorization'
);
return next();
});
// Add limit of 10 MB to avoid "Request Entity too large error"
// https://stackoverflow.com/questions/19917401/error-request-entity-too-large
app.use(express.urlencoded({ limit: '10mb', extended: true }));
app.use(express.json({ limit: '10mb' }));
// log request middleware
const getActualRequestDurationInMilliseconds = start => {
const NS_PER_SEC = 1e9; // convert to nanoseconds
const NS_TO_MS = 1e6; // convert to milliseconds
const diff = process.hrtime(start);
return (diff[0] * NS_PER_SEC + diff[1]) / NS_TO_MS;
};
app.use(function(req, res, next) {
const current_datetime = new Date();
const formatted_date =
current_datetime.getFullYear() +
'-' +
(current_datetime.getMonth() + 1) +
'-' +
current_datetime.getDate() +
' ' +
current_datetime.getHours() +
':' +
current_datetime.getMinutes() +
':' +
current_datetime.getSeconds();
const method = req.method;
const url = req.url;
const status = res.statusCode;
const start = process.hrtime();
const durationInMilliseconds = getActualRequestDurationInMilliseconds(
start
);
const log = `[${formatted_date}] ${method}:${url} ${status} ${durationInMilliseconds.toLocaleString()} ms`;
console.log(log);
return next();
});
app.get(['/fetch-resources/status', '/status'], function(req, res) {
res.setHeader('Content-Type', 'application/json');
res.send(
JSON.stringify({
status: 200,
message: 'Service Status - OK',
serviceType: 'oneuptime-fetch-resources',
})
);
});
app.use(['/probe', '/api/probe'], require('./api/probe'));
app.use(Sentry.Handlers.errorHandler());
app.set('port', process.env.PORT || 3400);
http.listen(app.get('port'), function() {
// eslint-disable-next-line
console.log('fetch-resources server started on port ' + app.get('port'));
});
module.exports = app;

View File

@ -0,0 +1,27 @@
const replacer = (key, value) => (value === null ? '' : value); // specify how you want to handle null values here
module.exports = {
ToCsv: json => {
return new Promise((resolve, reject) => {
try {
if (json.length > 0) {
const header = Object.keys(json[0]);
let csv = json.map(row =>
header
.map(fieldName =>
JSON.stringify(row[fieldName], replacer)
)
.join(',')
);
csv.unshift(header.join(','));
csv = csv.join('\r\n');
resolve(csv);
} else {
resolve('');
}
} catch (error) {
reject(error);
}
});
},
};

View File

@ -0,0 +1,200 @@
/**
*
* Copyright HackerBay, Inc.
*
*/
const ProbeService = require('../services/probeService');
const sendErrorResponse = require('../middlewares/response').sendErrorResponse;
const ErrorService = require('../services/errorService');
const { clusterKey: CLUSTER_KEY } = require('../utils/config');
global.probes = {};
module.exports = {
isAuthorizedProbe: async function(req, res, next) {
try {
let probeKey, probeName, clusterKey, probeVersion;
if (req.params && req.params.probeKey) {
probeKey = req.params.probeKey;
} else if (req.query && req.query.probeKey) {
probeKey = req.query.probeKey;
} else if (
req.headers &&
(req.headers['probeKey'] || req.headers['probekey'])
) {
// header keys are automatically transformed to lowercase
probeKey = req.headers['probeKey'] || req.headers['probekey'];
} else if (req.body && req.body.probeKey) {
probeKey = req.body.probeKey;
} else {
return sendErrorResponse(req, res, {
code: 400,
message: 'Probe Key not found.',
});
}
if (req.params && req.params.probeName) {
probeName = req.params.probeName;
} else if (req.query && req.query.probeName) {
probeName = req.query.probeName;
} else if (
req.headers &&
(req.headers['probeName'] || req.headers['probename'])
) {
// header keys are automatically transformed to lowercase
probeName =
req.headers['probeName'] || req.headers['probename'];
} else if (req.body && req.body.probeName) {
probeName = req.body.probeName;
} else {
return sendErrorResponse(req, res, {
code: 400,
message: 'Probe Name not found.',
});
}
if (req.params && req.params.clusterKey) {
clusterKey = req.params.clusterKey;
} else if (req.query && req.query.clusterKey) {
clusterKey = req.query.clusterKey;
} else if (
req.headers &&
(req.headers['clusterKey'] || req.headers['clusterkey'])
) {
// header keys are automatically transformed to lowercase
clusterKey =
req.headers['clusterKey'] || req.headers['clusterkey'];
} else if (req.body && req.body.clusterKey) {
clusterKey = req.body.clusterKey;
}
if (req.params && req.params.probeVersion) {
probeVersion = req.params.probeVersion;
} else if (req.query && req.query.probeVersion) {
probeVersion = req.query.probeVersion;
} else if (
req.headers &&
(req.headers['probeversion'] || req.headers['probeVersion'])
) {
// header keys are automatically transformed to lowercase
probeVersion =
req.headers['probeversion'] || req.headers['probeVersion'];
} else if (req.body && req.body.probeVersion) {
probeVersion = req.body.probeVersion;
}
let probeId = null;
if (clusterKey && clusterKey === CLUSTER_KEY) {
// if cluster key matches then just query by probe name,
// because if the probe key does not match, we can update probe key later
// without updating mognodb database manually.
if (global.probes[probeName]) {
probeId = global.probes[probeName]._id;
} else {
const probe = await ProbeService.findOneBy({ probeName });
if (probe && probe._id) {
probeId = probe._id;
global.probes[probeName] = {
_id: probe._id,
probeKey: probe.probeKey,
version: probe.version,
};
}
}
} else {
if (global.probes[probeName]) {
probeId = global.probes[probeName]._id;
} else {
const probe = await ProbeService.findOneBy({
probeKey,
probeName,
});
if (probe && probe._id) {
probeId = probe._id;
global.probes[probeName] = {
_id: probe._id,
probeKey: probe.probeKey,
version: probe.version,
};
}
}
}
if (!probeId && (!clusterKey || clusterKey !== CLUSTER_KEY)) {
return sendErrorResponse(req, res, {
code: 400,
message: 'Probe key and probe name do not match.',
});
}
if (!probeId) {
//create a new probe.
const probe = await ProbeService.create({
probeKey,
probeName,
probeVersion,
});
probeId = probe._id;
global.probes[probeName] = {
_id: probe._id,
probeKey: probe.probeKey,
version: probe.version,
};
}
if (global.probes[probeName].probeKey !== probeKey) {
//update probe key becasue it does not match.
await ProbeService.updateOneBy(
{
probeName,
},
{ probeKey }
);
const probe = await ProbeService.findOneBy({
probeKey,
probeName,
});
probeId = probe._id;
global.probes[probeName] = {
_id: probe._id,
probeKey: probe.probeKey,
version: probe.version,
};
}
req.probe = {};
req.probe.id = probeId.toString();
req.probe.name = probeName;
// run in background.
ProbeService.updateProbeStatus(probeId);
if (
probeVersion &&
(!global.probes[probeName].version ||
global.probes[probeName].version !== probeVersion)
) {
await ProbeService.updateOneBy(
{
probeName,
},
{ version: probeVersion }
);
}
return next();
} catch (error) {
ErrorService.log('probeAuthorization.isAuthorizedProbe', error);
throw error;
}
},
};

View File

@ -0,0 +1,118 @@
/**
*
* Copyright HackerBay, Inc.
*
*/
const JsonToCsv = require('./jsonToCsv');
module.exports = {
sendErrorResponse: function(req, res, error) {
//log error to the console.
// eslint-disable-next-line no-console
console.error(error);
if (error.statusCode && error.message) {
res.resBody = { message: error.message }; // To be used in 'auditLog' middleware to log reponse data;
return res
.status(error.statusCode)
.send({ message: error.message });
} else if (
error.code &&
error.message &&
typeof error.code === 'number'
) {
let status = error.code;
if (
error.code &&
error.status &&
typeof error.code === 'number' &&
typeof error.status === 'number' &&
error.code > 600
) {
status = error.status;
}
res.resBody = { message: error.message };
return res.status(status).send({ message: error.message });
} else {
res.resBody = { message: 'Server Error.' };
return res.status(500).send({ message: 'Server Error.' });
}
},
sendListResponse: async function(req, res, list, count) {
// remove __v, deleted, deletedAt and deletedById if not Master Admin
const response = {};
if (!list) {
list = [];
}
if (list) {
response.data = list;
}
if (count) {
response.count = count;
} else {
if (list) response.count = list.length;
}
if (req.query.skip) {
response.skip = parseInt(req.query.skip);
}
if (req.query.limit) {
response.limit = parseInt(req.query.limit);
}
//purge request.
//req = null;
if (req.query['output-type'] === 'csv') {
if (!Array.isArray(response.data)) {
const properties = Object.keys(response.data);
const newObj = {};
properties.forEach(prop => {
if (
typeof response.data[[prop]] === 'object' &&
response.data[[prop]] !== null
) {
if (response.data[[prop]].name)
response.data[[prop]] = response.data[[prop]].name;
else if (response.data[[prop]].title)
response.data[[prop]] = response.data[[prop]].title;
else if (response.data[[prop]]._id)
response.data[[prop]] = response.data[[prop]]._id;
}
newObj[[prop]] = response.data[[prop]];
});
response.data = JSON.parse(JSON.stringify(newObj));
response.data = [response.data];
} else {
response.data = response.data.map(i => {
i = i._doc ? i._doc : i;
const properties = Object.keys(i);
const newObj = {};
properties.forEach(prop => {
if (
typeof i[[prop]] === 'object' &&
i[[prop]] !== null
) {
if (i[[prop]].name) i[[prop]] = i[[prop]].name;
else if (i[[prop]].title)
i[[prop]] = i[[prop]].title;
else if (i[[prop]]._id) i[[prop]] = i[[prop]]._id;
}
newObj[[prop]] = i[[prop]];
});
return JSON.parse(JSON.stringify(newObj));
});
}
response.data = await JsonToCsv.ToCsv(response.data);
}
res.resBody = response; // To be used in 'auditLog' middleware to log reponse data;
return res.status(200).send(response);
},
};

4513
fetch-resources/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,33 @@
{
"name": "fetch-resources",
"version": "3.0.0",
"description": "A project to handle all resource fetching or update for probes",
"main": "index.js",
"scripts": {
"start": "node --max-old-space-size=14336 index.js",
"dev": "cross-env NODE_ENV=development nodemon --inspect=0.0.0.0 index.js",
"audit": "npm audit --audit-level=low",
"dep-check": "depcheck ./ --skip-missing=true"
},
"keywords": [],
"author": "Jude Ojini",
"license": "ISC",
"dependencies": {
"@sentry/node": "^6.12.0",
"@sentry/tracing": "^6.12.0",
"axios": "^0.21.1",
"cors": "^2.8.5",
"cross-env": "^7.0.3",
"custom-env": "^2.0.1",
"express": "^4.17.1",
"lodash": "^4.17.21",
"moment": "^2.29.1",
"mongodb": "^4.0.0",
"uuid": "^8.3.2",
"winston": "^3.3.3",
"winston-slack-webhook-transport": "^2.0.1"
},
"devDependencies": {
"nodemon": "^2.0.12"
}
}

View File

@ -0,0 +1,27 @@
const winston = require('winston');
const Slack = require('winston-slack-webhook-transport');
if (
process.env.PORT &&
process.env.SLACK_ERROR_LOG_WEBHOOK &&
process.env.SLACK_ERROR_LOG_CHANNEL
) {
winston.add(new Slack({ webhookUrl: process.env.SLACK_ERROR_LOG_WEBHOOK }));
}
module.exports = {
log: (functionName, error) => {
error = error && error.message ? error.message : error;
winston.error(
JSON.stringify(
{
error: String(error),
functionName: String(functionName),
stack: new Error().stack,
},
0,
2
)
);
},
};

View File

@ -0,0 +1,121 @@
module.exports = {
async getProbeMonitors(probeId, limit, date) {
const query = {
deleted: false,
disabled: false,
type: {
$in: [
'url',
'api',
'incomingHttpRequest',
'kubernetes',
'ip',
'server-monitor',
],
},
$or: [
{
// regions does not include the probeId yet
regions: {
$not: {
$elemMatch: {
probeId,
},
},
},
},
{
regions: {
$elemMatch: {
probeId,
lastPingTime: {
$lt: date,
},
},
},
},
],
};
try {
const monitors = await monitorCollection
.aggregate([
{ $match: query },
{
$addFields: {
regionLastPingTime: {
$filter: {
input: '$regions',
as: 'region',
cond: {
$eq: [
'$$region.probeId',
ObjectId(probeId),
],
},
},
},
},
},
{
$addFields: {
regionLastPingTime: {
$cond: {
if: {
$anyElementTrue: [
'$regionLastPingTime',
],
},
then: '$regionLastPingTime',
else: [{ lastPingTime: 0 }],
},
},
},
},
{
$sort: {
'regionLastPingTime.lastPingTime': 1,
},
},
{ $limit: limit },
])
.toArray();
if (monitors && monitors.length) {
for (const monitor of monitors) {
const newdate = new Date(moment().format());
let updated = false;
const regions = monitor.regions.map(region => {
if (String(region.probeId) === String(probeId)) {
updated = true;
region.lastPingTime = newdate;
}
return region;
});
if (!updated) {
regions.push({
probeId,
lastPingTime: newdate,
});
}
await monitorCollection.update(
{ _id: monitor._id },
{ $set: { regions } }
);
}
return monitors;
} else {
return [];
}
} catch (error) {
ErrorService.log('monitorService.getProbeMonitors', error);
throw error;
}
},
};
const ErrorService = require('./errorService');
const moment = require('moment');
const monitorCollection = global.db.collection('monitors');
const { ObjectId } = require('mongodb');

View File

@ -0,0 +1,120 @@
module.exports = {
create: async function(data) {
try {
const _this = this;
let probeKey;
if (data.probeKey) {
probeKey = data.probeKey;
} else {
probeKey = uuidv1();
}
const storedProbe = await _this.findOneBy({
probeName: data.probeName,
});
if (storedProbe && storedProbe.probeName) {
const error = new Error('Probe name already exists.');
error.code = 400;
ErrorService.log('probe.create', error);
throw error;
} else {
const probe = {};
probe.probeKey = probeKey;
probe.probeName = data.probeName;
probe.version = data.probeVersion;
const now = new Date(moment().format());
probe.createdAt = now;
probe.lastAlive = now;
probe.deleted = false;
const result = await probeCollection.insertOne(probe);
const savedProbe = await _this.findOneBy({
_id: ObjectId(result.insertedId),
});
return savedProbe;
}
} catch (error) {
ErrorService.log('ProbeService.create', error);
throw error;
}
},
findOneBy: async function(query) {
try {
if (!query) {
query = {};
}
if (!query.deleted)
query.$or = [
{ deleted: false },
{ deleted: { $exists: false } },
];
const probe = await probeCollection.findOne(query);
return probe;
} catch (error) {
ErrorService.log('ProbeService.findOneBy', error);
throw error;
}
},
updateOneBy: async function(query, data) {
try {
if (!query) {
query = {};
}
if (!query.deleted)
query.$or = [
{ deleted: false },
{ deleted: { $exists: false } },
];
await probeCollection.updateOne(query, { $set: data });
const probe = await this.findOneBy(query);
return probe;
} catch (error) {
ErrorService.log('ProbeService.updateOneBy', error);
throw error;
}
},
updateProbeStatus: async function(probeId) {
try {
const now = new Date(moment().format());
await probeCollection.updateOne(
{
_id: ObjectId(probeId),
$or: [{ deleted: false }, { deleted: { $exists: false } }],
},
{ $set: { lastAlive: now } }
);
const probe = await this.findOneBy({
_id: ObjectId(probeId),
});
// realtime update for probe
postApi(
`${realtimeBaseUrl}/update-probe`,
{ data: probe },
true
).catch(error => {
ErrorService.log('probeService.updateProbeStatus', error);
});
return probe;
} catch (error) {
ErrorService.log('probeService.updateProbeStatus', error);
throw error;
}
},
};
const ErrorService = require('./errorService');
const moment = require('moment');
const { ObjectId } = require('mongodb');
const probeCollection = global.db.collection('probes');
const { v1: uuidv1 } = require('uuid');
const { postApi } = require('../utils/api');
const { realtimeUrl } = require('../utils/config');
const realtimeBaseUrl = `${realtimeUrl}/realtime`;

42
fetch-resources/utils/api.js Executable file
View File

@ -0,0 +1,42 @@
const axios = require('axios');
const { clusterKey, serverUrl } = require('./config');
const _this = {
getHeaders: () => {
return {
'Access-Control-Allow-Origin': '*',
Accept: 'application/json',
'Content-Type': 'application/json;charset=UTF-8',
clusterKey,
};
},
postApi: (url, data, withBaseUrl = false) => {
const headers = _this.getHeaders();
return new Promise((resolve, reject) => {
// Error [ERR_FR_MAX_BODY_LENGTH_EXCEEDED]: Request body larger than maxBodyLength limit
// https://stackoverflow.com/questions/58655532/increasing-maxcontentlength-and-maxbodylength-in-axios
axios({
method: 'POST',
url: withBaseUrl ? `${url}` : `${serverUrl}/${url}`,
headers,
data,
maxContentLength: Infinity,
maxBodyLength: Infinity,
})
.then(function(response) {
resolve(response.data);
})
.catch(function(error) {
if (error && error.response && error.response.data)
error = error.response.data;
if (error && error.data) {
error = error.data;
}
reject(error);
});
});
},
};
module.exports = _this;

View File

@ -0,0 +1,8 @@
const packageJson = require('../package.json');
module.exports = {
clusterKey: process.env.CLUSTER_KEY,
fetchResourcesVersion: packageJson.version,
mongoUrl: process.env.MONGO_URL || 'mongodb://localhost:27017/fyipedb',
realtimeUrl: process.env['REALTIME_URL'],
};