mirror of
https://github.com/OneUptime/oneuptime
synced 2024-11-21 22:59:07 +00:00
add try catch in cron time parser
This commit is contained in:
parent
e16c9cb3b7
commit
206c7d9bf1
@ -10,7 +10,7 @@ To begin with you need to create a custom probe in your Project Settings > Probe
|
||||
To run a probe, please make sure you have docker installed. You can run custom probe by:
|
||||
|
||||
```
|
||||
docker run --name oneuptime-probe --network host -e PROBE_KEY=<probe-key> -e PROBE_ID=<probe-id> -e PROBE_API_URL=https://oneuptime.com/probe-api -d oneuptime/probe:release
|
||||
docker run --name oneuptime-probe --network host -e PROBE_KEY=<probe-key> -e PROBE_ID=<probe-id> -e PROBE_API_URL=https://oneuptime.com -d oneuptime/probe:release
|
||||
|
||||
```
|
||||
|
||||
|
105
HelmChart/public/oneuptime/templates/home.yaml
Normal file
105
HelmChart/public/oneuptime/templates/home.yaml
Normal file
@ -0,0 +1,105 @@
|
||||
############-----HOME----#############################
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ printf "%s-%s" $.Release.Name "home" }}
|
||||
namespace: {{ $.Release.Namespace }}
|
||||
labels:
|
||||
app: {{ printf "%s-%s" $.Release.Name "home" }}
|
||||
app.kubernetes.io/part-of: oneuptime
|
||||
app.kubernetes.io/managed-by: Helm
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
app: {{ printf "%s-%s" $.Release.Name "home" }}
|
||||
replicas: {{ $.Values.replicaCount }}
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: {{ printf "%s-%s" $.Release.Name "home" }}
|
||||
spec:
|
||||
containers:
|
||||
- image: {{ printf "%s/%s/%s:%s" .Values.image.registry .Values.image.repository "home" .Values.image.tag }}
|
||||
name: {{ printf "%s-%s" $.Release.Name "home" }}
|
||||
imagePullPolicy: {{ $.Values.image.pullPolicy }}
|
||||
resources:
|
||||
requests:
|
||||
cpu: 250m
|
||||
limits:
|
||||
cpu: 500m
|
||||
env:
|
||||
{{- if .Values.saas.isSaasService }}
|
||||
- name: STRIPE_PUBLIC_KEY
|
||||
value: {{ $.Values.saas.stripe.publicKey }}
|
||||
- name: BILLING_ENABLED
|
||||
value: 'true'
|
||||
- name: AMPLITUDE_PUBLIC_KEY
|
||||
value: {{ $.Values.saas.amplitude.key }}
|
||||
{{- end }}
|
||||
- name: NODE_ENV
|
||||
value: {{ $.Values.nodeEnv }}
|
||||
- name: DISABLE_SIGNUP
|
||||
value: {{ $.Values.disableSignup | quote }}
|
||||
- name: NODE_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: spec.nodeName
|
||||
- name: POD_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: metadata.name
|
||||
- name: POD_NAMESPACE
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: metadata.namespace
|
||||
- name: POD_IP
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: status.podIP
|
||||
- name: POD_SERVICE_ACCOUNT
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: spec.serviceAccountName
|
||||
ports:
|
||||
- containerPort: {{ $.Values.host.homePort }}
|
||||
hostPort: {{ $.Values.host.homePort }}
|
||||
name: {{ printf "%s-%s" $.Release.Name "home" }}
|
||||
restartPolicy: {{ $.Values.image.restartPolicy }}
|
||||
|
||||
---
|
||||
# OneUptime home Service
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
labels:
|
||||
app: {{ printf "%s-%s" $.Release.Name "home" }}
|
||||
app.kubernetes.io/part-of: oneuptime
|
||||
app.kubernetes.io/managed-by: Helm
|
||||
name: {{ printf "%s-%s" $.Release.Name "home" }}
|
||||
namespace: {{ $.Release.Namespace }}
|
||||
spec:
|
||||
ports:
|
||||
- port: {{ $.Values.host.homeServicePort }}
|
||||
targetPort: {{ $.Values.host.homePort }}
|
||||
selector:
|
||||
app: {{ printf "%s-%s" $.Release.Name "home" }}
|
||||
type: ClusterIP
|
||||
---
|
||||
###########################################
|
||||
|
||||
|
||||
{{- if .Values.autoScaler.enabled }}
|
||||
apiVersion: autoscaling/v1
|
||||
kind: HorizontalPodAutoscaler
|
||||
metadata:
|
||||
name: {{ printf "%s-%s" $.Release.Name "home" }}
|
||||
spec:
|
||||
maxReplicas: {{ $.Values.autoScaler.maxReplicas }}
|
||||
minReplicas: {{ $.Values.autoScaler.minReplicas }}
|
||||
scaleTargetRef:
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
name: {{ printf "%s-%s" $.Release.Name "home" }}
|
||||
targetCPUUtilizationPercentage: {{ $.Values.autoScaler.averageCpuUtilization }}
|
||||
---
|
||||
{{- end }}
|
@ -7,7 +7,17 @@ if (!process.env['PROBE_API_URL']) {
|
||||
process.exit();
|
||||
}
|
||||
|
||||
export const PROBE_API_URL: URL = URL.fromString(process.env['PROBE_API_URL']);
|
||||
export let PROBE_API_URL: URL = URL.fromString(process.env['PROBE_API_URL']);
|
||||
|
||||
// If probe api does not have the path. Add it.
|
||||
if (
|
||||
!PROBE_API_URL.toString().endsWith('probe-api') &&
|
||||
!PROBE_API_URL.toString().endsWith('probe-api/')
|
||||
) {
|
||||
PROBE_API_URL = URL.fromString(
|
||||
PROBE_API_URL.addRoute('/probe-api').toString()
|
||||
);
|
||||
}
|
||||
|
||||
export const PROBE_NAME: string | null = process.env['PROBE_NAME'] || null;
|
||||
|
||||
|
@ -24,6 +24,7 @@ import LIMIT_MAX from 'Common/Types/Database/LimitMax';
|
||||
import SortOrder from 'Common/Types/Database/SortOrder';
|
||||
import Query from 'CommonServer/Types/Database/Query';
|
||||
import JSONFunctions from 'Common/Types/JSONFunctions';
|
||||
import logger from 'CommonServer/Utils/Logger';
|
||||
|
||||
const router: ExpressRouter = Express.getRouter();
|
||||
|
||||
@ -247,13 +248,24 @@ router.post(
|
||||
continue;
|
||||
}
|
||||
|
||||
let nextPing: Date = OneUptimeDate.addRemoveMinutes(
|
||||
OneUptimeDate.getCurrentDate(),
|
||||
1
|
||||
);
|
||||
|
||||
try {
|
||||
nextPing = CronTab.getNextExecutionTime(
|
||||
monitorProbe?.monitor?.monitoringInterval as string
|
||||
);
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
}
|
||||
|
||||
await MonitorProbeService.updateOneById({
|
||||
id: monitorProbe.id!,
|
||||
data: {
|
||||
lastPingAt: OneUptimeDate.getCurrentDate(),
|
||||
nextPingAt: CronTab.getNextExecutionTime(
|
||||
monitorProbe?.monitor?.monitoringInterval as string
|
||||
),
|
||||
nextPingAt: nextPing,
|
||||
},
|
||||
props: {
|
||||
isRoot: true,
|
||||
|
Loading…
Reference in New Issue
Block a user