Merge branch 'main' into feat/stripe
This commit is contained in:
@@ -3,7 +3,8 @@ services:
|
|||||||
main:
|
main:
|
||||||
build:
|
build:
|
||||||
context: ./docker
|
context: ./docker
|
||||||
image: automatischio/automatisch
|
dockerfile: Dockerfile.compose
|
||||||
|
entrypoint: /compose-entrypoint.sh
|
||||||
ports:
|
ports:
|
||||||
- '3000:3000'
|
- '3000:3000'
|
||||||
depends_on:
|
depends_on:
|
||||||
@@ -28,7 +29,8 @@ services:
|
|||||||
worker:
|
worker:
|
||||||
build:
|
build:
|
||||||
context: ./docker
|
context: ./docker
|
||||||
image: automatischio/automatisch
|
dockerfile: Dockerfile.compose
|
||||||
|
entrypoint: /compose-entrypoint.sh
|
||||||
depends_on:
|
depends_on:
|
||||||
- main
|
- main
|
||||||
environment:
|
environment:
|
||||||
@@ -52,7 +54,7 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- postgres_data:/var/lib/postgresql/data
|
- postgres_data:/var/lib/postgresql/data
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"]
|
test: ['CMD-SHELL', 'pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}']
|
||||||
interval: 10s
|
interval: 10s
|
||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 5
|
retries: 5
|
||||||
|
@@ -1,11 +1,8 @@
|
|||||||
# syntax=docker/dockerfile:1
|
# syntax=docker/dockerfile:1
|
||||||
FROM node:16
|
FROM node:16-alpine
|
||||||
WORKDIR /automatisch
|
WORKDIR /automatisch
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y dos2unix
|
|
||||||
|
|
||||||
COPY ./entrypoint.sh /entrypoint.sh
|
COPY ./entrypoint.sh /entrypoint.sh
|
||||||
RUN dos2unix /entrypoint.sh && apt-get --purge remove -y dos2unix && rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
RUN yarn global add @automatisch/cli@0.2.0
|
RUN yarn global add @automatisch/cli@0.2.0
|
||||||
|
|
||||||
|
11
docker/Dockerfile.compose
Normal file
11
docker/Dockerfile.compose
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
# syntax=docker/dockerfile:1
|
||||||
|
FROM automatischio/automatisch:0.2.0
|
||||||
|
WORKDIR /automatisch
|
||||||
|
|
||||||
|
RUN apk add --no-cache openssl dos2unix
|
||||||
|
|
||||||
|
COPY ./compose-entrypoint.sh /compose-entrypoint.sh
|
||||||
|
RUN dos2unix /compose-entrypoint.sh
|
||||||
|
|
||||||
|
EXPOSE 3000
|
||||||
|
ENTRYPOINT ["sh", "/compose-entrypoint.sh"]
|
18
docker/compose-entrypoint.sh
Executable file
18
docker/compose-entrypoint.sh
Executable file
@@ -0,0 +1,18 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [ ! -f /automatisch/storage/.env ]; then
|
||||||
|
>&2 echo "Saving environment variables"
|
||||||
|
ENCRYPTION_KEY="${ENCRYPTION_KEY:-$(openssl rand -base64 36)}"
|
||||||
|
APP_SECRET_KEY="${APP_SECRET_KEY:-$(openssl rand -base64 36)}"
|
||||||
|
echo "ENCRYPTION_KEY=$ENCRYPTION_KEY" >> /automatisch/storage/.env
|
||||||
|
echo "APP_SECRET_KEY=$APP_SECRET_KEY" >> /automatisch/storage/.env
|
||||||
|
fi
|
||||||
|
|
||||||
|
# initiate env. vars. from /automatisch/storage/.env file
|
||||||
|
export $(grep -v '^#' /automatisch/storage/.env | xargs)
|
||||||
|
|
||||||
|
echo "Environment variables have been set!"
|
||||||
|
|
||||||
|
sh /entrypoint.sh
|
@@ -2,16 +2,8 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
if [ ! -f /automatisch/storage/.env ]; then
|
|
||||||
>&2 echo "Saving environment variables"
|
|
||||||
ENCRYPTION_KEY="${ENCRYPTION_KEY:-$(openssl rand -base64 36)}"
|
|
||||||
APP_SECRET_KEY="${APP_SECRET_KEY:-$(openssl rand -base64 36)}"
|
|
||||||
echo "ENCRYPTION_KEY=$ENCRYPTION_KEY" >> /automatisch/storage/.env
|
|
||||||
echo "APP_SECRET_KEY=$APP_SECRET_KEY" >> /automatisch/storage/.env
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -n "$WORKER" ]; then
|
if [ -n "$WORKER" ]; then
|
||||||
automatisch start-worker --env-file /automatisch/storage/.env
|
automatisch start-worker
|
||||||
else
|
else
|
||||||
automatisch start --env-file /automatisch/storage/.env
|
automatisch start
|
||||||
fi
|
fi
|
||||||
|
@@ -1,11 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
until psql -h "$POSTGRES_HOST" -U "$POSTGRES_USERNAME" -d "$POSTGRES_DATABASE" -c '\q'; do
|
|
||||||
>&2 echo "Waiting for Postgres to be ready..."
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
|
|
||||||
>&2 echo "Postgres is up - executing command"
|
|
||||||
exec "$@"
|
|
@@ -1,11 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
until psql -h "$POSTGRES_HOST" -U "$POSTGRES_USERNAME" -d "$POSTGRES_DATABASE" -c '\q'; do
|
|
||||||
>&2 echo "Waiting for Postgres to be ready..."
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
|
|
||||||
>&2 echo "Postgres is up - executing command"
|
|
||||||
exec "$@"
|
|
@@ -13,5 +13,8 @@ ENCRYPTION_KEY=sample-encryption-key
|
|||||||
APP_SECRET_KEY=sample-app-secret-key
|
APP_SECRET_KEY=sample-app-secret-key
|
||||||
REDIS_PORT=6379
|
REDIS_PORT=6379
|
||||||
REDIS_HOST=127.0.0.1
|
REDIS_HOST=127.0.0.1
|
||||||
|
REDIS_USERNAME=redis_username
|
||||||
|
REDIS_PASSWORD=redis_password
|
||||||
|
REDIS_TLS=true
|
||||||
ENABLE_BULLMQ_DASHBOARD=false
|
ENABLE_BULLMQ_DASHBOARD=false
|
||||||
SERVE_WEB_APP_SEPARATELY=true
|
SERVE_WEB_APP_SEPARATELY=true
|
||||||
|
@@ -21,6 +21,9 @@ type AppConfig = {
|
|||||||
serveWebAppSeparately: boolean;
|
serveWebAppSeparately: boolean;
|
||||||
redisHost: string;
|
redisHost: string;
|
||||||
redisPort: number;
|
redisPort: number;
|
||||||
|
redisUsername: string;
|
||||||
|
redisPassword: string;
|
||||||
|
redisTls: boolean;
|
||||||
enableBullMQDashboard: boolean;
|
enableBullMQDashboard: boolean;
|
||||||
bullMQDashboardUsername: string;
|
bullMQDashboardUsername: string;
|
||||||
bullMQDashboardPassword: string;
|
bullMQDashboardPassword: string;
|
||||||
@@ -55,14 +58,17 @@ const appConfig: AppConfig = {
|
|||||||
postgresUsername:
|
postgresUsername:
|
||||||
process.env.POSTGRES_USERNAME || 'automatisch_development_user',
|
process.env.POSTGRES_USERNAME || 'automatisch_development_user',
|
||||||
postgresPassword: process.env.POSTGRES_PASSWORD,
|
postgresPassword: process.env.POSTGRES_PASSWORD,
|
||||||
postgresEnableSsl: process.env.POSTGRES_ENABLE_SSL === 'true' ? true : false,
|
postgresEnableSsl: process.env.POSTGRES_ENABLE_SSL === 'true',
|
||||||
encryptionKey: process.env.ENCRYPTION_KEY || '',
|
encryptionKey: process.env.ENCRYPTION_KEY || '',
|
||||||
appSecretKey: process.env.APP_SECRET_KEY || '',
|
appSecretKey: process.env.APP_SECRET_KEY || '',
|
||||||
serveWebAppSeparately,
|
serveWebAppSeparately,
|
||||||
redisHost: process.env.REDIS_HOST || '127.0.0.1',
|
redisHost: process.env.REDIS_HOST || '127.0.0.1',
|
||||||
redisPort: parseInt(process.env.REDIS_PORT || '6379'),
|
redisPort: parseInt(process.env.REDIS_PORT || '6379'),
|
||||||
|
redisUsername: process.env.REDIS_USERNAME,
|
||||||
|
redisPassword: process.env.REDIS_PASSWORD,
|
||||||
|
redisTls: process.env.REDIS_TLS === 'true',
|
||||||
enableBullMQDashboard:
|
enableBullMQDashboard:
|
||||||
process.env.ENABLE_BULLMQ_DASHBOARD === 'true' ? true : false,
|
process.env.ENABLE_BULLMQ_DASHBOARD === 'true',
|
||||||
bullMQDashboardUsername: process.env.BULLMQ_DASHBOARD_USERNAME,
|
bullMQDashboardUsername: process.env.BULLMQ_DASHBOARD_USERNAME,
|
||||||
bullMQDashboardPassword: process.env.BULLMQ_DASHBOARD_PASSWORD,
|
bullMQDashboardPassword: process.env.BULLMQ_DASHBOARD_PASSWORD,
|
||||||
baseUrl,
|
baseUrl,
|
||||||
|
@@ -1,9 +1,24 @@
|
|||||||
import appConfig from './app';
|
import appConfig from './app';
|
||||||
|
|
||||||
const redisConfig = {
|
type TRedisConfig = {
|
||||||
|
host: string,
|
||||||
|
port: number,
|
||||||
|
username?: string,
|
||||||
|
password?: string,
|
||||||
|
tls?: Record<string, unknown>,
|
||||||
|
enableOfflineQueue: boolean,
|
||||||
|
}
|
||||||
|
|
||||||
|
const redisConfig: TRedisConfig = {
|
||||||
host: appConfig.redisHost,
|
host: appConfig.redisHost,
|
||||||
port: appConfig.redisPort,
|
port: appConfig.redisPort,
|
||||||
|
username: appConfig.redisUsername,
|
||||||
|
password: appConfig.redisPassword,
|
||||||
enableOfflineQueue: false,
|
enableOfflineQueue: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (appConfig.redisTls) {
|
||||||
|
redisConfig.tls = {};
|
||||||
|
}
|
||||||
|
|
||||||
export default redisConfig;
|
export default redisConfig;
|
||||||
|
@@ -1,12 +1,17 @@
|
|||||||
|
import type { AxiosResponse, AxiosError } from 'axios';
|
||||||
import { IJSONObject } from '@automatisch/types';
|
import { IJSONObject } from '@automatisch/types';
|
||||||
import BaseError from './base';
|
import BaseError from './base';
|
||||||
|
|
||||||
export default class HttpError extends BaseError {
|
export default class HttpError extends BaseError {
|
||||||
constructor(error: IJSONObject) {
|
response: AxiosResponse;
|
||||||
|
|
||||||
|
constructor(error: AxiosError) {
|
||||||
const computedError =
|
const computedError =
|
||||||
((error.response as IJSONObject)?.data as IJSONObject) ||
|
error.response?.data as IJSONObject ||
|
||||||
(error.message as string);
|
error.message as string;
|
||||||
|
|
||||||
super(computedError);
|
super(computedError);
|
||||||
|
|
||||||
|
this.response = error.response;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
import Context from '../../types/express/context';
|
import Context from '../../types/express/context';
|
||||||
import flowQueue from '../../queues/flow';
|
import flowQueue from '../../queues/flow';
|
||||||
|
import { REMOVE_AFTER_30_DAYS_OR_150_JOBS, REMOVE_AFTER_7_DAYS_OR_50_JOBS } from '../../helpers/remove-job-configuration';
|
||||||
|
|
||||||
type Params = {
|
type Params = {
|
||||||
input: {
|
input: {
|
||||||
@@ -51,6 +52,8 @@ const updateFlowStatus = async (
|
|||||||
{
|
{
|
||||||
repeat: repeatOptions,
|
repeat: repeatOptions,
|
||||||
jobId: flow.id,
|
jobId: flow.id,
|
||||||
|
removeOnComplete: REMOVE_AFTER_7_DAYS_OR_50_JOBS,
|
||||||
|
removeOnFail: REMOVE_AFTER_30_DAYS_OR_150_JOBS
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
|
10
packages/backend/src/helpers/remove-job-configuration.ts
Normal file
10
packages/backend/src/helpers/remove-job-configuration.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
export const REMOVE_AFTER_30_DAYS_OR_150_JOBS = {
|
||||||
|
age: 30 * 24 * 3600,
|
||||||
|
count: 150,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const REMOVE_AFTER_7_DAYS_OR_50_JOBS = {
|
||||||
|
age: 7 * 24 * 3600,
|
||||||
|
count: 50,
|
||||||
|
};
|
||||||
|
|
@@ -4,6 +4,7 @@ import logger from '../helpers/logger';
|
|||||||
import Step from '../models/step';
|
import Step from '../models/step';
|
||||||
import actionQueue from '../queues/action';
|
import actionQueue from '../queues/action';
|
||||||
import { processAction } from '../services/action';
|
import { processAction } from '../services/action';
|
||||||
|
import { REMOVE_AFTER_30_DAYS_OR_150_JOBS, REMOVE_AFTER_7_DAYS_OR_50_JOBS } from '../helpers/remove-job-configuration';
|
||||||
|
|
||||||
type JobData = {
|
type JobData = {
|
||||||
flowId: string;
|
flowId: string;
|
||||||
@@ -31,7 +32,12 @@ export const worker = new Worker(
|
|||||||
stepId: nextStep.id,
|
stepId: nextStep.id,
|
||||||
};
|
};
|
||||||
|
|
||||||
await actionQueue.add(jobName, jobPayload);
|
const jobOptions = {
|
||||||
|
removeOnComplete: REMOVE_AFTER_7_DAYS_OR_50_JOBS,
|
||||||
|
removeOnFail: REMOVE_AFTER_30_DAYS_OR_150_JOBS,
|
||||||
|
}
|
||||||
|
|
||||||
|
await actionQueue.add(jobName, jobPayload, jobOptions);
|
||||||
},
|
},
|
||||||
{ connection: redisConfig }
|
{ connection: redisConfig }
|
||||||
);
|
);
|
||||||
@@ -42,7 +48,7 @@ worker.on('completed', (job) => {
|
|||||||
|
|
||||||
worker.on('failed', (job, err) => {
|
worker.on('failed', (job, err) => {
|
||||||
logger.info(
|
logger.info(
|
||||||
`JOB ID: ${job.id} - FLOW ID: ${job.data.flowId} has failed22 to start with ${err.message}`
|
`JOB ID: ${job.id} - FLOW ID: ${job.data.flowId} has failed to start with ${err.message}`
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@@ -4,6 +4,7 @@ import logger from '../helpers/logger';
|
|||||||
import triggerQueue from '../queues/trigger';
|
import triggerQueue from '../queues/trigger';
|
||||||
import { processFlow } from '../services/flow';
|
import { processFlow } from '../services/flow';
|
||||||
import Flow from '../models/flow';
|
import Flow from '../models/flow';
|
||||||
|
import { REMOVE_AFTER_30_DAYS_OR_150_JOBS, REMOVE_AFTER_7_DAYS_OR_50_JOBS } from '../helpers/remove-job-configuration';
|
||||||
|
|
||||||
export const worker = new Worker(
|
export const worker = new Worker(
|
||||||
'flow',
|
'flow',
|
||||||
@@ -17,6 +18,11 @@ export const worker = new Worker(
|
|||||||
|
|
||||||
const reversedData = data.reverse();
|
const reversedData = data.reverse();
|
||||||
|
|
||||||
|
const jobOptions = {
|
||||||
|
removeOnComplete: REMOVE_AFTER_7_DAYS_OR_50_JOBS,
|
||||||
|
removeOnFail: REMOVE_AFTER_30_DAYS_OR_150_JOBS,
|
||||||
|
}
|
||||||
|
|
||||||
for (const triggerItem of reversedData) {
|
for (const triggerItem of reversedData) {
|
||||||
const jobName = `${triggerStep.id}-${triggerItem.meta.internalId}`;
|
const jobName = `${triggerStep.id}-${triggerItem.meta.internalId}`;
|
||||||
|
|
||||||
@@ -26,7 +32,7 @@ export const worker = new Worker(
|
|||||||
triggerItem,
|
triggerItem,
|
||||||
};
|
};
|
||||||
|
|
||||||
await triggerQueue.add(jobName, jobPayload);
|
await triggerQueue.add(jobName, jobPayload, jobOptions);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (error) {
|
if (error) {
|
||||||
@@ -38,7 +44,7 @@ export const worker = new Worker(
|
|||||||
error,
|
error,
|
||||||
};
|
};
|
||||||
|
|
||||||
await triggerQueue.add(jobName, jobPayload);
|
await triggerQueue.add(jobName, jobPayload, jobOptions);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{ connection: redisConfig }
|
{ connection: redisConfig }
|
||||||
|
@@ -5,6 +5,7 @@ import { IJSONObject, ITriggerItem } from '@automatisch/types';
|
|||||||
import actionQueue from '../queues/action';
|
import actionQueue from '../queues/action';
|
||||||
import Step from '../models/step';
|
import Step from '../models/step';
|
||||||
import { processTrigger } from '../services/trigger';
|
import { processTrigger } from '../services/trigger';
|
||||||
|
import { REMOVE_AFTER_30_DAYS_OR_150_JOBS, REMOVE_AFTER_7_DAYS_OR_50_JOBS } from '../helpers/remove-job-configuration';
|
||||||
|
|
||||||
type JobData = {
|
type JobData = {
|
||||||
flowId: string;
|
flowId: string;
|
||||||
@@ -32,7 +33,12 @@ export const worker = new Worker(
|
|||||||
stepId: nextStep.id,
|
stepId: nextStep.id,
|
||||||
};
|
};
|
||||||
|
|
||||||
await actionQueue.add(jobName, jobPayload);
|
const jobOptions = {
|
||||||
|
removeOnComplete: REMOVE_AFTER_7_DAYS_OR_50_JOBS,
|
||||||
|
removeOnFail: REMOVE_AFTER_30_DAYS_OR_150_JOBS,
|
||||||
|
}
|
||||||
|
|
||||||
|
await actionQueue.add(jobName, jobPayload, jobOptions);
|
||||||
},
|
},
|
||||||
{ connection: redisConfig }
|
{ connection: redisConfig }
|
||||||
);
|
);
|
||||||
|
@@ -29,6 +29,9 @@ Please be careful with the `ENCRYPTION_KEY` environment variable. It is used to
|
|||||||
| `APP_SECRET_KEY` | string | | Secret Key to authenticate the user |
|
| `APP_SECRET_KEY` | string | | Secret Key to authenticate the user |
|
||||||
| `REDIS_HOST` | string | `redis` | Redis Host |
|
| `REDIS_HOST` | string | `redis` | Redis Host |
|
||||||
| `REDIS_PORT` | number | `6379` | Redis Port |
|
| `REDIS_PORT` | number | `6379` | Redis Port |
|
||||||
|
| `REDIS_USERNAME` | string | `` | Redis Username |
|
||||||
|
| `REDIS_PASSWORD` | string | `` | Redis Password |
|
||||||
|
| `REDIS_TLS` | boolean | `false` | Redis TLS |
|
||||||
| `TELEMETRY_ENABLED` | boolean | `true` | Enable/Disable Telemetry |
|
| `TELEMETRY_ENABLED` | boolean | `true` | Enable/Disable Telemetry |
|
||||||
| `ENABLE_BULLMQ_DASHBOARD` | boolean | `false` | Enable BullMQ Dashboard |
|
| `ENABLE_BULLMQ_DASHBOARD` | boolean | `false` | Enable BullMQ Dashboard |
|
||||||
| `BULLMQ_DASHBOARD_USERNAME` | string | | Username to login BullMQ Dashboard |
|
| `BULLMQ_DASHBOARD_USERNAME` | string | | Username to login BullMQ Dashboard |
|
||||||
|
Reference in New Issue
Block a user