Compare commits

..

2 Commits

Author SHA1 Message Date
Ali BARIN
74494989d2 test: update first app as azure-openai 2023-12-20 16:25:25 +00:00
Ali BARIN
e122ad4178 feat(azure-openai): add send prompt action 2023-12-20 16:25:11 +00:00
3069 changed files with 75007 additions and 66382 deletions

View File

@@ -28,7 +28,7 @@ cd packages/web
rm -rf .env rm -rf .env
echo " echo "
PORT=$WEB_PORT PORT=$WEB_PORT
REACT_APP_BACKEND_URL=http://localhost:$BACKEND_PORT REACT_APP_GRAPHQL_URL=http://localhost:$BACKEND_PORT/graphql
" >> .env " >> .env
cd $CURRENT_DIR cd $CURRENT_DIR

View File

@@ -8,7 +8,7 @@
"version": "latest" "version": "latest"
}, },
"ghcr.io/devcontainers/features/node:1": { "ghcr.io/devcontainers/features/node:1": {
"version": 18 "version": 16
}, },
"ghcr.io/devcontainers/features/common-utils:1": { "ghcr.io/devcontainers/features/common-utils:1": {
"username": "vscode", "username": "vscode",

View File

@@ -36,6 +36,7 @@ services:
keycloak: keycloak:
image: quay.io/keycloak/keycloak:21.1 image: quay.io/keycloak/keycloak:21.1
restart: always restart: always
container_name: keycloak
environment: environment:
- KEYCLOAK_ADMIN=admin - KEYCLOAK_ADMIN=admin
- KEYCLOAK_ADMIN_PASSWORD=admin - KEYCLOAK_ADMIN_PASSWORD=admin

View File

@@ -4,9 +4,5 @@
**/.devcontainer **/.devcontainer
**/.github **/.github
**/.vscode **/.vscode
**/.env
**/.env.test
**/.env.production
**/yarn-error.log
packages/docs packages/docs
packages/e2e-test packages/e2e-test

18
.eslintrc.js Normal file
View File

@@ -0,0 +1,18 @@
module.exports = {
root: true,
parser: '@typescript-eslint/parser',
plugins: ['@typescript-eslint'],
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
'prettier',
],
overrides: [
{
files: ['**/*.test.ts', '**/test/**/*.ts'],
rules: {
'@typescript-eslint/ban-ts-comment': ['off'],
},
},
],
};

View File

@@ -16,15 +16,15 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/setup-node@v2 - uses: actions/setup-node@v2
with: with:
node-version: '18' node-version: '16'
cache: 'yarn' cache: 'yarn'
cache-dependency-path: yarn.lock cache-dependency-path: yarn.lock
- run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner." - run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner."
- run: echo "🖥️ The workflow is now ready to test your code on the runner." - run: echo "🖥️ The workflow is now ready to test your code on the runner."
- run: yarn --frozen-lockfile - run: yarn --frozen-lockfile
- run: cd packages/backend && yarn lint - run: yarn lint
- run: echo "🍏 This job's status is ${{ job.status }}." - run: echo "🍏 This job's status is ${{ job.status }}."
start-backend-server: build-backend:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
@@ -33,36 +33,13 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/setup-node@v2 - uses: actions/setup-node@v2
with: with:
node-version: '18' node-version: '16'
cache: 'yarn' cache: 'yarn'
cache-dependency-path: yarn.lock cache-dependency-path: yarn.lock
- run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner." - run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner."
- run: echo "🖥️ The workflow is now ready to test your code on the runner." - run: echo "🖥️ The workflow is now ready to test your code on the runner."
- run: yarn --frozen-lockfile && yarn lerna bootstrap - run: yarn --frozen-lockfile && yarn lerna bootstrap
- run: cd packages/backend && yarn start - run: cd packages/backend && yarn build
env:
ENCRYPTION_KEY: sample_encryption_key
WEBHOOK_SECRET_KEY: sample_webhook_secret_key
- run: echo "🍏 This job's status is ${{ job.status }}."
start-backend-worker:
runs-on: ubuntu-latest
steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub!"
- run: echo "🔎 The name of your branch is ${{ github.ref }} and your repository is ${{ github.repository }}."
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: '18'
cache: 'yarn'
cache-dependency-path: yarn.lock
- run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner."
- run: echo "🖥️ The workflow is now ready to test your code on the runner."
- run: yarn --frozen-lockfile && yarn lerna bootstrap
- run: cd packages/backend && yarn start:worker
env:
ENCRYPTION_KEY: sample_encryption_key
WEBHOOK_SECRET_KEY: sample_webhook_secret_key
- run: echo "🍏 This job's status is ${{ job.status }}." - run: echo "🍏 This job's status is ${{ job.status }}."
build-web: build-web:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -73,7 +50,7 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/setup-node@v2 - uses: actions/setup-node@v2
with: with:
node-version: '18' node-version: '16'
cache: 'yarn' cache: 'yarn'
cache-dependency-path: yarn.lock cache-dependency-path: yarn.lock
- run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner." - run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner."
@@ -83,3 +60,21 @@ jobs:
env: env:
CI: false CI: false
- run: echo "🍏 This job's status is ${{ job.status }}." - run: echo "🍏 This job's status is ${{ job.status }}."
build-cli:
runs-on: ubuntu-latest
steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub!"
- run: echo "🔎 The name of your branch is ${{ github.ref }} and your repository is ${{ github.repository }}."
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: '16'
cache: 'yarn'
cache-dependency-path: yarn.lock
- run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner."
- run: echo "🖥️ The workflow is now ready to test your code on the runner."
- run: yarn --frozen-lockfile && yarn lerna bootstrap
- run: cd packages/backend && yarn build
- run: cd packages/cli && yarn build
- run: echo "🍏 This job's status is ${{ job.status }}."

View File

@@ -1,32 +0,0 @@
name: Automatisch Docs Change
on:
pull_request:
paths:
- 'packages/docs/**'
jobs:
label:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Label PR
uses: actions/github-script@v6
with:
script: |
const { pull_request } = context.payload;
const label = 'documentation-change';
const hasLabel = pull_request.labels.some(({ name }) => name === label);
if (!hasLabel) {
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pull_request.number,
labels: [label],
});
console.log(`Label "${label}" added to PR #${pull_request.number}`);
} else {
console.log(`Label "${label}" already exists on PR #${pull_request.number}`);
}

View File

@@ -62,15 +62,14 @@ jobs:
run: yarn && yarn lerna bootstrap run: yarn && yarn lerna bootstrap
- name: Install Playwright Browsers - name: Install Playwright Browsers
run: yarn playwright install --with-deps run: yarn playwright install --with-deps
- name: Build Automatisch web - name: Build Automatisch
working-directory: ./packages/web run: yarn lerna run --scope=@*/{web,backend,cli} build
run: yarn build
env: env:
# Keep this until we clean up warnings in build processes # Keep this until we clean up warnings in build processes
CI: false CI: false
- name: Migrate database - name: Migrate database
working-directory: ./packages/backend working-directory: ./packages/backend
run: yarn db:migrate run: yarn db:migrate --migrations-directory ./dist/src/db/migrations
- name: Seed user - name: Seed user
working-directory: ./packages/backend working-directory: ./packages/backend
run: yarn db:seed:user & run: yarn db:seed:user &
@@ -97,7 +96,7 @@ jobs:
run: yarn start & run: yarn start &
working-directory: ./packages/backend working-directory: ./packages/backend
- name: Run Automatisch worker - name: Run Automatisch worker
run: yarn start:worker & run: node dist/src/worker.js &
working-directory: ./packages/backend working-directory: ./packages/backend
- name: Setup upterm session - name: Setup upterm session
if: false if: false

View File

@@ -1 +1 @@
18.19.0 16.15.0

2
.nvmrc
View File

@@ -1 +1 @@
18.19.0 16.15.0

View File

@@ -1,25 +1,14 @@
# syntax=docker/dockerfile:1 # syntax=docker/dockerfile:1
FROM node:18-alpine FROM node:16-alpine
ENV PORT 3000
RUN \
apk --no-cache add --virtual build-dependencies python3 build-base git
WORKDIR /automatisch WORKDIR /automatisch
# copy the app, note .dockerignore
COPY . /automatisch
RUN yarn
RUN cd packages/web && yarn build
RUN \ RUN \
apk --no-cache add --virtual build-dependencies python3 build-base && \
yarn global add @automatisch/cli@0.10.0 --network-timeout 1000000 && \
rm -rf /usr/local/share/.cache/ && \ rm -rf /usr/local/share/.cache/ && \
apk del build-dependencies apk del build-dependencies
COPY ./docker/entrypoint.sh /entrypoint.sh COPY ./entrypoint.sh /entrypoint.sh
EXPOSE 3000 EXPOSE 3000
ENTRYPOINT ["sh", "/entrypoint.sh"] ENTRYPOINT ["sh", "/entrypoint.sh"]

19
docker/Dockerfile.cloud Normal file
View File

@@ -0,0 +1,19 @@
# syntax=docker/dockerfile:1
FROM node:16-alpine
WORKDIR /automatisch
ENV PORT 3000
RUN ls -lna
# copy the app, note .dockerignore
COPY . ./
RUN yarn
RUN yarn lerna bootstrap
RUN yarn lerna run --scope=@*/{web,backend,cli} build
COPY ./docker/entrypoint-cloud.sh /entrypoint-cloud.sh
EXPOSE 3000
ENTRYPOINT ["sh", "/entrypoint-cloud.sh"]

View File

@@ -1,5 +1,5 @@
# syntax=docker/dockerfile:1 # syntax=docker/dockerfile:1
FROM automatischio/automatisch:latest FROM automatischio/automatisch:0.10.0
WORKDIR /automatisch WORKDIR /automatisch
RUN apk add --no-cache openssl dos2unix RUN apk add --no-cache openssl dos2unix

9
docker/entrypoint-cloud.sh Executable file
View File

@@ -0,0 +1,9 @@
#!/bin/sh
set -e
if [ -n "$WORKER" ]; then
yarn automatisch start-worker
else
yarn automatisch start
fi

View File

@@ -2,12 +2,8 @@
set -e set -e
cd packages/backend
if [ -n "$WORKER" ]; then if [ -n "$WORKER" ]; then
yarn start:worker automatisch start-worker
else else
yarn db:migrate automatisch start
yarn db:seed:user
yarn start
fi fi

View File

@@ -6,6 +6,8 @@
"start": "lerna run --stream --parallel --scope=@*/{web,backend} dev", "start": "lerna run --stream --parallel --scope=@*/{web,backend} dev",
"start:web": "lerna run --stream --scope=@*/web dev", "start:web": "lerna run --stream --scope=@*/web dev",
"start:backend": "lerna run --stream --scope=@*/backend dev", "start:backend": "lerna run --stream --scope=@*/backend dev",
"lint": "lerna run --no-bail --stream --parallel --scope=@*/{web,backend,cli} lint",
"build:watch": "lerna run --no-bail --stream --parallel --scope=@*/{web,backend,cli} build:watch",
"build:docs": "cd ./packages/docs && yarn install && yarn build" "build:docs": "cd ./packages/docs && yarn install && yarn build"
}, },
"workspaces": { "workspaces": {
@@ -16,10 +18,13 @@
"**/babel-loader", "**/babel-loader",
"**/webpack", "**/webpack",
"**/@automatisch/web", "**/@automatisch/web",
"**/@automatisch/types",
"**/ajv" "**/ajv"
] ]
}, },
"devDependencies": { "devDependencies": {
"@typescript-eslint/eslint-plugin": "^5.9.1",
"@typescript-eslint/parser": "^5.9.1",
"eslint": "^8.13.0", "eslint": "^8.13.0",
"eslint-config-prettier": "^8.3.0", "eslint-config-prettier": "^8.3.0",
"eslint-plugin-prettier": "^4.0.0", "eslint-plugin-prettier": "^4.0.0",

View File

@@ -1,12 +0,0 @@
{
"root": true,
"env": {
"node": true,
"es6": true
},
"extends": ["eslint:recommended", "prettier"],
"parserOptions": {
"ecmaVersion": "latest",
"sourceType": "module"
}
}

View File

@@ -1,9 +0,0 @@
import pg from 'pg';
const client = new pg.Client({
host: 'localhost',
user: 'postgres',
port: 5432,
});
export default client;

View File

@@ -0,0 +1,9 @@
import { Client } from 'pg';
const client = new Client({
host: 'localhost',
user: 'postgres',
port: 5432,
});
export default client;

View File

@@ -1,31 +0,0 @@
import appConfig from '../../src/config/app.js';
import logger from '../../src/helpers/logger.js';
import '../../src/config/orm.js';
import { client as knex } from '../../src/config/database.js';
export const renameMigrationsAsJsFiles = async () => {
if (!appConfig.isDev) {
return;
}
try {
const tableExists = await knex.schema.hasTable('knex_migrations');
if (tableExists) {
await knex('knex_migrations')
.where('name', 'like', '%.ts')
.update({
name: knex.raw("REPLACE(name, '.ts', '.js')"),
});
logger.info(
`Migration file names with typescript renamed as JS file names!`
);
}
} catch (err) {
logger.error(err.message);
}
await knex.destroy();
};
renameMigrationsAsJsFiles();

View File

@@ -1,3 +0,0 @@
import { createDatabaseAndUser } from './utils.js';
createDatabaseAndUser();

View File

@@ -0,0 +1,3 @@
import { createDatabaseAndUser } from './utils';
createDatabaseAndUser();

View File

@@ -1,3 +0,0 @@
import { dropDatabase } from './utils.js';
dropDatabase();

View File

@@ -0,0 +1,3 @@
import { dropDatabase } from './utils';
dropDatabase();

View File

@@ -1,3 +0,0 @@
import { createUser } from './utils.js';
createUser();

View File

@@ -0,0 +1,3 @@
import { createUser } from './utils';
createUser();

View File

@@ -1,145 +0,0 @@
import appConfig from '../../src/config/app.js';
import logger from '../../src/helpers/logger.js';
import client from './client.js';
import User from '../../src/models/user.js';
import Config from '../../src/models/config.js';
import Role from '../../src/models/role.js';
import '../../src/config/orm.js';
import process from 'process';
async function fetchAdminRole() {
const role = await Role.query()
.where({
key: 'admin',
})
.limit(1)
.first();
return role;
}
export async function createUser(
email = 'user@automatisch.io',
password = 'sample'
) {
if (appConfig.disableSeedUser) {
logger.info('Seed user is disabled.');
process.exit(0);
return;
}
const UNIQUE_VIOLATION_CODE = '23505';
const role = await fetchAdminRole();
const userParams = {
email,
password,
fullName: 'Initial admin',
roleId: role.id,
};
try {
const userCount = await User.query().resultSize();
if (userCount === 0) {
const user = await User.query().insertAndFetch(userParams);
logger.info(`User has been saved: ${user.email}`);
await Config.markInstallationCompleted();
} else {
logger.info('No need to seed a user.');
}
} catch (err) {
if (err.nativeError.code !== UNIQUE_VIOLATION_CODE) {
throw err;
}
logger.info(`User already exists: ${email}`);
}
process.exit(0);
}
export const createDatabaseAndUser = async (
database = appConfig.postgresDatabase,
user = appConfig.postgresUsername
) => {
await client.connect();
await createDatabase(database);
await createDatabaseUser(user);
await grantPrivileges(database, user);
await client.end();
process.exit(0);
};
export const createDatabase = async (database = appConfig.postgresDatabase) => {
const DUPLICATE_DB_CODE = '42P04';
try {
await client.query(`CREATE DATABASE ${database}`);
logger.info(`Database: ${database} created!`);
} catch (err) {
if (err.code !== DUPLICATE_DB_CODE) {
throw err;
}
logger.info(`Database: ${database} already exists!`);
}
};
export const createDatabaseUser = async (user = appConfig.postgresUsername) => {
const DUPLICATE_OBJECT_CODE = '42710';
try {
const result = await client.query(`CREATE USER ${user}`);
logger.info(`Database User: ${user} created!`);
return result;
} catch (err) {
if (err.code !== DUPLICATE_OBJECT_CODE) {
throw err;
}
logger.info(`Database User: ${user} already exists!`);
}
};
export const grantPrivileges = async (
database = appConfig.postgresDatabase,
user = appConfig.postgresUsername
) => {
await client.query(
`GRANT ALL PRIVILEGES ON DATABASE ${database} TO ${user};`
);
logger.info(`${user} has granted all privileges on ${database}!`);
};
export const dropDatabase = async () => {
if (appConfig.appEnv != 'development' && appConfig.appEnv != 'test') {
const errorMessage =
'Drop database command can be used only with development or test environments!';
logger.error(errorMessage);
return;
}
await client.connect();
await dropDatabaseAndUser();
await client.end();
};
export const dropDatabaseAndUser = async (
database = appConfig.postgresDatabase,
user = appConfig.postgresUsername
) => {
await client.query(`DROP DATABASE IF EXISTS ${database}`);
logger.info(`Database: ${database} removed!`);
await client.query(`DROP USER IF EXISTS ${user}`);
logger.info(`Database User: ${user} removed!`);
};

View File

@@ -0,0 +1,131 @@
import appConfig from '../../src/config/app';
import logger from '../../src/helpers/logger';
import client from './client';
import User from '../../src/models/user';
import Role from '../../src/models/role';
import '../../src/config/orm';
async function fetchAdminRole() {
const role = await Role
.query()
.where({
key: 'admin'
})
.limit(1)
.first();
return role;
}
export async function createUser(
email = 'user@automatisch.io',
password = 'sample'
) {
const UNIQUE_VIOLATION_CODE = '23505';
const role = await fetchAdminRole();
const userParams = {
email,
password,
fullName: 'Initial admin',
roleId: role.id,
};
try {
const userCount = await User.query().resultSize();
if (userCount === 0) {
const user = await User.query().insertAndFetch(userParams);
logger.info(`User has been saved: ${user.email}`);
} else {
logger.info('No need to seed a user.');
}
} catch (err) {
if ((err as any).nativeError.code !== UNIQUE_VIOLATION_CODE) {
throw err;
}
logger.info(`User already exists: ${email}`);
}
}
export const createDatabaseAndUser = async (
database = appConfig.postgresDatabase,
user = appConfig.postgresUsername
) => {
await client.connect();
await createDatabase(database);
await createDatabaseUser(user);
await grantPrivileges(database, user);
await client.end();
};
export const createDatabase = async (database = appConfig.postgresDatabase) => {
const DUPLICATE_DB_CODE = '42P04';
try {
await client.query(`CREATE DATABASE ${database}`);
logger.info(`Database: ${database} created!`);
} catch (err) {
if ((err as any).code !== DUPLICATE_DB_CODE) {
throw err;
}
logger.info(`Database: ${database} already exists!`);
}
};
export const createDatabaseUser = async (user = appConfig.postgresUsername) => {
const DUPLICATE_OBJECT_CODE = '42710';
try {
const result = await client.query(`CREATE USER ${user}`);
logger.info(`Database User: ${user} created!`);
return result;
} catch (err) {
if ((err as any).code !== DUPLICATE_OBJECT_CODE) {
throw err;
}
logger.info(`Database User: ${user} already exists!`);
}
};
export const grantPrivileges = async (
database = appConfig.postgresDatabase,
user = appConfig.postgresUsername
) => {
await client.query(
`GRANT ALL PRIVILEGES ON DATABASE ${database} TO ${user};`
);
logger.info(`${user} has granted all privileges on ${database}!`);
};
export const dropDatabase = async () => {
if (appConfig.appEnv != 'development' && appConfig.appEnv != 'test') {
const errorMessage =
'Drop database command can be used only with development or test environments!';
logger.error(errorMessage);
return;
}
await client.connect();
await dropDatabaseAndUser();
await client.end();
};
export const dropDatabaseAndUser = async (
database = appConfig.postgresDatabase,
user = appConfig.postgresUsername
) => {
await client.query(`DROP DATABASE IF EXISTS ${database}`);
logger.info(`Database: ${database} removed!`);
await client.query(`DROP USER IF EXISTS ${user}`);
logger.info(`Database User: ${user} removed!`);
};

1
packages/backend/database-utils.d.ts vendored Normal file
View File

@@ -0,0 +1 @@
export * from './dist/bin/database/utils';

View File

@@ -0,0 +1,2 @@
/* eslint-disable */
module.exports = require('./dist/bin/database/utils');

1
packages/backend/database.d.ts vendored Normal file
View File

@@ -0,0 +1 @@
export * from './dist/src/config/database';

View File

@@ -0,0 +1,2 @@
/* eslint-disable */
module.exports = require('./dist/src/config/database');

View File

@@ -0,0 +1,9 @@
/** @type {import('ts-jest').JestConfigWithTsJest} */
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
setupFilesAfterEnv: ['./test/setup/global-hooks.ts'],
globalTeardown: './test/setup/global-teardown.ts',
collectCoverage: true,
collectCoverageFrom: ['src/graphql/queries/*.ts'],
};

View File

@@ -1,33 +0,0 @@
import { knexSnakeCaseMappers } from 'objection';
import appConfig from './src/config/app.js';
import path from 'path';
import { fileURLToPath } from 'url';
const fileExtension = 'js';
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const knexConfig = {
client: 'pg',
connection: {
host: appConfig.postgresHost,
port: appConfig.postgresPort,
user: appConfig.postgresUsername,
password: appConfig.postgresPassword,
database: appConfig.postgresDatabase,
ssl: appConfig.postgresEnableSsl,
},
asyncStackTraces: appConfig.isDev,
searchPath: [appConfig.postgresSchema],
pool: { min: 0, max: 20 },
migrations: {
directory: __dirname + '/src/db/migrations',
extension: fileExtension,
loadExtensions: [`.${fileExtension}`],
},
seeds: {
directory: __dirname + '/src/db/seeds',
},
...(appConfig.isTest ? knexSnakeCaseMappers() : {}),
};
export default knexConfig;

View File

@@ -0,0 +1,30 @@
import { knexSnakeCaseMappers } from 'objection';
import appConfig from './src/config/app';
const fileExtension = appConfig.isDev || appConfig.isTest ? 'ts' : 'js';
const knexConfig = {
client: 'pg',
connection: {
host: appConfig.postgresHost,
port: appConfig.postgresPort,
user: appConfig.postgresUsername,
password: appConfig.postgresPassword,
database: appConfig.postgresDatabase,
ssl: appConfig.postgresEnableSsl,
},
asyncStackTraces: appConfig.isDev,
searchPath: [appConfig.postgresSchema],
pool: { min: 0, max: 20 },
migrations: {
directory: __dirname + '/src/db/migrations',
extension: fileExtension,
loadExtensions: [`.${fileExtension}`],
},
seeds: {
directory: __dirname + '/src/db/seeds',
},
...(appConfig.isTest ? knexSnakeCaseMappers() : {}),
};
export default knexConfig;

1
packages/backend/logger.d.ts vendored Normal file
View File

@@ -0,0 +1 @@
export * from './dist/src/helpers/logger';

View File

@@ -0,0 +1,2 @@
/* eslint-disable */
module.exports = require('./dist/src/helpers/logger');

View File

@@ -3,24 +3,27 @@
"version": "0.10.0", "version": "0.10.0",
"license": "See LICENSE file", "license": "See LICENSE file",
"description": "The open source Zapier alternative. Build workflow automation without spending time and money.", "description": "The open source Zapier alternative. Build workflow automation without spending time and money.",
"type": "module",
"scripts": { "scripts": {
"dev": "nodemon --watch 'src/**/*.js' --exec 'node' src/server.js", "dev": "ts-node-dev --watch 'src/graphql/schema.graphql' --exit-child src/server.ts",
"worker": "nodemon --watch 'src/**/*.js' --exec 'node' src/worker.js", "worker": "nodemon --watch 'src/**/*.ts' --exec 'ts-node' src/worker.ts",
"start": "node src/server.js", "build": "tsc && yarn copy-statics",
"start:worker": "node src/worker.js", "build:watch": "nodemon --watch 'src/**/*.ts' --watch 'bin/**/*.ts' --exec yarn build --ext ts",
"pretest": "APP_ENV=test node ./test/setup/prepare-test-env.js", "start": "node dist/src/server.js",
"test": "APP_ENV=test vitest run", "pretest": "APP_ENV=test ts-node ./test/setup/prepare-test-env.ts",
"lint": "eslint .", "test": "APP_ENV=test jest --verbose",
"db:create": "node ./bin/database/create.js", "lint": "eslint . --ignore-path ../../.eslintignore",
"db:seed:user": "node ./bin/database/seed-user.js", "db:create": "ts-node ./bin/database/create.ts",
"db:drop": "node ./bin/database/drop.js", "db:seed:user": "ts-node ./bin/database/seed-user.ts",
"db:drop": "ts-node ./bin/database/drop.ts",
"db:migration:create": "knex migrate:make", "db:migration:create": "knex migrate:make",
"db:rollback": "knex migrate:rollback", "db:rollback": "knex migrate:rollback",
"db:migrate": "node ./bin/database/convert-migrations.js && knex migrate:latest" "db:migrate": "knex migrate:latest",
"copy-statics": "copyfiles src/**/*.{graphql,json,svg,hbs} dist",
"prepack": "yarn build",
"prebuild": "rm -rf ./dist"
}, },
"dependencies": { "dependencies": {
"@atproto/api": "^0.12.13", "@automatisch/web": "^0.10.0",
"@bull-board/express": "^3.10.1", "@bull-board/express": "^3.10.1",
"@casl/ability": "^6.5.0", "@casl/ability": "^6.5.0",
"@graphql-tools/graphql-file-loader": "^7.3.4", "@graphql-tools/graphql-file-loader": "^7.3.4",
@@ -29,23 +32,28 @@
"@rudderstack/rudder-sdk-node": "^1.1.2", "@rudderstack/rudder-sdk-node": "^1.1.2",
"@sentry/node": "^7.42.0", "@sentry/node": "^7.42.0",
"@sentry/tracing": "^7.42.0", "@sentry/tracing": "^7.42.0",
"@types/accounting": "^0.4.2",
"@types/luxon": "^2.3.1",
"@types/passport": "^1.0.12",
"@types/xmlrpc": "^1.3.7",
"accounting": "^0.4.1", "accounting": "^0.4.1",
"ajv-formats": "^2.1.1", "ajv-formats": "^2.1.1",
"axios": "1.6.0", "axios": "1.6.0",
"bcrypt": "^5.1.0", "bcrypt": "^5.0.1",
"bullmq": "^3.0.0", "bullmq": "^3.0.0",
"copyfiles": "^2.4.1",
"cors": "^2.8.5", "cors": "^2.8.5",
"crypto-js": "^4.1.1", "crypto-js": "^4.1.1",
"debug": "~2.6.9", "debug": "~2.6.9",
"dotenv": "^10.0.0", "dotenv": "^10.0.0",
"express": "~4.18.2", "express": "~4.18.2",
"express-async-handler": "^1.2.0",
"express-basic-auth": "^1.2.1", "express-basic-auth": "^1.2.1",
"express-graphql": "^0.12.0", "express-graphql": "^0.12.0",
"fast-xml-parser": "^4.0.11", "fast-xml-parser": "^4.0.11",
"graphql-middleware": "^6.1.15", "graphql-middleware": "^6.1.15",
"graphql-shield": "^7.5.0", "graphql-shield": "^7.5.0",
"graphql-tools": "^8.2.0", "graphql-tools": "^8.2.0",
"graphql-type-json": "^0.3.2",
"handlebars": "^4.7.7", "handlebars": "^4.7.7",
"http-errors": "~1.6.3", "http-errors": "~1.6.3",
"http-proxy-agent": "^7.0.0", "http-proxy-agent": "^7.0.0",
@@ -68,7 +76,7 @@
"pluralize": "^8.0.0", "pluralize": "^8.0.0",
"raw-body": "^2.5.2", "raw-body": "^2.5.2",
"showdown": "^2.1.0", "showdown": "^2.1.0",
"uuid": "^9.0.1", "stripe": "^11.13.0",
"winston": "^3.7.1", "winston": "^3.7.1",
"xmlrpc": "^1.3.2" "xmlrpc": "^1.3.2"
}, },
@@ -79,15 +87,26 @@
} }
], ],
"homepage": "https://github.com/automatisch/automatisch#readme", "homepage": "https://github.com/automatisch/automatisch#readme",
"main": "src/server", "main": "dist/src/app",
"directories": { "directories": {
"bin": "bin", "bin": "bin",
"src": "src", "src": "src",
"test": "__tests__" "test": "__tests__"
}, },
"files": [ "files": [
"dist",
"bin", "bin",
"src" "src",
"server.js",
"server.d.ts",
"worker.js",
"worker.d.ts",
"logger.js",
"logger.d.ts",
"database.js",
"database.d.ts",
"database-utils.js",
"database-utils.d.ts"
], ],
"repository": { "repository": {
"type": "git", "type": "git",
@@ -97,10 +116,34 @@
"url": "https://github.com/automatisch/automatisch/issues" "url": "https://github.com/automatisch/automatisch/issues"
}, },
"devDependencies": { "devDependencies": {
"node-gyp": "^10.1.0", "@automatisch/types": "^0.10.0",
"@faker-js/faker": "^8.1.0",
"@types/bcrypt": "^5.0.0",
"@types/bull": "^3.15.8",
"@types/cors": "^2.8.12",
"@types/crypto-js": "^4.0.2",
"@types/express": "^4.17.15",
"@types/http-errors": "^1.8.1",
"@types/jest": "^29.5.5",
"@types/jsonwebtoken": "^8.5.8",
"@types/lodash.get": "^4.4.6",
"@types/memory-cache": "^0.2.2",
"@types/morgan": "^1.9.3",
"@types/multer": "1.4.7",
"@types/node": "^16.10.2",
"@types/nodemailer": "^6.4.4",
"@types/pg": "^8.6.1",
"@types/pino": "^7.0.5",
"@types/pluralize": "^0.0.30",
"@types/showdown": "^2.0.1",
"@types/supertest": "^2.0.14",
"jest": "^29.7.0",
"nodemon": "^2.0.13", "nodemon": "^2.0.13",
"sinon": "^11.1.2",
"supertest": "^6.3.3", "supertest": "^6.3.3",
"vitest": "^1.1.3" "ts-jest": "^29.1.1",
"ts-node": "^10.2.1",
"ts-node-dev": "^1.1.8"
}, },
"publishConfig": { "publishConfig": {
"access": "public" "access": "public"

1
packages/backend/server.d.ts vendored Normal file
View File

@@ -0,0 +1 @@
export * from './dist/src/server';

View File

@@ -0,0 +1,2 @@
/* eslint-disable */
module.exports = require('./dist/src/server.js');

View File

@@ -1,70 +0,0 @@
import createError from 'http-errors';
import express from 'express';
import cors from 'cors';
import appConfig from './config/app.js';
import corsOptions from './config/cors-options.js';
import morgan from './helpers/morgan.js';
import * as Sentry from './helpers/sentry.ee.js';
import appAssetsHandler from './helpers/app-assets-handler.js';
import webUIHandler from './helpers/web-ui-handler.js';
import errorHandler from './helpers/error-handler.js';
import './config/orm.js';
import {
createBullBoardHandler,
serverAdapter,
} from './helpers/create-bull-board-handler.js';
import injectBullBoardHandler from './helpers/inject-bull-board-handler.js';
import router from './routes/index.js';
import configurePassport from './helpers/passport.js';
createBullBoardHandler(serverAdapter);
const app = express();
Sentry.init(app);
Sentry.attachRequestHandler(app);
Sentry.attachTracingHandler(app);
injectBullBoardHandler(app, serverAdapter);
appAssetsHandler(app);
app.use(morgan);
app.use(
express.json({
limit: appConfig.requestBodySizeLimit,
verify(req, res, buf) {
req.rawBody = buf;
},
})
);
app.use(
express.urlencoded({
extended: true,
limit: appConfig.requestBodySizeLimit,
verify(req, res, buf) {
req.rawBody = buf;
},
})
);
app.use(cors(corsOptions));
configurePassport(app);
app.use('/', router);
webUIHandler(app);
// catch 404 and forward to error handler
app.use(function (req, res, next) {
next(createError(404));
});
Sentry.attachErrorHandler(app);
app.use(errorHandler);
export default app;

View File

@@ -0,0 +1,71 @@
import createError from 'http-errors';
import express from 'express';
import cors from 'cors';
import { IRequest } from '@automatisch/types';
import appConfig from './config/app';
import corsOptions from './config/cors-options';
import morgan from './helpers/morgan';
import * as Sentry from './helpers/sentry.ee';
import appAssetsHandler from './helpers/app-assets-handler';
import webUIHandler from './helpers/web-ui-handler';
import errorHandler from './helpers/error-handler';
import './config/orm';
import {
createBullBoardHandler,
serverAdapter,
} from './helpers/create-bull-board-handler';
import injectBullBoardHandler from './helpers/inject-bull-board-handler';
import router from './routes';
import configurePassport from './helpers/passport';
createBullBoardHandler(serverAdapter);
const app = express();
Sentry.init(app);
Sentry.attachRequestHandler(app);
Sentry.attachTracingHandler(app);
injectBullBoardHandler(app, serverAdapter);
appAssetsHandler(app);
app.use(morgan);
app.use(
express.json({
limit: appConfig.requestBodySizeLimit,
verify(req, res, buf) {
(req as IRequest).rawBody = buf;
},
})
);
app.use(
express.urlencoded({
extended: true,
limit: appConfig.requestBodySizeLimit,
verify(req, res, buf) {
(req as IRequest).rawBody = buf;
},
})
);
app.use(cors(corsOptions));
configurePassport(app);
app.use('/', router);
webUIHandler(app);
// catch 404 and forward to error handler
app.use(function (req, res, next) {
next(createError(404));
});
Sentry.attachErrorHandler(app);
app.use(errorHandler);
export default app;

View File

@@ -1,92 +0,0 @@
import defineAction from '../../../../helpers/define-action.js';
export default defineAction({
name: 'Create record',
key: 'createRecord',
description: 'Creates a new record with fields that automatically populate.',
arguments: [
{
label: 'Base',
key: 'baseId',
type: 'dropdown',
required: true,
description: 'Base in which to create the record.',
variables: true,
source: {
type: 'query',
name: 'getDynamicData',
arguments: [
{
name: 'key',
value: 'listBases',
},
],
},
},
{
label: 'Table',
key: 'tableId',
type: 'dropdown',
required: true,
dependsOn: ['parameters.baseId'],
description: '',
variables: true,
source: {
type: 'query',
name: 'getDynamicData',
arguments: [
{
name: 'key',
value: 'listTables',
},
{
name: 'parameters.baseId',
value: '{parameters.baseId}',
},
],
},
additionalFields: {
type: 'query',
name: 'getDynamicFields',
arguments: [
{
name: 'key',
value: 'listFields',
},
{
name: 'parameters.baseId',
value: '{parameters.baseId}',
},
{
name: 'parameters.tableId',
value: '{parameters.tableId}',
},
],
},
},
],
async run($) {
const { baseId, tableId, ...rest } = $.step.parameters;
const fields = Object.entries(rest).reduce((result, [key, value]) => {
if (Array.isArray(value)) {
result[key] = value.map((item) => item.value);
} else if (value !== '') {
result[key] = value;
}
return result;
}, {});
const body = {
typecast: true,
fields,
};
const { data } = await $.http.post(`/v0/${baseId}/${tableId}`, body);
$.setActionItem({
raw: data,
});
},
});

View File

@@ -1,174 +0,0 @@
import defineAction from '../../../../helpers/define-action.js';
import { URLSearchParams } from 'url';
export default defineAction({
name: 'Find record',
key: 'findRecord',
description:
"Finds a record using simple field search or use Airtable's formula syntax to find a matching record.",
arguments: [
{
label: 'Base',
key: 'baseId',
type: 'dropdown',
required: true,
description: 'Base in which to create the record.',
variables: true,
source: {
type: 'query',
name: 'getDynamicData',
arguments: [
{
name: 'key',
value: 'listBases',
},
],
},
},
{
label: 'Table',
key: 'tableId',
type: 'dropdown',
required: true,
dependsOn: ['parameters.baseId'],
description: '',
variables: true,
source: {
type: 'query',
name: 'getDynamicData',
arguments: [
{
name: 'key',
value: 'listTables',
},
{
name: 'parameters.baseId',
value: '{parameters.baseId}',
},
],
},
},
{
label: 'Search by field',
key: 'tableField',
type: 'dropdown',
required: false,
dependsOn: ['parameters.baseId', 'parameters.tableId'],
description: '',
variables: true,
source: {
type: 'query',
name: 'getDynamicData',
arguments: [
{
name: 'key',
value: 'listTableFields',
},
{
name: 'parameters.baseId',
value: '{parameters.baseId}',
},
{
name: 'parameters.tableId',
value: '{parameters.tableId}',
},
],
},
},
{
label: 'Search Value',
key: 'searchValue',
type: 'string',
required: false,
variables: true,
description:
'The value of unique identifier for the record. For date values, please use the ISO format (e.g., "YYYY-MM-DD").',
},
{
label: 'Search for exact match?',
key: 'exactMatch',
type: 'dropdown',
required: true,
description: '',
variables: true,
options: [
{ label: 'Yes', value: 'true' },
{ label: 'No', value: 'false' },
],
},
{
label: 'Search Formula',
key: 'searchFormula',
type: 'string',
required: false,
variables: true,
description:
'Instead, you have the option to use an Airtable search formula for locating records according to sophisticated criteria and across various fields.',
},
{
label: 'Limit to View',
key: 'limitToView',
type: 'dropdown',
required: false,
dependsOn: ['parameters.baseId', 'parameters.tableId'],
description:
'You have the choice to restrict the search to a particular view ID if desired.',
variables: true,
source: {
type: 'query',
name: 'getDynamicData',
arguments: [
{
name: 'key',
value: 'listTableViews',
},
{
name: 'parameters.baseId',
value: '{parameters.baseId}',
},
{
name: 'parameters.tableId',
value: '{parameters.tableId}',
},
],
},
},
],
async run($) {
const {
baseId,
tableId,
tableField,
searchValue,
exactMatch,
searchFormula,
limitToView,
} = $.step.parameters;
let filterByFormula;
if (tableField && searchValue) {
filterByFormula =
exactMatch === 'true'
? `{${tableField}} = '${searchValue}'`
: `LOWER({${tableField}}) = LOWER('${searchValue}')`;
} else {
filterByFormula = searchFormula;
}
const body = new URLSearchParams({
filterByFormula,
view: limitToView,
});
const { data } = await $.http.post(
`/v0/${baseId}/${tableId}/listRecords`,
body
);
$.setActionItem({
raw: data,
});
},
});

View File

@@ -1,4 +0,0 @@
import createRecord from './create-record/index.js';
import findRecord from './find-record/index.js';
export default [createRecord, findRecord];

View File

@@ -1,9 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="256px" height="215px" viewBox="0 0 256 215" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid">
<g>
<path d="M114.25873,2.70101695 L18.8604023,42.1756384 C13.5552723,44.3711638 13.6102328,51.9065311 18.9486282,54.0225085 L114.746142,92.0117514 C123.163769,95.3498757 132.537419,95.3498757 140.9536,92.0117514 L236.75256,54.0225085 C242.08951,51.9065311 242.145916,44.3711638 236.83934,42.1756384 L141.442459,2.70101695 C132.738459,-0.900338983 122.961284,-0.900338983 114.25873,2.70101695" fill="#FFBF00"></path>
<path d="M136.349071,112.756863 L136.349071,207.659101 C136.349071,212.173089 140.900664,215.263892 145.096461,213.600615 L251.844122,172.166219 C254.281184,171.200072 255.879376,168.845451 255.879376,166.224705 L255.879376,71.3224678 C255.879376,66.8084791 251.327783,63.7176768 247.131986,65.3809537 L140.384325,106.815349 C137.94871,107.781496 136.349071,110.136118 136.349071,112.756863" fill="#26B5F8"></path>
<path d="M111.422771,117.65355 L79.742409,132.949912 L76.5257763,134.504714 L9.65047684,166.548104 C5.4112904,168.593211 0.000578531073,165.503855 0.000578531073,160.794612 L0.000578531073,71.7210757 C0.000578531073,70.0173017 0.874160452,68.5463864 2.04568588,67.4384994 C2.53454463,66.9481944 3.08848814,66.5446689 3.66412655,66.2250305 C5.26231864,65.2661153 7.54173107,65.0101153 9.47981017,65.7766689 L110.890522,105.957098 C116.045234,108.002206 116.450206,115.225166 111.422771,117.65355" fill="#ED3049"></path>
<path d="M111.422771,117.65355 L79.742409,132.949912 L2.04568588,67.4384994 C2.53454463,66.9481944 3.08848814,66.5446689 3.66412655,66.2250305 C5.26231864,65.2661153 7.54173107,65.0101153 9.47981017,65.7766689 L110.890522,105.957098 C116.045234,108.002206 116.450206,115.225166 111.422771,117.65355" fill-opacity="0.25" fill="#000000"></path>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.9 KiB

View File

@@ -1,38 +0,0 @@
import crypto from 'crypto';
import { URLSearchParams } from 'url';
import authScope from '../common/auth-scope.js';
export default async function generateAuthUrl($) {
const oauthRedirectUrlField = $.app.auth.fields.find(
(field) => field.key == 'oAuthRedirectUrl'
);
const redirectUri = oauthRedirectUrlField.value;
const state = crypto.randomBytes(100).toString('base64url');
const codeVerifier = crypto.randomBytes(96).toString('base64url');
const codeChallenge = crypto
.createHash('sha256')
.update(codeVerifier)
.digest('base64')
.replace(/=/g, '')
.replace(/\+/g, '-')
.replace(/\//g, '_');
const searchParams = new URLSearchParams({
client_id: $.auth.data.clientId,
redirect_uri: redirectUri,
response_type: 'code',
scope: authScope.join(' '),
state,
code_challenge: codeChallenge,
code_challenge_method: 'S256',
});
const url = `https://airtable.com/oauth2/v1/authorize?${searchParams.toString()}`;
await $.auth.set({
url,
originalCodeChallenge: codeChallenge,
originalState: state,
codeVerifier,
});
}

View File

@@ -1,48 +0,0 @@
import generateAuthUrl from './generate-auth-url.js';
import verifyCredentials from './verify-credentials.js';
import refreshToken from './refresh-token.js';
import isStillVerified from './is-still-verified.js';
export default {
fields: [
{
key: 'oAuthRedirectUrl',
label: 'OAuth Redirect URL',
type: 'string',
required: true,
readOnly: true,
value: '{WEB_APP_URL}/app/airtable/connections/add',
placeholder: null,
description:
'When asked to input a redirect URL in Airtable, enter the URL above.',
clickToCopy: true,
},
{
key: 'clientId',
label: 'Client ID',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: null,
clickToCopy: false,
},
{
key: 'clientSecret',
label: 'Client Secret',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: null,
clickToCopy: false,
},
],
generateAuthUrl,
verifyCredentials,
isStillVerified,
refreshToken,
};

View File

@@ -1,8 +0,0 @@
import getCurrentUser from '../common/get-current-user.js';
const isStillVerified = async ($) => {
const currentUser = await getCurrentUser($);
return !!currentUser.id;
};
export default isStillVerified;

View File

@@ -1,40 +0,0 @@
import { URLSearchParams } from 'node:url';
import authScope from '../common/auth-scope.js';
const refreshToken = async ($) => {
const params = new URLSearchParams({
client_id: $.auth.data.clientId,
grant_type: 'refresh_token',
refresh_token: $.auth.data.refreshToken,
});
const basicAuthToken = Buffer.from(
$.auth.data.clientId + ':' + $.auth.data.clientSecret
).toString('base64');
const { data } = await $.http.post(
'https://airtable.com/oauth2/v1/token',
params.toString(),
{
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
Authorization: `Basic ${basicAuthToken}`,
},
additionalProperties: {
skipAddingAuthHeader: true,
},
}
);
await $.auth.set({
accessToken: data.access_token,
refreshToken: data.refresh_token,
expiresIn: data.expires_in,
refreshExpiresIn: data.refresh_expires_in,
scope: authScope.join(' '),
tokenType: data.token_type,
});
};
export default refreshToken;

View File

@@ -1,56 +0,0 @@
import getCurrentUser from '../common/get-current-user.js';
const verifyCredentials = async ($) => {
if ($.auth.data.originalState !== $.auth.data.state) {
throw new Error("The 'state' parameter does not match.");
}
if ($.auth.data.originalCodeChallenge !== $.auth.data.code_challenge) {
throw new Error("The 'code challenge' parameter does not match.");
}
const oauthRedirectUrlField = $.app.auth.fields.find(
(field) => field.key == 'oAuthRedirectUrl'
);
const redirectUri = oauthRedirectUrlField.value;
const basicAuthToken = Buffer.from(
$.auth.data.clientId + ':' + $.auth.data.clientSecret
).toString('base64');
const { data } = await $.http.post(
'https://airtable.com/oauth2/v1/token',
{
code: $.auth.data.code,
client_id: $.auth.data.clientId,
redirect_uri: redirectUri,
grant_type: 'authorization_code',
code_verifier: $.auth.data.codeVerifier,
},
{
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
Authorization: `Basic ${basicAuthToken}`,
},
additionalProperties: {
skipAddingAuthHeader: true,
},
}
);
await $.auth.set({
accessToken: data.access_token,
tokenType: data.token_type,
});
const currentUser = await getCurrentUser($);
await $.auth.set({
clientId: $.auth.data.clientId,
clientSecret: $.auth.data.clientSecret,
scope: $.auth.data.scope,
expiresIn: data.expires_in,
refreshExpiresIn: data.refresh_expires_in,
refreshToken: data.refresh_token,
screenName: currentUser.email,
});
};
export default verifyCredentials;

View File

@@ -1,12 +0,0 @@
const addAuthHeader = ($, requestConfig) => {
if (
!requestConfig.additionalProperties?.skipAddingAuthHeader &&
$.auth.data?.accessToken
) {
requestConfig.headers.Authorization = `${$.auth.data.tokenType} ${$.auth.data.accessToken}`;
}
return requestConfig;
};
export default addAuthHeader;

View File

@@ -1,12 +0,0 @@
const authScope = [
'data.records:read',
'data.records:write',
'data.recordComments:read',
'data.recordComments:write',
'schema.bases:read',
'schema.bases:write',
'user.email:read',
'webhook:manage',
];
export default authScope;

View File

@@ -1,6 +0,0 @@
const getCurrentUser = async ($) => {
const { data: currentUser } = await $.http.get('/v0/meta/whoami');
return currentUser;
};
export default getCurrentUser;

View File

@@ -1,6 +0,0 @@
import listBases from './list-bases/index.js';
import listTableFields from './list-table-fields/index.js';
import listTableViews from './list-table-views/index.js';
import listTables from './list-tables/index.js';
export default [listBases, listTableFields, listTableViews, listTables];

View File

@@ -1,28 +0,0 @@
export default {
name: 'List bases',
key: 'listBases',
async run($) {
const bases = {
data: [],
};
const params = {};
do {
const { data } = await $.http.get('/v0/meta/bases', { params });
params.offset = data.offset;
if (data?.bases) {
for (const base of data.bases) {
bases.data.push({
value: base.id,
name: base.name,
});
}
}
} while (params.offset);
return bases;
},
};

View File

@@ -1,39 +0,0 @@
export default {
name: 'List table fields',
key: 'listTableFields',
async run($) {
const tableFields = {
data: [],
};
const { baseId, tableId } = $.step.parameters;
if (!baseId) {
return tableFields;
}
const params = {};
do {
const { data } = await $.http.get(`/v0/meta/bases/${baseId}/tables`, {
params,
});
params.offset = data.offset;
if (data?.tables) {
for (const table of data.tables) {
if (table.id === tableId) {
table.fields.forEach((field) => {
tableFields.data.push({
value: field.name,
name: field.name,
});
});
}
}
}
} while (params.offset);
return tableFields;
},
};

View File

@@ -1,39 +0,0 @@
export default {
name: 'List table views',
key: 'listTableViews',
async run($) {
const tableViews = {
data: [],
};
const { baseId, tableId } = $.step.parameters;
if (!baseId) {
return tableViews;
}
const params = {};
do {
const { data } = await $.http.get(`/v0/meta/bases/${baseId}/tables`, {
params,
});
params.offset = data.offset;
if (data?.tables) {
for (const table of data.tables) {
if (table.id === tableId) {
table.views.forEach((view) => {
tableViews.data.push({
value: view.id,
name: view.name,
});
});
}
}
}
} while (params.offset);
return tableViews;
},
};

View File

@@ -1,35 +0,0 @@
export default {
name: 'List tables',
key: 'listTables',
async run($) {
const tables = {
data: [],
};
const baseId = $.step.parameters.baseId;
if (!baseId) {
return tables;
}
const params = {};
do {
const { data } = await $.http.get(`/v0/meta/bases/${baseId}/tables`, {
params,
});
params.offset = data.offset;
if (data?.tables) {
for (const table of data.tables) {
tables.data.push({
value: table.id,
name: table.name,
});
}
}
} while (params.offset);
return tables;
},
};

View File

@@ -1,3 +0,0 @@
import listFields from './list-fields/index.js';
export default [listFields];

View File

@@ -1,86 +0,0 @@
const hasValue = (value) => value !== null && value !== undefined;
export default {
name: 'List fields',
key: 'listFields',
async run($) {
const options = [];
const { baseId, tableId } = $.step.parameters;
if (!hasValue(baseId) || !hasValue(tableId)) {
return;
}
const { data } = await $.http.get(`/v0/meta/bases/${baseId}/tables`);
const selectedTable = data.tables.find((table) => table.id === tableId);
if (!selectedTable) return;
selectedTable.fields.forEach((field) => {
if (field.type === 'singleSelect') {
options.push({
label: field.name,
key: field.name,
type: 'dropdown',
required: false,
variables: true,
options: field.options.choices.map((choice) => ({
label: choice.name,
value: choice.id,
})),
});
} else if (field.type === 'multipleSelects') {
options.push({
label: field.name,
key: field.name,
type: 'dynamic',
required: false,
variables: true,
fields: [
{
label: 'Value',
key: 'value',
type: 'dropdown',
required: false,
variables: true,
options: field.options.choices.map((choice) => ({
label: choice.name,
value: choice.id,
})),
},
],
});
} else if (field.type === 'checkbox') {
options.push({
label: field.name,
key: field.name,
type: 'dropdown',
required: false,
variables: true,
options: [
{
label: 'Yes',
value: 'true',
},
{
label: 'No',
value: 'false',
},
],
});
} else {
options.push({
label: field.name,
key: field.name,
type: 'string',
required: false,
variables: true,
});
}
});
return options;
},
};

View File

@@ -1,22 +0,0 @@
import defineApp from '../../helpers/define-app.js';
import addAuthHeader from './common/add-auth-header.js';
import auth from './auth/index.js';
import actions from './actions/index.js';
import dynamicData from './dynamic-data/index.js';
import dynamicFields from './dynamic-fields/index.js';
export default defineApp({
name: 'Airtable',
key: 'airtable',
baseUrl: 'https://airtable.com',
apiBaseUrl: 'https://api.airtable.com',
iconUrl: '{BASE_URL}/apps/airtable/assets/favicon.svg',
authDocUrl: '{DOCS_URL}/apps/airtable/connection',
primaryColor: 'FFBF00',
supportsConnections: true,
beforeRequest: [addAuthHeader],
auth,
actions,
dynamicData,
dynamicFields,
});

View File

@@ -1 +0,0 @@
<svg xmlns:xlink="http://www.w3.org/1999/xlink" xmlns="http://www.w3.org/2000/svg" width="132" height="24" fill="none" viewBox="0 0 132 24"><path fill="#19191C" d="M38.557 19.495c2.16 0 3.25-1.113 3.725-1.87h.214c.094.805.664 1.562 1.779 1.562h2.111V16.82h-.545c-.38 0-.57-.213-.57-.545V6.776h-2.8v1.516h-.213c-.545-.758-1.684-1.824-3.772-1.824-3.321 0-5.789 2.748-5.789 6.514s2.515 6.513 5.86 6.513m.498-2.7c-1.969 0-3.51-1.445-3.51-3.79 0-2.297 1.494-3.86 3.487-3.86 1.898 0 3.487 1.397 3.487 3.86 0 2.108-1.352 3.79-3.463 3.79M48.04 24h2.799v-6.376h.213c.522.758 1.637 1.871 3.844 1.871 3.321 0 5.741-2.795 5.741-6.513 0-3.743-2.586-6.514-5.931-6.514-2.135 0-3.18 1.16-3.678 1.8h-.213V6.776h-2.776V24m6.263-7.134c-1.922 0-3.512-1.42-3.512-3.884 0-2.108 1.353-3.885 3.464-3.885 1.97 0 3.511 1.54 3.511 3.885 0 2.297-1.494 3.884-3.463 3.884M62.082 24h2.8v-6.376h.213c.522.758 1.637 1.871 3.843 1.871 3.321 0 5.51-2.795 5.51-6.513 0-3.743-2.355-6.514-5.7-6.514-2.135 0-3.179 1.16-3.677 1.8h-.214V6.776h-2.775zm6.263-7.134c-1.922 0-3.511-1.42-3.511-3.884 0-2.108 1.352-3.885 3.463-3.885 1.97 0 3.512 1.54 3.512 3.885 0 2.297-1.495 3.884-3.464 3.884m9.805 2.61h3.961l2.254-9.735h.143l2.253 9.735H90.7l3.153-12.412h-2.821l-2.254 9.759h-.214l-2.253-9.759h-3.725l-2.278 9.759h-.213l-2.23-9.759h-2.99l3.274 12.412m17.123 0h2.8V13.34c0-2.345 1.09-3.79 3.131-3.79h1.233V6.756h-.925c-1.59 0-2.8 1.09-3.274 2.132h-.19V7.064h-2.775zm21.057 0h2.183v-2.487h-2.159c-.854 0-1.21-.38-1.21-1.256V9.528h3.511V7.064h-3.511V3.582h-2.657v3.482h-2.325v2.464h2.159v6.229c0 2.63 1.589 3.719 4.009 3.719m9.693.019c2.586 0 4.864-1.279 5.67-3.86l-2.562-.616c-.451 1.373-1.755 2.084-3.131 2.084-2.041 0-3.393-1.326-3.417-3.41h9.419v-.782c0-3.695-2.301-6.443-6.097-6.443-3.346 0-6.216 2.63-6.216 6.537 0 3.79 2.538 6.49 6.334 6.49m-3.416-7.84c.166-1.492 1.518-2.747 3.298-2.747 1.708 0 3.108 1.066 3.25 2.747h-6.548"/><path fill="#19191C" fill-rule="evenodd" d="M108.916 19.476h-2.8V9.528h-2.182V7.064h4.982z" clip-rule="evenodd"/><path fill="#19191C" d="M107.309 5.342c1.02 0 1.779-.758 1.779-1.753 0-.971-.759-1.73-1.779-1.73-1.021 0-1.78.759-1.78 1.73 0 .995.759 1.753 1.78 1.753"/><path fill="#FD366E" d="M24.443 16.432v5.478H10.752c-3.989 0-7.472-2.203-9.335-5.478A11.041 11.041 0 0 1 0 11.695v-1.48a10.97 10.97 0 0 1 .381-2.247C1.661 3.368 5.82 0 10.751 0c4.934 0 9.092 3.37 10.371 7.967h-5.854c-.96-1.499-2.624-2.49-4.516-2.49s-3.555.991-4.516 2.49a5.47 5.47 0 0 0-.67 1.494 5.562 5.562 0 0 0-.202 1.494 5.5 5.5 0 0 0 1.69 3.983 5.32 5.32 0 0 0 3.698 1.494h13.69"/><path fill="#FD366E" d="M24.443 9.46v5.478h-9.994a5.5 5.5 0 0 0 1.691-3.983 5.56 5.56 0 0 0-.203-1.494h8.506"/></svg>

Before

Width:  |  Height:  |  Size: 2.6 KiB

View File

@@ -1,65 +0,0 @@
import verifyCredentials from './verify-credentials.js';
import isStillVerified from './is-still-verified.js';
export default {
fields: [
{
key: 'screenName',
label: 'Screen Name',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description:
'Screen name of your connection to be used on Automatisch UI.',
clickToCopy: false,
},
{
key: 'projectId',
label: 'Project ID',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: 'Project ID of your Appwrite project.',
clickToCopy: false,
},
{
key: 'apiKey',
label: 'API Key',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: 'API key of your Appwrite project.',
clickToCopy: false,
},
{
key: 'instanceUrl',
label: 'Appwrite instance URL',
type: 'string',
required: false,
readOnly: false,
placeholder: '',
description: '',
clickToCopy: true,
},
{
key: 'host',
label: 'Host Name',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: 'Host name of your Appwrite project.',
clickToCopy: false,
},
],
verifyCredentials,
isStillVerified,
};

View File

@@ -1,8 +0,0 @@
import verifyCredentials from './verify-credentials.js';
const isStillVerified = async ($) => {
await verifyCredentials($);
return true;
};
export default isStillVerified;

View File

@@ -1,5 +0,0 @@
const verifyCredentials = async ($) => {
await $.http.get('/v1/users');
};
export default verifyCredentials;

View File

@@ -1,16 +0,0 @@
const addAuthHeader = ($, requestConfig) => {
requestConfig.headers['Content-Type'] = 'application/json';
if ($.auth.data?.apiKey && $.auth.data?.projectId) {
requestConfig.headers['X-Appwrite-Project'] = $.auth.data.projectId;
requestConfig.headers['X-Appwrite-Key'] = $.auth.data.apiKey;
}
if ($.auth.data?.host) {
requestConfig.headers['Host'] = $.auth.data.host;
}
return requestConfig;
};
export default addAuthHeader;

View File

@@ -1,13 +0,0 @@
const setBaseUrl = ($, requestConfig) => {
const instanceUrl = $.auth.data.instanceUrl;
if (instanceUrl) {
requestConfig.baseURL = instanceUrl;
} else if ($.app.apiBaseUrl) {
requestConfig.baseURL = $.app.apiBaseUrl;
}
return requestConfig;
};
export default setBaseUrl;

View File

@@ -1,4 +0,0 @@
import listCollections from './list-collections/index.js';
import listDatabases from './list-databases/index.js';
export default [listCollections, listDatabases];

View File

@@ -1,44 +0,0 @@
export default {
name: 'List collections',
key: 'listCollections',
async run($) {
const collections = {
data: [],
};
const databaseId = $.step.parameters.databaseId;
if (!databaseId) {
return collections;
}
const params = {
queries: [
JSON.stringify({
method: 'orderAsc',
attribute: 'name',
}),
JSON.stringify({
method: 'limit',
values: [100],
}),
],
};
const { data } = await $.http.get(
`/v1/databases/${databaseId}/collections`,
{ params }
);
if (data?.collections) {
for (const collection of data.collections) {
collections.data.push({
value: collection.$id,
name: collection.name,
});
}
}
return collections;
},
};

View File

@@ -1,36 +0,0 @@
export default {
name: 'List databases',
key: 'listDatabases',
async run($) {
const databases = {
data: [],
};
const params = {
queries: [
JSON.stringify({
method: 'orderAsc',
attribute: 'name',
}),
JSON.stringify({
method: 'limit',
values: [100],
}),
],
};
const { data } = await $.http.get('/v1/databases', { params });
if (data?.databases) {
for (const database of data.databases) {
databases.data.push({
value: database.$id,
name: database.name,
});
}
}
return databases;
},
};

View File

@@ -1,21 +0,0 @@
import defineApp from '../../helpers/define-app.js';
import addAuthHeader from './common/add-auth-header.js';
import setBaseUrl from './common/set-base-url.js';
import auth from './auth/index.js';
import triggers from './triggers/index.js';
import dynamicData from './dynamic-data/index.js';
export default defineApp({
name: 'Appwrite',
key: 'appwrite',
baseUrl: 'https://appwrite.io',
apiBaseUrl: 'https://cloud.appwrite.io',
iconUrl: '{BASE_URL}/apps/appwrite/assets/favicon.svg',
authDocUrl: '{DOCS_URL}/apps/appwrite/connection',
primaryColor: 'FD366E',
supportsConnections: true,
beforeRequest: [setBaseUrl, addAuthHeader],
auth,
triggers,
dynamicData,
});

View File

@@ -1,3 +0,0 @@
import newDocuments from './new-documents/index.js';
export default [newDocuments];

View File

@@ -1,104 +0,0 @@
import defineTrigger from '../../../../helpers/define-trigger.js';
export default defineTrigger({
name: 'New documents',
key: 'newDocuments',
pollInterval: 15,
description: 'Triggers when a new document is created.',
arguments: [
{
label: 'Database',
key: 'databaseId',
type: 'dropdown',
required: true,
description: '',
variables: true,
source: {
type: 'query',
name: 'getDynamicData',
arguments: [
{
name: 'key',
value: 'listDatabases',
},
],
},
},
{
label: 'Collection',
key: 'collectionId',
type: 'dropdown',
required: true,
dependsOn: ['parameters.databaseId'],
description: '',
variables: true,
source: {
type: 'query',
name: 'getDynamicData',
arguments: [
{
name: 'key',
value: 'listCollections',
},
{
name: 'parameters.databaseId',
value: '{parameters.databaseId}',
},
],
},
},
],
async run($) {
const { databaseId, collectionId } = $.step.parameters;
const limit = 1;
let lastDocumentId = undefined;
let offset = 0;
let documentCount = 0;
do {
const params = {
queries: [
JSON.stringify({
method: 'orderDesc',
attribute: '$createdAt',
}),
JSON.stringify({
method: 'limit',
values: [limit],
}),
// An invalid cursor shouldn't be sent.
lastDocumentId &&
JSON.stringify({
method: 'cursorAfter',
values: [lastDocumentId],
}),
].filter(Boolean),
};
const { data } = await $.http.get(
`/v1/databases/${databaseId}/collections/${collectionId}/documents`,
{ params }
);
const documents = data?.documents;
documentCount = documents?.length;
offset = offset + limit;
lastDocumentId = documents[documentCount - 1]?.$id;
if (!documentCount) {
return;
}
for (const document of documents) {
$.pushTriggerItem({
raw: document,
meta: {
internalId: document.$id,
},
});
}
} while (documentCount === limit);
},
});

View File

@@ -1,3 +0,0 @@
import sendPrompt from './send-prompt/index.js';
export default [sendPrompt];

View File

@@ -0,0 +1,3 @@
import sendPrompt from './send-prompt';
export default [sendPrompt];

View File

@@ -1,97 +0,0 @@
import defineAction from '../../../../helpers/define-action.js';
const castFloatOrUndefined = (value) => {
return value === '' ? undefined : parseFloat(value);
};
export default defineAction({
name: 'Send prompt',
key: 'sendPrompt',
description: 'Creates a completion for the provided prompt and parameters.',
arguments: [
{
label: 'Prompt',
key: 'prompt',
type: 'string',
required: true,
variables: true,
description: 'The text to analyze.',
},
{
label: 'Temperature',
key: 'temperature',
type: 'string',
required: false,
variables: true,
description:
'What sampling temperature to use, between 0 and 2. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer. We generally recommend altering this or Top P but not both.',
},
{
label: 'Maximum tokens',
key: 'maxTokens',
type: 'string',
required: false,
variables: true,
description:
'The maximum number of tokens to generate in the completion.',
},
{
label: 'Stop Sequence',
key: 'stopSequence',
type: 'string',
required: false,
variables: true,
description:
'Single stop sequence where the API will stop generating further tokens. The returned text will not contain the stop sequence.',
},
{
label: 'Top P',
key: 'topP',
type: 'string',
required: false,
variables: true,
description:
'An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both.',
},
{
label: 'Frequency Penalty',
key: 'frequencyPenalty',
type: 'string',
required: false,
variables: true,
description: `Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.`,
},
{
label: 'Presence Penalty',
key: 'presencePenalty',
type: 'string',
required: false,
variables: true,
description: `Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.`,
},
],
async run($) {
const payload = {
model: $.step.parameters.model,
prompt: $.step.parameters.prompt,
temperature: castFloatOrUndefined($.step.parameters.temperature),
max_tokens: castFloatOrUndefined($.step.parameters.maxTokens),
stop: $.step.parameters.stopSequence || null,
top_p: castFloatOrUndefined($.step.parameters.topP),
frequency_penalty: castFloatOrUndefined(
$.step.parameters.frequencyPenalty
),
presence_penalty: castFloatOrUndefined($.step.parameters.presencePenalty),
};
const { data } = await $.http.post(
`/deployments/${$.auth.data.deploymentId}/completions`,
payload
);
$.setActionItem({
raw: data,
});
},
});

View File

@@ -0,0 +1,87 @@
import defineAction from '../../../../helpers/define-action';
const castFloatOrUndefined = (value: string | null) => {
return value === '' ? undefined : parseFloat(value);
}
export default defineAction({
name: 'Send prompt',
key: 'sendPrompt',
description: 'Creates a completion for the provided prompt and parameters.',
arguments: [
{
label: 'Prompt',
key: 'prompt',
type: 'string' as const,
required: true,
variables: true,
description: 'The text to analyze.'
},
{
label: 'Temperature',
key: 'temperature',
type: 'string' as const,
required: false,
variables: true,
description: 'What sampling temperature to use, between 0 and 2. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer. We generally recommend altering this or Top P but not both.'
},
{
label: 'Maximum tokens',
key: 'maxTokens',
type: 'string' as const,
required: false,
variables: true,
description: 'The maximum number of tokens to generate in the completion.'
},
{
label: 'Stop Sequence',
key: 'stopSequence',
type: 'string' as const,
required: false,
variables: true,
description: 'Single stop sequence where the API will stop generating further tokens. The returned text will not contain the stop sequence.'
},
{
label: 'Top P',
key: 'topP',
type: 'string' as const,
required: false,
variables: true,
description: 'An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both.'
},
{
label: 'Frequency Penalty',
key: 'frequencyPenalty',
type: 'string' as const,
required: false,
variables: true,
description: `Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.`
},
{
label: 'Presence Penalty',
key: 'presencePenalty',
type: 'string' as const,
required: false,
variables: true,
description: `Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.`
},
],
async run($) {
const payload = {
model: $.step.parameters.model as string,
prompt: $.step.parameters.prompt as string,
temperature: castFloatOrUndefined($.step.parameters.temperature as string),
max_tokens: castFloatOrUndefined($.step.parameters.maxTokens as string),
stop: ($.step.parameters.stopSequence as string || null),
top_p: castFloatOrUndefined($.step.parameters.topP as string),
frequency_penalty: castFloatOrUndefined($.step.parameters.frequencyPenalty as string),
presence_penalty: castFloatOrUndefined($.step.parameters.presencePenalty as string),
};
const { data } = await $.http.post(`/deployments/${$.auth.data.deploymentId}/completions`, payload);
$.setActionItem({
raw: data,
});
},
});

View File

@@ -1,58 +0,0 @@
import verifyCredentials from './verify-credentials.js';
import isStillVerified from './is-still-verified.js';
export default {
fields: [
{
key: 'screenName',
label: 'Screen Name',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description:
'Screen name of your connection to be used on Automatisch UI.',
clickToCopy: false,
},
{
key: 'yourResourceName',
label: 'Your Resource Name',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: 'The name of your Azure OpenAI Resource.',
docUrl: 'https://automatisch.io/docs/azure-openai#your-resource-name',
clickToCopy: false,
},
{
key: 'deploymentId',
label: 'Deployment ID',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: 'The deployment name you chose when you deployed the model.',
docUrl: 'https://automatisch.io/docs/azure-openai#deployment-id',
clickToCopy: false,
},
{
key: 'apiKey',
label: 'API Key',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: 'Azure OpenAI API key of your account.',
docUrl: 'https://automatisch.io/docs/azure-openai#api-key',
clickToCopy: false,
},
],
verifyCredentials,
isStillVerified,
};

View File

@@ -0,0 +1,58 @@
import verifyCredentials from './verify-credentials';
import isStillVerified from './is-still-verified';
export default {
fields: [
{
key: 'screenName',
label: 'Screen Name',
type: 'string' as const,
required: true,
readOnly: false,
value: null,
placeholder: null,
description:
'Screen name of your connection to be used on Automatisch UI.',
clickToCopy: false,
},
{
key: 'yourResourceName',
label: 'Your Resource Name',
type: 'string' as const,
required: true,
readOnly: false,
value: null,
placeholder: null,
description: 'The name of your Azure OpenAI Resource.',
docUrl: 'https://automatisch.io/docs/azure-openai#your-resource-name',
clickToCopy: false,
},
{
key: 'deploymentId',
label: 'Deployment ID',
type: 'string' as const,
required: true,
readOnly: false,
value: null,
placeholder: null,
description: 'The deployment name you chose when you deployed the model.',
docUrl: 'https://automatisch.io/docs/azure-openai#deployment-id',
clickToCopy: false,
},
{
key: 'apiKey',
label: 'API Key',
type: 'string' as const,
required: true,
readOnly: false,
value: null,
placeholder: null,
description: 'Azure OpenAI API key of your account.',
docUrl: 'https://automatisch.io/docs/azure-openai#api-key',
clickToCopy: false,
},
],
verifyCredentials,
isStillVerified,
};

View File

@@ -1,6 +0,0 @@
const isStillVerified = async ($) => {
await $.http.get('/fine_tuning/jobs');
return true;
};
export default isStillVerified;

View File

@@ -0,0 +1,8 @@
import { IGlobalVariable } from '@automatisch/types';
const isStillVerified = async ($: IGlobalVariable) => {
await $.http.get('/fine_tuning/jobs');
return true;
};
export default isStillVerified;

View File

@@ -1,5 +0,0 @@
const verifyCredentials = async ($) => {
await $.http.get('/fine_tuning/jobs');
};
export default verifyCredentials;

View File

@@ -0,0 +1,7 @@
import { IGlobalVariable } from '@automatisch/types';
const verifyCredentials = async ($: IGlobalVariable) => {
await $.http.get('/fine_tuning/jobs');
};
export default verifyCredentials;

View File

@@ -1,13 +0,0 @@
const addAuthHeader = ($, requestConfig) => {
if ($.auth.data?.apiKey) {
requestConfig.headers['api-key'] = $.auth.data.apiKey;
}
requestConfig.params = {
'api-version': '2023-10-01-preview',
};
return requestConfig;
};
export default addAuthHeader;

View File

@@ -0,0 +1,15 @@
import { TBeforeRequest } from '@automatisch/types';
const addAuthHeader: TBeforeRequest = ($, requestConfig) => {
if ($.auth.data?.apiKey) {
requestConfig.headers['api-key'] = $.auth.data.apiKey as string;
}
requestConfig.params = {
'api-version': '2023-10-01-preview'
}
return requestConfig;
};
export default addAuthHeader;

View File

@@ -1,11 +0,0 @@
const setBaseUrl = ($, requestConfig) => {
const yourResourceName = $.auth.data.yourResourceName;
if (yourResourceName) {
requestConfig.baseURL = `https://${yourResourceName}.openai.azure.com/openai`;
}
return requestConfig;
};
export default setBaseUrl;

View File

@@ -0,0 +1,13 @@
import { TBeforeRequest } from '@automatisch/types';
const setBaseUrl: TBeforeRequest = ($, requestConfig) => {
const yourResourceName = $.auth.data.yourResourceName as string;
if (yourResourceName) {
requestConfig.baseURL = `https://${yourResourceName}.openai.azure.com/openai`;
}
return requestConfig;
};
export default setBaseUrl;

View File

View File

@@ -1,20 +0,0 @@
import defineApp from '../../helpers/define-app.js';
import setBaseUrl from './common/set-base-url.js';
import addAuthHeader from './common/add-auth-header.js';
import auth from './auth/index.js';
import actions from './actions/index.js';
export default defineApp({
name: 'Azure OpenAI',
key: 'azure-openai',
baseUrl:
'https://azure.microsoft.com/en-us/products/ai-services/openai-service',
apiBaseUrl: '',
iconUrl: '{BASE_URL}/apps/azure-openai/assets/favicon.svg',
authDocUrl: '{DOCS_URL}/apps/azure-openai/connection',
primaryColor: '000000',
supportsConnections: true,
beforeRequest: [setBaseUrl, addAuthHeader],
auth,
actions,
});

View File

@@ -0,0 +1,19 @@
import defineApp from '../../helpers/define-app';
import setBaseUrl from './common/set-base-url';
import addAuthHeader from './common/add-auth-header';
import auth from './auth';
import actions from './actions';
export default defineApp({
name: 'Azure OpenAI',
key: 'azure-openai',
baseUrl: 'https://azure.microsoft.com/en-us/products/ai-services/openai-service',
apiBaseUrl: '',
iconUrl: '{BASE_URL}/apps/azure-openai/assets/favicon.svg',
authDocUrl: 'https://automatisch.io/docs/apps/azure-openai/connection',
primaryColor: '000000',
supportsConnections: true,
beforeRequest: [setBaseUrl, addAuthHeader],
auth,
actions,
});

View File

@@ -1,41 +0,0 @@
import { BskyAgent, RichText } from '@atproto/api';
import defineAction from '../../../../helpers/define-action.js';
export default defineAction({
name: 'Create post',
key: 'createPost',
description: 'Creates a new post.',
arguments: [
{
label: 'Text',
key: 'text',
type: 'string',
required: true,
variables: true,
description: '',
},
],
async run($) {
const text = $.step.parameters.text;
const agent = new BskyAgent({ service: 'https://bsky.social/xrpc' });
const richText = new RichText({ text });
await richText.detectFacets(agent);
const body = {
repo: $.auth.data.did,
collection: 'app.bsky.feed.post',
record: {
$type: 'app.bsky.feed.post',
text: richText.text,
facets: richText.facets,
createdAt: new Date().toISOString(),
},
};
const { data } = await $.http.post('/com.atproto.repo.createRecord', body);
$.setActionItem({ raw: data });
},
});

View File

@@ -1,5 +0,0 @@
import createPost from './create-post/index.js';
import replyToPost from './reply-to-post/index.js';
import searchPostByUrl from './search-post-by-url/index.js';
export default [createPost, replyToPost, searchPostByUrl];

View File

@@ -1,55 +0,0 @@
import { BskyAgent, RichText } from '@atproto/api';
import defineAction from '../../../../helpers/define-action.js';
import getReplyRefs from '../../common/get-reply-refs.js';
export default defineAction({
name: 'Reply to post',
key: 'replyToPost',
description: 'Replies to a post.',
arguments: [
{
label: 'Post Url',
key: 'postUrl',
type: 'string',
required: true,
variables: true,
description: 'Enter whole post url you want to reply here.',
},
{
label: 'Text',
key: 'text',
type: 'string',
required: true,
variables: true,
description: 'Your post.',
},
],
async run($) {
const text = $.step.parameters.text;
const postUrl = $.step.parameters.postUrl;
const replyRefs = await getReplyRefs($, postUrl);
const agent = new BskyAgent({ service: 'https://bsky.social/xrpc' });
const richText = new RichText({ text });
await richText.detectFacets(agent);
const body = {
repo: $.auth.data.did,
collection: 'app.bsky.feed.post',
record: {
$type: 'app.bsky.feed.post',
text: richText.text,
facets: richText.facets,
createdAt: new Date().toISOString(),
reply: replyRefs,
},
};
const { data } = await $.http.post('/com.atproto.repo.createRecord', body);
$.setActionItem({ raw: data });
},
});

View File

@@ -1,35 +0,0 @@
import defineAction from '../../../../helpers/define-action.js';
export default defineAction({
name: 'Search post by url',
key: 'searchPostByUrl',
description: 'Searches a post in a thread by url.',
arguments: [
{
label: 'Post Url',
key: 'postUrl',
type: 'string',
required: true,
variables: true,
description: 'Enter whole post url here.',
},
],
async run($) {
const postUrl = $.step.parameters.postUrl;
const urlParts = postUrl.split('/');
const handle = urlParts[urlParts.length - 3];
const postId = urlParts[urlParts.length - 1];
const uri = `at://${handle}/app.bsky.feed.post/${postId}`;
const params = {
uri,
};
const { data } = await $.http.get('/app.bsky.feed.getPostThread', {
params,
});
$.setActionItem({ raw: data });
},
});

View File

@@ -1,4 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="-50 -50 430 390" fill="#1185fd" aria-hidden="true">
<path d="M180 141.964C163.699 110.262 119.308 51.1817 78.0347 22.044C38.4971 -5.86834 23.414 -1.03207 13.526 3.43594C2.08093 8.60755 0 26.1785 0 36.5164C0 46.8542 5.66748 121.272 9.36416 133.694C21.5786 174.738 65.0603 188.607 105.104 184.156C107.151 183.852 109.227 183.572 111.329 183.312C109.267 183.642 107.19 183.924 105.104 184.156C46.4204 192.847 -5.69621 214.233 62.6582 290.33C137.848 368.18 165.705 273.637 180 225.702C194.295 273.637 210.76 364.771 295.995 290.33C360 225.702 313.58 192.85 254.896 184.158C252.81 183.926 250.733 183.645 248.671 183.315C250.773 183.574 252.849 183.855 254.896 184.158C294.94 188.61 338.421 174.74 350.636 133.697C354.333 121.275 360 46.8568 360 36.519C360 26.1811 357.919 8.61012 346.474 3.43851C336.586 -1.02949 321.503 -5.86576 281.965 22.0466C240.692 51.1843 196.301 110.262 180 141.964Z">
</path>
</svg>

Before

Width:  |  Height:  |  Size: 956 B

View File

@@ -1,34 +0,0 @@
import verifyCredentials from './verify-credentials.js';
import isStillVerified from './is-still-verified.js';
import refreshToken from './refresh-token.js';
export default {
fields: [
{
key: 'handle',
label: 'Your Bluesky Handle',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: '',
clickToCopy: false,
},
{
key: 'password',
label: 'Your Bluesky Password',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: '',
clickToCopy: false,
},
],
verifyCredentials,
isStillVerified,
refreshToken,
};

View File

@@ -1,8 +0,0 @@
import getCurrentUser from '../common/get-current-user.js';
const isStillVerified = async ($) => {
const currentUser = await getCurrentUser($);
return !!currentUser.did;
};
export default isStillVerified;

View File

@@ -1,24 +0,0 @@
const refreshToken = async ($) => {
const { refreshJwt } = $.auth.data;
const { data } = await $.http.post(
'/com.atproto.server.refreshSession',
null,
{
headers: {
Authorization: `Bearer ${refreshJwt}`,
},
additionalProperties: {
skipAddingAuthHeader: true,
},
}
);
await $.auth.set({
accessJwt: data.accessJwt,
refreshJwt: data.refreshJwt,
did: data.did,
});
};
export default refreshToken;

Some files were not shown because too many files have changed in this diff Show More