Compare commits

..

1 Commits

Author SHA1 Message Date
Faruk AYDIN
fedbd66f8e Release v.0.5.0 2023-02-08 12:42:38 +01:00
2459 changed files with 53159 additions and 77590 deletions

View File

@@ -8,8 +8,10 @@ echo "Configuring backend environment variables..."
cd packages/backend
rm -rf .env
echo "
HOST=localhost
PROTOCOL=http
PORT=$BACKEND_PORT
WEB_APP_URL=http://localhost:$WEB_PORT
WEB_APP_URL=https://$CODESPACE_NAME-$WEB_PORT.$GITHUB_CODESPACES_PORT_FORWARDING_DOMAIN
APP_ENV=development
POSTGRES_DATABASE=automatisch
POSTGRES_PORT=5432
@@ -28,7 +30,9 @@ cd packages/web
rm -rf .env
echo "
PORT=$WEB_PORT
REACT_APP_BACKEND_URL=http://localhost:$BACKEND_PORT
REACT_APP_GRAPHQL_URL=https://$CODESPACE_NAME-$BACKEND_PORT.$GITHUB_CODESPACES_PORT_FORWARDING_DOMAIN/graphql
REACT_APP_BASE_URL=https://$CODESPACE_NAME-$WEB_PORT.$GITHUB_CODESPACES_PORT_FORWARDING_DOMAIN
REACT_APP_NOTIFICATIONS_URL=https://notifications.automatisch.io
" >> .env
cd $CURRENT_DIR

View File

@@ -8,7 +8,7 @@
"version": "latest"
},
"ghcr.io/devcontainers/features/node:1": {
"version": 18
"version": 16
},
"ghcr.io/devcontainers/features/common-utils:1": {
"username": "vscode",

View File

@@ -21,43 +21,11 @@ services:
interval: 10s
timeout: 5s
retries: 5
ports:
- '5432:5432'
expose:
- 5432
redis:
image: 'redis:7.0.4-alpine'
volumes:
- redis_data:/data
ports:
- '6379:6379'
expose:
- 6379
keycloak:
image: quay.io/keycloak/keycloak:21.1
restart: always
environment:
- KEYCLOAK_ADMIN=admin
- KEYCLOAK_ADMIN_PASSWORD=admin
- KC_DB=postgres
- KC_DB_URL_HOST=postgres
- KC_DB_URL_DATABASE=keycloak
- KC_DB_USERNAME=automatisch_user
- KC_DB_PASSWORD=automatisch_password
- KC_HEALTH_ENABLED=true
ports:
- "8080:8080"
command: start-dev
depends_on:
- postgres
healthcheck:
test: "curl -f http://localhost:8080/health/ready || exit 1"
volumes:
- keycloak:/opt/keycloak/data/
expose:
- 8080
volumes:
postgres_data:
redis_data:
keycloak:

View File

@@ -1,12 +0,0 @@
**/node_modules/
**/dist/
**/logs/
**/.devcontainer
**/.github
**/.vscode
**/.env
**/.env.test
**/.env.production
**/yarn-error.log
packages/docs
packages/e2e-test

10
.eslintrc.js Normal file
View File

@@ -0,0 +1,10 @@
module.exports = {
root: true,
parser: '@typescript-eslint/parser',
plugins: ['@typescript-eslint'],
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
'prettier',
],
};

View File

@@ -1,48 +0,0 @@
name: Automatisch Backend Tests
on:
push:
branches:
- main
pull_request:
workflow_dispatch:
jobs:
test:
timeout-minutes: 60
runs-on:
- ubuntu-latest
services:
postgres:
image: postgres:14.5-alpine
env:
POSTGRES_DB: automatisch_test
POSTGRES_USER: automatisch_test_user
POSTGRES_PASSWORD: automatisch_test_user_password
options: >-
--health-cmd "pg_isready -U automatisch_test_user -d automatisch_test"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
redis:
image: redis:7.0.4-alpine
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 6379:6379
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
- name: Install dependencies
run: cd packages/backend && yarn
- name: Copy .env-example.test file to .env.test
run: cd packages/backend && cp .env-example.test .env.test
- name: Run tests
run: cd packages/backend && yarn test

View File

@@ -1,11 +1,5 @@
name: Automatisch CI
on:
push:
branches:
- main
pull_request:
workflow_dispatch:
on: [push]
jobs:
linter:
runs-on: ubuntu-latest
@@ -16,15 +10,15 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: '18'
node-version: '16'
cache: 'yarn'
cache-dependency-path: yarn.lock
- run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner."
- run: echo "🖥️ The workflow is now ready to test your code on the runner."
- run: yarn --frozen-lockfile
- run: cd packages/backend && yarn lint
- run: yarn lint
- run: echo "🍏 This job's status is ${{ job.status }}."
start-backend-server:
build-backend:
runs-on: ubuntu-latest
steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
@@ -33,36 +27,13 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: '18'
node-version: '16'
cache: 'yarn'
cache-dependency-path: yarn.lock
- run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner."
- run: echo "🖥️ The workflow is now ready to test your code on the runner."
- run: yarn --frozen-lockfile && yarn lerna bootstrap
- run: cd packages/backend && yarn start
env:
ENCRYPTION_KEY: sample_encryption_key
WEBHOOK_SECRET_KEY: sample_webhook_secret_key
- run: echo "🍏 This job's status is ${{ job.status }}."
start-backend-worker:
runs-on: ubuntu-latest
steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub!"
- run: echo "🔎 The name of your branch is ${{ github.ref }} and your repository is ${{ github.repository }}."
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: '18'
cache: 'yarn'
cache-dependency-path: yarn.lock
- run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner."
- run: echo "🖥️ The workflow is now ready to test your code on the runner."
- run: yarn --frozen-lockfile && yarn lerna bootstrap
- run: cd packages/backend && yarn start:worker
env:
ENCRYPTION_KEY: sample_encryption_key
WEBHOOK_SECRET_KEY: sample_webhook_secret_key
- run: cd packages/backend && yarn build
- run: echo "🍏 This job's status is ${{ job.status }}."
build-web:
runs-on: ubuntu-latest
@@ -73,7 +44,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: '18'
node-version: '16'
cache: 'yarn'
cache-dependency-path: yarn.lock
- run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner."
@@ -83,3 +54,21 @@ jobs:
env:
CI: false
- run: echo "🍏 This job's status is ${{ job.status }}."
build-cli:
runs-on: ubuntu-latest
steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub!"
- run: echo "🔎 The name of your branch is ${{ github.ref }} and your repository is ${{ github.repository }}."
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: '16'
cache: 'yarn'
cache-dependency-path: yarn.lock
- run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner."
- run: echo "🖥️ The workflow is now ready to test your code on the runner."
- run: yarn --frozen-lockfile && yarn lerna bootstrap
- run: cd packages/backend && yarn build
- run: cd packages/cli && yarn build
- run: echo "🍏 This job's status is ${{ job.status }}."

View File

@@ -1,32 +0,0 @@
name: Automatisch Docs Change
on:
pull_request:
paths:
- 'packages/docs/**'
jobs:
label:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Label PR
uses: actions/github-script@v6
with:
script: |
const { pull_request } = context.payload;
const label = 'documentation-change';
const hasLabel = pull_request.labels.some(({ name }) => name === label);
if (!hasLabel) {
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pull_request.number,
labels: [label],
});
console.log(`Label "${label}" added to PR #${pull_request.number}`);
} else {
console.log(`Label "${label}" already exists on PR #${pull_request.number}`);
}

View File

@@ -1,122 +0,0 @@
name: Automatisch UI Tests
on:
push:
branches:
- main
pull_request:
paths:
- 'packages/backend/**'
- 'packages/e2e-tests/**'
- 'packages/web/**'
- '!packages/backend/src/apps/**'
workflow_dispatch:
env:
ENCRYPTION_KEY: sample_encryption_key
WEBHOOK_SECRET_KEY: sample_webhook_secret_key
APP_SECRET_KEY: sample_app_secret_key
POSTGRES_HOST: localhost
POSTGRES_DATABASE: automatisch
POSTGRES_PORT: 5432
POSTGRES_USERNAME: automatisch_user
POSTGRES_PASSWORD: automatisch_password
REDIS_HOST: localhost
APP_ENV: production
LICENSE_KEY: dummy_license_key
jobs:
test:
timeout-minutes: 60
runs-on:
- ubuntu-latest
services:
postgres:
image: postgres:14.5-alpine
env:
POSTGRES_DB: automatisch
POSTGRES_USER: automatisch_user
POSTGRES_PASSWORD: automatisch_password
options: >-
--health-cmd "pg_isready -U automatisch_user -d automatisch"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
redis:
image: redis:7.0.4-alpine
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 6379:6379
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
- name: Install dependencies
run: yarn && yarn lerna bootstrap
- name: Install Playwright Browsers
run: yarn playwright install --with-deps
- name: Build Automatisch web
working-directory: ./packages/web
run: yarn build
env:
# Keep this until we clean up warnings in build processes
CI: false
- name: Migrate database
working-directory: ./packages/backend
run: yarn db:migrate
- name: Seed user
working-directory: ./packages/backend
run: yarn db:seed:user &
- name: Install certutils
run: sudo apt install -y libnss3-tools
- name: Install mkcert
run: |
curl -JLO "https://dl.filippo.io/mkcert/latest?for=linux/amd64" \
&& chmod +x mkcert-v*-linux-amd64 \
&& sudo cp mkcert-v*-linux-amd64 /usr/local/bin/mkcert
- name: Install root certificate via mkcert
run: mkcert -install
- name: Create certificate
run: mkcert automatisch.io "*.automatisch.io" localhost 127.0.0.1 ::1
working-directory: ./packages/e2e-tests
- name: Set CAROOT environment variable
run: echo "NODE_EXTRA_CA_CERTS=$(mkcert -CAROOT)/rootCA.pem" >> "$GITHUB_ENV"
- name: Override license server with local server
run: sudo echo "127.0.0.1 license.automatisch.io" | sudo tee -a /etc/hosts
- name: Run local license server
working-directory: ./packages/e2e-tests
run: sudo yarn start-mock-license-server &
- name: Run Automatisch
run: yarn start &
working-directory: ./packages/backend
- name: Run Automatisch worker
run: yarn start:worker &
working-directory: ./packages/backend
- name: Setup upterm session
if: false
uses: lhotari/action-upterm@v1
with:
limit-access-to-actor: true
limit-access-to-users: barinali
- name: Run Playwright tests
working-directory: ./packages/e2e-tests
env:
LOGIN_EMAIL: user@automatisch.io
LOGIN_PASSWORD: sample
BASE_URL: http://localhost:3000
GITHUB_CLIENT_ID: 1c0417daf898adfbd99a
GITHUB_CLIENT_SECRET: 3328fa814dd582ccd03dbe785cfd683fb8da92b3
run: yarn test
- uses: actions/upload-artifact@v3
if: always()
with:
name: playwright-report
path: packages/e2e-tests/test-results
retention-days: 30

3
.gitignore vendored
View File

@@ -125,6 +125,3 @@ dist
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
# MacOS finder preferences
.DS_store

View File

@@ -1 +0,0 @@
18.19.0

1
.nvmrc
View File

@@ -1 +0,0 @@
18.19.0

View File

@@ -1,7 +1,4 @@
{
"editor.formatOnSave": true,
"editor.defaultFormatter": "esbenp.prettier-vscode",
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
}
"editor.defaultFormatter": "esbenp.prettier-vscode"
}

View File

@@ -1,5 +0,0 @@
# Automatisch Contributor License Agreement
I give Automatisch permission to license my contributions on any terms they like. I am giving them this license in order to make it possible for them to accept my contributions into their project.
**_As far as the law allows, my contributions come as is, without any warranty or condition, and I will not be liable to anyone for any damages related to this software or this license, under any kind of legal claim._**

View File

@@ -1,3 +0,0 @@
LICENSE.agpl (AGPL-3.0) applies to all files in this
repository, except for files that contain ".ee." in their name
which are covered by LICENSE.enterprise.

View File

@@ -1,35 +0,0 @@
The Automatisch Enterprise license (the “Enterprise License”)
Copyright (c) 2023-present AB Software GmbH.
With regard to the Automatisch Software:
This software and associated documentation files (the "Software") may only be
used in production, if you (and any entity that you represent) have a valid
Automatisch Enterprise license for the correct number of user seats. Subject
to the foregoing sentence, you are free to modify this Software and publish
patches to the Software. You agree that Automatisch and/or its licensors
(as applicable) retain all right, title and interest in and to all such
modifications and/or patches, and all such modifications and/or patches may
only be used, copied, modified, displayed, distributed, or otherwise exploited
with a valid Automatisch Enterprise license for the correct number of user seats.
Notwithstanding the foregoing, you may copy and modify the Software for
development and testing purposes, without requiring a subscription. You agree
that Automatisch and/or its licensors (as applicable) retain all right, title
and interest in and to all such modifications. You are not granted any other
rights beyond what is expressly stated herein. Subject to the foregoing, it is
forbidden to copy, merge, publish, distribute, sublicense, and/or sell the Software.
The full text of this Enterprise License shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
For all third party components incorporated into the Automatisch Software, those
components are licensed under the original license provided by the owner of the
applicable component.

View File

@@ -44,18 +44,10 @@ For other installation types, you can check the [installation](https://automatis
## Support
If you have any questions or problems, please visit our GitHub issues page, and we'll try to help you as soon as possible.
If you have any questions or problems, please visit our GitHub discussions page, and we'll try to help you as soon as possible.
[https://github.com/automatisch/automatisch/issues](https://github.com/automatisch/automatisch/issues)
[https://github.com/automatisch/automatisch/discussions](https://github.com/automatisch/automatisch/discussions)
## License
Automatisch Community Edition (Automatisch CE) is an open-source software with the [AGPL-3.0 license](LICENSE.agpl).
Automatisch Enterprise Edition (Automatisch EE) is a commercial offering with the [Enterprise license](LICENSE.enterprise).
The Automatisch repository contains both AGPL-licensed and Enterprise-licensed files. We maintain a single repository to make development easier.
All files that contain ".ee." in their name fall under the [Enterprise license](LICENSE.enterprise). All other files fall under the [AGPL-3.0 license](LICENSE.agpl).
See the [LICENSE](LICENSE) file for more information.
Automatisch is an open-source software with the [AGPL 3.0 license](https://github.com/automatisch/automatisch/blob/main/LICENSE.md).

View File

@@ -1,25 +1,14 @@
# syntax=docker/dockerfile:1
FROM node:18-alpine
ENV PORT 3000
RUN \
apk --no-cache add --virtual build-dependencies python3 build-base git
FROM node:16-alpine
WORKDIR /automatisch
# copy the app, note .dockerignore
COPY . /automatisch
RUN apk --no-cache add --virtual build-dependencies python3 build-base
RUN yarn
COPY ./entrypoint.sh /entrypoint.sh
RUN cd packages/web && yarn build
RUN yarn global add @automatisch/cli@0.4.0 --network-timeout 1000000
RUN \
rm -rf /usr/local/share/.cache/ && \
apk del build-dependencies
COPY ./docker/entrypoint.sh /entrypoint.sh
RUN apk del build-dependencies python3 build-base
EXPOSE 3000
ENTRYPOINT ["sh", "/entrypoint.sh"]

View File

@@ -1,5 +1,5 @@
# syntax=docker/dockerfile:1
FROM automatischio/automatisch:latest
FROM automatischio/automatisch:0.4.0
WORKDIR /automatisch
RUN apk add --no-cache openssl dos2unix

View File

@@ -2,12 +2,8 @@
set -e
cd packages/backend
if [ -n "$WORKER" ]; then
yarn start:worker
automatisch start-worker
else
yarn db:migrate
yarn db:seed:user
yarn start
automatisch start
fi

View File

@@ -2,7 +2,7 @@
"packages": [
"packages/*"
],
"version": "0.10.0",
"version": "0.5.0",
"npmClient": "yarn",
"useWorkspaces": true,
"command": {

View File

@@ -1,11 +1,13 @@
{
"name": "@automatisch/root",
"license": "See LICENSE file",
"license": "AGPL-3.0",
"private": true,
"scripts": {
"start": "lerna run --stream --parallel --scope=@*/{web,backend} dev",
"start:web": "lerna run --stream --scope=@*/web dev",
"start:backend": "lerna run --stream --scope=@*/backend dev",
"lint": "lerna run --no-bail --stream --parallel --scope=@*/{web,backend,cli} lint",
"build:watch": "lerna run --no-bail --stream --parallel --scope=@*/{web,backend,cli} build:watch",
"build:docs": "cd ./packages/docs && yarn install && yarn build"
},
"workspaces": {
@@ -16,10 +18,13 @@
"**/babel-loader",
"**/webpack",
"**/@automatisch/web",
"**/@automatisch/types",
"**/ajv"
]
},
"devDependencies": {
"@typescript-eslint/eslint-plugin": "^5.9.1",
"@typescript-eslint/parser": "^5.9.1",
"eslint": "^8.13.0",
"eslint-config-prettier": "^8.3.0",
"eslint-plugin-prettier": "^4.0.0",

View File

@@ -1,15 +0,0 @@
APP_ENV=test
HOST=localhost
PROTOCOL=http
PORT=3000
LOG_LEVEL=debug
ENCRYPTION_KEY=sample_encryption_key
WEBHOOK_SECRET_KEY=sample_webhook_secret_key
APP_SECRET_KEY=sample_app_secret_key
POSTGRES_HOST=localhost
POSTGRES_DATABASE=automatisch_test
POSTGRES_PORT=5432
POSTGRES_USERNAME=automatisch_test_user
POSTGRES_PASSWORD=automatisch_test_user_password
REDIS_HOST=localhost
AUTOMATISCH_CLOUD=true

View File

@@ -1,12 +0,0 @@
{
"root": true,
"env": {
"node": true,
"es6": true
},
"extends": ["eslint:recommended", "prettier"],
"parserOptions": {
"ecmaVersion": "latest",
"sourceType": "module"
}
}

View File

@@ -1,9 +0,0 @@
import pg from 'pg';
const client = new pg.Client({
host: 'localhost',
user: 'postgres',
port: 5432,
});
export default client;

View File

@@ -0,0 +1,9 @@
import { Client } from 'pg';
const client = new Client({
host: 'localhost',
user: 'postgres',
port: 5432,
});
export default client;

View File

@@ -1,31 +0,0 @@
import appConfig from '../../src/config/app.js';
import logger from '../../src/helpers/logger.js';
import '../../src/config/orm.js';
import { client as knex } from '../../src/config/database.js';
export const renameMigrationsAsJsFiles = async () => {
if (!appConfig.isDev) {
return;
}
try {
const tableExists = await knex.schema.hasTable('knex_migrations');
if (tableExists) {
await knex('knex_migrations')
.where('name', 'like', '%.ts')
.update({
name: knex.raw("REPLACE(name, '.ts', '.js')"),
});
logger.info(
`Migration file names with typescript renamed as JS file names!`
);
}
} catch (err) {
logger.error(err.message);
}
await knex.destroy();
};
renameMigrationsAsJsFiles();

View File

@@ -1,3 +0,0 @@
import { createDatabaseAndUser } from './utils.js';
createDatabaseAndUser();

View File

@@ -0,0 +1,3 @@
import { createDatabaseAndUser } from './utils';
createDatabaseAndUser();

View File

@@ -1,3 +0,0 @@
import { dropDatabase } from './utils.js';
dropDatabase();

View File

@@ -0,0 +1,3 @@
import { dropDatabase } from './utils';
dropDatabase();

View File

@@ -1,3 +0,0 @@
import { createUser } from './utils.js';
createUser();

View File

@@ -0,0 +1,3 @@
import { createUser } from './utils';
createUser();

View File

@@ -1,145 +0,0 @@
import appConfig from '../../src/config/app.js';
import logger from '../../src/helpers/logger.js';
import client from './client.js';
import User from '../../src/models/user.js';
import Config from '../../src/models/config.js';
import Role from '../../src/models/role.js';
import '../../src/config/orm.js';
import process from 'process';
async function fetchAdminRole() {
const role = await Role.query()
.where({
key: 'admin',
})
.limit(1)
.first();
return role;
}
export async function createUser(
email = 'user@automatisch.io',
password = 'sample'
) {
if (appConfig.disableSeedUser) {
logger.info('Seed user is disabled.');
process.exit(0);
return;
}
const UNIQUE_VIOLATION_CODE = '23505';
const role = await fetchAdminRole();
const userParams = {
email,
password,
fullName: 'Initial admin',
roleId: role.id,
};
try {
const userCount = await User.query().resultSize();
if (userCount === 0) {
const user = await User.query().insertAndFetch(userParams);
logger.info(`User has been saved: ${user.email}`);
await Config.markInstallationCompleted();
} else {
logger.info('No need to seed a user.');
}
} catch (err) {
if (err.nativeError.code !== UNIQUE_VIOLATION_CODE) {
throw err;
}
logger.info(`User already exists: ${email}`);
}
process.exit(0);
}
export const createDatabaseAndUser = async (
database = appConfig.postgresDatabase,
user = appConfig.postgresUsername
) => {
await client.connect();
await createDatabase(database);
await createDatabaseUser(user);
await grantPrivileges(database, user);
await client.end();
process.exit(0);
};
export const createDatabase = async (database = appConfig.postgresDatabase) => {
const DUPLICATE_DB_CODE = '42P04';
try {
await client.query(`CREATE DATABASE ${database}`);
logger.info(`Database: ${database} created!`);
} catch (err) {
if (err.code !== DUPLICATE_DB_CODE) {
throw err;
}
logger.info(`Database: ${database} already exists!`);
}
};
export const createDatabaseUser = async (user = appConfig.postgresUsername) => {
const DUPLICATE_OBJECT_CODE = '42710';
try {
const result = await client.query(`CREATE USER ${user}`);
logger.info(`Database User: ${user} created!`);
return result;
} catch (err) {
if (err.code !== DUPLICATE_OBJECT_CODE) {
throw err;
}
logger.info(`Database User: ${user} already exists!`);
}
};
export const grantPrivileges = async (
database = appConfig.postgresDatabase,
user = appConfig.postgresUsername
) => {
await client.query(
`GRANT ALL PRIVILEGES ON DATABASE ${database} TO ${user};`
);
logger.info(`${user} has granted all privileges on ${database}!`);
};
export const dropDatabase = async () => {
if (appConfig.appEnv != 'development' && appConfig.appEnv != 'test') {
const errorMessage =
'Drop database command can be used only with development or test environments!';
logger.error(errorMessage);
return;
}
await client.connect();
await dropDatabaseAndUser();
await client.end();
};
export const dropDatabaseAndUser = async (
database = appConfig.postgresDatabase,
user = appConfig.postgresUsername
) => {
await client.query(`DROP DATABASE IF EXISTS ${database}`);
logger.info(`Database: ${database} removed!`);
await client.query(`DROP USER IF EXISTS ${user}`);
logger.info(`Database User: ${user} removed!`);
};

View File

@@ -0,0 +1,114 @@
import appConfig from '../../src/config/app';
import logger from '../../src/helpers/logger';
import client from './client';
import User from '../../src/models/user';
import '../../src/config/orm';
export async function createUser(
email = 'user@automatisch.io',
password = 'sample'
) {
const UNIQUE_VIOLATION_CODE = '23505';
const userParams = {
email,
password,
};
try {
const userCount = await User.query().resultSize();
if (userCount === 0) {
const user = await User.query().insertAndFetch(userParams);
logger.info(`User has been saved: ${user.email}`);
} else {
logger.info('No need to seed a user.');
}
} catch (err) {
if ((err as any).nativeError.code !== UNIQUE_VIOLATION_CODE) {
throw err;
}
logger.info(`User already exists: ${email}`);
}
}
export const createDatabaseAndUser = async (
database = appConfig.postgresDatabase,
user = appConfig.postgresUsername
) => {
await client.connect();
await createDatabase(database);
await createDatabaseUser(user);
await grantPrivileges(database, user);
await client.end();
};
export const createDatabase = async (database = appConfig.postgresDatabase) => {
const DUPLICATE_DB_CODE = '42P04';
try {
await client.query(`CREATE DATABASE ${database}`);
logger.info(`Database: ${database} created!`);
} catch (err) {
if ((err as any).code !== DUPLICATE_DB_CODE) {
throw err;
}
logger.info(`Database: ${database} already exists!`);
}
};
export const createDatabaseUser = async (user = appConfig.postgresUsername) => {
const DUPLICATE_OBJECT_CODE = '42710';
try {
const result = await client.query(`CREATE USER ${user}`);
logger.info(`Database User: ${user} created!`);
return result;
} catch (err) {
if ((err as any).code !== DUPLICATE_OBJECT_CODE) {
throw err;
}
logger.info(`Database User: ${user} already exists!`);
}
};
export const grantPrivileges = async (
database = appConfig.postgresDatabase,
user = appConfig.postgresUsername
) => {
await client.query(
`GRANT ALL PRIVILEGES ON DATABASE ${database} TO ${user};`
);
logger.info(`${user} has granted all privileges on ${database}!`);
};
export const dropDatabase = async () => {
if (appConfig.appEnv != 'development' && appConfig.appEnv != 'test') {
const errorMessage =
'Drop database command can be used only with development or test environments!';
logger.error(errorMessage);
return;
}
await client.connect();
await dropDatabaseAndUser();
await client.end();
};
export const dropDatabaseAndUser = async (
database = appConfig.postgresDatabase,
user = appConfig.postgresUsername
) => {
await client.query(`DROP DATABASE IF EXISTS ${database}`);
logger.info(`Database: ${database} removed!`);
await client.query(`DROP USER IF EXISTS ${user}`);
logger.info(`Database User: ${user} removed!`);
};

1
packages/backend/database-utils.d.ts vendored Normal file
View File

@@ -0,0 +1 @@
export * from './dist/bin/database/utils';

View File

@@ -0,0 +1,2 @@
/* eslint-disable */
module.exports = require('./dist/bin/database/utils');

1
packages/backend/database.d.ts vendored Normal file
View File

@@ -0,0 +1 @@
export * from './dist/src/config/database';

View File

@@ -0,0 +1,2 @@
/* eslint-disable */
module.exports = require('./dist/src/config/database');

View File

@@ -1,33 +0,0 @@
import { knexSnakeCaseMappers } from 'objection';
import appConfig from './src/config/app.js';
import path from 'path';
import { fileURLToPath } from 'url';
const fileExtension = 'js';
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const knexConfig = {
client: 'pg',
connection: {
host: appConfig.postgresHost,
port: appConfig.postgresPort,
user: appConfig.postgresUsername,
password: appConfig.postgresPassword,
database: appConfig.postgresDatabase,
ssl: appConfig.postgresEnableSsl,
},
asyncStackTraces: appConfig.isDev,
searchPath: [appConfig.postgresSchema],
pool: { min: 0, max: 20 },
migrations: {
directory: __dirname + '/src/db/migrations',
extension: fileExtension,
loadExtensions: [`.${fileExtension}`],
},
seeds: {
directory: __dirname + '/src/db/seeds',
},
...(appConfig.isTest ? knexSnakeCaseMappers() : {}),
};
export default knexConfig;

View File

@@ -0,0 +1,26 @@
import appConfig from './src/config/app';
const fileExtension = appConfig.isDev ? 'ts' : 'js';
const knexConfig = {
client: 'pg',
connection: {
host: appConfig.postgresHost,
port: appConfig.postgresPort,
user: appConfig.postgresUsername,
password: appConfig.postgresPassword,
database: appConfig.postgresDatabase,
ssl: appConfig.postgresEnableSsl,
},
pool: { min: 0, max: 20 },
migrations: {
directory: __dirname + '/src/db/migrations',
extension: fileExtension,
loadExtensions: [`.${fileExtension}`],
},
seeds: {
directory: __dirname + '/src/db/seeds',
},
};
export default knexConfig;

1
packages/backend/logger.d.ts vendored Normal file
View File

@@ -0,0 +1 @@
export * from './dist/src/helpers/logger';

View File

@@ -0,0 +1,2 @@
/* eslint-disable */
module.exports = require('./dist/src/helpers/logger');

View File

@@ -1,75 +1,62 @@
{
"name": "@automatisch/backend",
"version": "0.10.0",
"license": "See LICENSE file",
"version": "0.5.0",
"license": "AGPL-3.0",
"description": "The open source Zapier alternative. Build workflow automation without spending time and money.",
"type": "module",
"scripts": {
"dev": "nodemon --watch 'src/**/*.js' --exec 'node' src/server.js",
"worker": "nodemon --watch 'src/**/*.js' --exec 'node' src/worker.js",
"start": "node src/server.js",
"start:worker": "node src/worker.js",
"pretest": "APP_ENV=test node ./test/setup/prepare-test-env.js",
"test": "APP_ENV=test vitest run",
"lint": "eslint .",
"db:create": "node ./bin/database/create.js",
"db:seed:user": "node ./bin/database/seed-user.js",
"db:drop": "node ./bin/database/drop.js",
"dev": "ts-node-dev --exit-child src/server.ts",
"worker": "nodemon --watch 'src/**/*.ts' --exec 'ts-node' src/worker.ts",
"build": "tsc && yarn copy-statics",
"build:watch": "nodemon --watch 'src/**/*.ts' --watch 'bin/**/*.ts' --exec yarn build --ext ts",
"start": "node dist/src/server.js",
"test": "ava",
"lint": "eslint . --ignore-path ../../.eslintignore",
"db:create": "ts-node ./bin/database/create.ts",
"db:seed:user": "ts-node ./bin/database/seed-user.ts",
"db:drop": "ts-node ./bin/database/drop.ts",
"db:migration:create": "knex migrate:make",
"db:rollback": "knex migrate:rollback",
"db:migrate": "node ./bin/database/convert-migrations.js && knex migrate:latest"
"db:migrate": "knex migrate:latest",
"copy-statics": "copyfiles src/**/*.{graphql,json,svg} dist",
"prepack": "yarn build",
"prebuild": "rm -rf ./dist"
},
"dependencies": {
"@automatisch/web": "^0.5.0",
"@bull-board/express": "^3.10.1",
"@casl/ability": "^6.5.0",
"@graphql-tools/graphql-file-loader": "^7.3.4",
"@graphql-tools/load": "^7.5.2",
"@node-saml/passport-saml": "^4.0.4",
"@rudderstack/rudder-sdk-node": "^1.1.2",
"@sentry/node": "^7.42.0",
"@sentry/tracing": "^7.42.0",
"accounting": "^0.4.1",
"@types/luxon": "^2.3.1",
"ajv-formats": "^2.1.1",
"axios": "1.6.0",
"bcrypt": "^5.1.0",
"axios": "0.24.0",
"bcrypt": "^5.0.1",
"bullmq": "^3.0.0",
"copyfiles": "^2.4.1",
"cors": "^2.8.5",
"crypto-js": "^4.1.1",
"debug": "~2.6.9",
"dotenv": "^10.0.0",
"express": "~4.18.2",
"express-async-handler": "^1.2.0",
"express-basic-auth": "^1.2.1",
"express-graphql": "^0.12.0",
"fast-xml-parser": "^4.0.11",
"graphql-middleware": "^6.1.15",
"graphql-shield": "^7.5.0",
"graphql-tools": "^8.2.0",
"handlebars": "^4.7.7",
"graphql-type-json": "^0.3.2",
"http-errors": "~1.6.3",
"http-proxy-agent": "^7.0.0",
"https-proxy-agent": "^7.0.1",
"jsonwebtoken": "^9.0.0",
"knex": "^2.4.0",
"libphonenumber-js": "^1.10.48",
"lodash.get": "^4.4.2",
"luxon": "2.5.2",
"memory-cache": "^0.2.0",
"morgan": "^1.10.0",
"multer": "1.4.5-lts.1",
"node-html-markdown": "^1.3.0",
"nodemailer": "6.7.0",
"oauth-1.0a": "^2.2.6",
"objection": "^3.0.0",
"passport": "^0.6.0",
"pg": "^8.7.1",
"php-serialize": "^4.0.2",
"pluralize": "^8.0.0",
"raw-body": "^2.5.2",
"showdown": "^2.1.0",
"uuid": "^9.0.1",
"winston": "^3.7.1",
"xmlrpc": "^1.3.2"
"winston": "^3.7.1"
},
"contributors": [
{
@@ -78,15 +65,26 @@
}
],
"homepage": "https://github.com/automatisch/automatisch#readme",
"main": "src/server",
"main": "dist/src/app",
"directories": {
"bin": "bin",
"src": "src",
"test": "__tests__"
},
"files": [
"dist",
"bin",
"src"
"src",
"server.js",
"server.d.ts",
"worker.js",
"worker.d.ts",
"logger.js",
"logger.d.ts",
"database.js",
"database.d.ts",
"database-utils.js",
"database-utils.d.ts"
],
"repository": {
"type": "git",
@@ -96,10 +94,37 @@
"url": "https://github.com/automatisch/automatisch/issues"
},
"devDependencies": {
"node-gyp": "^10.1.0",
"@automatisch/types": "^0.5.0",
"@types/bcrypt": "^5.0.0",
"@types/bull": "^3.15.8",
"@types/cors": "^2.8.12",
"@types/crypto-js": "^4.0.2",
"@types/express": "^4.17.15",
"@types/http-errors": "^1.8.1",
"@types/jsonwebtoken": "^8.5.8",
"@types/lodash.get": "^4.4.6",
"@types/morgan": "^1.9.3",
"@types/multer": "1.4.7",
"@types/node": "^16.10.2",
"@types/nodemailer": "^6.4.4",
"@types/pg": "^8.6.1",
"@types/pino": "^7.0.5",
"ava": "^3.15.0",
"nodemon": "^2.0.13",
"supertest": "^6.3.3",
"vitest": "^1.1.3"
"sinon": "^11.1.2",
"ts-node": "^10.2.1",
"ts-node-dev": "^1.1.8"
},
"ava": {
"files": [
"test/**/*"
],
"extensions": [
"ts"
],
"require": [
"ts-node/register"
]
},
"publishConfig": {
"access": "public"

1
packages/backend/server.d.ts vendored Normal file
View File

@@ -0,0 +1 @@
export * from './dist/src/server';

View File

@@ -0,0 +1,2 @@
/* eslint-disable */
module.exports = require('./dist/src/server.js');

View File

@@ -1,70 +0,0 @@
import createError from 'http-errors';
import express from 'express';
import cors from 'cors';
import appConfig from './config/app.js';
import corsOptions from './config/cors-options.js';
import morgan from './helpers/morgan.js';
import * as Sentry from './helpers/sentry.ee.js';
import appAssetsHandler from './helpers/app-assets-handler.js';
import webUIHandler from './helpers/web-ui-handler.js';
import errorHandler from './helpers/error-handler.js';
import './config/orm.js';
import {
createBullBoardHandler,
serverAdapter,
} from './helpers/create-bull-board-handler.js';
import injectBullBoardHandler from './helpers/inject-bull-board-handler.js';
import router from './routes/index.js';
import configurePassport from './helpers/passport.js';
createBullBoardHandler(serverAdapter);
const app = express();
Sentry.init(app);
Sentry.attachRequestHandler(app);
Sentry.attachTracingHandler(app);
injectBullBoardHandler(app, serverAdapter);
appAssetsHandler(app);
app.use(morgan);
app.use(
express.json({
limit: appConfig.requestBodySizeLimit,
verify(req, res, buf) {
req.rawBody = buf;
},
})
);
app.use(
express.urlencoded({
extended: true,
limit: appConfig.requestBodySizeLimit,
verify(req, res, buf) {
req.rawBody = buf;
},
})
);
app.use(cors(corsOptions));
configurePassport(app);
app.use('/', router);
webUIHandler(app);
// catch 404 and forward to error handler
app.use(function (req, res, next) {
next(createError(404));
});
Sentry.attachErrorHandler(app);
app.use(errorHandler);
export default app;

View File

@@ -0,0 +1,55 @@
import createError from 'http-errors';
import express from 'express';
import appConfig from './config/app';
import cors from 'cors';
import corsOptions from './config/cors-options';
import morgan from './helpers/morgan';
import appAssetsHandler from './helpers/app-assets-handler';
import webUIHandler from './helpers/web-ui-handler';
import errorHandler from './helpers/error-handler';
import './config/orm';
import {
createBullBoardHandler,
serverAdapter,
} from './helpers/create-bull-board-handler';
import injectBullBoardHandler from './helpers/inject-bull-board-handler';
import router from './routes';
import { IRequest } from '@automatisch/types';
createBullBoardHandler(serverAdapter);
const app = express();
injectBullBoardHandler(app, serverAdapter);
appAssetsHandler(app);
app.use(morgan);
app.use(
express.json({
limit: appConfig.requestBodySizeLimit,
verify(req, res, buf) {
(req as IRequest).rawBody = buf;
},
})
);
app.use(express.urlencoded({
extended: false,
limit: appConfig.requestBodySizeLimit,
verify(req, res, buf) {
(req as IRequest).rawBody = buf;
},
}));
app.use(cors(corsOptions));
app.use('/', router);
webUIHandler(app);
// catch 404 and forward to error handler
app.use(function (req, res, next) {
next(createError(404));
});
app.use(errorHandler);
export default app;

View File

@@ -1,92 +0,0 @@
import defineAction from '../../../../helpers/define-action.js';
export default defineAction({
name: 'Create record',
key: 'createRecord',
description: 'Creates a new record with fields that automatically populate.',
arguments: [
{
label: 'Base',
key: 'baseId',
type: 'dropdown',
required: true,
description: 'Base in which to create the record.',
variables: true,
source: {
type: 'query',
name: 'getDynamicData',
arguments: [
{
name: 'key',
value: 'listBases',
},
],
},
},
{
label: 'Table',
key: 'tableId',
type: 'dropdown',
required: true,
dependsOn: ['parameters.baseId'],
description: '',
variables: true,
source: {
type: 'query',
name: 'getDynamicData',
arguments: [
{
name: 'key',
value: 'listTables',
},
{
name: 'parameters.baseId',
value: '{parameters.baseId}',
},
],
},
additionalFields: {
type: 'query',
name: 'getDynamicFields',
arguments: [
{
name: 'key',
value: 'listFields',
},
{
name: 'parameters.baseId',
value: '{parameters.baseId}',
},
{
name: 'parameters.tableId',
value: '{parameters.tableId}',
},
],
},
},
],
async run($) {
const { baseId, tableId, ...rest } = $.step.parameters;
const fields = Object.entries(rest).reduce((result, [key, value]) => {
if (Array.isArray(value)) {
result[key] = value.map((item) => item.value);
} else if (value !== '') {
result[key] = value;
}
return result;
}, {});
const body = {
typecast: true,
fields,
};
const { data } = await $.http.post(`/v0/${baseId}/${tableId}`, body);
$.setActionItem({
raw: data,
});
},
});

View File

@@ -1,174 +0,0 @@
import defineAction from '../../../../helpers/define-action.js';
import { URLSearchParams } from 'url';
export default defineAction({
name: 'Find record',
key: 'findRecord',
description:
"Finds a record using simple field search or use Airtable's formula syntax to find a matching record.",
arguments: [
{
label: 'Base',
key: 'baseId',
type: 'dropdown',
required: true,
description: 'Base in which to create the record.',
variables: true,
source: {
type: 'query',
name: 'getDynamicData',
arguments: [
{
name: 'key',
value: 'listBases',
},
],
},
},
{
label: 'Table',
key: 'tableId',
type: 'dropdown',
required: true,
dependsOn: ['parameters.baseId'],
description: '',
variables: true,
source: {
type: 'query',
name: 'getDynamicData',
arguments: [
{
name: 'key',
value: 'listTables',
},
{
name: 'parameters.baseId',
value: '{parameters.baseId}',
},
],
},
},
{
label: 'Search by field',
key: 'tableField',
type: 'dropdown',
required: false,
dependsOn: ['parameters.baseId', 'parameters.tableId'],
description: '',
variables: true,
source: {
type: 'query',
name: 'getDynamicData',
arguments: [
{
name: 'key',
value: 'listTableFields',
},
{
name: 'parameters.baseId',
value: '{parameters.baseId}',
},
{
name: 'parameters.tableId',
value: '{parameters.tableId}',
},
],
},
},
{
label: 'Search Value',
key: 'searchValue',
type: 'string',
required: false,
variables: true,
description:
'The value of unique identifier for the record. For date values, please use the ISO format (e.g., "YYYY-MM-DD").',
},
{
label: 'Search for exact match?',
key: 'exactMatch',
type: 'dropdown',
required: true,
description: '',
variables: true,
options: [
{ label: 'Yes', value: 'true' },
{ label: 'No', value: 'false' },
],
},
{
label: 'Search Formula',
key: 'searchFormula',
type: 'string',
required: false,
variables: true,
description:
'Instead, you have the option to use an Airtable search formula for locating records according to sophisticated criteria and across various fields.',
},
{
label: 'Limit to View',
key: 'limitToView',
type: 'dropdown',
required: false,
dependsOn: ['parameters.baseId', 'parameters.tableId'],
description:
'You have the choice to restrict the search to a particular view ID if desired.',
variables: true,
source: {
type: 'query',
name: 'getDynamicData',
arguments: [
{
name: 'key',
value: 'listTableViews',
},
{
name: 'parameters.baseId',
value: '{parameters.baseId}',
},
{
name: 'parameters.tableId',
value: '{parameters.tableId}',
},
],
},
},
],
async run($) {
const {
baseId,
tableId,
tableField,
searchValue,
exactMatch,
searchFormula,
limitToView,
} = $.step.parameters;
let filterByFormula;
if (tableField && searchValue) {
filterByFormula =
exactMatch === 'true'
? `{${tableField}} = '${searchValue}'`
: `LOWER({${tableField}}) = LOWER('${searchValue}')`;
} else {
filterByFormula = searchFormula;
}
const body = new URLSearchParams({
filterByFormula,
view: limitToView,
});
const { data } = await $.http.post(
`/v0/${baseId}/${tableId}/listRecords`,
body
);
$.setActionItem({
raw: data,
});
},
});

View File

@@ -1,4 +0,0 @@
import createRecord from './create-record/index.js';
import findRecord from './find-record/index.js';
export default [createRecord, findRecord];

View File

@@ -1,9 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="256px" height="215px" viewBox="0 0 256 215" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid">
<g>
<path d="M114.25873,2.70101695 L18.8604023,42.1756384 C13.5552723,44.3711638 13.6102328,51.9065311 18.9486282,54.0225085 L114.746142,92.0117514 C123.163769,95.3498757 132.537419,95.3498757 140.9536,92.0117514 L236.75256,54.0225085 C242.08951,51.9065311 242.145916,44.3711638 236.83934,42.1756384 L141.442459,2.70101695 C132.738459,-0.900338983 122.961284,-0.900338983 114.25873,2.70101695" fill="#FFBF00"></path>
<path d="M136.349071,112.756863 L136.349071,207.659101 C136.349071,212.173089 140.900664,215.263892 145.096461,213.600615 L251.844122,172.166219 C254.281184,171.200072 255.879376,168.845451 255.879376,166.224705 L255.879376,71.3224678 C255.879376,66.8084791 251.327783,63.7176768 247.131986,65.3809537 L140.384325,106.815349 C137.94871,107.781496 136.349071,110.136118 136.349071,112.756863" fill="#26B5F8"></path>
<path d="M111.422771,117.65355 L79.742409,132.949912 L76.5257763,134.504714 L9.65047684,166.548104 C5.4112904,168.593211 0.000578531073,165.503855 0.000578531073,160.794612 L0.000578531073,71.7210757 C0.000578531073,70.0173017 0.874160452,68.5463864 2.04568588,67.4384994 C2.53454463,66.9481944 3.08848814,66.5446689 3.66412655,66.2250305 C5.26231864,65.2661153 7.54173107,65.0101153 9.47981017,65.7766689 L110.890522,105.957098 C116.045234,108.002206 116.450206,115.225166 111.422771,117.65355" fill="#ED3049"></path>
<path d="M111.422771,117.65355 L79.742409,132.949912 L2.04568588,67.4384994 C2.53454463,66.9481944 3.08848814,66.5446689 3.66412655,66.2250305 C5.26231864,65.2661153 7.54173107,65.0101153 9.47981017,65.7766689 L110.890522,105.957098 C116.045234,108.002206 116.450206,115.225166 111.422771,117.65355" fill-opacity="0.25" fill="#000000"></path>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.9 KiB

View File

@@ -1,38 +0,0 @@
import crypto from 'crypto';
import { URLSearchParams } from 'url';
import authScope from '../common/auth-scope.js';
export default async function generateAuthUrl($) {
const oauthRedirectUrlField = $.app.auth.fields.find(
(field) => field.key == 'oAuthRedirectUrl'
);
const redirectUri = oauthRedirectUrlField.value;
const state = crypto.randomBytes(100).toString('base64url');
const codeVerifier = crypto.randomBytes(96).toString('base64url');
const codeChallenge = crypto
.createHash('sha256')
.update(codeVerifier)
.digest('base64')
.replace(/=/g, '')
.replace(/\+/g, '-')
.replace(/\//g, '_');
const searchParams = new URLSearchParams({
client_id: $.auth.data.clientId,
redirect_uri: redirectUri,
response_type: 'code',
scope: authScope.join(' '),
state,
code_challenge: codeChallenge,
code_challenge_method: 'S256',
});
const url = `https://airtable.com/oauth2/v1/authorize?${searchParams.toString()}`;
await $.auth.set({
url,
originalCodeChallenge: codeChallenge,
originalState: state,
codeVerifier,
});
}

View File

@@ -1,48 +0,0 @@
import generateAuthUrl from './generate-auth-url.js';
import verifyCredentials from './verify-credentials.js';
import refreshToken from './refresh-token.js';
import isStillVerified from './is-still-verified.js';
export default {
fields: [
{
key: 'oAuthRedirectUrl',
label: 'OAuth Redirect URL',
type: 'string',
required: true,
readOnly: true,
value: '{WEB_APP_URL}/app/airtable/connections/add',
placeholder: null,
description:
'When asked to input a redirect URL in Airtable, enter the URL above.',
clickToCopy: true,
},
{
key: 'clientId',
label: 'Client ID',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: null,
clickToCopy: false,
},
{
key: 'clientSecret',
label: 'Client Secret',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: null,
clickToCopy: false,
},
],
generateAuthUrl,
verifyCredentials,
isStillVerified,
refreshToken,
};

View File

@@ -1,8 +0,0 @@
import getCurrentUser from '../common/get-current-user.js';
const isStillVerified = async ($) => {
const currentUser = await getCurrentUser($);
return !!currentUser.id;
};
export default isStillVerified;

View File

@@ -1,40 +0,0 @@
import { URLSearchParams } from 'node:url';
import authScope from '../common/auth-scope.js';
const refreshToken = async ($) => {
const params = new URLSearchParams({
client_id: $.auth.data.clientId,
grant_type: 'refresh_token',
refresh_token: $.auth.data.refreshToken,
});
const basicAuthToken = Buffer.from(
$.auth.data.clientId + ':' + $.auth.data.clientSecret
).toString('base64');
const { data } = await $.http.post(
'https://airtable.com/oauth2/v1/token',
params.toString(),
{
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
Authorization: `Basic ${basicAuthToken}`,
},
additionalProperties: {
skipAddingAuthHeader: true,
},
}
);
await $.auth.set({
accessToken: data.access_token,
refreshToken: data.refresh_token,
expiresIn: data.expires_in,
refreshExpiresIn: data.refresh_expires_in,
scope: authScope.join(' '),
tokenType: data.token_type,
});
};
export default refreshToken;

View File

@@ -1,56 +0,0 @@
import getCurrentUser from '../common/get-current-user.js';
const verifyCredentials = async ($) => {
if ($.auth.data.originalState !== $.auth.data.state) {
throw new Error("The 'state' parameter does not match.");
}
if ($.auth.data.originalCodeChallenge !== $.auth.data.code_challenge) {
throw new Error("The 'code challenge' parameter does not match.");
}
const oauthRedirectUrlField = $.app.auth.fields.find(
(field) => field.key == 'oAuthRedirectUrl'
);
const redirectUri = oauthRedirectUrlField.value;
const basicAuthToken = Buffer.from(
$.auth.data.clientId + ':' + $.auth.data.clientSecret
).toString('base64');
const { data } = await $.http.post(
'https://airtable.com/oauth2/v1/token',
{
code: $.auth.data.code,
client_id: $.auth.data.clientId,
redirect_uri: redirectUri,
grant_type: 'authorization_code',
code_verifier: $.auth.data.codeVerifier,
},
{
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
Authorization: `Basic ${basicAuthToken}`,
},
additionalProperties: {
skipAddingAuthHeader: true,
},
}
);
await $.auth.set({
accessToken: data.access_token,
tokenType: data.token_type,
});
const currentUser = await getCurrentUser($);
await $.auth.set({
clientId: $.auth.data.clientId,
clientSecret: $.auth.data.clientSecret,
scope: $.auth.data.scope,
expiresIn: data.expires_in,
refreshExpiresIn: data.refresh_expires_in,
refreshToken: data.refresh_token,
screenName: currentUser.email,
});
};
export default verifyCredentials;

View File

@@ -1,12 +0,0 @@
const addAuthHeader = ($, requestConfig) => {
if (
!requestConfig.additionalProperties?.skipAddingAuthHeader &&
$.auth.data?.accessToken
) {
requestConfig.headers.Authorization = `${$.auth.data.tokenType} ${$.auth.data.accessToken}`;
}
return requestConfig;
};
export default addAuthHeader;

View File

@@ -1,12 +0,0 @@
const authScope = [
'data.records:read',
'data.records:write',
'data.recordComments:read',
'data.recordComments:write',
'schema.bases:read',
'schema.bases:write',
'user.email:read',
'webhook:manage',
];
export default authScope;

View File

@@ -1,6 +0,0 @@
const getCurrentUser = async ($) => {
const { data: currentUser } = await $.http.get('/v0/meta/whoami');
return currentUser;
};
export default getCurrentUser;

View File

@@ -1,6 +0,0 @@
import listBases from './list-bases/index.js';
import listTableFields from './list-table-fields/index.js';
import listTableViews from './list-table-views/index.js';
import listTables from './list-tables/index.js';
export default [listBases, listTableFields, listTableViews, listTables];

View File

@@ -1,28 +0,0 @@
export default {
name: 'List bases',
key: 'listBases',
async run($) {
const bases = {
data: [],
};
const params = {};
do {
const { data } = await $.http.get('/v0/meta/bases', { params });
params.offset = data.offset;
if (data?.bases) {
for (const base of data.bases) {
bases.data.push({
value: base.id,
name: base.name,
});
}
}
} while (params.offset);
return bases;
},
};

View File

@@ -1,39 +0,0 @@
export default {
name: 'List table fields',
key: 'listTableFields',
async run($) {
const tableFields = {
data: [],
};
const { baseId, tableId } = $.step.parameters;
if (!baseId) {
return tableFields;
}
const params = {};
do {
const { data } = await $.http.get(`/v0/meta/bases/${baseId}/tables`, {
params,
});
params.offset = data.offset;
if (data?.tables) {
for (const table of data.tables) {
if (table.id === tableId) {
table.fields.forEach((field) => {
tableFields.data.push({
value: field.name,
name: field.name,
});
});
}
}
}
} while (params.offset);
return tableFields;
},
};

View File

@@ -1,39 +0,0 @@
export default {
name: 'List table views',
key: 'listTableViews',
async run($) {
const tableViews = {
data: [],
};
const { baseId, tableId } = $.step.parameters;
if (!baseId) {
return tableViews;
}
const params = {};
do {
const { data } = await $.http.get(`/v0/meta/bases/${baseId}/tables`, {
params,
});
params.offset = data.offset;
if (data?.tables) {
for (const table of data.tables) {
if (table.id === tableId) {
table.views.forEach((view) => {
tableViews.data.push({
value: view.id,
name: view.name,
});
});
}
}
}
} while (params.offset);
return tableViews;
},
};

View File

@@ -1,35 +0,0 @@
export default {
name: 'List tables',
key: 'listTables',
async run($) {
const tables = {
data: [],
};
const baseId = $.step.parameters.baseId;
if (!baseId) {
return tables;
}
const params = {};
do {
const { data } = await $.http.get(`/v0/meta/bases/${baseId}/tables`, {
params,
});
params.offset = data.offset;
if (data?.tables) {
for (const table of data.tables) {
tables.data.push({
value: table.id,
name: table.name,
});
}
}
} while (params.offset);
return tables;
},
};

View File

@@ -1,3 +0,0 @@
import listFields from './list-fields/index.js';
export default [listFields];

View File

@@ -1,86 +0,0 @@
const hasValue = (value) => value !== null && value !== undefined;
export default {
name: 'List fields',
key: 'listFields',
async run($) {
const options = [];
const { baseId, tableId } = $.step.parameters;
if (!hasValue(baseId) || !hasValue(tableId)) {
return;
}
const { data } = await $.http.get(`/v0/meta/bases/${baseId}/tables`);
const selectedTable = data.tables.find((table) => table.id === tableId);
if (!selectedTable) return;
selectedTable.fields.forEach((field) => {
if (field.type === 'singleSelect') {
options.push({
label: field.name,
key: field.name,
type: 'dropdown',
required: false,
variables: true,
options: field.options.choices.map((choice) => ({
label: choice.name,
value: choice.id,
})),
});
} else if (field.type === 'multipleSelects') {
options.push({
label: field.name,
key: field.name,
type: 'dynamic',
required: false,
variables: true,
fields: [
{
label: 'Value',
key: 'value',
type: 'dropdown',
required: false,
variables: true,
options: field.options.choices.map((choice) => ({
label: choice.name,
value: choice.id,
})),
},
],
});
} else if (field.type === 'checkbox') {
options.push({
label: field.name,
key: field.name,
type: 'dropdown',
required: false,
variables: true,
options: [
{
label: 'Yes',
value: 'true',
},
{
label: 'No',
value: 'false',
},
],
});
} else {
options.push({
label: field.name,
key: field.name,
type: 'string',
required: false,
variables: true,
});
}
});
return options;
},
};

View File

@@ -1,22 +0,0 @@
import defineApp from '../../helpers/define-app.js';
import addAuthHeader from './common/add-auth-header.js';
import auth from './auth/index.js';
import actions from './actions/index.js';
import dynamicData from './dynamic-data/index.js';
import dynamicFields from './dynamic-fields/index.js';
export default defineApp({
name: 'Airtable',
key: 'airtable',
baseUrl: 'https://airtable.com',
apiBaseUrl: 'https://api.airtable.com',
iconUrl: '{BASE_URL}/apps/airtable/assets/favicon.svg',
authDocUrl: '{DOCS_URL}/apps/airtable/connection',
primaryColor: 'FFBF00',
supportsConnections: true,
beforeRequest: [addAuthHeader],
auth,
actions,
dynamicData,
dynamicFields,
});

View File

@@ -1 +0,0 @@
<svg xmlns:xlink="http://www.w3.org/1999/xlink" xmlns="http://www.w3.org/2000/svg" width="132" height="24" fill="none" viewBox="0 0 132 24"><path fill="#19191C" d="M38.557 19.495c2.16 0 3.25-1.113 3.725-1.87h.214c.094.805.664 1.562 1.779 1.562h2.111V16.82h-.545c-.38 0-.57-.213-.57-.545V6.776h-2.8v1.516h-.213c-.545-.758-1.684-1.824-3.772-1.824-3.321 0-5.789 2.748-5.789 6.514s2.515 6.513 5.86 6.513m.498-2.7c-1.969 0-3.51-1.445-3.51-3.79 0-2.297 1.494-3.86 3.487-3.86 1.898 0 3.487 1.397 3.487 3.86 0 2.108-1.352 3.79-3.463 3.79M48.04 24h2.799v-6.376h.213c.522.758 1.637 1.871 3.844 1.871 3.321 0 5.741-2.795 5.741-6.513 0-3.743-2.586-6.514-5.931-6.514-2.135 0-3.18 1.16-3.678 1.8h-.213V6.776h-2.776V24m6.263-7.134c-1.922 0-3.512-1.42-3.512-3.884 0-2.108 1.353-3.885 3.464-3.885 1.97 0 3.511 1.54 3.511 3.885 0 2.297-1.494 3.884-3.463 3.884M62.082 24h2.8v-6.376h.213c.522.758 1.637 1.871 3.843 1.871 3.321 0 5.51-2.795 5.51-6.513 0-3.743-2.355-6.514-5.7-6.514-2.135 0-3.179 1.16-3.677 1.8h-.214V6.776h-2.775zm6.263-7.134c-1.922 0-3.511-1.42-3.511-3.884 0-2.108 1.352-3.885 3.463-3.885 1.97 0 3.512 1.54 3.512 3.885 0 2.297-1.495 3.884-3.464 3.884m9.805 2.61h3.961l2.254-9.735h.143l2.253 9.735H90.7l3.153-12.412h-2.821l-2.254 9.759h-.214l-2.253-9.759h-3.725l-2.278 9.759h-.213l-2.23-9.759h-2.99l3.274 12.412m17.123 0h2.8V13.34c0-2.345 1.09-3.79 3.131-3.79h1.233V6.756h-.925c-1.59 0-2.8 1.09-3.274 2.132h-.19V7.064h-2.775zm21.057 0h2.183v-2.487h-2.159c-.854 0-1.21-.38-1.21-1.256V9.528h3.511V7.064h-3.511V3.582h-2.657v3.482h-2.325v2.464h2.159v6.229c0 2.63 1.589 3.719 4.009 3.719m9.693.019c2.586 0 4.864-1.279 5.67-3.86l-2.562-.616c-.451 1.373-1.755 2.084-3.131 2.084-2.041 0-3.393-1.326-3.417-3.41h9.419v-.782c0-3.695-2.301-6.443-6.097-6.443-3.346 0-6.216 2.63-6.216 6.537 0 3.79 2.538 6.49 6.334 6.49m-3.416-7.84c.166-1.492 1.518-2.747 3.298-2.747 1.708 0 3.108 1.066 3.25 2.747h-6.548"/><path fill="#19191C" fill-rule="evenodd" d="M108.916 19.476h-2.8V9.528h-2.182V7.064h4.982z" clip-rule="evenodd"/><path fill="#19191C" d="M107.309 5.342c1.02 0 1.779-.758 1.779-1.753 0-.971-.759-1.73-1.779-1.73-1.021 0-1.78.759-1.78 1.73 0 .995.759 1.753 1.78 1.753"/><path fill="#FD366E" d="M24.443 16.432v5.478H10.752c-3.989 0-7.472-2.203-9.335-5.478A11.041 11.041 0 0 1 0 11.695v-1.48a10.97 10.97 0 0 1 .381-2.247C1.661 3.368 5.82 0 10.751 0c4.934 0 9.092 3.37 10.371 7.967h-5.854c-.96-1.499-2.624-2.49-4.516-2.49s-3.555.991-4.516 2.49a5.47 5.47 0 0 0-.67 1.494 5.562 5.562 0 0 0-.202 1.494 5.5 5.5 0 0 0 1.69 3.983 5.32 5.32 0 0 0 3.698 1.494h13.69"/><path fill="#FD366E" d="M24.443 9.46v5.478h-9.994a5.5 5.5 0 0 0 1.691-3.983 5.56 5.56 0 0 0-.203-1.494h8.506"/></svg>

Before

Width:  |  Height:  |  Size: 2.6 KiB

View File

@@ -1,65 +0,0 @@
import verifyCredentials from './verify-credentials.js';
import isStillVerified from './is-still-verified.js';
export default {
fields: [
{
key: 'screenName',
label: 'Screen Name',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description:
'Screen name of your connection to be used on Automatisch UI.',
clickToCopy: false,
},
{
key: 'projectId',
label: 'Project ID',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: 'Project ID of your Appwrite project.',
clickToCopy: false,
},
{
key: 'apiKey',
label: 'API Key',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: 'API key of your Appwrite project.',
clickToCopy: false,
},
{
key: 'instanceUrl',
label: 'Appwrite instance URL',
type: 'string',
required: false,
readOnly: false,
placeholder: '',
description: '',
clickToCopy: true,
},
{
key: 'host',
label: 'Host Name',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: 'Host name of your Appwrite project.',
clickToCopy: false,
},
],
verifyCredentials,
isStillVerified,
};

View File

@@ -1,8 +0,0 @@
import verifyCredentials from './verify-credentials.js';
const isStillVerified = async ($) => {
await verifyCredentials($);
return true;
};
export default isStillVerified;

View File

@@ -1,5 +0,0 @@
const verifyCredentials = async ($) => {
await $.http.get('/v1/users');
};
export default verifyCredentials;

View File

@@ -1,16 +0,0 @@
const addAuthHeader = ($, requestConfig) => {
requestConfig.headers['Content-Type'] = 'application/json';
if ($.auth.data?.apiKey && $.auth.data?.projectId) {
requestConfig.headers['X-Appwrite-Project'] = $.auth.data.projectId;
requestConfig.headers['X-Appwrite-Key'] = $.auth.data.apiKey;
}
if ($.auth.data?.host) {
requestConfig.headers['Host'] = $.auth.data.host;
}
return requestConfig;
};
export default addAuthHeader;

View File

@@ -1,13 +0,0 @@
const setBaseUrl = ($, requestConfig) => {
const instanceUrl = $.auth.data.instanceUrl;
if (instanceUrl) {
requestConfig.baseURL = instanceUrl;
} else if ($.app.apiBaseUrl) {
requestConfig.baseURL = $.app.apiBaseUrl;
}
return requestConfig;
};
export default setBaseUrl;

View File

@@ -1,4 +0,0 @@
import listCollections from './list-collections/index.js';
import listDatabases from './list-databases/index.js';
export default [listCollections, listDatabases];

View File

@@ -1,44 +0,0 @@
export default {
name: 'List collections',
key: 'listCollections',
async run($) {
const collections = {
data: [],
};
const databaseId = $.step.parameters.databaseId;
if (!databaseId) {
return collections;
}
const params = {
queries: [
JSON.stringify({
method: 'orderAsc',
attribute: 'name',
}),
JSON.stringify({
method: 'limit',
values: [100],
}),
],
};
const { data } = await $.http.get(
`/v1/databases/${databaseId}/collections`,
{ params }
);
if (data?.collections) {
for (const collection of data.collections) {
collections.data.push({
value: collection.$id,
name: collection.name,
});
}
}
return collections;
},
};

View File

@@ -1,36 +0,0 @@
export default {
name: 'List databases',
key: 'listDatabases',
async run($) {
const databases = {
data: [],
};
const params = {
queries: [
JSON.stringify({
method: 'orderAsc',
attribute: 'name',
}),
JSON.stringify({
method: 'limit',
values: [100],
}),
],
};
const { data } = await $.http.get('/v1/databases', { params });
if (data?.databases) {
for (const database of data.databases) {
databases.data.push({
value: database.$id,
name: database.name,
});
}
}
return databases;
},
};

View File

@@ -1,21 +0,0 @@
import defineApp from '../../helpers/define-app.js';
import addAuthHeader from './common/add-auth-header.js';
import setBaseUrl from './common/set-base-url.js';
import auth from './auth/index.js';
import triggers from './triggers/index.js';
import dynamicData from './dynamic-data/index.js';
export default defineApp({
name: 'Appwrite',
key: 'appwrite',
baseUrl: 'https://appwrite.io',
apiBaseUrl: 'https://cloud.appwrite.io',
iconUrl: '{BASE_URL}/apps/appwrite/assets/favicon.svg',
authDocUrl: '{DOCS_URL}/apps/appwrite/connection',
primaryColor: 'FD366E',
supportsConnections: true,
beforeRequest: [setBaseUrl, addAuthHeader],
auth,
triggers,
dynamicData,
});

View File

@@ -1,3 +0,0 @@
import newDocuments from './new-documents/index.js';
export default [newDocuments];

View File

@@ -1,104 +0,0 @@
import defineTrigger from '../../../../helpers/define-trigger.js';
export default defineTrigger({
name: 'New documents',
key: 'newDocuments',
pollInterval: 15,
description: 'Triggers when a new document is created.',
arguments: [
{
label: 'Database',
key: 'databaseId',
type: 'dropdown',
required: true,
description: '',
variables: true,
source: {
type: 'query',
name: 'getDynamicData',
arguments: [
{
name: 'key',
value: 'listDatabases',
},
],
},
},
{
label: 'Collection',
key: 'collectionId',
type: 'dropdown',
required: true,
dependsOn: ['parameters.databaseId'],
description: '',
variables: true,
source: {
type: 'query',
name: 'getDynamicData',
arguments: [
{
name: 'key',
value: 'listCollections',
},
{
name: 'parameters.databaseId',
value: '{parameters.databaseId}',
},
],
},
},
],
async run($) {
const { databaseId, collectionId } = $.step.parameters;
const limit = 1;
let lastDocumentId = undefined;
let offset = 0;
let documentCount = 0;
do {
const params = {
queries: [
JSON.stringify({
method: 'orderDesc',
attribute: '$createdAt',
}),
JSON.stringify({
method: 'limit',
values: [limit],
}),
// An invalid cursor shouldn't be sent.
lastDocumentId &&
JSON.stringify({
method: 'cursorAfter',
values: [lastDocumentId],
}),
].filter(Boolean),
};
const { data } = await $.http.get(
`/v1/databases/${databaseId}/collections/${collectionId}/documents`,
{ params }
);
const documents = data?.documents;
documentCount = documents?.length;
offset = offset + limit;
lastDocumentId = documents[documentCount - 1]?.$id;
if (!documentCount) {
return;
}
for (const document of documents) {
$.pushTriggerItem({
raw: document,
meta: {
internalId: document.$id,
},
});
}
} while (documentCount === limit);
},
});

View File

@@ -1,3 +0,0 @@
import sendPrompt from './send-prompt/index.js';
export default [sendPrompt];

View File

@@ -1,97 +0,0 @@
import defineAction from '../../../../helpers/define-action.js';
const castFloatOrUndefined = (value) => {
return value === '' ? undefined : parseFloat(value);
};
export default defineAction({
name: 'Send prompt',
key: 'sendPrompt',
description: 'Creates a completion for the provided prompt and parameters.',
arguments: [
{
label: 'Prompt',
key: 'prompt',
type: 'string',
required: true,
variables: true,
description: 'The text to analyze.',
},
{
label: 'Temperature',
key: 'temperature',
type: 'string',
required: false,
variables: true,
description:
'What sampling temperature to use, between 0 and 2. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer. We generally recommend altering this or Top P but not both.',
},
{
label: 'Maximum tokens',
key: 'maxTokens',
type: 'string',
required: false,
variables: true,
description:
'The maximum number of tokens to generate in the completion.',
},
{
label: 'Stop Sequence',
key: 'stopSequence',
type: 'string',
required: false,
variables: true,
description:
'Single stop sequence where the API will stop generating further tokens. The returned text will not contain the stop sequence.',
},
{
label: 'Top P',
key: 'topP',
type: 'string',
required: false,
variables: true,
description:
'An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both.',
},
{
label: 'Frequency Penalty',
key: 'frequencyPenalty',
type: 'string',
required: false,
variables: true,
description: `Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.`,
},
{
label: 'Presence Penalty',
key: 'presencePenalty',
type: 'string',
required: false,
variables: true,
description: `Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.`,
},
],
async run($) {
const payload = {
model: $.step.parameters.model,
prompt: $.step.parameters.prompt,
temperature: castFloatOrUndefined($.step.parameters.temperature),
max_tokens: castFloatOrUndefined($.step.parameters.maxTokens),
stop: $.step.parameters.stopSequence || null,
top_p: castFloatOrUndefined($.step.parameters.topP),
frequency_penalty: castFloatOrUndefined(
$.step.parameters.frequencyPenalty
),
presence_penalty: castFloatOrUndefined($.step.parameters.presencePenalty),
};
const { data } = await $.http.post(
`/deployments/${$.auth.data.deploymentId}/completions`,
payload
);
$.setActionItem({
raw: data,
});
},
});

View File

@@ -1,6 +0,0 @@
<svg width="256px" height="260px" viewBox="0 0 256 260" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid">
<title>OpenAI</title>
<g>
<path d="M239.183914,106.202783 C245.054304,88.5242096 243.02228,69.1733805 233.607599,53.0998864 C219.451678,28.4588021 190.999703,15.7836129 163.213007,21.739505 C147.554077,4.32145883 123.794909,-3.42398554 100.87901,1.41873898 C77.9631105,6.26146349 59.3690093,22.9572536 52.0959621,45.2214219 C33.8436494,48.9644867 18.0901721,60.392749 8.86672513,76.5818033 C-5.443491,101.182962 -2.19544431,132.215255 16.8986662,153.320094 C11.0060865,170.990656 13.0197283,190.343991 22.4238231,206.422991 C36.5975553,231.072344 65.0680342,243.746566 92.8695738,237.783372 C105.235639,251.708249 123.001113,259.630942 141.623968,259.52692 C170.105359,259.552169 195.337611,241.165718 204.037777,214.045661 C222.28734,210.296356 238.038489,198.869783 247.267014,182.68528 C261.404453,158.127515 258.142494,127.262775 239.183914,106.202783 L239.183914,106.202783 Z M141.623968,242.541207 C130.255682,242.559177 119.243876,238.574642 110.519381,231.286197 L112.054146,230.416496 L163.724595,200.590881 C166.340648,199.056444 167.954321,196.256818 167.970781,193.224005 L167.970781,120.373788 L189.815614,133.010026 C190.034132,133.121423 190.186235,133.330564 190.224885,133.572774 L190.224885,193.940229 C190.168603,220.758427 168.442166,242.484864 141.623968,242.541207 Z M37.1575749,197.93062 C31.456498,188.086359 29.4094818,176.546984 31.3766237,165.342426 L32.9113895,166.263285 L84.6329973,196.088901 C87.2389349,197.618207 90.4682717,197.618207 93.0742093,196.088901 L156.255402,159.663793 L156.255402,184.885111 C156.243557,185.149771 156.111725,185.394602 155.89729,185.550176 L103.561776,215.733903 C80.3054953,229.131632 50.5924954,221.165435 37.1575749,197.93062 Z M23.5493181,85.3811273 C29.2899861,75.4733097 38.3511911,67.9162648 49.1287482,64.0478825 L49.1287482,125.438515 C49.0891492,128.459425 50.6965386,131.262556 53.3237748,132.754232 L116.198014,169.025864 L94.3531808,181.662102 C94.1132325,181.789434 93.8257461,181.789434 93.5857979,181.662102 L41.3526015,151.529534 C18.1419426,138.076098 10.1817681,108.385562 23.5493181,85.125333 L23.5493181,85.3811273 Z M203.0146,127.075598 L139.935725,90.4458545 L161.7294,77.8607748 C161.969348,77.7334434 162.256834,77.7334434 162.496783,77.8607748 L214.729979,108.044502 C231.032329,117.451747 240.437294,135.426109 238.871504,154.182739 C237.305714,172.939368 225.050719,189.105572 207.414262,195.67963 L207.414262,134.288998 C207.322521,131.276867 205.650697,128.535853 203.0146,127.075598 Z M224.757116,94.3850867 L223.22235,93.4642272 L171.60306,63.3828173 C168.981293,61.8443751 165.732456,61.8443751 163.110689,63.3828173 L99.9806554,99.8079259 L99.9806554,74.5866077 C99.9533004,74.3254088 100.071095,74.0701869 100.287609,73.9215426 L152.520805,43.7889738 C168.863098,34.3743518 189.174256,35.2529043 204.642579,46.0434841 C220.110903,56.8340638 227.949269,75.5923959 224.757116,94.1804513 L224.757116,94.3850867 Z M88.0606409,139.097931 L66.2158076,126.512851 C65.9950399,126.379091 65.8450965,126.154176 65.8065367,125.898945 L65.8065367,65.684966 C65.8314495,46.8285367 76.7500605,29.6846032 93.8270852,21.6883055 C110.90411,13.6920079 131.063833,16.2835462 145.5632,28.338998 L144.028434,29.2086986 L92.3579852,59.0343142 C89.7419327,60.5687513 88.1282597,63.3683767 88.1117998,66.4011901 L88.0606409,139.097931 Z M99.9294965,113.5185 L128.06687,97.3011417 L156.255402,113.5185 L156.255402,145.953218 L128.169187,162.170577 L99.9806554,145.953218 L99.9294965,113.5185 Z" fill="#000000"></path>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 3.6 KiB

View File

@@ -1,58 +0,0 @@
import verifyCredentials from './verify-credentials.js';
import isStillVerified from './is-still-verified.js';
export default {
fields: [
{
key: 'screenName',
label: 'Screen Name',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description:
'Screen name of your connection to be used on Automatisch UI.',
clickToCopy: false,
},
{
key: 'yourResourceName',
label: 'Your Resource Name',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: 'The name of your Azure OpenAI Resource.',
docUrl: 'https://automatisch.io/docs/azure-openai#your-resource-name',
clickToCopy: false,
},
{
key: 'deploymentId',
label: 'Deployment ID',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: 'The deployment name you chose when you deployed the model.',
docUrl: 'https://automatisch.io/docs/azure-openai#deployment-id',
clickToCopy: false,
},
{
key: 'apiKey',
label: 'API Key',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: 'Azure OpenAI API key of your account.',
docUrl: 'https://automatisch.io/docs/azure-openai#api-key',
clickToCopy: false,
},
],
verifyCredentials,
isStillVerified,
};

View File

@@ -1,6 +0,0 @@
const isStillVerified = async ($) => {
await $.http.get('/fine_tuning/jobs');
return true;
};
export default isStillVerified;

View File

@@ -1,5 +0,0 @@
const verifyCredentials = async ($) => {
await $.http.get('/fine_tuning/jobs');
};
export default verifyCredentials;

View File

@@ -1,13 +0,0 @@
const addAuthHeader = ($, requestConfig) => {
if ($.auth.data?.apiKey) {
requestConfig.headers['api-key'] = $.auth.data.apiKey;
}
requestConfig.params = {
'api-version': '2023-10-01-preview',
};
return requestConfig;
};
export default addAuthHeader;

View File

@@ -1,11 +0,0 @@
const setBaseUrl = ($, requestConfig) => {
const yourResourceName = $.auth.data.yourResourceName;
if (yourResourceName) {
requestConfig.baseURL = `https://${yourResourceName}.openai.azure.com/openai`;
}
return requestConfig;
};
export default setBaseUrl;

View File

@@ -1,20 +0,0 @@
import defineApp from '../../helpers/define-app.js';
import setBaseUrl from './common/set-base-url.js';
import addAuthHeader from './common/add-auth-header.js';
import auth from './auth/index.js';
import actions from './actions/index.js';
export default defineApp({
name: 'Azure OpenAI',
key: 'azure-openai',
baseUrl:
'https://azure.microsoft.com/en-us/products/ai-services/openai-service',
apiBaseUrl: '',
iconUrl: '{BASE_URL}/apps/azure-openai/assets/favicon.svg',
authDocUrl: '{DOCS_URL}/apps/azure-openai/connection',
primaryColor: '000000',
supportsConnections: true,
beforeRequest: [setBaseUrl, addAuthHeader],
auth,
actions,
});

View File

@@ -1,4 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="-50 -50 430 390" fill="#1185fd" aria-hidden="true">
<path d="M180 141.964C163.699 110.262 119.308 51.1817 78.0347 22.044C38.4971 -5.86834 23.414 -1.03207 13.526 3.43594C2.08093 8.60755 0 26.1785 0 36.5164C0 46.8542 5.66748 121.272 9.36416 133.694C21.5786 174.738 65.0603 188.607 105.104 184.156C107.151 183.852 109.227 183.572 111.329 183.312C109.267 183.642 107.19 183.924 105.104 184.156C46.4204 192.847 -5.69621 214.233 62.6582 290.33C137.848 368.18 165.705 273.637 180 225.702C194.295 273.637 210.76 364.771 295.995 290.33C360 225.702 313.58 192.85 254.896 184.158C252.81 183.926 250.733 183.645 248.671 183.315C250.773 183.574 252.849 183.855 254.896 184.158C294.94 188.61 338.421 174.74 350.636 133.697C354.333 121.275 360 46.8568 360 36.519C360 26.1811 357.919 8.61012 346.474 3.43851C336.586 -1.02949 321.503 -5.86576 281.965 22.0466C240.692 51.1843 196.301 110.262 180 141.964Z">
</path>
</svg>

Before

Width:  |  Height:  |  Size: 956 B

View File

@@ -1,34 +0,0 @@
import verifyCredentials from './verify-credentials.js';
import isStillVerified from './is-still-verified.js';
import refreshToken from './refresh-token.js';
export default {
fields: [
{
key: 'handle',
label: 'Your Bluesky Handle',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: '',
clickToCopy: false,
},
{
key: 'password',
label: 'Your Bluesky Password',
type: 'string',
required: true,
readOnly: false,
value: null,
placeholder: null,
description: '',
clickToCopy: false,
},
],
verifyCredentials,
isStillVerified,
refreshToken,
};

View File

@@ -1,8 +0,0 @@
import getCurrentUser from '../common/get-current-user.js';
const isStillVerified = async ($) => {
const currentUser = await getCurrentUser($);
return !!currentUser.did;
};
export default isStillVerified;

View File

@@ -1,24 +0,0 @@
const refreshToken = async ($) => {
const { refreshJwt } = $.auth.data;
const { data } = await $.http.post(
'/com.atproto.server.refreshSession',
null,
{
headers: {
Authorization: `Bearer ${refreshJwt}`,
},
additionalProperties: {
skipAddingAuthHeader: true,
},
}
);
await $.auth.set({
accessJwt: data.accessJwt,
refreshJwt: data.refreshJwt,
did: data.did,
});
};
export default refreshToken;

View File

@@ -1,20 +0,0 @@
const verifyCredentials = async ($) => {
const handle = $.auth.data.handle;
const password = $.auth.data.password;
const body = {
identifier: handle,
password,
};
const { data } = await $.http.post('/com.atproto.server.createSession', body);
await $.auth.set({
accessJwt: data.accessJwt,
refreshJwt: data.refreshJwt,
did: data.did,
screenName: data.handle,
});
};
export default verifyCredentials;

View File

@@ -1,12 +0,0 @@
const addAuthHeader = ($, requestConfig) => {
if (requestConfig.additionalProperties?.skipAddingAuthHeader)
return requestConfig;
if ($.auth.data?.accessJwt) {
requestConfig.headers.Authorization = `Bearer ${$.auth.data.accessJwt}`;
}
return requestConfig;
};
export default addAuthHeader;

View File

@@ -1,15 +0,0 @@
const getCurrentUser = async ($) => {
const handle = $.auth.data.handle;
const params = {
actor: handle,
};
const { data: currentUser } = await $.http.get('/app.bsky.actor.getProfile', {
params,
});
return currentUser;
};
export default getCurrentUser;

View File

@@ -1,16 +0,0 @@
import defineApp from '../../helpers/define-app.js';
import addAuthHeader from './common/add-auth-header.js';
import auth from './auth/index.js';
export default defineApp({
name: 'Bluesky',
key: 'bluesky',
iconUrl: '{BASE_URL}/apps/bluesky/assets/favicon.svg',
authDocUrl: '{DOCS_URL}/apps/bluesky/connection',
supportsConnections: true,
baseUrl: 'https://bluesky.app',
apiBaseUrl: 'https://bsky.social/xrpc',
primaryColor: '1185fd',
beforeRequest: [addAuthHeader],
auth,
});

View File

@@ -1,35 +0,0 @@
import defineAction from '../../../../helpers/define-action.js';
export default defineAction({
name: 'Add Template',
key: 'addTemplate',
description:
'Creates an attachment of a specified object by given parent ID.',
arguments: [
{
label: 'Templete Data',
key: 'templateData',
type: 'string',
required: true,
variables: true,
description: 'The content of your new Template in XML/HTML format.',
},
],
async run($) {
const templateData = $.step.parameters.templateData;
const base64Data = Buffer.from(templateData).toString('base64');
const dataURI = `data:application/xml;base64,${base64Data}`;
const body = JSON.stringify({ template: dataURI });
const response = await $.http.post('/template', body, {
headers: {
'Content-Type': 'application/json',
},
});
$.setActionItem({ raw: response.data });
},
});

Some files were not shown because too many files have changed in this diff Show More