Compare commits

...

116 Commits

Author SHA1 Message Date
8f9041301c docker-compose.yml aktualisiert
All checks were successful
release-tag / release-image (push) Successful in 1m20s
2024-12-12 20:14:47 +00:00
3573fc8895 .gitea/workflows/registry.yml aktualisiert
All checks were successful
release-tag / release-image (push) Successful in 1m24s
2024-12-12 20:10:07 +00:00
253d4ab6bd .gitea/workflows/registry.yml aktualisiert
All checks were successful
release-tag / release-image (push) Successful in 1m25s
2024-12-12 20:04:42 +00:00
1c9d30b688 .gitea/workflows/registry.yml aktualisiert
Some checks failed
release-tag / release-image (push) Failing after 16s
2024-12-12 20:01:14 +00:00
feff219994 .gitea/workflows/registry.yml aktualisiert
Some checks failed
release-tag / release-image (push) Failing after 14s
2024-12-12 19:57:41 +00:00
6bc2fe0f46 .gitea/workflows/registry.yml hinzugefügt
Some checks failed
release-tag / release-image (push) Failing after 14s
2024-12-12 19:52:35 +00:00
Ömer Faruk Aydın
3d62fabaac Merge pull request #2246 from automatisch/restructure-queues
refactor: Abstract queue generation and configuration
2024-12-12 14:57:03 +01:00
Faruk AYDIN
e41a331ad7 refactor: Abstract queue generation and configuration 2024-12-12 14:51:13 +01:00
Ömer Faruk Aydın
978ceaadb6 Merge pull request #2238 from automatisch/workers-refactoring
refactor: Workers and queues and eliminate redundant process listeners
2024-12-11 14:57:51 +03:00
Faruk AYDIN
770b07179f refactor: Workers and queues and eliminate redundant process listeners 2024-12-11 12:53:24 +01:00
Ömer Faruk Aydın
6d15167ad9 Merge pull request #2243 from automatisch/fix-tests
Fix failing API endpoint tests
2024-12-11 14:52:34 +03:00
Faruk AYDIN
39cba6bc74 test: Fix tests for create dynamic data and fields endpoints 2024-12-11 12:45:50 +01:00
Faruk AYDIN
9558e66abf test: Fix tests for get apps connection 2024-12-11 12:27:15 +01:00
Ömer Faruk Aydın
ff7908955e Merge pull request #2239 from automatisch/fix-flaky-user-tests
test(user): use relative future dates
2024-12-11 14:06:33 +03:00
Ali BARIN
26b095b835 test(user): use relative future dates 2024-12-05 11:46:38 +00:00
Ömer Faruk Aydın
feba2a32f9 Merge pull request #2235 from automatisch/rearrange-user-tests
refactor: Reorder user model tests
2024-11-28 15:57:10 +03:00
Ömer Faruk Aydın
5090ece9b6 Merge pull request #2234 from automatisch/saml-auth-provider-tests
SamlAuthProvider model tests
2024-11-28 15:56:59 +03:00
Faruk AYDIN
221b19586e refactor: Reorder user model tests 2024-11-28 15:39:12 +03:00
Faruk AYDIN
3346c14255 refactor: Remove redundant test case after removing transaction 2024-11-28 15:30:32 +03:00
Faruk AYDIN
6e97e023c9 test: Add tests for updateRoleMappings method for saml auth provider 2024-11-27 17:19:24 +03:00
Faruk AYDIN
b26e2ecf2e test: Add tests for terminateRemoteSession method 2024-11-27 17:04:37 +03:00
Faruk AYDIN
d896238f23 test: Implement generateLogoutRequestBody test for saml auth provider model 2024-11-27 16:59:49 +03:00
Faruk AYDIN
d2c8f5a75c test: Add test for config method of saml auth providers 2024-11-27 16:47:50 +03:00
Ali BARIN
ce430d238c Merge pull request #2231 from automatisch/aut-1373
chore(migrations): rename saml_auth_providers_role_mappings table as role_mappings
2024-11-27 12:25:51 +01:00
Ali BARIN
ee397441ed refactor: update RoleMapping references 2024-11-25 15:13:14 +00:00
Ali BARIN
ba82d986c1 chore(migrations): rename saml_auth_providers_role_mappings table as role_mappings 2024-11-25 15:03:26 +00:00
Ömer Faruk Aydın
2361cb521e Merge pull request #2224 from automatisch/test-coverage
chore: add test coverage
2024-11-25 14:49:46 +03:00
Ömer Faruk Aydın
05f8d95281 Merge pull request #2220 from automatisch/aut-1350-after-hooks
test(user): write tests for $afterInsert and $afterFind
2024-11-25 14:48:58 +03:00
Ali BARIN
6c60b1c263 test(user): write tests for $afterInsert and $afterFind 2024-11-25 14:45:18 +03:00
Ömer Faruk Aydın
0c32a0693c Merge pull request #2219 from automatisch/aut-1350-beforeHooks
test(user): write tests for $beforeInsert and $beforeUpdate
2024-11-25 14:42:05 +03:00
Ali BARIN
807faa3c93 test(user): write tests for $beforeInsert and $beforeUpdate 2024-11-25 14:38:32 +03:00
Ömer Faruk Aydın
fb53e37f7a Merge pull request #2217 from automatisch/aut-1350-ability-and-can
test(user): write tests for ability and can
2024-11-25 14:23:57 +03:00
Ali BARIN
4ffdf98e16 test(user): write tests for ability and can 2024-11-25 14:17:44 +03:00
Ömer Faruk Aydın
b8da721e39 Merge pull request #2216 from automatisch/aut-1350-registerUser
test(user): write tests for registerUser
2024-11-25 13:01:55 +03:00
Ali BARIN
db8b98ca16 test(user): write tests for registerUser 2024-11-25 12:56:17 +03:00
Ömer Faruk Aydın
01b8c600fe Merge pull request #2229 from automatisch/playwright-main
chore: Do not run e2e tests in pull requests
2024-11-25 12:54:46 +03:00
Ömer Faruk Aydın
69bd5549a2 Merge pull request #2215 from automatisch/aut-1350-createAdmin
test(user): write test for createAdmin
2024-11-25 12:53:05 +03:00
Faruk AYDIN
bc631e3931 chore: Do not run e2e tests in pull requests 2024-11-25 12:50:59 +03:00
Ali BARIN
8ca4bc5a33 test(user): write test for createAdmin 2024-11-25 12:43:45 +03:00
Ömer Faruk Aydın
58a569afb0 Merge pull request #2214 from automatisch/aut-1350-getInvoices
test(user): write tests for getInvoices
2024-11-25 12:41:10 +03:00
Ali BARIN
db718d6fc3 test(user): write tests for getInvoices 2024-11-25 12:32:07 +03:00
Ömer Faruk Aydın
ca9cb8b07b Merge pull request #2213 from automatisch/aut-1350-getPlanAndUsage
test(user): write tests for getPlanAndUsage
2024-11-25 12:27:55 +03:00
Ali BARIN
ef14586412 test(user): write tests for getPlanAndUsage 2024-11-25 12:19:40 +03:00
Ali BARIN
09335fcd79 chore: add test coverage 2024-11-25 09:01:05 +00:00
Ömer Faruk Aydın
15f1fca6fe Merge pull request #2205 from automatisch/aut-1350-withinLimits
test(user): write tests for withinLimits
2024-11-22 15:40:21 +03:00
Ali BARIN
a570b8eb7a test(user): write tests for withinLimits 2024-11-22 15:31:19 +03:00
Ömer Faruk Aydın
02e2735b7a Merge pull request #2204 from automatisch/aut-1350-hasActiveSubscription
test(user): write tests for hasActiveSubscription
2024-11-22 15:29:10 +03:00
Ali BARIN
54fa347142 test(user): write tests for hasActiveSubscription 2024-11-22 15:21:10 +03:00
Ömer Faruk Aydın
0c752beace Merge pull request #2203 from automatisch/aut-1350-inTrial
test(user): write tests for inTrial
2024-11-22 15:19:00 +03:00
Ali BARIN
c14f808d29 test(user): write tests for inTrial 2024-11-22 15:10:13 +03:00
Ali BARIN
ad71173671 Merge pull request #2223 from automatisch/AUT-1365
feat: introduce inline error messages for ForgotPassword and ResetPasswordForm
2024-11-22 11:36:56 +01:00
Ömer Faruk Aydın
204325ef44 Merge pull request #2200 from automatisch/aut-1350-isAllowedToRunFlows
test(user): write tests for isAllowedToRunFlows
2024-11-22 10:32:03 +03:00
Ali BARIN
7ce6117659 test(user): write tests for isAllowedToRunFlows 2024-11-22 10:02:05 +03:00
Ömer Faruk Aydın
823a2c8b73 Merge pull request #2199 from automatisch/aut-1350-startTrialPeriod
test(user): write test for startTrialPeriod
2024-11-21 17:52:42 +03:00
Ali BARIN
741866e742 test(user): use luxon DateTime with zone over Date 2024-11-21 17:42:42 +03:00
Ali BARIN
41622678b0 test(user): write test for startTrialPeriod 2024-11-21 17:42:40 +03:00
Ömer Faruk Aydın
449b953401 Merge pull request #2198 from automatisch/aut-1350-generateHash
test(user): write tests for generateHash
2024-11-21 17:41:06 +03:00
kasia.oczkowska
551548400f refactor: use non-asyc version of mutate 2024-11-21 14:34:11 +00:00
Ali BARIN
6345ce5195 test(user): write tests for generateHash 2024-11-21 17:29:13 +03:00
Ömer Faruk Aydın
95651f6163 Merge pull request #2197 from automatisch/aut-1350-isInvitationTokenValid
test(user): write tests for isInvitationTokenValid
2024-11-21 17:23:10 +03:00
Ali BARIN
b02c1545b7 test(user): use luxon DateTime with zone over Date 2024-11-21 17:15:00 +03:00
Ali BARIN
2deaab9b24 test(user): write tests for isInvitationTokenValid 2024-11-21 17:14:56 +03:00
Ömer Faruk Aydın
f0d4853533 Merge pull request #2188 from automatisch/aut-1350-sendInvitationEmail
test(user): write test for sendInvitationEmail
2024-11-21 16:46:35 +03:00
Ali BARIN
af81ae812f Merge pull request #2222 from automatisch/fix-devcontainer
fix(devcontainer): remove yarn workspaces
2024-11-21 14:44:40 +01:00
Ali BARIN
bae76064e5 test(user): use luxon DateTime with zone over Date 2024-11-21 13:33:30 +00:00
Ali BARIN
07d9198cc8 test(user): write test for sendInvitationEmail 2024-11-21 13:31:41 +00:00
Ömer Faruk Aydın
a2e07ea2f7 Merge pull request #2187 from automatisch/aut-1350-isResetPasswordTokenValid
test(user): write test cases for isResetPasswordTokenValid
2024-11-21 16:29:50 +03:00
Ali BARIN
864c762fe2 test(user): use luxon DateTime with zone over Date 2024-11-21 12:49:33 +00:00
Ali BARIN
167bb4e8a0 test(user): write test cases for isResetPasswordTokenValid 2024-11-21 12:34:08 +00:00
Ömer Faruk Aydın
4cf64ede74 Merge pull request #2185 from automatisch/aut-1350-sendResetPasswordEmail
test(user): write test for sendResetPasswordEmail
2024-11-21 14:57:42 +03:00
Ali BARIN
bb309fea6f test(user): write test for sendResetPasswordEmail 2024-11-21 14:47:18 +03:00
kasia.oczkowska
90a7b4c1c0 feat: introduce inline error messages for ForgotPassword and ResetPasswordForm 2024-11-21 11:43:00 +00:00
Ömer Faruk Aydın
1133362028 Merge pull request #2184 from automatisch/aut-1350-softRemove
test(user): write test for softRemove
2024-11-21 14:34:53 +03:00
Ali BARIN
eb9226bd4a test(user): write test for softRemove 2024-11-21 14:23:29 +03:00
Ömer Faruk Aydın
a9abdcc37e Merge pull request #2212 from automatisch/remove-lerna
Remove lerna and yarn workspaces
2024-11-21 13:51:57 +03:00
Ali BARIN
6ace93bdbf fix(devcontainer): remove yarn workspaces 2024-11-21 10:17:43 +00:00
Faruk AYDIN
b89197939a docs: Adjust development setup page for new package setup 2024-11-21 12:40:31 +03:00
Faruk AYDIN
da788106af feat: Add separate gitignore file for docs package 2024-11-21 12:40:25 +03:00
Faruk AYDIN
49e92e6f1d fix: Adjust build web CI job to use cache dependency path 2024-11-19 20:50:15 +03:00
Faruk AYDIN
a6c3276104 chore: Remove root package.json file 2024-11-19 20:46:13 +03:00
Faruk AYDIN
6388bfc714 docs: Adjust repository structure page to reflect individual packages 2024-11-19 20:37:08 +03:00
Faruk AYDIN
bebc3b181d chore: Remove lerna debug file from .gitignore 2024-11-19 20:36:48 +03:00
Faruk AYDIN
5a6d561c1a refactor: Adjust dockerfile to work without lerna 2024-11-19 18:31:39 +03:00
Faruk AYDIN
5ba575fdfd fix: Add knex individually to e2e-tests package 2024-11-19 16:04:21 +03:00
Faruk AYDIN
dcf8bbd804 chore: Add axios package to e2e-tests package 2024-11-19 15:56:10 +03:00
Faruk AYDIN
ff93ffd0b1 fix: Adjust e2e-tests CI job working directories 2024-11-19 15:49:25 +03:00
Faruk AYDIN
395c09df92 fix: Use named ClickAwayListener import statement for web 2024-11-19 15:41:59 +03:00
Faruk AYDIN
4c903cd08b fix: Adjust backend start server CI job working directory 2024-11-19 15:36:01 +03:00
Faruk AYDIN
64cb98717c fix: Adjust backend start worker CI job working directory 2024-11-19 15:35:21 +03:00
Faruk AYDIN
b0e4ce54fb fix: Add fakerjs individually to backend package 2024-11-19 15:29:36 +03:00
Faruk AYDIN
d67a37002f fix: Use working directory for backend tests CI job 2024-11-19 15:24:40 +03:00
Faruk AYDIN
965ff8bc3f fix: Add axios individually to web package 2024-11-19 15:20:35 +03:00
Faruk AYDIN
400a495ad2 chore: Remove cache dependency path for build web CI job 2024-11-19 15:14:06 +03:00
Faruk AYDIN
09d0822a8d fix: Adjust working directory for build-web CI job 2024-11-19 15:09:46 +03:00
Faruk AYDIN
7016c20ccc fix: Change working directory for linter CI job 2024-11-19 15:03:54 +03:00
Faruk AYDIN
df54895805 fix: Linter command to work without workspaces 2024-11-19 15:00:23 +03:00
Faruk AYDIN
62d5e6fe51 chore: Remove root package dev dependencies 2024-11-19 14:57:39 +03:00
Faruk AYDIN
4615a0b7ea chore: Remove workspaces from root package.json file 2024-11-19 14:51:53 +03:00
Faruk AYDIN
280d603b14 chore: Remove lerna.json file 2024-11-18 20:21:20 +03:00
Faruk AYDIN
36271f0749 refactor: Explicitly state different packages for lerna 2024-11-18 20:16:20 +03:00
Ali BARIN
579638f932 Merge pull request #2210 from automatisch/release/v0.14.0
Update version to 0.14.0
2024-11-18 17:32:18 +01:00
Faruk AYDIN
48871c82a6 Update version to 0.14.0 2024-11-18 15:43:23 +00:00
Ali BARIN
14056c42ef Merge pull request #2211 from automatisch/upgrade-lerna
chore(deps): upgrade lerna with version 8
2024-11-18 16:42:08 +01:00
Ali BARIN
90fe1576de chore(lerna): remove bootstrap command usages 2024-11-18 15:00:49 +00:00
Ali BARIN
d61cf13985 chore(deps): upgrade lerna with version 8 2024-11-18 14:47:54 +00:00
Ali BARIN
dfe6dfd0c6 Merge pull request #2196 from automatisch/AUT-1364
feat: inline login form error and unify Alert font weight
2024-11-18 11:08:58 +01:00
Ali BARIN
c138c7d0e9 Merge pull request #2190 from automatisch/AUT-1347
feat: persist pagination and search value on flows page
2024-11-18 11:07:34 +01:00
Ali BARIN
d542be947e Merge pull request #2186 from automatisch/test-saml-auth-provider
test: Implement tests for saml auth provider url methods
2024-11-15 14:45:46 +01:00
kasia.oczkowska
c76366e72e feat: introduce improvements 2024-11-15 10:37:18 +00:00
kasia.oczkowska
75abfda783 feat: inline login form error and unify Alert font weight 2024-11-14 14:10:07 +00:00
Ali BARIN
f3d8d7d4ad Merge pull request #2191 from automatisch/AUT-1280
feat: make Wordpress instanceUrl field required
2024-11-14 12:31:43 +01:00
Ömer Faruk Aydın
7255eccb22 Merge pull request #2183 from automatisch/aut-1350-updatePassword
test(user): write tests for updatePassword
2024-11-13 19:31:28 +01:00
Ali BARIN
a0decb70cc test(user): write tests for updatePassword 2024-11-13 19:22:04 +01:00
kasia.oczkowska
532f562495 feat: make Wordpress instanceUrl field required 2024-11-13 14:32:16 +00:00
kasia.oczkowska
27e58ae925 feat: persist pagination and search value on flows page 2024-11-13 13:44:05 +00:00
Faruk AYDIN
4867ffcb4b test: Implement tests for saml auth provider url methods 2024-11-12 13:12:00 +01:00
82 changed files with 20085 additions and 17835 deletions

View File

@@ -5,8 +5,11 @@ BACKEND_PORT=3000
WEB_PORT=3001 WEB_PORT=3001
echo "Configuring backend environment variables..." echo "Configuring backend environment variables..."
cd packages/backend cd packages/backend
rm -rf .env rm -rf .env
echo " echo "
PORT=$BACKEND_PORT PORT=$BACKEND_PORT
WEB_APP_URL=http://localhost:$WEB_PORT WEB_APP_URL=http://localhost:$WEB_PORT
@@ -21,23 +24,34 @@ WEBHOOK_SECRET_KEY=sample_webhook_secret_key
APP_SECRET_KEY=sample_app_secret_key APP_SECRET_KEY=sample_app_secret_key
REDIS_HOST=redis REDIS_HOST=redis
SERVE_WEB_APP_SEPARATELY=true" >> .env SERVE_WEB_APP_SEPARATELY=true" >> .env
echo "Installing backend dependencies..."
yarn
cd $CURRENT_DIR cd $CURRENT_DIR
echo "Configuring web environment variables..." echo "Configuring web environment variables..."
cd packages/web cd packages/web
rm -rf .env rm -rf .env
echo " echo "
PORT=$WEB_PORT PORT=$WEB_PORT
REACT_APP_BACKEND_URL=http://localhost:$BACKEND_PORT REACT_APP_BACKEND_URL=http://localhost:$BACKEND_PORT
" >> .env " >> .env
echo "Installing web dependencies..."
yarn
cd $CURRENT_DIR cd $CURRENT_DIR
echo "Installing and linking dependencies..."
yarn
yarn lerna bootstrap
echo "Migrating database..." echo "Migrating database..."
cd packages/backend cd packages/backend
yarn db:migrate yarn db:migrate
yarn db:seed:user yarn db:seed:user

View File

@@ -0,0 +1,52 @@
name: release-tag
on:
push:
branches:
- 'main'
jobs:
release-image:
runs-on: ubuntu-latest
env:
DOCKER_ORG: groot
DOCKER_LATEST: latest
RUNNER_TOOL_CACHE: /toolcache
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker BuildX
uses: docker/setup-buildx-action@v2
with: # replace it with your local IP
config-inline: |
[registry."git.send.nrw"]
http = true
insecure = true
- name: Login to DockerHub
uses: docker/login-action@v2
with:
registry: git.send.nrw # replace it with your local IP
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Get Meta
id: meta
run: |
echo REPO_NAME=$(echo ${GITHUB_REPOSITORY} | awk -F"/" '{print $2}') >> $GITHUB_OUTPUT
echo REPO_VERSION=$(git describe --tags --always | sed 's/^v//') >> $GITHUB_OUTPUT
- name: Build and push
uses: docker/build-push-action@v4
with:
context: ./docker
file: ./docker/Dockerfile.compose
entrypoint: ./docker/compose-entrypoint.sh
platforms: |
linux/amd64
push: true
tags: | # replace it with your local IP and tags
git.send.nrw/${{ env.DOCKER_ORG }}/${{ steps.meta.outputs.REPO_NAME }}:${{ steps.meta.outputs.REPO_VERSION }}
git.send.nrw/${{ env.DOCKER_ORG }}/${{ steps.meta.outputs.REPO_NAME }}:${{ env.DOCKER_LATEST }}

View File

@@ -41,8 +41,11 @@ jobs:
with: with:
node-version: 18 node-version: 18
- name: Install dependencies - name: Install dependencies
run: cd packages/backend && yarn run: yarn
working-directory: packages/backend
- name: Copy .env-example.test file to .env.test - name: Copy .env-example.test file to .env.test
run: cd packages/backend && cp .env-example.test .env.test run: cp .env-example.test .env.test
working-directory: packages/backend
- name: Run tests - name: Run tests
run: cd packages/backend && yarn test run: yarn test:coverage
working-directory: packages/backend

View File

@@ -18,11 +18,13 @@ jobs:
with: with:
node-version: '18' node-version: '18'
cache: 'yarn' cache: 'yarn'
cache-dependency-path: yarn.lock cache-dependency-path: packages/backend/yarn.lock
- run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner." - run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner."
- run: echo "🖥️ The workflow is now ready to test your code on the runner." - run: echo "🖥️ The workflow is now ready to test your code on the runner."
- run: yarn --frozen-lockfile - run: yarn --frozen-lockfile
- run: cd packages/backend && yarn lint working-directory: packages/backend
- run: yarn lint
working-directory: packages/backend
- run: echo "🍏 This job's status is ${{ job.status }}." - run: echo "🍏 This job's status is ${{ job.status }}."
start-backend-server: start-backend-server:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -35,11 +37,13 @@ jobs:
with: with:
node-version: '18' node-version: '18'
cache: 'yarn' cache: 'yarn'
cache-dependency-path: yarn.lock cache-dependency-path: packages/backend/yarn.lock
- run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner." - run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner."
- run: echo "🖥️ The workflow is now ready to test your code on the runner." - run: echo "🖥️ The workflow is now ready to test your code on the runner."
- run: yarn --frozen-lockfile && yarn lerna bootstrap - run: yarn --frozen-lockfile
- run: cd packages/backend && yarn start working-directory: packages/backend
- run: yarn start
working-directory: packages/backend
env: env:
ENCRYPTION_KEY: sample_encryption_key ENCRYPTION_KEY: sample_encryption_key
WEBHOOK_SECRET_KEY: sample_webhook_secret_key WEBHOOK_SECRET_KEY: sample_webhook_secret_key
@@ -55,11 +59,13 @@ jobs:
with: with:
node-version: '18' node-version: '18'
cache: 'yarn' cache: 'yarn'
cache-dependency-path: yarn.lock cache-dependency-path: packages/backend/yarn.lock
- run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner." - run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner."
- run: echo "🖥️ The workflow is now ready to test your code on the runner." - run: echo "🖥️ The workflow is now ready to test your code on the runner."
- run: yarn --frozen-lockfile && yarn lerna bootstrap - run: yarn --frozen-lockfile
- run: cd packages/backend && yarn start:worker working-directory: packages/backend
- run: yarn start:worker
working-directory: packages/backend
env: env:
ENCRYPTION_KEY: sample_encryption_key ENCRYPTION_KEY: sample_encryption_key
WEBHOOK_SECRET_KEY: sample_webhook_secret_key WEBHOOK_SECRET_KEY: sample_webhook_secret_key
@@ -75,11 +81,13 @@ jobs:
with: with:
node-version: '18' node-version: '18'
cache: 'yarn' cache: 'yarn'
cache-dependency-path: yarn.lock cache-dependency-path: packages/web/yarn.lock
- run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner." - run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner."
- run: echo "🖥️ The workflow is now ready to test your code on the runner." - run: echo "🖥️ The workflow is now ready to test your code on the runner."
- run: yarn --frozen-lockfile && yarn lerna bootstrap - run: yarn --frozen-lockfile
- run: cd packages/web && yarn build working-directory: packages/web
- run: yarn build
working-directory: packages/web
env: env:
CI: false CI: false
- run: echo "🍏 This job's status is ${{ job.status }}." - run: echo "🍏 This job's status is ${{ job.status }}."

View File

@@ -3,12 +3,13 @@ on:
push: push:
branches: branches:
- main - main
pull_request: # TODO: Add pull request after optimizing the total excecution time of the test suite.
paths: # pull_request:
- 'packages/backend/**' # paths:
- 'packages/e2e-tests/**' # - 'packages/backend/**'
- 'packages/web/**' # - 'packages/e2e-tests/**'
- '!packages/backend/src/apps/**' # - 'packages/web/**'
# - '!packages/backend/src/apps/**'
workflow_dispatch: workflow_dispatch:
env: env:
@@ -58,13 +59,21 @@ jobs:
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3
with: with:
node-version: 18 node-version: 18
- name: Install dependencies - name: Install web dependencies
run: yarn && yarn lerna bootstrap run: yarn
working-directory: ./packages/web
- name: Install backend dependencies
run: yarn
working-directory: ./packages/backend
- name: Install e2e-tests dependencies
run: yarn
working-directory: ./packages/e2e-tests
- name: Install Playwright Browsers - name: Install Playwright Browsers
run: yarn playwright install --with-deps run: yarn playwright install --with-deps
working-directory: ./packages/e2e-tests
- name: Build Automatisch web - name: Build Automatisch web
working-directory: ./packages/web
run: yarn build run: yarn build
working-directory: ./packages/web
env: env:
# Keep this until we clean up warnings in build processes # Keep this until we clean up warnings in build processes
CI: false CI: false

1
.gitignore vendored
View File

@@ -4,7 +4,6 @@ logs
npm-debug.log* npm-debug.log*
yarn-debug.log* yarn-debug.log*
yarn-error.log* yarn-error.log*
lerna-debug.log*
.pnpm-debug.log* .pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html) # Diagnostic reports (https://nodejs.org/api/report.html)

View File

@@ -1,10 +1,7 @@
version: '3.9' version: '3.9'
services: services:
main: main:
build: image: git.send.nrw/groot/automatisch:latest
context: ./docker
dockerfile: Dockerfile.compose
entrypoint: /compose-entrypoint.sh
ports: ports:
- '3000:3000' - '3000:3000'
depends_on: depends_on:
@@ -28,10 +25,7 @@ services:
volumes: volumes:
- automatisch_storage:/automatisch/storage - automatisch_storage:/automatisch/storage
worker: worker:
build: image: git.send.nrw/groot/automatisch:latest
context: ./docker
dockerfile: Dockerfile.compose
entrypoint: /compose-entrypoint.sh
depends_on: depends_on:
- main - main
environment: environment:

View File

@@ -11,10 +11,12 @@ WORKDIR /automatisch
# copy the app, note .dockerignore # copy the app, note .dockerignore
COPY . /automatisch COPY . /automatisch
RUN yarn RUN cd packages/web && yarn
RUN cd packages/web && yarn build RUN cd packages/web && yarn build
RUN cd packages/backend && yarn --production
RUN \ RUN \
rm -rf /usr/local/share/.cache/ && \ rm -rf /usr/local/share/.cache/ && \
apk del build-dependencies apk del build-dependencies

View File

@@ -1,13 +0,0 @@
{
"packages": [
"packages/*"
],
"version": "0.10.0",
"npmClient": "yarn",
"useWorkspaces": true,
"command": {
"add": {
"exact": true
}
}
}

View File

@@ -1,32 +0,0 @@
{
"name": "@automatisch/root",
"license": "See LICENSE file",
"private": true,
"scripts": {
"start": "lerna run --stream --parallel --scope=@*/{web,backend} dev",
"start:web": "lerna run --stream --scope=@*/web dev",
"start:backend": "lerna run --stream --scope=@*/backend dev",
"build:docs": "cd ./packages/docs && yarn install && yarn build"
},
"workspaces": {
"packages": [
"packages/*"
],
"nohoist": [
"**/babel-loader",
"**/webpack",
"**/@automatisch/web",
"**/ajv"
]
},
"devDependencies": {
"eslint": "^8.13.0",
"eslint-config-prettier": "^8.3.0",
"eslint-plugin-prettier": "^4.0.0",
"lerna": "^4.0.0",
"prettier": "^2.5.1"
},
"publishConfig": {
"access": "public"
}
}

View File

@@ -12,6 +12,7 @@
"pretest": "APP_ENV=test node ./test/setup/prepare-test-env.js", "pretest": "APP_ENV=test node ./test/setup/prepare-test-env.js",
"test": "APP_ENV=test vitest run", "test": "APP_ENV=test vitest run",
"test:watch": "APP_ENV=test vitest watch", "test:watch": "APP_ENV=test vitest watch",
"test:coverage": "yarn test --coverage",
"lint": "eslint .", "lint": "eslint .",
"db:create": "node ./bin/database/create.js", "db:create": "node ./bin/database/create.js",
"db:seed:user": "node ./bin/database/seed-user.js", "db:seed:user": "node ./bin/database/seed-user.js",
@@ -23,6 +24,7 @@
"dependencies": { "dependencies": {
"@bull-board/express": "^3.10.1", "@bull-board/express": "^3.10.1",
"@casl/ability": "^6.5.0", "@casl/ability": "^6.5.0",
"@faker-js/faker": "^9.2.0",
"@node-saml/passport-saml": "^4.0.4", "@node-saml/passport-saml": "^4.0.4",
"@rudderstack/rudder-sdk-node": "^1.1.2", "@rudderstack/rudder-sdk-node": "^1.1.2",
"@sentry/node": "^7.42.0", "@sentry/node": "^7.42.0",
@@ -36,6 +38,9 @@
"crypto-js": "^4.1.1", "crypto-js": "^4.1.1",
"debug": "~2.6.9", "debug": "~2.6.9",
"dotenv": "^10.0.0", "dotenv": "^10.0.0",
"eslint": "^8.13.0",
"eslint-config-prettier": "^8.3.0",
"eslint-plugin-prettier": "^4.0.0",
"express": "~4.18.2", "express": "~4.18.2",
"express-async-errors": "^3.1.1", "express-async-errors": "^3.1.1",
"express-basic-auth": "^1.2.1", "express-basic-auth": "^1.2.1",
@@ -61,6 +66,7 @@
"pg": "^8.7.1", "pg": "^8.7.1",
"php-serialize": "^4.0.2", "php-serialize": "^4.0.2",
"pluralize": "^8.0.0", "pluralize": "^8.0.0",
"prettier": "^2.5.1",
"raw-body": "^2.5.2", "raw-body": "^2.5.2",
"showdown": "^2.1.0", "showdown": "^2.1.0",
"uuid": "^9.0.1", "uuid": "^9.0.1",
@@ -92,10 +98,11 @@
"url": "https://github.com/automatisch/automatisch/issues" "url": "https://github.com/automatisch/automatisch/issues"
}, },
"devDependencies": { "devDependencies": {
"@vitest/coverage-v8": "^2.1.5",
"node-gyp": "^10.1.0", "node-gyp": "^10.1.0",
"nodemon": "^2.0.13", "nodemon": "^2.0.13",
"supertest": "^6.3.3", "supertest": "^6.3.3",
"vitest": "^1.1.3" "vitest": "^2.1.5"
}, },
"publishConfig": { "publishConfig": {
"access": "public" "access": "public"

View File

@@ -8,7 +8,7 @@ export default {
key: 'instanceUrl', key: 'instanceUrl',
label: 'WordPress instance URL', label: 'WordPress instance URL',
type: 'string', type: 'string',
required: false, required: true,
readOnly: false, readOnly: false,
value: null, value: null,
placeholder: null, placeholder: null,

View File

@@ -52,7 +52,7 @@ const appConfig = {
isDev: appEnv === 'development', isDev: appEnv === 'development',
isTest: appEnv === 'test', isTest: appEnv === 'test',
isProd: appEnv === 'production', isProd: appEnv === 'production',
version: '0.13.1', version: '0.14.0',
postgresDatabase: process.env.POSTGRES_DATABASE || 'automatisch_development', postgresDatabase: process.env.POSTGRES_DATABASE || 'automatisch_development',
postgresSchema: process.env.POSTGRES_SCHEMA || 'public', postgresSchema: process.env.POSTGRES_SCHEMA || 'public',
postgresPort: parseInt(process.env.POSTGRES_PORT || '5432'), postgresPort: parseInt(process.env.POSTGRES_PORT || '5432'),

View File

@@ -7,7 +7,7 @@ export default async (request, response) => {
.throwIfNotFound(); .throwIfNotFound();
const roleMappings = await samlAuthProvider const roleMappings = await samlAuthProvider
.$relatedQuery('samlAuthProvidersRoleMappings') .$relatedQuery('roleMappings')
.orderBy('remote_role_name', 'asc'); .orderBy('remote_role_name', 'asc');
renderObject(response, roleMappings); renderObject(response, roleMappings);

View File

@@ -8,15 +8,14 @@ export default async (request, response) => {
.findById(samlAuthProviderId) .findById(samlAuthProviderId)
.throwIfNotFound(); .throwIfNotFound();
const samlAuthProvidersRoleMappings = const roleMappings = await samlAuthProvider.updateRoleMappings(
await samlAuthProvider.updateRoleMappings( roleMappingsParams(request)
samlAuthProvidersRoleMappingsParams(request) );
);
renderObject(response, samlAuthProvidersRoleMappings); renderObject(response, roleMappings);
}; };
const samlAuthProvidersRoleMappingsParams = (request) => { const roleMappingsParams = (request) => {
const roleMappings = request.body; const roleMappings = request.body;
return roleMappings.map(({ roleId, remoteRoleName }) => ({ return roleMappings.map(({ roleId, remoteRoleName }) => ({

View File

@@ -6,7 +6,7 @@ import createAuthTokenByUserId from '../../../../../helpers/create-auth-token-by
import { createRole } from '../../../../../../test/factories/role.js'; import { createRole } from '../../../../../../test/factories/role.js';
import { createUser } from '../../../../../../test/factories/user.js'; import { createUser } from '../../../../../../test/factories/user.js';
import { createSamlAuthProvider } from '../../../../../../test/factories/saml-auth-provider.ee.js'; import { createSamlAuthProvider } from '../../../../../../test/factories/saml-auth-provider.ee.js';
import { createSamlAuthProvidersRoleMapping } from '../../../../../../test/factories/saml-auth-providers-role-mapping.js'; import { createRoleMapping } from '../../../../../../test/factories/role-mapping.js';
import createRoleMappingsMock from '../../../../../../test/mocks/rest/api/v1/admin/saml-auth-providers/update-role-mappings.ee.js'; import createRoleMappingsMock from '../../../../../../test/mocks/rest/api/v1/admin/saml-auth-providers/update-role-mappings.ee.js';
import * as license from '../../../../../helpers/license.ee.js'; import * as license from '../../../../../helpers/license.ee.js';
@@ -21,12 +21,12 @@ describe('PATCH /api/v1/admin/saml-auth-providers/:samlAuthProviderId/role-mappi
samlAuthProvider = await createSamlAuthProvider(); samlAuthProvider = await createSamlAuthProvider();
await createSamlAuthProvidersRoleMapping({ await createRoleMapping({
samlAuthProviderId: samlAuthProvider.id, samlAuthProviderId: samlAuthProvider.id,
remoteRoleName: 'Viewer', remoteRoleName: 'Viewer',
}); });
await createSamlAuthProvidersRoleMapping({ await createRoleMapping({
samlAuthProviderId: samlAuthProvider.id, samlAuthProviderId: samlAuthProvider.id,
remoteRoleName: 'Editor', remoteRoleName: 'Editor',
}); });
@@ -64,7 +64,7 @@ describe('PATCH /api/v1/admin/saml-auth-providers/:samlAuthProviderId/role-mappi
it('should delete role mappings when given empty role mappings', async () => { it('should delete role mappings when given empty role mappings', async () => {
const existingRoleMappings = await samlAuthProvider.$relatedQuery( const existingRoleMappings = await samlAuthProvider.$relatedQuery(
'samlAuthProvidersRoleMappings' 'roleMappings'
); );
expect(existingRoleMappings.length).toBe(2); expect(existingRoleMappings.length).toBe(2);
@@ -149,34 +149,4 @@ describe('PATCH /api/v1/admin/saml-auth-providers/:samlAuthProviderId/role-mappi
.send(roleMappings) .send(roleMappings)
.expect(404); .expect(404);
}); });
it('should not delete existing role mapping when error thrown', async () => {
const roleMappings = [
{
roleId: userRole.id,
remoteRoleName: {
invalid: 'data',
},
},
];
const roleMappingsBeforeRequest = await samlAuthProvider.$relatedQuery(
'samlAuthProvidersRoleMappings'
);
await request(app)
.patch(
`/api/v1/admin/saml-auth-providers/${samlAuthProvider.id}/role-mappings`
)
.set('Authorization', token)
.send(roleMappings)
.expect(422);
const roleMappingsAfterRequest = await samlAuthProvider.$relatedQuery(
'samlAuthProvidersRoleMappings'
);
expect(roleMappingsBeforeRequest).toStrictEqual(roleMappingsAfterRequest);
expect(roleMappingsAfterRequest.length).toBe(2);
});
}); });

View File

@@ -87,14 +87,14 @@ describe('GET /api/v1/apps/:appKey/connections', () => {
it('should return not found response for invalid connection UUID', async () => { it('should return not found response for invalid connection UUID', async () => {
await createPermission({ await createPermission({
action: 'update', action: 'read',
subject: 'Connection', subject: 'Connection',
roleId: currentUserRole.id, roleId: currentUserRole.id,
conditions: ['isCreator'], conditions: ['isCreator'],
}); });
await request(app) await request(app)
.get('/api/v1/connections/invalid-connection-id/connections') .get('/api/v1/apps/invalid-connection-id/connections')
.set('Authorization', token) .set('Authorization', token)
.expect(404); .expect(404);
}); });

View File

@@ -10,7 +10,7 @@ describe('GET /api/v1/automatisch/version', () => {
const expectedPayload = { const expectedPayload = {
data: { data: {
version: '0.13.1', version: '0.14.0',
}, },
meta: { meta: {
count: 1, count: 1,

View File

@@ -193,7 +193,7 @@ describe('POST /api/v1/steps/:stepId/dynamic-data', () => {
const notExistingStepUUID = Crypto.randomUUID(); const notExistingStepUUID = Crypto.randomUUID();
await request(app) await request(app)
.get(`/api/v1/steps/${notExistingStepUUID}/dynamic-data`) .post(`/api/v1/steps/${notExistingStepUUID}/dynamic-data`)
.set('Authorization', token) .set('Authorization', token)
.expect(404); .expect(404);
}); });
@@ -216,7 +216,7 @@ describe('POST /api/v1/steps/:stepId/dynamic-data', () => {
const step = await createStep({ appKey: null }); const step = await createStep({ appKey: null });
await request(app) await request(app)
.get(`/api/v1/steps/${step.id}/dynamic-data`) .post(`/api/v1/steps/${step.id}/dynamic-data`)
.set('Authorization', token) .set('Authorization', token)
.expect(404); .expect(404);
}); });

View File

@@ -118,7 +118,7 @@ describe('POST /api/v1/steps/:stepId/dynamic-fields', () => {
const notExistingStepUUID = Crypto.randomUUID(); const notExistingStepUUID = Crypto.randomUUID();
await request(app) await request(app)
.get(`/api/v1/steps/${notExistingStepUUID}/dynamic-fields`) .post(`/api/v1/steps/${notExistingStepUUID}/dynamic-fields`)
.set('Authorization', token) .set('Authorization', token)
.expect(404); .expect(404);
}); });
@@ -138,10 +138,11 @@ describe('POST /api/v1/steps/:stepId/dynamic-fields', () => {
conditions: [], conditions: [],
}); });
const step = await createStep({ appKey: null }); const step = await createStep();
await step.$query().patch({ appKey: null });
await request(app) await request(app)
.get(`/api/v1/steps/${step.id}/dynamic-fields`) .post(`/api/v1/steps/${step.id}/dynamic-fields`)
.set('Authorization', token) .set('Authorization', token)
.expect(404); .expect(404);
}); });

View File

@@ -0,0 +1,52 @@
export async function up(knex) {
await knex.schema.createTable('role_mappings', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('gen_random_uuid()'));
table
.uuid('saml_auth_provider_id')
.references('id')
.inTable('saml_auth_providers');
table.uuid('role_id').references('id').inTable('roles');
table.string('remote_role_name').notNullable();
table.unique(['saml_auth_provider_id', 'remote_role_name']);
table.timestamps(true, true);
});
const existingRoleMappings = await knex('saml_auth_providers_role_mappings');
if (existingRoleMappings.length) {
await knex('role_mappings').insert(existingRoleMappings);
}
return await knex.schema.dropTable('saml_auth_providers_role_mappings');
}
export async function down(knex) {
await knex.schema.createTable(
'saml_auth_providers_role_mappings',
(table) => {
table.uuid('id').primary().defaultTo(knex.raw('gen_random_uuid()'));
table
.uuid('saml_auth_provider_id')
.references('id')
.inTable('saml_auth_providers');
table.uuid('role_id').references('id').inTable('roles');
table.string('remote_role_name').notNullable();
table.unique(['saml_auth_provider_id', 'remote_role_name']);
table.timestamps(true, true);
}
);
const existingRoleMappings = await knex('role_mappings');
if (existingRoleMappings.length) {
await knex('saml_auth_providers_role_mappings').insert(
existingRoleMappings
);
}
return await knex.schema.dropTable('role_mappings');
}

View File

@@ -30,7 +30,7 @@ const findOrCreateUserBySamlIdentity = async (
: [mappedUser.role]; : [mappedUser.role];
const samlAuthProviderRoleMapping = await samlAuthProvider const samlAuthProviderRoleMapping = await samlAuthProvider
.$relatedQuery('samlAuthProvidersRoleMappings') .$relatedQuery('roleMappings')
.whereIn('remote_role_name', mappedRoles) .whereIn('remote_role_name', mappedRoles)
.limit(1) .limit(1)
.first(); .first();

View File

@@ -0,0 +1,46 @@
import { describe, expect, it } from 'vitest';
import userAbility from './user-ability.js';
describe('userAbility', () => {
it('should return PureAbility instantiated with user permissions', () => {
const user = {
permissions: [
{
subject: 'Flow',
action: 'read',
conditions: ['isCreator'],
},
],
role: {
name: 'User',
},
};
const ability = userAbility(user);
expect(ability.rules).toStrictEqual(user.permissions);
});
it('should return permission-less PureAbility for user with no role', () => {
const user = {
permissions: [
{
subject: 'Flow',
action: 'read',
conditions: ['isCreator'],
},
],
role: null,
};
const ability = userAbility(user);
expect(ability.rules).toStrictEqual([]);
});
it('should return permission-less PureAbility for user with no permissions', () => {
const user = { permissions: null, role: { name: 'User' } };
const ability = userAbility(user);
expect(ability.rules).toStrictEqual([]);
});
});

View File

@@ -0,0 +1,30 @@
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
exports[`RoleMapping model > jsonSchema should have the correct schema 1`] = `
{
"properties": {
"id": {
"format": "uuid",
"type": "string",
},
"remoteRoleName": {
"minLength": 1,
"type": "string",
},
"roleId": {
"format": "uuid",
"type": "string",
},
"samlAuthProviderId": {
"format": "uuid",
"type": "string",
},
},
"required": [
"samlAuthProviderId",
"roleId",
"remoteRoleName",
],
"type": "object",
}
`;

View File

@@ -1,6 +1,6 @@
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html // Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
exports[`SamlAuthProvidersRoleMapping model > jsonSchema should have the correct schema 1`] = ` exports[`RoleMapping model > jsonSchema should have the correct schema 1`] = `
{ {
"properties": { "properties": {
"id": { "id": {

View File

@@ -1,8 +1,8 @@
import Base from './base.js'; import Base from './base.js';
import SamlAuthProvider from './saml-auth-provider.ee.js'; import SamlAuthProvider from './saml-auth-provider.ee.js';
class SamlAuthProvidersRoleMapping extends Base { class RoleMapping extends Base {
static tableName = 'saml_auth_providers_role_mappings'; static tableName = 'role_mappings';
static jsonSchema = { static jsonSchema = {
type: 'object', type: 'object',
@@ -21,11 +21,11 @@ class SamlAuthProvidersRoleMapping extends Base {
relation: Base.BelongsToOneRelation, relation: Base.BelongsToOneRelation,
modelClass: SamlAuthProvider, modelClass: SamlAuthProvider,
join: { join: {
from: 'saml_auth_providers_role_mappings.saml_auth_provider_id', from: 'role_mappings.saml_auth_provider_id',
to: 'saml_auth_providers.id', to: 'saml_auth_providers.id',
}, },
}, },
}); });
} }
export default SamlAuthProvidersRoleMapping; export default RoleMapping;

View File

@@ -1,28 +1,26 @@
import { describe, it, expect } from 'vitest'; import { describe, it, expect } from 'vitest';
import SamlAuthProvidersRoleMapping from '../models/saml-auth-providers-role-mapping.ee'; import RoleMapping from './role-mapping.ee';
import SamlAuthProvider from './saml-auth-provider.ee'; import SamlAuthProvider from './saml-auth-provider.ee';
import Base from './base'; import Base from './base';
describe('SamlAuthProvidersRoleMapping model', () => { describe('RoleMapping model', () => {
it('tableName should return correct name', () => { it('tableName should return correct name', () => {
expect(SamlAuthProvidersRoleMapping.tableName).toBe( expect(RoleMapping.tableName).toBe('role_mappings');
'saml_auth_providers_role_mappings'
);
}); });
it('jsonSchema should have the correct schema', () => { it('jsonSchema should have the correct schema', () => {
expect(SamlAuthProvidersRoleMapping.jsonSchema).toMatchSnapshot(); expect(RoleMapping.jsonSchema).toMatchSnapshot();
}); });
it('relationMappings should return correct associations', () => { it('relationMappings should return correct associations', () => {
const relationMappings = SamlAuthProvidersRoleMapping.relationMappings(); const relationMappings = RoleMapping.relationMappings();
const expectedRelations = { const expectedRelations = {
samlAuthProvider: { samlAuthProvider: {
relation: Base.BelongsToOneRelation, relation: Base.BelongsToOneRelation,
modelClass: SamlAuthProvider, modelClass: SamlAuthProvider,
join: { join: {
from: 'saml_auth_providers_role_mappings.saml_auth_provider_id', from: 'role_mappings.saml_auth_provider_id',
to: 'saml_auth_providers.id', to: 'saml_auth_providers.id',
}, },
}, },

View File

@@ -5,7 +5,7 @@ import appConfig from '../config/app.js';
import axios from '../helpers/axios-with-proxy.js'; import axios from '../helpers/axios-with-proxy.js';
import Base from './base.js'; import Base from './base.js';
import Identity from './identity.ee.js'; import Identity from './identity.ee.js';
import SamlAuthProvidersRoleMapping from './saml-auth-providers-role-mapping.ee.js'; import RoleMapping from './role-mapping.ee.js';
class SamlAuthProvider extends Base { class SamlAuthProvider extends Base {
static tableName = 'saml_auth_providers'; static tableName = 'saml_auth_providers';
@@ -53,12 +53,12 @@ class SamlAuthProvider extends Base {
to: 'saml_auth_providers.id', to: 'saml_auth_providers.id',
}, },
}, },
samlAuthProvidersRoleMappings: { roleMappings: {
relation: Base.HasManyRelation, relation: Base.HasManyRelation,
modelClass: SamlAuthProvidersRoleMapping, modelClass: RoleMapping,
join: { join: {
from: 'saml_auth_providers.id', from: 'saml_auth_providers.id',
to: 'saml_auth_providers_role_mappings.saml_auth_provider_id', to: 'role_mappings.saml_auth_provider_id',
}, },
}, },
}); });
@@ -133,27 +133,22 @@ class SamlAuthProvider extends Base {
} }
async updateRoleMappings(roleMappings) { async updateRoleMappings(roleMappings) {
return await SamlAuthProvider.transaction(async (trx) => { await this.$relatedQuery('roleMappings').delete();
await this.$relatedQuery('samlAuthProvidersRoleMappings', trx).delete();
if (isEmpty(roleMappings)) { if (isEmpty(roleMappings)) {
return []; return [];
} }
const samlAuthProvidersRoleMappingsData = roleMappings.map( const roleMappingsData = roleMappings.map((roleMapping) => ({
(samlAuthProvidersRoleMapping) => ({ ...roleMapping,
...samlAuthProvidersRoleMapping, samlAuthProviderId: this.id,
samlAuthProviderId: this.id, }));
})
);
const samlAuthProvidersRoleMappings = const newRoleMappings = await RoleMapping.query().insertAndFetch(
await SamlAuthProvidersRoleMapping.query(trx).insertAndFetch( roleMappingsData
samlAuthProvidersRoleMappingsData );
);
return samlAuthProvidersRoleMappings; return newRoleMappings;
});
} }
} }

View File

@@ -1,8 +1,14 @@
import { describe, it, expect } from 'vitest'; import { vi, beforeEach, describe, it, expect } from 'vitest';
import { v4 as uuidv4 } from 'uuid';
import SamlAuthProvider from '../models/saml-auth-provider.ee'; import SamlAuthProvider from '../models/saml-auth-provider.ee';
import SamlAuthProvidersRoleMapping from '../models/saml-auth-providers-role-mapping.ee'; import RoleMapping from '../models/role-mapping.ee';
import axios from '../helpers/axios-with-proxy.js';
import Identity from './identity.ee'; import Identity from './identity.ee';
import Base from './base'; import Base from './base';
import appConfig from '../config/app';
import { createSamlAuthProvider } from '../../test/factories/saml-auth-provider.ee.js';
import { createRoleMapping } from '../../test/factories/role-mapping.js';
import { createRole } from '../../test/factories/role.js';
describe('SamlAuthProvider model', () => { describe('SamlAuthProvider model', () => {
it('tableName should return correct name', () => { it('tableName should return correct name', () => {
@@ -25,12 +31,12 @@ describe('SamlAuthProvider model', () => {
to: 'saml_auth_providers.id', to: 'saml_auth_providers.id',
}, },
}, },
samlAuthProvidersRoleMappings: { roleMappings: {
relation: Base.HasManyRelation, relation: Base.HasManyRelation,
modelClass: SamlAuthProvidersRoleMapping, modelClass: RoleMapping,
join: { join: {
from: 'saml_auth_providers.id', from: 'saml_auth_providers.id',
to: 'saml_auth_providers_role_mappings.saml_auth_provider_id', to: 'role_mappings.saml_auth_provider_id',
}, },
}, },
}; };
@@ -45,4 +51,181 @@ describe('SamlAuthProvider model', () => {
expect(virtualAttributes).toStrictEqual(expectedAttributes); expect(virtualAttributes).toStrictEqual(expectedAttributes);
}); });
it('loginUrl should return the URL of login', () => {
const samlAuthProvider = new SamlAuthProvider();
samlAuthProvider.issuer = 'sample-issuer';
vi.spyOn(appConfig, 'baseUrl', 'get').mockReturnValue(
'https://automatisch.io'
);
expect(samlAuthProvider.loginUrl).toStrictEqual(
'https://automatisch.io/login/saml/sample-issuer'
);
});
it('loginCallbackUrl should return the URL of login callback', () => {
const samlAuthProvider = new SamlAuthProvider();
samlAuthProvider.issuer = 'sample-issuer';
vi.spyOn(appConfig, 'baseUrl', 'get').mockReturnValue(
'https://automatisch.io'
);
expect(samlAuthProvider.loginCallBackUrl).toStrictEqual(
'https://automatisch.io/login/saml/sample-issuer/callback'
);
});
it('remoteLogoutUrl should return the URL from entrypoint', () => {
const samlAuthProvider = new SamlAuthProvider();
samlAuthProvider.entryPoint = 'https://example.com/saml/logout';
expect(samlAuthProvider.remoteLogoutUrl).toStrictEqual(
'https://example.com/saml/logout'
);
});
it('config should return the correct configuration object', () => {
const samlAuthProvider = new SamlAuthProvider();
samlAuthProvider.certificate = 'sample-certificate';
samlAuthProvider.signatureAlgorithm = 'sha256';
samlAuthProvider.entryPoint = 'https://example.com/saml';
samlAuthProvider.issuer = 'sample-issuer';
vi.spyOn(appConfig, 'baseUrl', 'get').mockReturnValue(
'https://automatisch.io'
);
const expectedConfig = {
callbackUrl: 'https://automatisch.io/login/saml/sample-issuer/callback',
cert: 'sample-certificate',
entryPoint: 'https://example.com/saml',
issuer: 'sample-issuer',
signatureAlgorithm: 'sha256',
logoutUrl: 'https://example.com/saml',
};
expect(samlAuthProvider.config).toStrictEqual(expectedConfig);
});
it('generateLogoutRequestBody should return a correctly encoded SAML logout request', () => {
vi.mock('uuid', () => ({
v4: vi.fn(),
}));
const samlAuthProvider = new SamlAuthProvider();
samlAuthProvider.entryPoint = 'https://example.com/saml';
samlAuthProvider.issuer = 'sample-issuer';
const mockUuid = '123e4567-e89b-12d3-a456-426614174000';
uuidv4.mockReturnValue(mockUuid);
const sessionId = 'test-session-id';
const logoutRequest = samlAuthProvider.generateLogoutRequestBody(sessionId);
const expectedLogoutRequest = `
<samlp:LogoutRequest
xmlns:samlp="urn:oasis:names:tc:SAML:2.0:protocol"
ID="${mockUuid}"
Version="2.0"
IssueInstant="${new Date().toISOString()}"
Destination="https://example.com/saml">
<saml:Issuer xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion">sample-issuer</saml:Issuer>
<samlp:SessionIndex>test-session-id</samlp:SessionIndex>
</samlp:LogoutRequest>
`;
const expectedEncodedRequest = Buffer.from(expectedLogoutRequest).toString(
'base64'
);
expect(logoutRequest).toBe(expectedEncodedRequest);
});
it('terminateRemoteSession should send the correct POST request and return the response', async () => {
vi.mock('../helpers/axios-with-proxy.js', () => ({
default: {
post: vi.fn(),
},
}));
const samlAuthProvider = new SamlAuthProvider();
samlAuthProvider.entryPoint = 'https://example.com/saml';
samlAuthProvider.generateLogoutRequestBody = vi
.fn()
.mockReturnValue('mockEncodedLogoutRequest');
const sessionId = 'test-session-id';
const mockResponse = { data: 'Logout Successful' };
axios.post.mockResolvedValue(mockResponse);
const response = await samlAuthProvider.terminateRemoteSession(sessionId);
expect(samlAuthProvider.generateLogoutRequestBody).toHaveBeenCalledWith(
sessionId
);
expect(axios.post).toHaveBeenCalledWith(
'https://example.com/saml',
'SAMLRequest=mockEncodedLogoutRequest',
{
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
},
}
);
expect(response).toBe(mockResponse);
});
describe('updateRoleMappings', () => {
let samlAuthProvider;
beforeEach(async () => {
samlAuthProvider = await createSamlAuthProvider();
});
it('should remove all existing role mappings', async () => {
await createRoleMapping({
samlAuthProviderId: samlAuthProvider.id,
remoteRoleName: 'Admin',
});
await createRoleMapping({
samlAuthProviderId: samlAuthProvider.id,
remoteRoleName: 'User',
});
await samlAuthProvider.updateRoleMappings([]);
const roleMappings = await samlAuthProvider.$relatedQuery('roleMappings');
expect(roleMappings).toStrictEqual([]);
});
it('should return the updated role mappings when new ones are provided', async () => {
const adminRole = await createRole({ name: 'Admin' });
const userRole = await createRole({ name: 'User' });
const newRoleMappings = [
{ remoteRoleName: 'Admin', roleId: adminRole.id },
{ remoteRoleName: 'User', roleId: userRole.id },
];
const result = await samlAuthProvider.updateRoleMappings(newRoleMappings);
const refetchedRoleMappings = await samlAuthProvider.$relatedQuery(
'roleMappings'
);
expect(result).toStrictEqual(refetchedRoleMappings);
});
});
}); });

View File

@@ -212,6 +212,10 @@ class User extends Base {
return `${appConfig.webAppUrl}/accept-invitation?token=${this.invitationToken}`; return `${appConfig.webAppUrl}/accept-invitation?token=${this.invitationToken}`;
} }
get ability() {
return userAbility(this);
}
static async authenticate(email, password) { static async authenticate(email, password) {
const user = await User.query().findOne({ const user = await User.query().findOne({
email: email?.toLowerCase() || null, email: email?.toLowerCase() || null,
@@ -407,7 +411,7 @@ class User extends Base {
} }
} }
async startTrialPeriod() { startTrialPeriod() {
this.trialExpiryDate = DateTime.now().plus({ days: 30 }).toISODate(); this.trialExpiryDate = DateTime.now().plus({ days: 30 }).toISODate();
} }
@@ -583,62 +587,6 @@ class User extends Base {
return user; return user;
} }
async $beforeInsert(queryContext) {
await super.$beforeInsert(queryContext);
this.email = this.email.toLowerCase();
await this.generateHash();
if (appConfig.isCloud) {
await this.startTrialPeriod();
}
}
async $beforeUpdate(opt, queryContext) {
await super.$beforeUpdate(opt, queryContext);
if (this.email) {
this.email = this.email.toLowerCase();
}
await this.generateHash();
}
async $afterInsert(queryContext) {
await super.$afterInsert(queryContext);
if (appConfig.isCloud) {
await this.$relatedQuery('usageData').insert({
userId: this.id,
consumedTaskCount: 0,
nextResetAt: DateTime.now().plus({ days: 30 }).toISODate(),
});
}
}
async $afterFind() {
if (await hasValidLicense()) return this;
if (Array.isArray(this.permissions)) {
this.permissions = this.permissions.filter((permission) => {
const restrictedSubjects = [
'App',
'Role',
'SamlAuthProvider',
'Config',
];
return !restrictedSubjects.includes(permission.subject);
});
}
return this;
}
get ability() {
return userAbility(this);
}
can(action, subject) { can(action, subject) {
const can = this.ability.can(action, subject); const can = this.ability.can(action, subject);
@@ -654,12 +602,68 @@ class User extends Base {
return conditionMap; return conditionMap;
} }
cannot(action, subject) { lowercaseEmail() {
const cannot = this.ability.cannot(action, subject); if (this.email) {
this.email = this.email.toLowerCase();
}
}
if (cannot) throw new NotAuthorizedError(); async createUsageData() {
if (appConfig.isCloud) {
return await this.$relatedQuery('usageData').insertAndFetch({
userId: this.id,
consumedTaskCount: 0,
nextResetAt: DateTime.now().plus({ days: 30 }).toISODate(),
});
}
}
return cannot; async omitEnterprisePermissionsWithoutValidLicense() {
if (await hasValidLicense()) {
return this;
}
if (Array.isArray(this.permissions)) {
this.permissions = this.permissions.filter((permission) => {
const restrictedSubjects = [
'App',
'Role',
'SamlAuthProvider',
'Config',
];
return !restrictedSubjects.includes(permission.subject);
});
}
}
async $beforeInsert(queryContext) {
await super.$beforeInsert(queryContext);
this.lowercaseEmail();
await this.generateHash();
if (appConfig.isCloud) {
this.startTrialPeriod();
}
}
async $beforeUpdate(opt, queryContext) {
await super.$beforeUpdate(opt, queryContext);
this.lowercaseEmail();
await this.generateHash();
}
async $afterInsert(queryContext) {
await super.$afterInsert(queryContext);
await this.createUsageData();
}
async $afterFind() {
await this.omitEnterprisePermissionsWithoutValidLicense();
} }
} }

File diff suppressed because it is too large Load Diff

View File

@@ -1,31 +1,4 @@
import process from 'process'; import { generateQueue } from './queue.js';
import { Queue } from 'bullmq';
import redisConfig from '../config/redis.js';
import logger from '../helpers/logger.js';
const CONNECTION_REFUSED = 'ECONNREFUSED';
const redisConnection = {
connection: redisConfig,
};
const actionQueue = new Queue('action', redisConnection);
process.on('SIGTERM', async () => {
await actionQueue.close();
});
actionQueue.on('error', (error) => {
if (error.code === CONNECTION_REFUSED) {
logger.error(
'Make sure you have installed Redis and it is running.',
error
);
process.exit();
}
logger.error('Error happened in action queue!', error);
});
const actionQueue = generateQueue('action');
export default actionQueue; export default actionQueue;

View File

@@ -1,31 +1,4 @@
import process from 'process'; import { generateQueue } from './queue.js';
import { Queue } from 'bullmq';
import redisConfig from '../config/redis.js';
import logger from '../helpers/logger.js';
const CONNECTION_REFUSED = 'ECONNREFUSED';
const redisConnection = {
connection: redisConfig,
};
const deleteUserQueue = new Queue('delete-user', redisConnection);
process.on('SIGTERM', async () => {
await deleteUserQueue.close();
});
deleteUserQueue.on('error', (error) => {
if (error.code === CONNECTION_REFUSED) {
logger.error(
'Make sure you have installed Redis and it is running.',
error
);
process.exit();
}
logger.error('Error happened in delete user queue!', error);
});
const deleteUserQueue = generateQueue('delete-user');
export default deleteUserQueue; export default deleteUserQueue;

View File

@@ -1,31 +1,4 @@
import process from 'process'; import { generateQueue } from './queue.js';
import { Queue } from 'bullmq';
import redisConfig from '../config/redis.js';
import logger from '../helpers/logger.js';
const CONNECTION_REFUSED = 'ECONNREFUSED';
const redisConnection = {
connection: redisConfig,
};
const emailQueue = new Queue('email', redisConnection);
process.on('SIGTERM', async () => {
await emailQueue.close();
});
emailQueue.on('error', (error) => {
if (error.code === CONNECTION_REFUSED) {
logger.error(
'Make sure you have installed Redis and it is running.',
error
);
process.exit();
}
logger.error('Error happened in email queue!', error);
});
const emailQueue = generateQueue('email');
export default emailQueue; export default emailQueue;

View File

@@ -1,31 +1,4 @@
import process from 'process'; import { generateQueue } from './queue.js';
import { Queue } from 'bullmq';
import redisConfig from '../config/redis.js';
import logger from '../helpers/logger.js';
const CONNECTION_REFUSED = 'ECONNREFUSED';
const redisConnection = {
connection: redisConfig,
};
const flowQueue = new Queue('flow', redisConnection);
process.on('SIGTERM', async () => {
await flowQueue.close();
});
flowQueue.on('error', (error) => {
if (error.code === CONNECTION_REFUSED) {
logger.error(
'Make sure you have installed Redis and it is running.',
error
);
process.exit();
}
logger.error('Error happened in flow queue!', error);
});
const flowQueue = generateQueue('flow');
export default flowQueue; export default flowQueue;

View File

@@ -0,0 +1,21 @@
import appConfig from '../config/app.js';
import actionQueue from './action.js';
import emailQueue from './email.js';
import flowQueue from './flow.js';
import triggerQueue from './trigger.js';
import deleteUserQueue from './delete-user.ee.js';
import removeCancelledSubscriptionsQueue from './remove-cancelled-subscriptions.ee.js';
const queues = [
actionQueue,
emailQueue,
flowQueue,
triggerQueue,
deleteUserQueue,
];
if (appConfig.isCloud) {
queues.push(removeCancelledSubscriptionsQueue);
}
export default queues;

View File

@@ -0,0 +1,44 @@
import process from 'process';
import { Queue } from 'bullmq';
import redisConfig from '../config/redis.js';
import logger from '../helpers/logger.js';
const CONNECTION_REFUSED = 'ECONNREFUSED';
const redisConnection = {
connection: redisConfig,
};
export const generateQueue = (queueName, options) => {
const queue = new Queue(queueName, redisConnection);
queue.on('error', (error) => queueOnError(error, queueName));
if (options?.runDaily) addScheduler(queueName, queue);
return queue;
};
const queueOnError = (error, queueName) => {
if (error.code === CONNECTION_REFUSED) {
const errorMessage =
'Make sure you have installed Redis and it is running.';
logger.error(errorMessage, error);
process.exit();
}
logger.error(`Error happened in ${queueName} queue!`, error);
};
const addScheduler = (queueName, queue) => {
const everydayAtOneOclock = '0 1 * * *';
queue.add(queueName, null, {
jobId: queueName,
repeat: {
pattern: everydayAtOneOclock,
},
});
};

View File

@@ -1,44 +1,8 @@
import process from 'process'; import { generateQueue } from './queue.js';
import { Queue } from 'bullmq';
import redisConfig from '../config/redis.js';
import logger from '../helpers/logger.js';
const CONNECTION_REFUSED = 'ECONNREFUSED'; const removeCancelledSubscriptionsQueue = generateQueue(
const redisConnection = {
connection: redisConfig,
};
const removeCancelledSubscriptionsQueue = new Queue(
'remove-cancelled-subscriptions', 'remove-cancelled-subscriptions',
redisConnection { runDaily: true }
); );
process.on('SIGTERM', async () => {
await removeCancelledSubscriptionsQueue.close();
});
removeCancelledSubscriptionsQueue.on('error', (error) => {
if (error.code === CONNECTION_REFUSED) {
logger.error(
'Make sure you have installed Redis and it is running.',
error
);
process.exit();
}
logger.error(
'Error happened in remove cancelled subscriptions queue!',
error
);
});
removeCancelledSubscriptionsQueue.add('remove-cancelled-subscriptions', null, {
jobId: 'remove-cancelled-subscriptions',
repeat: {
pattern: '0 1 * * *',
},
});
export default removeCancelledSubscriptionsQueue; export default removeCancelledSubscriptionsQueue;

View File

@@ -1,31 +1,4 @@
import process from 'process'; import { generateQueue } from './queue.js';
import { Queue } from 'bullmq';
import redisConfig from '../config/redis.js';
import logger from '../helpers/logger.js';
const CONNECTION_REFUSED = 'ECONNREFUSED';
const redisConnection = {
connection: redisConfig,
};
const triggerQueue = new Queue('trigger', redisConnection);
process.on('SIGTERM', async () => {
await triggerQueue.close();
});
triggerQueue.on('error', (error) => {
if (error.code === CONNECTION_REFUSED) {
logger.error(
'Make sure you have installed Redis and it is running.',
error
);
process.exit();
}
logger.error('Error happened in trigger queue!', error);
});
const triggerQueue = generateQueue('trigger');
export default triggerQueue; export default triggerQueue;

View File

@@ -26,7 +26,7 @@ const serializers = {
Permission: permissionSerializer, Permission: permissionSerializer,
AdminSamlAuthProvider: adminSamlAuthProviderSerializer, AdminSamlAuthProvider: adminSamlAuthProviderSerializer,
SamlAuthProvider: samlAuthProviderSerializer, SamlAuthProvider: samlAuthProviderSerializer,
SamlAuthProvidersRoleMapping: samlAuthProviderRoleMappingSerializer, RoleMapping: samlAuthProviderRoleMappingSerializer,
AppAuthClient: appAuthClientSerializer, AppAuthClient: appAuthClientSerializer,
AppConfig: appConfigSerializer, AppConfig: appConfigSerializer,
Flow: flowSerializer, Flow: flowSerializer,

View File

@@ -1,20 +1,22 @@
import * as Sentry from './helpers/sentry.ee.js'; import * as Sentry from './helpers/sentry.ee.js';
import appConfig from './config/app.js'; import process from 'node:process';
Sentry.init(); Sentry.init();
import './config/orm.js'; import './config/orm.js';
import './helpers/check-worker-readiness.js'; import './helpers/check-worker-readiness.js';
import './workers/flow.js'; import queues from './queues/index.js';
import './workers/trigger.js'; import workers from './workers/index.js';
import './workers/action.js';
import './workers/email.js';
import './workers/delete-user.ee.js';
if (appConfig.isCloud) { process.on('SIGTERM', async () => {
import('./workers/remove-cancelled-subscriptions.ee.js'); for (const queue of queues) {
import('./queues/remove-cancelled-subscriptions.ee.js'); await queue.close();
} }
for (const worker of workers) {
await worker.close();
}
});
import telemetry from './helpers/telemetry/index.js'; import telemetry from './helpers/telemetry/index.js';

View File

@@ -1,5 +1,4 @@
import { Worker } from 'bullmq'; import { Worker } from 'bullmq';
import process from 'node:process';
import * as Sentry from '../helpers/sentry.ee.js'; import * as Sentry from '../helpers/sentry.ee.js';
import redisConfig from '../config/redis.js'; import redisConfig from '../config/redis.js';
@@ -15,7 +14,7 @@ import delayAsMilliseconds from '../helpers/delay-as-milliseconds.js';
const DEFAULT_DELAY_DURATION = 0; const DEFAULT_DELAY_DURATION = 0;
export const worker = new Worker( const actionWorker = new Worker(
'action', 'action',
async (job) => { async (job) => {
const { stepId, flowId, executionId, computedParameters, executionStep } = const { stepId, flowId, executionId, computedParameters, executionStep } =
@@ -55,11 +54,11 @@ export const worker = new Worker(
{ connection: redisConfig } { connection: redisConfig }
); );
worker.on('completed', (job) => { actionWorker.on('completed', (job) => {
logger.info(`JOB ID: ${job.id} - FLOW ID: ${job.data.flowId} has started!`); logger.info(`JOB ID: ${job.id} - FLOW ID: ${job.data.flowId} has started!`);
}); });
worker.on('failed', (job, err) => { actionWorker.on('failed', (job, err) => {
const errorMessage = ` const errorMessage = `
JOB ID: ${job.id} - FLOW ID: ${job.data.flowId} has failed to start with ${err.message} JOB ID: ${job.id} - FLOW ID: ${job.data.flowId} has failed to start with ${err.message}
\n ${err.stack} \n ${err.stack}
@@ -74,6 +73,4 @@ worker.on('failed', (job, err) => {
}); });
}); });
process.on('SIGTERM', async () => { export default actionWorker;
await worker.close();
});

View File

@@ -1,5 +1,4 @@
import { Worker } from 'bullmq'; import { Worker } from 'bullmq';
import process from 'node:process';
import * as Sentry from '../helpers/sentry.ee.js'; import * as Sentry from '../helpers/sentry.ee.js';
import redisConfig from '../config/redis.js'; import redisConfig from '../config/redis.js';
@@ -8,7 +7,7 @@ import appConfig from '../config/app.js';
import User from '../models/user.js'; import User from '../models/user.js';
import ExecutionStep from '../models/execution-step.js'; import ExecutionStep from '../models/execution-step.js';
export const worker = new Worker( const deleteUserWorker = new Worker(
'delete-user', 'delete-user',
async (job) => { async (job) => {
const { id } = job.data; const { id } = job.data;
@@ -46,13 +45,13 @@ export const worker = new Worker(
{ connection: redisConfig } { connection: redisConfig }
); );
worker.on('completed', (job) => { deleteUserWorker.on('completed', (job) => {
logger.info( logger.info(
`JOB ID: ${job.id} - The user with the ID of '${job.data.id}' has been deleted!` `JOB ID: ${job.id} - The user with the ID of '${job.data.id}' has been deleted!`
); );
}); });
worker.on('failed', (job, err) => { deleteUserWorker.on('failed', (job, err) => {
const errorMessage = ` const errorMessage = `
JOB ID: ${job.id} - The user with the ID of '${job.data.id}' has failed to be deleted! ${err.message} JOB ID: ${job.id} - The user with the ID of '${job.data.id}' has failed to be deleted! ${err.message}
\n ${err.stack} \n ${err.stack}
@@ -67,6 +66,4 @@ worker.on('failed', (job, err) => {
}); });
}); });
process.on('SIGTERM', async () => { export default deleteUserWorker;
await worker.close();
});

View File

@@ -1,5 +1,4 @@
import { Worker } from 'bullmq'; import { Worker } from 'bullmq';
import process from 'node:process';
import * as Sentry from '../helpers/sentry.ee.js'; import * as Sentry from '../helpers/sentry.ee.js';
import redisConfig from '../config/redis.js'; import redisConfig from '../config/redis.js';
@@ -16,7 +15,7 @@ const isAutomatischEmail = (email) => {
return email.endsWith('@automatisch.io'); return email.endsWith('@automatisch.io');
}; };
export const worker = new Worker( const emailWorker = new Worker(
'email', 'email',
async (job) => { async (job) => {
const { email, subject, template, params } = job.data; const { email, subject, template, params } = job.data;
@@ -39,13 +38,13 @@ export const worker = new Worker(
{ connection: redisConfig } { connection: redisConfig }
); );
worker.on('completed', (job) => { emailWorker.on('completed', (job) => {
logger.info( logger.info(
`JOB ID: ${job.id} - ${job.data.subject} email sent to ${job.data.email}!` `JOB ID: ${job.id} - ${job.data.subject} email sent to ${job.data.email}!`
); );
}); });
worker.on('failed', (job, err) => { emailWorker.on('failed', (job, err) => {
const errorMessage = ` const errorMessage = `
JOB ID: ${job.id} - ${job.data.subject} email to ${job.data.email} has failed to send with ${err.message} JOB ID: ${job.id} - ${job.data.subject} email to ${job.data.email} has failed to send with ${err.message}
\n ${err.stack} \n ${err.stack}
@@ -60,6 +59,4 @@ worker.on('failed', (job, err) => {
}); });
}); });
process.on('SIGTERM', async () => { export default emailWorker;
await worker.close();
});

View File

@@ -1,5 +1,4 @@
import { Worker } from 'bullmq'; import { Worker } from 'bullmq';
import process from 'node:process';
import * as Sentry from '../helpers/sentry.ee.js'; import * as Sentry from '../helpers/sentry.ee.js';
import redisConfig from '../config/redis.js'; import redisConfig from '../config/redis.js';
@@ -13,7 +12,7 @@ import {
REMOVE_AFTER_7_DAYS_OR_50_JOBS, REMOVE_AFTER_7_DAYS_OR_50_JOBS,
} from '../helpers/remove-job-configuration.js'; } from '../helpers/remove-job-configuration.js';
export const worker = new Worker( const flowWorker = new Worker(
'flow', 'flow',
async (job) => { async (job) => {
const { flowId } = job.data; const { flowId } = job.data;
@@ -64,11 +63,11 @@ export const worker = new Worker(
{ connection: redisConfig } { connection: redisConfig }
); );
worker.on('completed', (job) => { flowWorker.on('completed', (job) => {
logger.info(`JOB ID: ${job.id} - FLOW ID: ${job.data.flowId} has started!`); logger.info(`JOB ID: ${job.id} - FLOW ID: ${job.data.flowId} has started!`);
}); });
worker.on('failed', async (job, err) => { flowWorker.on('failed', async (job, err) => {
const errorMessage = ` const errorMessage = `
JOB ID: ${job.id} - FLOW ID: ${job.data.flowId} has failed to start with ${err.message} JOB ID: ${job.id} - FLOW ID: ${job.data.flowId} has failed to start with ${err.message}
\n ${err.stack} \n ${err.stack}
@@ -95,6 +94,4 @@ worker.on('failed', async (job, err) => {
}); });
}); });
process.on('SIGTERM', async () => { export default flowWorker;
await worker.close();
});

View File

@@ -0,0 +1,21 @@
import appConfig from '../config/app.js';
import actionWorker from './action.js';
import emailWorker from './email.js';
import flowWorker from './flow.js';
import triggerWorker from './trigger.js';
import deleteUserWorker from './delete-user.ee.js';
import removeCancelledSubscriptionsWorker from './remove-cancelled-subscriptions.ee.js';
const workers = [
actionWorker,
emailWorker,
flowWorker,
triggerWorker,
deleteUserWorker,
];
if (appConfig.isCloud) {
workers.push(removeCancelledSubscriptionsWorker);
}
export default workers;

View File

@@ -1,12 +1,11 @@
import { Worker } from 'bullmq'; import { Worker } from 'bullmq';
import process from 'node:process';
import { DateTime } from 'luxon'; import { DateTime } from 'luxon';
import * as Sentry from '../helpers/sentry.ee.js'; import * as Sentry from '../helpers/sentry.ee.js';
import redisConfig from '../config/redis.js'; import redisConfig from '../config/redis.js';
import logger from '../helpers/logger.js'; import logger from '../helpers/logger.js';
import Subscription from '../models/subscription.ee.js'; import Subscription from '../models/subscription.ee.js';
export const worker = new Worker( const removeCancelledSubscriptionsWorker = new Worker(
'remove-cancelled-subscriptions', 'remove-cancelled-subscriptions',
async () => { async () => {
await Subscription.query() await Subscription.query()
@@ -23,13 +22,13 @@ export const worker = new Worker(
{ connection: redisConfig } { connection: redisConfig }
); );
worker.on('completed', (job) => { removeCancelledSubscriptionsWorker.on('completed', (job) => {
logger.info( logger.info(
`JOB ID: ${job.id} - The cancelled subscriptions have been removed!` `JOB ID: ${job.id} - The cancelled subscriptions have been removed!`
); );
}); });
worker.on('failed', (job, err) => { removeCancelledSubscriptionsWorker.on('failed', (job, err) => {
const errorMessage = ` const errorMessage = `
JOB ID: ${job.id} - ERROR: The cancelled subscriptions can not be removed! ${err.message} JOB ID: ${job.id} - ERROR: The cancelled subscriptions can not be removed! ${err.message}
\n ${err.stack} \n ${err.stack}
@@ -42,6 +41,4 @@ worker.on('failed', (job, err) => {
}); });
}); });
process.on('SIGTERM', async () => { export default removeCancelledSubscriptionsWorker;
await worker.close();
});

View File

@@ -1,5 +1,4 @@
import { Worker } from 'bullmq'; import { Worker } from 'bullmq';
import process from 'node:process';
import * as Sentry from '../helpers/sentry.ee.js'; import * as Sentry from '../helpers/sentry.ee.js';
import redisConfig from '../config/redis.js'; import redisConfig from '../config/redis.js';
@@ -12,7 +11,7 @@ import {
REMOVE_AFTER_7_DAYS_OR_50_JOBS, REMOVE_AFTER_7_DAYS_OR_50_JOBS,
} from '../helpers/remove-job-configuration.js'; } from '../helpers/remove-job-configuration.js';
export const worker = new Worker( const triggerWorker = new Worker(
'trigger', 'trigger',
async (job) => { async (job) => {
const { flowId, executionId, stepId, executionStep } = await processTrigger( const { flowId, executionId, stepId, executionStep } = await processTrigger(
@@ -41,11 +40,11 @@ export const worker = new Worker(
{ connection: redisConfig } { connection: redisConfig }
); );
worker.on('completed', (job) => { triggerWorker.on('completed', (job) => {
logger.info(`JOB ID: ${job.id} - FLOW ID: ${job.data.flowId} has started!`); logger.info(`JOB ID: ${job.id} - FLOW ID: ${job.data.flowId} has started!`);
}); });
worker.on('failed', (job, err) => { triggerWorker.on('failed', (job, err) => {
const errorMessage = ` const errorMessage = `
JOB ID: ${job.id} - FLOW ID: ${job.data.flowId} has failed to start with ${err.message} JOB ID: ${job.id} - FLOW ID: ${job.data.flowId} has failed to start with ${err.message}
\n ${err.stack} \n ${err.stack}
@@ -60,6 +59,4 @@ worker.on('failed', (job, err) => {
}); });
}); });
process.on('SIGTERM', async () => { export default triggerWorker;
await worker.close();
});

View File

@@ -1,16 +1,15 @@
import { faker } from '@faker-js/faker';
import { createRole } from './role.js'; import { createRole } from './role.js';
import RoleMapping from '../../src/models/role-mapping.ee.js';
import { createSamlAuthProvider } from './saml-auth-provider.ee.js'; import { createSamlAuthProvider } from './saml-auth-provider.ee.js';
import SamlAuthProviderRoleMapping from '../../src/models/saml-auth-providers-role-mapping.ee.js';
export const createRoleMapping = async (params = {}) => { export const createRoleMapping = async (params = {}) => {
params.roleId = params?.roleId || (await createRole()).id; params.roleId = params.roleId || (await createRole()).id;
params.samlAuthProviderId = params.samlAuthProviderId =
params?.samlAuthProviderId || (await createSamlAuthProvider()).id; params.samlAuthProviderId || (await createSamlAuthProvider()).id;
params.remoteRoleName = params.remoteRoleName || faker.person.jobType();
params.remoteRoleName = params?.remoteRoleName || 'User'; const roleMapping = await RoleMapping.query().insertAndFetch(params);
const samlAuthProviderRoleMapping = return roleMapping;
await SamlAuthProviderRoleMapping.query().insertAndFetch(params);
return samlAuthProviderRoleMapping;
}; };

View File

@@ -1,16 +0,0 @@
import { faker } from '@faker-js/faker';
import { createRole } from './role.js';
import SamlAuthProvidersRoleMapping from '../../src/models/saml-auth-providers-role-mapping.ee.js';
import { createSamlAuthProvider } from './saml-auth-provider.ee.js';
export const createSamlAuthProvidersRoleMapping = async (params = {}) => {
params.roleId = params.roleId || (await createRole()).id;
params.samlAuthProviderId =
params.samlAuthProviderId || (await createSamlAuthProvider()).id;
params.remoteRoleName = params.remoteRoleName || faker.person.jobType();
const samlAuthProvider =
await SamlAuthProvidersRoleMapping.query().insertAndFetch(params);
return samlAuthProvider;
};

View File

@@ -15,7 +15,7 @@ const getRoleMappingsMock = async (roleMappings) => {
currentPage: null, currentPage: null,
isArray: true, isArray: true,
totalPages: null, totalPages: null,
type: 'SamlAuthProvidersRoleMapping', type: 'RoleMapping',
}, },
}; };
}; };

View File

@@ -15,7 +15,7 @@ const createRoleMappingsMock = async (roleMappings) => {
currentPage: null, currentPage: null,
isArray: true, isArray: true,
totalPages: null, totalPages: null,
type: 'SamlAuthProvidersRoleMapping', type: 'RoleMapping',
}, },
}; };
}; };

View File

@@ -2,8 +2,25 @@ import { defineConfig } from 'vitest/config';
export default defineConfig({ export default defineConfig({
test: { test: {
root: './',
environment: 'node', environment: 'node',
setupFiles: ['./test/setup/global-hooks.js'], setupFiles: ['./test/setup/global-hooks.js'],
globals: true, globals: true,
reporters: process.env.GITHUB_ACTIONS ? ['dot', 'github-actions'] : ['dot'],
coverage: {
reportOnFailure: true,
provider: 'v8',
reportsDirectory: './coverage',
reporter: ['text', 'lcov'],
all: true,
include: ['**/src/models/**', '**/src/controllers/**'],
thresholds: {
autoUpdate: true,
statements: 93.41,
branches: 93.46,
functions: 95.95,
lines: 93.41,
},
},
}, },
}); });

4821
packages/backend/yarn.lock Normal file

File diff suppressed because it is too large Load Diff

1
packages/docs/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
pages/.vitepress/cache

View File

@@ -4,6 +4,7 @@
"license": "See LICENSE file", "license": "See LICENSE file",
"description": "The open source Zapier alternative. Build workflow automation without spending time and money.", "description": "The open source Zapier alternative. Build workflow automation without spending time and money.",
"private": true, "private": true,
"type": "module",
"scripts": { "scripts": {
"dev": "vitepress dev pages --port 3002", "dev": "vitepress dev pages --port 3002",
"build": "vitepress build pages", "build": "vitepress build pages",

View File

@@ -6,11 +6,19 @@ Clone main branch of Automatisch.
git clone git@github.com:automatisch/automatisch.git git clone git@github.com:automatisch/automatisch.git
``` ```
Then, install the dependencies. Then, install the dependencies for both backend and web packages separately.
```bash ```bash
cd automatisch cd automatisch
# Install backend dependencies
cd packages/backend
yarn install yarn install
# Install web dependencies
cd packages/web
yarn install
``` ```
## Backend ## Backend
@@ -53,12 +61,14 @@ yarn db:seed:user
Start the main backend server. Start the main backend server.
```bash ```bash
cd packages/backend
yarn dev yarn dev
``` ```
Start the worker server in another terminal tab. Start the worker server in another terminal tab.
```bash ```bash
cd packages/backend
yarn worker yarn worker
``` ```
@@ -84,6 +94,7 @@ It will automatically open [http://localhost:3001](http://localhost:3001) in you
```bash ```bash
cd packages/docs cd packages/docs
yarn install
yarn dev yarn dev
``` ```

View File

@@ -1,6 +1,6 @@
# Repository Structure # Repository Structure
We use `lerna` with `yarn workspaces` to manage the mono repository. We have the following packages: We manage a monorepo structure with the following packages:
``` ```
. .
@@ -15,3 +15,5 @@ We use `lerna` with `yarn workspaces` to manage the mono repository. We have the
- `docs` - The docs package contains the documentation website. - `docs` - The docs package contains the documentation website.
- `e2e-tests` - The e2e-tests package contains the end-to-end tests for the internal usage. - `e2e-tests` - The e2e-tests package contains the end-to-end tests for the internal usage.
- `web` - The web package contains the frontend application of Automatisch. - `web` - The web package contains the frontend application of Automatisch.
Each package is independently managed, and has its own package.json file to manage dependencies. This allows for better isolation and flexibility.

1192
packages/docs/yarn.lock Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -29,10 +29,12 @@
"@playwright/test": "^1.45.1" "@playwright/test": "^1.45.1"
}, },
"dependencies": { "dependencies": {
"axios": "^1.6.0",
"dotenv": "^16.3.1", "dotenv": "^16.3.1",
"eslint": "^8.13.0", "eslint": "^8.13.0",
"eslint-config-prettier": "^8.3.0", "eslint-config-prettier": "^8.3.0",
"eslint-plugin-prettier": "^4.0.0", "eslint-plugin-prettier": "^4.0.0",
"knex": "^2.4.0",
"luxon": "^3.4.4", "luxon": "^3.4.4",
"micro": "^10.0.1", "micro": "^10.0.1",
"pg": "^8.12.0", "pg": "^8.12.0",

1099
packages/e2e-tests/yarn.lock Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -18,6 +18,7 @@
"@testing-library/jest-dom": "^5.11.4", "@testing-library/jest-dom": "^5.11.4",
"@testing-library/react": "^11.1.0", "@testing-library/react": "^11.1.0",
"@testing-library/user-event": "^12.1.10", "@testing-library/user-event": "^12.1.10",
"axios": "^1.6.0",
"clipboard-copy": "^4.0.1", "clipboard-copy": "^4.0.1",
"compare-versions": "^4.1.3", "compare-versions": "^4.1.3",
"lodash": "^4.17.21", "lodash": "^4.17.21",

View File

@@ -112,7 +112,7 @@ export default function ResetPasswordForm() {
<Alert <Alert
data-test="accept-invitation-form-error" data-test="accept-invitation-form-error"
severity="error" severity="error"
sx={{ mt: 1, fontWeight: 500 }} sx={{ mt: 1 }}
> >
{formatMessage('acceptInvitationForm.invalidToken')} {formatMessage('acceptInvitationForm.invalidToken')}
</Alert> </Alert>

View File

@@ -126,7 +126,7 @@ function AddAppConnection(props) {
</DialogTitle> </DialogTitle>
{authDocUrl && ( {authDocUrl && (
<Alert severity="info" sx={{ fontWeight: 300 }}> <Alert severity="info">
{formatMessage('addAppConnection.callToDocs', { {formatMessage('addAppConnection.callToDocs', {
appName: name, appName: name,
docsLink: generateExternalLink(authDocUrl), docsLink: generateExternalLink(authDocUrl),
@@ -138,7 +138,7 @@ function AddAppConnection(props) {
<Alert <Alert
data-test="add-connection-error" data-test="add-connection-error"
severity="error" severity="error"
sx={{ mt: 1, fontWeight: 500, wordBreak: 'break-all' }} sx={{ mt: 1, wordBreak: 'break-all' }}
> >
{!errorDetails && errorMessage} {!errorDetails && errorMessage}
{errorDetails && ( {errorDetails && (

View File

@@ -32,10 +32,7 @@ function AdminApplicationAuthClientDialog(props) {
<Dialog open={true} onClose={onClose}> <Dialog open={true} onClose={onClose}>
<DialogTitle>{title}</DialogTitle> <DialogTitle>{title}</DialogTitle>
{error && ( {error && (
<Alert <Alert severity="error" sx={{ mt: 1, wordBreak: 'break-all' }}>
severity="error"
sx={{ mt: 1, fontWeight: 500, wordBreak: 'break-all' }}
>
{error.message} {error.message}
</Alert> </Alert>
)} )}

View File

@@ -6,7 +6,7 @@ import FormHelperText from '@mui/material/FormHelperText';
import ArrowDropDownIcon from '@mui/icons-material/ArrowDropDown'; import ArrowDropDownIcon from '@mui/icons-material/ArrowDropDown';
import ClearIcon from '@mui/icons-material/Clear'; import ClearIcon from '@mui/icons-material/Clear';
import { ActionButtonsWrapper } from './style'; import { ActionButtonsWrapper } from './style';
import ClickAwayListener from '@mui/base/ClickAwayListener'; import { ClickAwayListener } from '@mui/base/ClickAwayListener';
import InputLabel from '@mui/material/InputLabel'; import InputLabel from '@mui/material/InputLabel';
import { createEditor } from 'slate'; import { createEditor } from 'slate';
import { Editable, ReactEditor } from 'slate-react'; import { Editable, ReactEditor } from 'slate-react';

View File

@@ -1,8 +1,8 @@
import * as React from 'react'; import * as React from 'react';
import Paper from '@mui/material/Paper'; import Paper from '@mui/material/Paper';
import Typography from '@mui/material/Typography'; import Typography from '@mui/material/Typography';
import Alert from '@mui/material/Alert';
import LoadingButton from '@mui/lab/LoadingButton'; import LoadingButton from '@mui/lab/LoadingButton';
import { enqueueSnackbar } from 'notistack';
import useForgotPassword from 'hooks/useForgotPassword'; import useForgotPassword from 'hooks/useForgotPassword';
import Form from 'components/Form'; import Form from 'components/Form';
@@ -12,25 +12,17 @@ import useFormatMessage from 'hooks/useFormatMessage';
export default function ForgotPasswordForm() { export default function ForgotPasswordForm() {
const formatMessage = useFormatMessage(); const formatMessage = useFormatMessage();
const { const {
mutateAsync: forgotPassword, mutate: forgotPassword,
isPending: loading, isPending: loading,
isSuccess, isSuccess,
isError,
error,
} = useForgotPassword(); } = useForgotPassword();
const handleSubmit = async (values) => { const handleSubmit = ({ email }) => {
const { email } = values; forgotPassword({
try { email,
await forgotPassword({ });
email,
});
} catch (error) {
enqueueSnackbar(
error?.message || formatMessage('forgotPasswordForm.error'),
{
variant: 'error',
},
);
}
}; };
return ( return (
@@ -57,6 +49,16 @@ export default function ForgotPasswordForm() {
margin="dense" margin="dense"
autoComplete="username" autoComplete="username"
/> />
{isError && (
<Alert severity="error" sx={{ mt: 2 }}>
{error?.message || formatMessage('forgotPasswordForm.error')}
</Alert>
)}
{isSuccess && (
<Alert severity="success" sx={{ mt: 2 }}>
{formatMessage('forgotPasswordForm.instructionsSent')}
</Alert>
)}
<LoadingButton <LoadingButton
type="submit" type="submit"
variant="contained" variant="contained"
@@ -68,14 +70,6 @@ export default function ForgotPasswordForm() {
> >
{formatMessage('forgotPasswordForm.submit')} {formatMessage('forgotPasswordForm.submit')}
</LoadingButton> </LoadingButton>
{isSuccess && (
<Typography
variant="body1"
sx={{ color: (theme) => theme.palette.success.main }}
>
{formatMessage('forgotPasswordForm.instructionsSent')}
</Typography>
)}
</Form> </Form>
</Paper> </Paper>
); );

View File

@@ -188,7 +188,7 @@ function InstallationForm() {
)} )}
/> />
{install.isSuccess && ( {install.isSuccess && (
<Alert data-test="success-alert" severity="success" sx={{ mt: 3, fontWeight: 500 }}> <Alert data-test="success-alert" severity="success" sx={{ mt: 3 }}>
{formatMessage('installationForm.success', { {formatMessage('installationForm.success', {
link: (str) => ( link: (str) => (
<Link <Link

View File

@@ -2,6 +2,7 @@ import * as React from 'react';
import { useNavigate, Link as RouterLink } from 'react-router-dom'; import { useNavigate, Link as RouterLink } from 'react-router-dom';
import Paper from '@mui/material/Paper'; import Paper from '@mui/material/Paper';
import Link from '@mui/material/Link'; import Link from '@mui/material/Link';
import Alert from '@mui/material/Alert';
import Typography from '@mui/material/Typography'; import Typography from '@mui/material/Typography';
import LoadingButton from '@mui/lab/LoadingButton'; import LoadingButton from '@mui/lab/LoadingButton';
import useAuthentication from 'hooks/useAuthentication'; import useAuthentication from 'hooks/useAuthentication';
@@ -11,16 +12,18 @@ import Form from 'components/Form';
import TextField from 'components/TextField'; import TextField from 'components/TextField';
import useFormatMessage from 'hooks/useFormatMessage'; import useFormatMessage from 'hooks/useFormatMessage';
import useCreateAccessToken from 'hooks/useCreateAccessToken'; import useCreateAccessToken from 'hooks/useCreateAccessToken';
import useEnqueueSnackbar from 'hooks/useEnqueueSnackbar';
function LoginForm() { function LoginForm() {
const isCloud = useCloud(); const isCloud = useCloud();
const navigate = useNavigate(); const navigate = useNavigate();
const formatMessage = useFormatMessage(); const formatMessage = useFormatMessage();
const enqueueSnackbar = useEnqueueSnackbar();
const authentication = useAuthentication(); const authentication = useAuthentication();
const { mutateAsync: createAccessToken, isPending: loading } = const {
useCreateAccessToken(); mutateAsync: createAccessToken,
isPending: loading,
error,
isError,
} = useCreateAccessToken();
React.useEffect(() => { React.useEffect(() => {
if (authentication.isAuthenticated) { if (authentication.isAuthenticated) {
@@ -37,11 +40,19 @@ function LoginForm() {
}); });
const { token } = data; const { token } = data;
authentication.updateToken(token); authentication.updateToken(token);
} catch (error) { } catch {}
enqueueSnackbar(error?.message || formatMessage('loginForm.error'), { };
variant: 'error',
}); const renderError = () => {
} const errors = error?.response?.data?.errors?.general || [
error?.message || formatMessage('loginForm.error'),
];
return errors.map((error) => (
<Alert severity="error" sx={{ mt: 2 }}>
{error}
</Alert>
));
}; };
return ( return (
@@ -94,6 +105,8 @@ function LoginForm() {
</Link> </Link>
)} )}
{isError && renderError()}
<LoadingButton <LoadingButton
type="submit" type="submit"
variant="contained" variant="contained"

View File

@@ -1,5 +1,5 @@
import PropTypes from 'prop-types'; import PropTypes from 'prop-types';
import ClickAwayListener from '@mui/base/ClickAwayListener'; import { ClickAwayListener } from '@mui/base/ClickAwayListener';
import FormHelperText from '@mui/material/FormHelperText'; import FormHelperText from '@mui/material/FormHelperText';
import InputLabel from '@mui/material/InputLabel'; import InputLabel from '@mui/material/InputLabel';
import * as React from 'react'; import * as React from 'react';

View File

@@ -2,6 +2,7 @@ import { yupResolver } from '@hookform/resolvers/yup';
import LoadingButton from '@mui/lab/LoadingButton'; import LoadingButton from '@mui/lab/LoadingButton';
import Paper from '@mui/material/Paper'; import Paper from '@mui/material/Paper';
import Typography from '@mui/material/Typography'; import Typography from '@mui/material/Typography';
import Alert from '@mui/material/Alert';
import useEnqueueSnackbar from 'hooks/useEnqueueSnackbar'; import useEnqueueSnackbar from 'hooks/useEnqueueSnackbar';
import * as React from 'react'; import * as React from 'react';
import { useNavigate, useSearchParams } from 'react-router-dom'; import { useNavigate, useSearchParams } from 'react-router-dom';
@@ -30,6 +31,8 @@ export default function ResetPasswordForm() {
mutateAsync: resetPassword, mutateAsync: resetPassword,
isPending, isPending,
isSuccess, isSuccess,
error,
isError,
} = useResetPassword(); } = useResetPassword();
const token = searchParams.get('token'); const token = searchParams.get('token');
@@ -47,14 +50,23 @@ export default function ResetPasswordForm() {
}, },
}); });
navigate(URLS.LOGIN); navigate(URLS.LOGIN);
} catch (error) { } catch {}
enqueueSnackbar( };
error?.message || formatMessage('resetPasswordForm.error'),
{ const renderError = () => {
variant: 'error', if (!isError) {
}, return null;
);
} }
const errors = error?.response?.data?.errors?.general || [
error?.message || formatMessage('resetPasswordForm.error'),
];
return errors.map((error) => (
<Alert severity="error" sx={{ mt: 2 }}>
{error}
</Alert>
));
}; };
return ( return (
@@ -96,7 +108,6 @@ export default function ResetPasswordForm() {
: '' : ''
} }
/> />
<TextField <TextField
label={formatMessage( label={formatMessage(
'resetPasswordForm.confirmPasswordFieldLabel', 'resetPasswordForm.confirmPasswordFieldLabel',
@@ -117,7 +128,7 @@ export default function ResetPasswordForm() {
: '' : ''
} }
/> />
{renderError()}
<LoadingButton <LoadingButton
type="submit" type="submit"
variant="contained" variant="contained"

View File

@@ -7,7 +7,7 @@ import FormControl from '@mui/material/FormControl';
import SearchIcon from '@mui/icons-material/Search'; import SearchIcon from '@mui/icons-material/Search';
import useFormatMessage from 'hooks/useFormatMessage'; import useFormatMessage from 'hooks/useFormatMessage';
export default function SearchInput({ onChange }) { export default function SearchInput({ onChange, defaultValue = '' }) {
const formatMessage = useFormatMessage(); const formatMessage = useFormatMessage();
return ( return (
<FormControl variant="outlined" fullWidth> <FormControl variant="outlined" fullWidth>
@@ -16,6 +16,7 @@ export default function SearchInput({ onChange }) {
</InputLabel> </InputLabel>
<OutlinedInput <OutlinedInput
defaultValue={defaultValue}
id="search-input" id="search-input"
type="text" type="text"
size="medium" size="medium"
@@ -34,4 +35,5 @@ export default function SearchInput({ onChange }) {
SearchInput.propTypes = { SearchInput.propTypes = {
onChange: PropTypes.func, onChange: PropTypes.func,
defaultValue: PropTypes.string,
}; };

View File

@@ -2,7 +2,7 @@ import PropTypes from 'prop-types';
import ArrowDropDownIcon from '@mui/icons-material/ArrowDropDown'; import ArrowDropDownIcon from '@mui/icons-material/ArrowDropDown';
import Button from '@mui/material/Button'; import Button from '@mui/material/Button';
import ButtonGroup from '@mui/material/ButtonGroup'; import ButtonGroup from '@mui/material/ButtonGroup';
import ClickAwayListener from '@mui/material/ClickAwayListener'; import { ClickAwayListener } from '@mui/base/ClickAwayListener';
import Grow from '@mui/material/Grow'; import Grow from '@mui/material/Grow';
import MenuItem from '@mui/material/MenuItem'; import MenuItem from '@mui/material/MenuItem';
import MenuList from '@mui/material/MenuList'; import MenuList from '@mui/material/MenuList';

View File

@@ -84,10 +84,7 @@ function TestSubstep(props) {
}} }}
> >
{hasError && ( {hasError && (
<Alert <Alert severity="error" sx={{ mb: 2, width: '100%' }}>
severity="error"
sx={{ mb: 2, fontWeight: 500, width: '100%' }}
>
<pre style={{ margin: 0, whiteSpace: 'pre-wrap' }}> <pre style={{ margin: 0, whiteSpace: 'pre-wrap' }}>
{JSON.stringify(errorDetails, null, 2)} {JSON.stringify(errorDetails, null, 2)}
</pre> </pre>
@@ -104,13 +101,11 @@ function TestSubstep(props) {
severity="warning" severity="warning"
sx={{ mb: 1, width: '100%' }} sx={{ mb: 1, width: '100%' }}
> >
<AlertTitle sx={{ fontWeight: 700 }}> <AlertTitle>
{formatMessage('flowEditor.noTestDataTitle')} {formatMessage('flowEditor.noTestDataTitle')}
</AlertTitle> </AlertTitle>
<Box sx={{ fontWeight: 400 }}> <Box>{formatMessage('flowEditor.noTestDataMessage')}</Box>
{formatMessage('flowEditor.noTestDataMessage')}
</Box>
</Alert> </Alert>
)} )}

View File

@@ -66,8 +66,8 @@ function RoleMappings({ provider, providerLoading }) {
const enqueueSnackbar = useEnqueueSnackbar(); const enqueueSnackbar = useEnqueueSnackbar();
const { const {
mutateAsync: updateSamlAuthProvidersRoleMappings, mutateAsync: updateRoleMappings,
isPending: isUpdateSamlAuthProvidersRoleMappingsPending, isPending: isUpdateRoleMappingsPending,
} = useAdminUpdateSamlAuthProviderRoleMappings(provider?.id); } = useAdminUpdateSamlAuthProviderRoleMappings(provider?.id);
const { data, isLoading: isAdminSamlAuthProviderRoleMappingsLoading } = const { data, isLoading: isAdminSamlAuthProviderRoleMappingsLoading } =
@@ -79,7 +79,7 @@ function RoleMappings({ provider, providerLoading }) {
const handleRoleMappingsUpdate = async (values) => { const handleRoleMappingsUpdate = async (values) => {
try { try {
if (provider?.id) { if (provider?.id) {
await updateSamlAuthProvidersRoleMappings( await updateRoleMappings(
values.roleMappings.map(({ roleId, remoteRoleName }) => ({ values.roleMappings.map(({ roleId, remoteRoleName }) => ({
roleId, roleId,
remoteRoleName, remoteRoleName,
@@ -148,7 +148,7 @@ function RoleMappings({ provider, providerLoading }) {
variant="contained" variant="contained"
color="primary" color="primary"
sx={{ boxShadow: 2 }} sx={{ boxShadow: 2 }}
loading={isUpdateSamlAuthProvidersRoleMappingsPending} loading={isUpdateRoleMappingsPending}
> >
{formatMessage('roleMappingsForm.save')} {formatMessage('roleMappingsForm.save')}
</LoadingButton> </LoadingButton>

View File

@@ -124,7 +124,6 @@ export default function CreateUser() {
<Alert <Alert
severity="info" severity="info"
color="primary" color="primary"
sx={{ fontWeight: '500' }}
data-test="invitation-email-info-alert" data-test="invitation-email-info-alert"
> >
{formatMessage('createUser.invitationEmailInfo', { {formatMessage('createUser.invitationEmailInfo', {

View File

@@ -42,13 +42,9 @@ export default function Execution() {
<Grid container item sx={{ mt: 2, mb: [2, 5] }} rowGap={3}> <Grid container item sx={{ mt: 2, mb: [2, 5] }} rowGap={3}>
{!isExecutionStepsLoading && !data?.pages?.[0].data.length && ( {!isExecutionStepsLoading && !data?.pages?.[0].data.length && (
<Alert severity="warning" sx={{ flex: 1 }}> <Alert severity="warning" sx={{ flex: 1 }}>
<AlertTitle sx={{ fontWeight: 700 }}> <AlertTitle>{formatMessage('execution.noDataTitle')}</AlertTitle>
{formatMessage('execution.noDataTitle')}
</AlertTitle>
<Box sx={{ fontWeight: 400 }}> <Box>{formatMessage('execution.noDataMessage')}</Box>
{formatMessage('execution.noDataMessage')}
</Box>
</Alert> </Alert>
)} )}

View File

@@ -1,5 +1,5 @@
import * as React from 'react'; import * as React from 'react';
import { Link, useSearchParams } from 'react-router-dom'; import { Link, useNavigate, useSearchParams } from 'react-router-dom';
import debounce from 'lodash/debounce'; import debounce from 'lodash/debounce';
import Box from '@mui/material/Box'; import Box from '@mui/material/Box';
import Grid from '@mui/material/Grid'; import Grid from '@mui/material/Grid';
@@ -23,13 +23,18 @@ import useLazyFlows from 'hooks/useLazyFlows';
export default function Flows() { export default function Flows() {
const formatMessage = useFormatMessage(); const formatMessage = useFormatMessage();
const navigate = useNavigate();
const [searchParams, setSearchParams] = useSearchParams(); const [searchParams, setSearchParams] = useSearchParams();
const page = parseInt(searchParams.get('page') || '', 10) || 1; const page = parseInt(searchParams.get('page') || '', 10) || 1;
const [flowName, setFlowName] = React.useState(''); const flowName = searchParams.get('flowName') || '';
const [isLoading, setIsLoading] = React.useState(false); const [isLoading, setIsLoading] = React.useState(true);
const currentUserAbility = useCurrentUserAbility(); const currentUserAbility = useCurrentUserAbility();
const { data, mutate: fetchFlows } = useLazyFlows( const {
data,
mutate: fetchFlows,
isSuccess,
} = useLazyFlows(
{ flowName, page }, { flowName, page },
{ {
onSettled: () => { onSettled: () => {
@@ -38,6 +43,36 @@ export default function Flows() {
}, },
); );
const flows = data?.data || [];
const pageInfo = data?.meta;
const hasFlows = flows?.length;
const navigateToLastPage = isSuccess && !hasFlows && page > 1;
const onSearchChange = React.useCallback((event) => {
setSearchParams({ flowName: event.target.value });
}, []);
const getPathWithSearchParams = (page, flowName) => {
const searchParams = new URLSearchParams();
if (page > 1) {
searchParams.set('page', page);
}
if (flowName) {
searchParams.set('flowName', flowName);
}
return { search: searchParams.toString() };
};
const onDuplicateFlow = () => {
if (pageInfo?.currentPage > 1) {
navigate(getPathWithSearchParams(1, flowName));
} else {
fetchFlows();
}
};
const fetchData = React.useMemo( const fetchData = React.useMemo(
() => debounce(fetchFlows, 300), () => debounce(fetchFlows, 300),
[fetchFlows], [fetchFlows],
@@ -54,21 +89,14 @@ export default function Flows() {
}, [fetchData, flowName, page]); }, [fetchData, flowName, page]);
React.useEffect( React.useEffect(
function resetPageOnSearch() { function redirectToLastPage() {
// reset search params which only consists of `page` if (navigateToLastPage) {
setSearchParams({}); navigate(getPathWithSearchParams(pageInfo.totalPages, flowName));
}
}, },
[flowName], [navigateToLastPage],
); );
const flows = data?.data || [];
const pageInfo = data?.meta;
const hasFlows = flows?.length;
const onSearchChange = React.useCallback((event) => {
setFlowName(event.target.value);
}, []);
return ( return (
<Box sx={{ py: 3 }}> <Box sx={{ py: 3 }}>
<Container> <Container>
@@ -78,7 +106,7 @@ export default function Flows() {
</Grid> </Grid>
<Grid item xs={12} sm="auto" order={{ xs: 2, sm: 1 }}> <Grid item xs={12} sm="auto" order={{ xs: 2, sm: 1 }}>
<SearchInput onChange={onSearchChange} /> <SearchInput onChange={onSearchChange} defaultValue={flowName} />
</Grid> </Grid>
<Grid <Grid
@@ -111,7 +139,7 @@ export default function Flows() {
</Grid> </Grid>
<Divider sx={{ mt: [2, 0], mb: 2 }} /> <Divider sx={{ mt: [2, 0], mb: 2 }} />
{isLoading && ( {(isLoading || navigateToLastPage) && (
<CircularProgress sx={{ display: 'block', margin: '20px auto' }} /> <CircularProgress sx={{ display: 'block', margin: '20px auto' }} />
)} )}
{!isLoading && {!isLoading &&
@@ -119,11 +147,11 @@ export default function Flows() {
<FlowRow <FlowRow
key={flow.id} key={flow.id}
flow={flow} flow={flow}
onDuplicateFlow={fetchFlows} onDuplicateFlow={onDuplicateFlow}
onDeleteFlow={fetchFlows} onDeleteFlow={fetchFlows}
/> />
))} ))}
{!isLoading && !hasFlows && ( {!isLoading && !navigateToLastPage && !hasFlows && (
<NoResultFound <NoResultFound
text={formatMessage('flows.noFlows')} text={formatMessage('flows.noFlows')}
{...(currentUserAbility.can('create', 'Flow') && { {...(currentUserAbility.can('create', 'Flow') && {
@@ -131,23 +159,23 @@ export default function Flows() {
})} })}
/> />
)} )}
{!isLoading && pageInfo && pageInfo.totalPages > 1 && ( {!isLoading &&
<Pagination !navigateToLastPage &&
sx={{ display: 'flex', justifyContent: 'center', mt: 3 }} pageInfo &&
page={pageInfo?.currentPage} pageInfo.totalPages > 1 && (
count={pageInfo?.totalPages} <Pagination
onChange={(event, page) => sx={{ display: 'flex', justifyContent: 'center', mt: 3 }}
setSearchParams({ page: page.toString() }) page={pageInfo?.currentPage}
} count={pageInfo?.totalPages}
renderItem={(item) => ( renderItem={(item) => (
<PaginationItem <PaginationItem
component={Link} component={Link}
to={`${item.page === 1 ? '' : `?page=${item.page}`}`} to={getPathWithSearchParams(item.page, flowName)}
{...item} {...item}
/> />
)} )}
/> />
)} )}
</Container> </Container>
</Box> </Box>
); );

View File

@@ -266,8 +266,8 @@ function ProfileSettings() {
</Grid> </Grid>
<Grid item xs={12} justifyContent="flex-end" sx={{ pt: 5 }}> <Grid item xs={12} justifyContent="flex-end" sx={{ pt: 5 }}>
<Alert variant="outlined" severity="error" sx={{ fontWeight: 500 }}> <Alert variant="outlined" severity="error">
<AlertTitle sx={{ fontWeight: 700 }}> <AlertTitle>
{formatMessage('profileSettings.deleteMyAccount')} {formatMessage('profileSettings.deleteMyAccount')}
</AlertTitle> </AlertTitle>

View File

@@ -278,6 +278,20 @@ export const defaultTheme = createTheme({
}), }),
}, },
}, },
MuiAlert: {
styleOverrides: {
root: ({ theme }) => ({
fontWeight: theme.typography.fontWeightRegular,
}),
},
},
MuiAlertTitle: {
styleOverrides: {
root: ({ theme }) => ({
fontWeight: theme.typography.fontWeightBold,
}),
},
},
}, },
}); });
export const mationTheme = createTheme( export const mationTheme = createTheme(

11069
packages/web/yarn.lock Normal file

File diff suppressed because it is too large Load Diff

17164
yarn.lock

File diff suppressed because it is too large Load Diff