From 49755303f77c67f6f0ca6e35ff58270045e5e30a Mon Sep 17 00:00:00 2001 From: Ali BARIN Date: Tue, 17 Jan 2023 22:17:06 +0100 Subject: [PATCH 1/3] feat(openai): add list models dynamic data --- .../src/apps/openai/dynamic-data/index.ts | 3 +++ .../openai/dynamic-data/list-models/index.ts | 19 +++++++++++++++++++ packages/backend/src/apps/openai/index.ts | 2 ++ 3 files changed, 24 insertions(+) create mode 100644 packages/backend/src/apps/openai/dynamic-data/index.ts create mode 100644 packages/backend/src/apps/openai/dynamic-data/list-models/index.ts diff --git a/packages/backend/src/apps/openai/dynamic-data/index.ts b/packages/backend/src/apps/openai/dynamic-data/index.ts new file mode 100644 index 00000000..4072dcdd --- /dev/null +++ b/packages/backend/src/apps/openai/dynamic-data/index.ts @@ -0,0 +1,3 @@ +import listModels from './list-models'; + +export default [listModels]; diff --git a/packages/backend/src/apps/openai/dynamic-data/list-models/index.ts b/packages/backend/src/apps/openai/dynamic-data/list-models/index.ts new file mode 100644 index 00000000..645b9f1e --- /dev/null +++ b/packages/backend/src/apps/openai/dynamic-data/list-models/index.ts @@ -0,0 +1,19 @@ +import { IGlobalVariable } from '@automatisch/types'; + +export default { + name: 'List models', + key: 'listModels', + + async run($: IGlobalVariable) { + const response = await $.http.get('/v1/models'); + + const models = response.data.data.map((model: { id: string }) => { + return { + value: model.id, + name: model.id, + }; + }); + + return { data: models }; + }, +}; diff --git a/packages/backend/src/apps/openai/index.ts b/packages/backend/src/apps/openai/index.ts index 9eb9e595..5be0b394 100644 --- a/packages/backend/src/apps/openai/index.ts +++ b/packages/backend/src/apps/openai/index.ts @@ -2,6 +2,7 @@ import defineApp from '../../helpers/define-app'; import addAuthHeader from './common/add-auth-header'; import auth from './auth'; import actions from './actions'; +import dynamicData from './dynamic-data'; export default defineApp({ name: 'OpenAI', @@ -15,4 +16,5 @@ export default defineApp({ beforeRequest: [addAuthHeader], auth, actions, + dynamicData, }); From 253c2c4317d7d77ed887a5119fe26b80ea2f054a Mon Sep 17 00:00:00 2001 From: Ali BARIN Date: Tue, 17 Jan 2023 22:17:19 +0100 Subject: [PATCH 2/3] feat(openai): add send prompt action --- .../backend/src/apps/openai/actions/index.ts | 3 +- .../apps/openai/actions/send-prompt/index.ts | 104 ++++++++++++++++++ 2 files changed, 106 insertions(+), 1 deletion(-) create mode 100644 packages/backend/src/apps/openai/actions/send-prompt/index.ts diff --git a/packages/backend/src/apps/openai/actions/index.ts b/packages/backend/src/apps/openai/actions/index.ts index 41a12e54..1f89da7a 100644 --- a/packages/backend/src/apps/openai/actions/index.ts +++ b/packages/backend/src/apps/openai/actions/index.ts @@ -1,3 +1,4 @@ import checkModeration from './check-moderation'; +import sendPrompt from './send-prompt'; -export default [checkModeration]; +export default [checkModeration, sendPrompt]; diff --git a/packages/backend/src/apps/openai/actions/send-prompt/index.ts b/packages/backend/src/apps/openai/actions/send-prompt/index.ts new file mode 100644 index 00000000..76f8d439 --- /dev/null +++ b/packages/backend/src/apps/openai/actions/send-prompt/index.ts @@ -0,0 +1,104 @@ +import defineAction from '../../../../helpers/define-action'; + +const castFloatOrUndefined = (value: string | null) => { + return value === '' ? undefined : parseFloat(value); +} + +export default defineAction({ + name: 'Send prompt', + key: 'sendPrompt', + description: 'Creates a completion for the provided prompt and parameters.', + arguments: [ + { + label: 'Model', + key: 'model', + type: 'dropdown' as const, + required: true, + variables: false, + source: { + type: 'query', + name: 'getDynamicData', + arguments: [ + { + name: 'key', + value: 'listModels', + }, + ], + }, + }, + { + label: 'Prompt', + key: 'prompt', + type: 'string' as const, + required: true, + variables: true, + description: 'The text to analyze.' + }, + { + label: 'Temperature', + key: 'temperature', + type: 'string' as const, + required: false, + variables: true, + description: 'What sampling temperature to use. Higher values mean the model will take more risk. Try 0.9 for more creative applications, and 0 for ones with a well-defined answer. We generally recommend altering this or Top P but not both.' + }, + { + label: 'Maximum tokens', + key: 'maxTokens', + type: 'string' as const, + required: false, + variables: true, + description: 'The maximum number of tokens to generate in the completion.' + }, + { + label: 'Stop Sequence', + key: 'stopSequence', + type: 'string' as const, + required: false, + variables: true, + description: 'Single stop sequence where the API will stop generating further tokens. The returned text will not contain the stop sequence.' + }, + { + label: 'Top P', + key: 'topP', + type: 'string' as const, + required: false, + variables: true, + description: 'An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with Top P probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.' + }, + { + label: 'Frequency Penalty', + key: 'frequencyPenalty', + type: 'string' as const, + required: false, + variables: true, + description: `Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.` + }, + { + label: 'presencePenalty', + key: 'presencePenalty', + type: 'string' as const, + required: false, + variables: true, + description: `Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.` + }, + ], + + async run($) { + const payload = { + model: $.step.parameters.model as string, + prompt: $.step.parameters.prompt as string, + temperature: castFloatOrUndefined($.step.parameters.temperature as string), + max_tokens: castFloatOrUndefined($.step.parameters.maxTokens as string), + stop: ($.step.parameters.stopSequence as string || null), + top_p: castFloatOrUndefined($.step.parameters.topP as string), + frequency_penalty: castFloatOrUndefined($.step.parameters.frequencyPenalty as string), + presence_penalty: castFloatOrUndefined($.step.parameters.presencePenalty as string), + }; + const { data } = await $.http.post('/v1/completions', payload); + + $.setActionItem({ + raw: data, + }); + }, +}); From 4db738a7f6075625c3e131cebdbeeb851574dee0 Mon Sep 17 00:00:00 2001 From: Ali BARIN Date: Tue, 17 Jan 2023 22:18:25 +0100 Subject: [PATCH 3/3] docs(openai): add send prompt action --- packages/docs/pages/apps/openai/actions.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/docs/pages/apps/openai/actions.md b/packages/docs/pages/apps/openai/actions.md index aafeb453..be3d2ca5 100644 --- a/packages/docs/pages/apps/openai/actions.md +++ b/packages/docs/pages/apps/openai/actions.md @@ -1,8 +1,10 @@ --- favicon: /favicons/openai.svg items: - - name: Check moderation Text + - name: Check moderation desc: Checks for hate, hate/threatening, self-harm, sexual, sexual/minors, violence, or violence/graphic content in text. + - name: Send prompt + desc: Creates a completion for the provided prompt and parameters. ---