feat: Convert all app files to JS

This commit is contained in:
Faruk AYDIN
2024-01-05 17:44:21 +01:00
parent b95478b635
commit 43dba351c3
1030 changed files with 5114 additions and 6436 deletions

View File

@@ -0,0 +1,3 @@
import sendPrompt from './send-prompt/index.js';
export default [sendPrompt];

View File

@@ -1,3 +0,0 @@
import sendPrompt from './send-prompt';
export default [sendPrompt];

View File

@@ -0,0 +1,97 @@
import defineAction from '../../../../helpers/define-action.js';
const castFloatOrUndefined = (value) => {
return value === '' ? undefined : parseFloat(value);
};
export default defineAction({
name: 'Send prompt',
key: 'sendPrompt',
description: 'Creates a completion for the provided prompt and parameters.',
arguments: [
{
label: 'Prompt',
key: 'prompt',
type: 'string',
required: true,
variables: true,
description: 'The text to analyze.',
},
{
label: 'Temperature',
key: 'temperature',
type: 'string',
required: false,
variables: true,
description:
'What sampling temperature to use, between 0 and 2. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer. We generally recommend altering this or Top P but not both.',
},
{
label: 'Maximum tokens',
key: 'maxTokens',
type: 'string',
required: false,
variables: true,
description:
'The maximum number of tokens to generate in the completion.',
},
{
label: 'Stop Sequence',
key: 'stopSequence',
type: 'string',
required: false,
variables: true,
description:
'Single stop sequence where the API will stop generating further tokens. The returned text will not contain the stop sequence.',
},
{
label: 'Top P',
key: 'topP',
type: 'string',
required: false,
variables: true,
description:
'An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both.',
},
{
label: 'Frequency Penalty',
key: 'frequencyPenalty',
type: 'string',
required: false,
variables: true,
description: `Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.`,
},
{
label: 'Presence Penalty',
key: 'presencePenalty',
type: 'string',
required: false,
variables: true,
description: `Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.`,
},
],
async run($) {
const payload = {
model: $.step.parameters.model,
prompt: $.step.parameters.prompt,
temperature: castFloatOrUndefined($.step.parameters.temperature),
max_tokens: castFloatOrUndefined($.step.parameters.maxTokens),
stop: $.step.parameters.stopSequence || null,
top_p: castFloatOrUndefined($.step.parameters.topP),
frequency_penalty: castFloatOrUndefined(
$.step.parameters.frequencyPenalty
),
presence_penalty: castFloatOrUndefined($.step.parameters.presencePenalty),
};
const { data } = await $.http.post(
`/deployments/${$.auth.data.deploymentId}/completions`,
payload
);
$.setActionItem({
raw: data,
});
},
});

View File

@@ -1,87 +0,0 @@
import defineAction from '../../../../helpers/define-action';
const castFloatOrUndefined = (value: string | null) => {
return value === '' ? undefined : parseFloat(value);
}
export default defineAction({
name: 'Send prompt',
key: 'sendPrompt',
description: 'Creates a completion for the provided prompt and parameters.',
arguments: [
{
label: 'Prompt',
key: 'prompt',
type: 'string' as const,
required: true,
variables: true,
description: 'The text to analyze.'
},
{
label: 'Temperature',
key: 'temperature',
type: 'string' as const,
required: false,
variables: true,
description: 'What sampling temperature to use, between 0 and 2. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer. We generally recommend altering this or Top P but not both.'
},
{
label: 'Maximum tokens',
key: 'maxTokens',
type: 'string' as const,
required: false,
variables: true,
description: 'The maximum number of tokens to generate in the completion.'
},
{
label: 'Stop Sequence',
key: 'stopSequence',
type: 'string' as const,
required: false,
variables: true,
description: 'Single stop sequence where the API will stop generating further tokens. The returned text will not contain the stop sequence.'
},
{
label: 'Top P',
key: 'topP',
type: 'string' as const,
required: false,
variables: true,
description: 'An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both.'
},
{
label: 'Frequency Penalty',
key: 'frequencyPenalty',
type: 'string' as const,
required: false,
variables: true,
description: `Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.`
},
{
label: 'Presence Penalty',
key: 'presencePenalty',
type: 'string' as const,
required: false,
variables: true,
description: `Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.`
},
],
async run($) {
const payload = {
model: $.step.parameters.model as string,
prompt: $.step.parameters.prompt as string,
temperature: castFloatOrUndefined($.step.parameters.temperature as string),
max_tokens: castFloatOrUndefined($.step.parameters.maxTokens as string),
stop: ($.step.parameters.stopSequence as string || null),
top_p: castFloatOrUndefined($.step.parameters.topP as string),
frequency_penalty: castFloatOrUndefined($.step.parameters.frequencyPenalty as string),
presence_penalty: castFloatOrUndefined($.step.parameters.presencePenalty as string),
};
const { data } = await $.http.post(`/deployments/${$.auth.data.deploymentId}/completions`, payload);
$.setActionItem({
raw: data,
});
},
});

View File

@@ -1,12 +1,12 @@
import verifyCredentials from './verify-credentials';
import isStillVerified from './is-still-verified';
import verifyCredentials from './verify-credentials.js';
import isStillVerified from './is-still-verified.js';
export default {
fields: [
{
key: 'screenName',
label: 'Screen Name',
type: 'string' as const,
type: 'string',
required: true,
readOnly: false,
value: null,
@@ -18,7 +18,7 @@ export default {
{
key: 'yourResourceName',
label: 'Your Resource Name',
type: 'string' as const,
type: 'string',
required: true,
readOnly: false,
value: null,
@@ -30,7 +30,7 @@ export default {
{
key: 'deploymentId',
label: 'Deployment ID',
type: 'string' as const,
type: 'string',
required: true,
readOnly: false,
value: null,
@@ -42,7 +42,7 @@ export default {
{
key: 'apiKey',
label: 'API Key',
type: 'string' as const,
type: 'string',
required: true,
readOnly: false,
value: null,

View File

@@ -0,0 +1,6 @@
const isStillVerified = async ($) => {
await $.http.get('/fine_tuning/jobs');
return true;
};
export default isStillVerified;

View File

@@ -1,8 +0,0 @@
import { IGlobalVariable } from '@automatisch/types';
const isStillVerified = async ($: IGlobalVariable) => {
await $.http.get('/fine_tuning/jobs');
return true;
};
export default isStillVerified;

View File

@@ -0,0 +1,5 @@
const verifyCredentials = async ($) => {
await $.http.get('/fine_tuning/jobs');
};
export default verifyCredentials;

View File

@@ -1,7 +0,0 @@
import { IGlobalVariable } from '@automatisch/types';
const verifyCredentials = async ($: IGlobalVariable) => {
await $.http.get('/fine_tuning/jobs');
};
export default verifyCredentials;

View File

@@ -0,0 +1,13 @@
const addAuthHeader = ($, requestConfig) => {
if ($.auth.data?.apiKey) {
requestConfig.headers['api-key'] = $.auth.data.apiKey;
}
requestConfig.params = {
'api-version': '2023-10-01-preview',
};
return requestConfig;
};
export default addAuthHeader;

View File

@@ -1,15 +0,0 @@
import { TBeforeRequest } from '@automatisch/types';
const addAuthHeader: TBeforeRequest = ($, requestConfig) => {
if ($.auth.data?.apiKey) {
requestConfig.headers['api-key'] = $.auth.data.apiKey as string;
}
requestConfig.params = {
'api-version': '2023-10-01-preview'
}
return requestConfig;
};
export default addAuthHeader;

View File

@@ -0,0 +1,11 @@
const setBaseUrl = ($, requestConfig) => {
const yourResourceName = $.auth.data.yourResourceName;
if (yourResourceName) {
requestConfig.baseURL = `https://${yourResourceName}.openai.azure.com/openai`;
}
return requestConfig;
};
export default setBaseUrl;

View File

@@ -1,13 +0,0 @@
import { TBeforeRequest } from '@automatisch/types';
const setBaseUrl: TBeforeRequest = ($, requestConfig) => {
const yourResourceName = $.auth.data.yourResourceName as string;
if (yourResourceName) {
requestConfig.baseURL = `https://${yourResourceName}.openai.azure.com/openai`;
}
return requestConfig;
};
export default setBaseUrl;

View File

@@ -1,8 +1,8 @@
import defineApp from '../../helpers/define-app';
import setBaseUrl from './common/set-base-url';
import addAuthHeader from './common/add-auth-header';
import auth from './auth';
import actions from './actions';
import defineApp from '../../helpers/define-app.js';
import setBaseUrl from './common/set-base-url.js';
import addAuthHeader from './common/add-auth-header.js';
import auth from './auth/index.js';
import actions from './actions/index.js';
export default defineApp({
name: 'Azure OpenAI',