Skip to content

Commit f21edde

Browse files
committed
getOpenAiPrompt
1 parent 158fa0d commit f21edde

File tree

8 files changed

+5196
-12685
lines changed

8 files changed

+5196
-12685
lines changed

package-lock.json

Lines changed: 5143 additions & 12685 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,7 @@
8383
},
8484
"dependencies": {
8585
"axios": "^1.7.2",
86+
"openai": "^4.47.1",
8687
"openapi-client-axios": "^7.5.4"
8788
}
8889
}

src/Client.ts

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,7 @@
1+
import { ChatCompletionCreateParamsNonStreaming } from 'openai/resources'
2+
13
import APIClient from './ApiClient'
4+
import { mapPromptToOpenAIConfig } from './helpers/openAi'
25
import { PromptConfiguration } from './types'
36

47
export default class PromptFoundry {
@@ -19,4 +22,10 @@ export default class PromptFoundry {
1922
public async getPrompt({ promptId }: { promptId: string }): Promise<PromptConfiguration> {
2023
return this.client.get<PromptConfiguration>(`/prompts/${promptId}`)
2124
}
25+
26+
public async getOpenAiPrompt({ promptId }: { promptId: string }): Promise<ChatCompletionCreateParamsNonStreaming> {
27+
const result = await this.client.get<PromptConfiguration>(`/prompts/${promptId}`)
28+
29+
return mapPromptToOpenAIConfig(result)
30+
}
2231
}

src/helpers/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
export * from './openAi'

src/helpers/openAi.ts

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
import { ChatCompletionCreateParamsNonStreaming, ChatCompletionMessageParam, ChatCompletionRole } from 'openai/resources'
2+
3+
import { PromptConfiguration, PromptMessage } from '../types'
4+
5+
export const mapMessagesToOpenAI = (promptMessages: PromptMessage[]): ChatCompletionMessageParam[] => {
6+
return promptMessages.map((message) => ({
7+
role: message.role.toLowerCase() as ChatCompletionRole,
8+
content: message.content
9+
})) as ChatCompletionMessageParam[]
10+
}
11+
12+
export const mapPromptToOpenAIConfig = (promptInstance: PromptConfiguration): ChatCompletionCreateParamsNonStreaming => {
13+
const { promptMessages, promptParameters } = promptInstance
14+
15+
const messages = mapMessagesToOpenAI(promptMessages)
16+
17+
return {
18+
messages,
19+
model: promptParameters.modelName,
20+
top_p: promptParameters.topP,
21+
max_tokens: promptParameters.maxTokens,
22+
temperature: promptParameters.temperature,
23+
seed: promptParameters.seed,
24+
presence_penalty: promptParameters.presencePenalty,
25+
frequency_penalty: promptParameters.frequencyPenalty,
26+
response_format: {
27+
type: promptParameters.responseFormat === 'json' ? 'json_object' : 'text'
28+
}
29+
}
30+
}

src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import Client from './Client'
22

3+
export * from './helpers'
34
export type * from './types'
45

56
export default Client

src/types/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
import { Components } from './openapi'
22

33
export type PromptConfiguration = Components.Schemas.PromptConfiguration
4+
export type PromptMessage = Components.Schemas.PromptConfiguration['promptMessages'][0]

src/types/openapi.d.ts

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,16 @@ declare namespace Components {
5252
* 1
5353
*/
5454
topP: number;
55+
/**
56+
* example:
57+
* 100
58+
*/
59+
maxTokens: number | null;
60+
/**
61+
* example:
62+
* 97946543
63+
*/
64+
seed: number | null;
5565
};
5666
/**
5767
* Variables to be used in the prompt messages.

0 commit comments

Comments
 (0)