@thinkableai/prompt
v1.0.3
Published
Build and execute an advanced prompts.
Downloads
1
Readme
Thinkableai prompt
by @thinkableai
Introduction
- ⚡️ Build and execute advanced prompts.
- 😍 Easy to extend and initialize language model settings.
Quick Installation
npm install @thinkableai/prompt
# or
yarn add @thinkableai/prompt
# or
pnpm add @thinkableai/prompt
# or
bun install @thinkableai/prompt
In env file make sure add key:
OPENAI_API_KEY=
# or
ANTHROPIC_API_KEY=
# or
GOOGLE_API_KEY=
# or
GROQ_API_KEY=
# or
AZURE_API_KEY=
Usage
- Import a module.
import { Module } from "@nestjs/common";
import { AppController } from "./app.controller";
import { AppService } from "./app.service";
import { PromptModule } from "@thinkableai/prompt";
@Module({
imports: [PromptModule], // import a module.
controllers: [AppController],
providers: [AppService],
})
export class AppModule {}
- Call service to build and execute a prompt.
import { Controller, Post } from "@nestjs/common";
import {
ExecutePromptInput,
FormatContentTypeEnum,
LLMProviderEnum,
LLMSettingsModel,
PromptService,
} from "@thinkableai/prompt";
@Controller()
export class AppController {
constructor(private readonly promptService: PromptService) {}
@Post()
async run() {
const llmSettings = new LLMSettingsModel("CHAT", LLMProviderEnum.OPENAI);
const systemPrompt = `I act you as a JSON generator`;
const prompt = `{VAR_A} is value of a prompt and {VAR_B} is value of chat, make sure to not use a highlight`;
const values = {
VAR_A: "my_value",
VAR_B: "hello",
};
const input: ExecutePromptInput = {
prompt,
values,
systemPrompt,
llmSettings,
formatContentType: FormatContentTypeEnum.JSON, // force to receive a clean json format.
};
const content = await this.promptService.executePrompt(input);
return { content };
}
}