Skip to content

Commit 0380c70

Browse files
committed
Added agent and memory commands
1 parent d97c842 commit 0380c70

File tree

6 files changed

+206
-0
lines changed

6 files changed

+206
-0
lines changed

app/commands/agents.ts

Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,81 @@
1+
import { LLMMessage, LLMModel, callLLMChatCompletion } from "~/utils/llmUtils";
2+
import { Config } from "~/utils/config";
3+
4+
export interface Agent {
5+
name: string;
6+
task: string;
7+
messages: LLMMessage[];
8+
model: LLMModel;
9+
}
10+
11+
let nextkey = 0;
12+
const agents: {[key: string]: Agent} = {};
13+
14+
export async function startAgent(
15+
name: string,
16+
task: string,
17+
prompt: string,
18+
model: LLMModel = Config.fast_llm_model
19+
) {
20+
const firstMessage = `You are ${name}. Respond with: "Acknowledged".`;
21+
const { key, agentReply } = await createAgent(name, task, firstMessage, model);
22+
23+
const agentResponse = await messageAgent(key, prompt);
24+
25+
return `Agent ${name} created with key ${key}. First response: ${agentResponse}`;
26+
}
27+
28+
export async function createAgent(
29+
name: string,
30+
task: string,
31+
prompt: string,
32+
model: LLMModel
33+
): Promise<{ key: string; agentReply: string }> {
34+
const messages: LLMMessage[] = [{ role: "user", content: prompt }];
35+
36+
const agentReply = await callLLMChatCompletion(messages, model);
37+
38+
messages.push({ role: "assistant", content: agentReply });
39+
40+
const agent: Agent = {
41+
name,
42+
task,
43+
messages,
44+
model,
45+
};
46+
const key = `${nextkey}`;
47+
nextkey = nextkey + 1;
48+
49+
agents[key] = agent;
50+
51+
return { key, agentReply };
52+
}
53+
54+
export async function messageAgent(
55+
key: string,
56+
message: string
57+
): Promise<string> {
58+
if (!agents[key]) {
59+
return "Invalid key, agent doesn't exist";
60+
}
61+
const { messages, model } = agents[key];
62+
messages.push({ role: "user", content: message });
63+
64+
const agentReply = await callLLMChatCompletion(messages, model);
65+
66+
messages.push({ role: "assistant", content: agentReply });
67+
return agentReply;
68+
}
69+
70+
export function listAgents(): [string, string][] {
71+
return Object.keys(agents).map((key: string) => [key, agents[key].task]);
72+
}
73+
74+
export function deleteAgent(key: string): boolean {
75+
if (agents[key]) {
76+
delete agents[key];
77+
return true;
78+
}
79+
80+
return false;
81+
}

app/commands/index.ts

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
import { startAgent, deleteAgent, messageAgent, listAgents } from "./agents";
2+
import {
3+
overwriteMemory,
4+
deleteMemory,
5+
commitMemory,
6+
} from "./memory";
7+
import { Config } from "~/utils/config";
8+
9+
export async function executeCommand(
10+
command: string,
11+
args: { [key: string]: string }
12+
): Promise<string> {
13+
try {
14+
if (command === "memory_add") {
15+
return commitMemory(args["string"]);
16+
} else if (command === "memory_del") {
17+
return deleteMemory(parseInt(args["key"]));
18+
} else if (command === "memory_ovr") {
19+
return overwriteMemory(parseInt(args["key"]), args["string"]);
20+
} else if (command === "start_agent") {
21+
return await startAgent(args["name"], args["task"], args["prompt"]);
22+
} else if (command === "message_agent") {
23+
return await messageAgent(args["key"], args["message"]);
24+
} else if (command === "list_agents") {
25+
return JSON.stringify(listAgents());
26+
} else if (command === "delete_agent") {
27+
return deleteAgent(args["key"])
28+
? `Agent ${args["key"]} deleted.`
29+
: `Agent ${args["key"]} does not exist.`;
30+
} else {
31+
return `Unknown command ${command}`;
32+
}
33+
} catch (error) {
34+
return `Error: ${error}`;
35+
}
36+
}

app/commands/memory.ts

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
const permanentMemory: string[] = [];
2+
3+
export function commitMemory(val: string) {
4+
permanentMemory.push(val);
5+
6+
return `Committing memory with string "${val}"`;
7+
}
8+
export function getMemory(index: number) {
9+
if (index >= permanentMemory.length) {
10+
return 'Invalid key, cannot retrieve memory.';
11+
}
12+
return permanentMemory[index];
13+
}
14+
15+
export function deleteMemory(index: number) {
16+
if (index >= permanentMemory.length) {
17+
return 'Invalid key, cannot delete memory.';
18+
}
19+
20+
permanentMemory.splice(index, 1);
21+
return `Deleting memory with key ${index}.`;
22+
}
23+
24+
export function overwriteMemory(index: number, val: string) {
25+
if (index >= permanentMemory.length) {
26+
return 'Invalid key, cannote overwrite memory.'
27+
}
28+
29+
permanentMemory[index] = val;
30+
return `Overwriting memory with key ${index} and string "${val}".`;
31+
}

app/utils/apiKey.ts

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
let OPENAI_API_KEY: string | null = null;
2+
3+
export function getAPIKey() {
4+
return OPENAI_API_KEY;
5+
}
6+
7+
export function setAPIKey(apiKey: string) {
8+
OPENAI_API_KEY = apiKey;
9+
}

app/utils/config.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
export const Config = {
2+
fast_llm_model: 'gpt-3.5-turbo' as const
3+
}

app/utils/llmUtils.ts

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
import { getAPIKey } from "./apiKey";
2+
3+
export type LLMMessage =
4+
| { role: "system"; content: string; }
5+
| { role: "assistant"; content: string; }
6+
| { role: "user"; content: string; };
7+
8+
export type LLMModel =
9+
| "gpt-3.5-turbo"
10+
| "gpt-3.5-turbo-0301"
11+
| "gpt-4"
12+
| "gpt-4-0314";
13+
14+
export async function callLLMChatCompletion(
15+
messages: LLMMessage[],
16+
model: LLMModel,
17+
temperature?: number,
18+
maxTokens?: number
19+
) {
20+
const reqBody = {
21+
model,
22+
messages,
23+
temperature,
24+
max_tokens: maxTokens,
25+
};
26+
27+
const apiKey = getAPIKey();
28+
const headers = new Headers();
29+
headers.set("Authorization", `Bearer ${apiKey}`);
30+
31+
const response = await fetch("https://api.openai.com/v1/chat/completions", {
32+
method: "POST",
33+
headers,
34+
body: JSON.stringify(reqBody),
35+
});
36+
37+
if (response.status !== 200) {
38+
const errorText = await response.text();
39+
console.error("Error calling OpenAI service", response.status, errorText);
40+
return `Error calling API with status code ${response.status} and message "${errorText}"`;
41+
}
42+
43+
const resBody = await response.json();
44+
45+
return resBody.data.choices[0].message as string;
46+
}

0 commit comments

Comments
 (0)