Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

GroqAI Models Added #3

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .example.env
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,5 @@ E2B_API_KEY=""
# OLLAMA_BASE_URL = "http://localhost:11434/api"

# TOGETHER_AI_API_KEY=""

# GROQ_AI_API_KEY=""
# FIREWORKS_API_KEY=""
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ yarn-error.log*

# local env files
.env*.local

.env
# vercel
.vercel

Expand Down
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ This is an AI-powered code and data analysis tool built with Next.js and the [E2
**Supported LLM Providers:**
- 🔸 TogetherAI
- 🔸 Fireworks
- 🔸 Groq

**Supported chart types:**
- 🔸 All the supported charts are descriebd [here](https://e2b.dev/docs/code-interpreting/create-charts-visualizations/interactive-charts#supported-intertactive-charts).
Expand Down Expand Up @@ -52,5 +53,6 @@ E2B: `E2B_API_KEY`
LLM Providers:

- Fireworks: `FIREWORKS_API_KEY`
- Groq: `GROQ_AI_API_KEY`
- Together AI: `TOGETHER_API_KEY`
- Ollama: `OLLAMA_BASE_URL`
60 changes: 30 additions & 30 deletions app/api/chat/route.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// import { z } from "zod";
// import { Sandbox } from "@e2b/code-interpreter";
import { z } from "zod";
import { Sandbox } from "@e2b/code-interpreter";
import { getModelClient, LLMModel, LLMModelConfig } from "@/lib/model";
import { toPrompt } from "@/lib/prompt";
import { CustomFiles } from "@/lib/types";
Expand Down Expand Up @@ -41,36 +41,36 @@ export async function POST(req: Request) {
messages: convertToCoreMessages(filteredMessages),
...modelParams,
// If the provider supports tooling, uncomment below
// tools: {
// runCode: {
// description:
// "Execute python code in a Jupyter notebook cell and return result",
// parameters: z.object({
// code: z
// .string()
// .describe("The python code to execute in a single cell"),
// }),
// execute: async ({ code }) => {
// // Create a sandbox, execute LLM-generated code, and return the result
// console.log("Executing code", code);
// const sandbox = await Sandbox.create();
// tools: {
// runCode: {
// description:
// "Execute python code in a Jupyter notebook cell and return result",
// parameters: z.object({
// code: z
// .string()
// .describe("The python code to execute in a single cell"),
// }),
// execute: async ({ code }) => {
// // Create a sandbox, execute LLM-generated code, and return the result
// console.log("Executing code", code);
// const sandbox = await Sandbox.create();

// // Upload files
// for (const file of data.files) {
// await sandbox.files.write(file.name, atob(file.base64));
// }
// const { text, results, logs, error } = await sandbox.runCode(code);
// console.log(text, results, logs, error);
// // Upload files
// for (const file of data.files) {
// await sandbox.files.write(file.name, atob(file.base64));
// }
// const { text, results, logs, error } = await sandbox.runCode(code);
// console.log(text, results, logs, error);

// return {
// text,
// results,
// logs,
// error,
// };
// },
// },
// },
// return {
// text,
// results,
// logs,
// error,
// };
// },
// },
// },
});

return result.toDataStreamResponse();
Expand Down
13 changes: 3 additions & 10 deletions app/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ export default function Home() {
const [languageModel, setLanguageModel] = useLocalStorage<LLMModelConfig>(
"languageModel",
{
model: "accounts/fireworks/models/llama-v3p1-405b-instruct",
model: "llama-3.1-70b-versatile",
}
);

Expand Down Expand Up @@ -120,15 +120,8 @@ export default function Home() {
<div className="flex items-center gap-2">
<Logo className="w-6 h-6" />
<h1 className="text-md font-medium">
Analyst by{" "}
<a
href="https://e2b.dev"
target="_blank"
className="underline decoration-[rgba(229,123,0,.3)] decoration-2 text-[#ff8800]"
>
E2B
</a>
</h1>
AI MultiAgents
</h1>
</div>
</nav>
<div className="flex-1 overflow-y-auto pt-14" id="messages">
Expand Down
2 changes: 2 additions & 0 deletions components/tooloutput.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ export function ToolOutput({ result }: { result: ToolResult | undefined }) {
const toolResult = result.find((r) => r.toolName === "runCode")?.result;

if (toolResult?.error) {
console.log("Error occur",toolResult);

return (
<div className="text-red-500 border border-red-200 rounded-xl bg-red-500/10 text-sm">
<div className="flex items-center gap-2 pt-4 px-4">
Expand Down
10 changes: 9 additions & 1 deletion lib/model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,20 @@ export function getModelClient(model: LLMModel, config: LLMModelConfig) {
const { apiKey, baseURL } = config;

const providerConfigs = {
togetherai: () =>
groqai: () =>
createOpenAI({
apiKey: apiKey || process.env.GROQ_AI_API_KEY,
baseURL: "https://api.groq.com/openai/v1",
})(modelNameString),

togetherai: () =>
createOpenAI({
apiKey: apiKey || process.env.TOGETHER_API_KEY,
baseURL: baseURL || "https://api.together.xyz/v1",
})(modelNameString),

ollama: () => createOllama({ baseURL })(modelNameString),

fireworks: () =>
createOpenAI({
apiKey: apiKey || process.env.FIREWORKS_API_KEY,
Expand Down
78 changes: 72 additions & 6 deletions lib/models.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,77 @@
{
"models": [
{
"id": "gemma2-9b-it",
"provider": "Groq",
"providerId": "groqai",
"name": "gemma2-9b-it"
},
{
"id": "llama-3.3-70b-versatile",
"provider": "Groq",
"providerId": "groqai",
"name": "llama-3.3-70b-versatile"
},
{
"id": "llama-3.1-8b-instant",
"provider": "Groq",
"providerId": "groqai",
"name": "llama-3.1-8b-instant"
},
{
"id": "llama3-70b-8192",
"provider": "Groq",
"providerId": "groqai",
"name": "llama3-70b-8192"
},
{
"id": "llama3-8b-8192",
"provider": "Groq",
"providerId": "groqai",
"name": "llama3-8b-8192"
},
{
"id": "mixtral-8x7b-32768",
"provider": "Groq",
"providerId": "groqai",
"name": "mixtral-8x7b-32768"
},
{
"id": "llama-3.2-3b-preview",
"provider": "Groq",
"providerId": "groqai",
"name": "llama-3.2-3b-preview (Groq)"
},
{
"id": "llama-3.1-70b-versatile",
"provider": "Groq",
"providerId": "groqai",
"name": "Llama 3.1 70b (Groq)"
},
{
"id": "llama-3.1-8b-instant",
"provider": "Groq",
"providerId": "groqai",
"name": "Llama 3.1 8b (Groq)"
},
{
"id": "llama-3.2-11b-vision-preview",
"provider": "Groq",
"providerId": "groqai",
"name": "Llama 3.2 11b (Groq)"
},
{
"id": "llama-3.2-3b-preview",
"provider": "Groq",
"providerId": "groqai",
"name": "Llama 3.2 3b (Groq)"
},
{
"id": "llama-3.2-1b-preview",
"provider": "Groq",
"providerId": "groqai",
"name": "Llama 3.2 1b (Groq)"
},
{
"id": "accounts/fireworks/models/llama-v3p1-405b-instruct",
"provider": "Fireworks",
Expand Down Expand Up @@ -29,12 +101,6 @@
"provider": "Together AI",
"providerId": "togetherai",
"name": "Llama 3.1 70B"
},
{
"id": "llama3.2",
"provider": "Ollama",
"providerId": "ollama",
"name": "Llama 3.2 3B"
}
]
}
16 changes: 16 additions & 0 deletions lib/types.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,27 @@
// import { ToolInvocation } from "ai";
// import { Result } from "@e2b/code-interpreter";

// export type ToolResult = (ToolInvocation & {
// result: Result;
// })[];

// export type CustomFiles = {
// base64(base64: any): string | ArrayBuffer | Blob | ReadableStream<any>;
// name: string;
// contentType: string;
// content: string;
// };

import { ToolInvocation } from "ai";
import { Result } from "@e2b/code-interpreter";

// Define a more specific type for the base64 method parameter
export type ToolResult = (ToolInvocation & {
result: Result;
})[];

export type CustomFiles = {
base64(base64: string): string | ArrayBuffer | Blob | ReadableStream<Uint8Array>;
name: string;
contentType: string;
content: string;
Expand Down
1 change: 1 addition & 0 deletions public/thirdparty/logos/groqai.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.