${suffix}` },
+ ],
+ model,
+ });
+
+ return chatCompletion.choices[0].message.content;
+}
\ No newline at end of file
diff --git a/website/package-lock.json b/website/package-lock.json
index cae53bd..fc1d1b0 100644
--- a/website/package-lock.json
+++ b/website/package-lock.json
@@ -18,6 +18,7 @@
"@uiw/react-codemirror": "^4.21.21",
"class-variance-authority": "^0.7.0",
"clsx": "^2.0.0",
+ "extract-json-from-string": "^1.0.1",
"lucide-react": "^0.294.0",
"next": "14.0.4",
"openai": "^4.21.0",
@@ -1456,6 +1457,14 @@
"node": ">=6"
}
},
+ "node_modules/extract-json-from-string": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/extract-json-from-string/-/extract-json-from-string-1.0.1.tgz",
+ "integrity": "sha512-xfQOSFYbELVs9QVkKsV9FZAjlAmXQ2SLR6FpfFX1kpn4QAvaGBJlrnVOblMLwrLPYc26H+q9qxo6JTd4E7AwgQ==",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
"node_modules/fast-glob": {
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz",
@@ -3610,6 +3619,11 @@
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
"integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ=="
},
+ "extract-json-from-string": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/extract-json-from-string/-/extract-json-from-string-1.0.1.tgz",
+ "integrity": "sha512-xfQOSFYbELVs9QVkKsV9FZAjlAmXQ2SLR6FpfFX1kpn4QAvaGBJlrnVOblMLwrLPYc26H+q9qxo6JTd4E7AwgQ=="
+ },
"fast-glob": {
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz",
diff --git a/website/package.json b/website/package.json
index ab6962d..a1179ed 100644
--- a/website/package.json
+++ b/website/package.json
@@ -20,6 +20,7 @@
"@uiw/react-codemirror": "^4.21.21",
"class-variance-authority": "^0.7.0",
"clsx": "^2.0.0",
+ "extract-json-from-string": "^1.0.1",
"lucide-react": "^0.294.0",
"next": "14.0.4",
"openai": "^4.21.0",
diff --git a/website/pages/api/autocomplete.js b/website/pages/api/autocomplete.js
index 6223a84..e71afa8 100644
--- a/website/pages/api/autocomplete.js
+++ b/website/pages/api/autocomplete.js
@@ -1,47 +1,11 @@
-import OpenAI from "openai";
-
-const openai = new OpenAI({
- apiKey: process.env.OPENAI_API_KEY,
-});
-
-async function completionLlama(prefix, suffix, language){
- try {
- const response = await fetch(
- `https://api.cloudflare.com/client/v4/accounts/${process.env.CLOUDLFARE_ID}/ai/run/@hf/thebloke/codellama-7b-instruct-awq`, {
- method: 'POST',
- headers: {
- 'Authorization': `Bearer ${process.env.CLOUDFLARE_KEY}`,
- 'Content-Type': 'application/json'
- },
- body: JSON.stringify({ "prompt": `You are a ${language?(language + " "):""}programmer. Do not add any explanation or markdown. ${prefix}${suffix}`, "max_tokens": 30 })
- });
-
- const data = await response.json();
- return data.result.response;
- } catch (error) {
- console.error('Error:', error);
- }
-}
-
-async function completionOpenAI(prefix, suffix, model="gpt-3.5-turbo-1106", language){
- const chatCompletion = await openai.chat.completions.create({
- messages: [
- {
- role: "system",
- content: `You are a ${language?(language + " "):""}programmer that replaces part with the right code. Only output the code that replaces part. Do not add any explanation or markdown.`,
- },
- { role: "user", content: `${prefix}${suffix}` },
- ],
- model,
- });
-
- return chatCompletion.choices[0].message.content;
-}
+import { completionLlama } from "@/lib/backends/llama";
+import { completionMixtralWithRetries } from "@/lib/backends/mistral";
+import { completionOpenAI } from "@/lib/backends/openai";
export default async function handler(req, res) {
const { prefix, suffix, model, language } = req.body;
- const completionMethod = model == "codellama" ? completionLlama : completionOpenAI;
+ const completionMethod = model == "codellama" ? completionLlama : (model==="mixtral-8x7b"? completionMixtralWithRetries : completionOpenAI);
const prediction = await completionMethod(prefix, suffix, model, language);
- console.log(model, prediction)
- res.status(200).json({ prediction })
+ console.log(model, prediction);
+ res.status(200).json({ prediction });
}