Skip to content

Commit 5b1f87d

Browse files
committed
feat: add Ollama proxy route to backend for jin.neu.edu access
1 parent 63ba4a4 commit 5b1f87d

5 files changed

Lines changed: 216 additions & 91 deletions

File tree

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
const OLLAMA_BASE_URL = "http://jin.neu.edu:11434";
2+
3+
const proxyChat = async (req, res) => {
4+
console.log("🟣 [Ollama] proxyChat hit — model:", req.body.model);
5+
try {
6+
const response = await fetch(`${OLLAMA_BASE_URL}/v1/chat/completions`, {
7+
method: "POST",
8+
headers: { "Content-Type": "application/json" },
9+
body: JSON.stringify(req.body),
10+
});
11+
12+
const data = await response.json();
13+
res.json(data);
14+
} catch (err) {
15+
console.error("🔴 [Ollama] fetch failed:", err.message); // ← add
16+
res.status(500).json({ error: err.message });
17+
}
18+
};
19+
20+
const getTags = async (req, res) => {
21+
console.log("🟣 [Ollama] getTags hit"); // ← add
22+
try {
23+
const response = await fetch(`${OLLAMA_BASE_URL}/api/tags`);
24+
const data = await response.json();
25+
res.json(data);
26+
} catch (err) {
27+
console.error("🔴 [Ollama] getTags failed:", err.message); // ← add
28+
res.status(500).json({ error: err.message });
29+
}
30+
};
31+
32+
module.exports = { proxyChat, getTags };
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
const express = require("express");
2+
const router = express.Router();
3+
const { proxyChat, getTags } = require("../controllers/ollama.controller");
4+
5+
router.post("/chat", proxyChat);
6+
router.get("/tags", getTags);
7+
8+
module.exports = router;

backend/src/server.js

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ const dbsRoutes = require("./routes/dbs.routes");
1313
const datasetsRoutes = require("./routes/datasets.routes");
1414
const collectionRoutes = require("./routes/collection.route");
1515
const projectRoutes = require("./routes/projects.routes");
16+
const ollamaRoutes = require("./routes/ollama.routes");
1617

1718
const app = express();
1819
const PORT = process.env.PORT || 5000;
@@ -49,6 +50,7 @@ app.use("/api/v1/dbs", dbsRoutes);
4950
app.use("/api/v1/datasets", datasetsRoutes);
5051
app.use("/api/v1/collections", collectionRoutes);
5152
app.use("/api/v1/projects", projectRoutes);
53+
app.use("/api/v1/ollama", ollamaRoutes);
5254

5355
// health check endpoint
5456
app.get("/api/health", async (req, res) => {

src/components/User/Dashboard/DatasetOrganizer/LLMPanel.tsx

Lines changed: 140 additions & 91 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@ import { Colors } from "design/theme";
4343
import JSZip from "jszip";
4444
import React, { useState, useEffect } from "react";
4545
import { FileItem } from "redux/projects/types/projects.interface";
46+
import { OllamaService } from "services/ollama.service";
4647

4748
interface LLMPanelProps {
4849
files: FileItem[];
@@ -70,12 +71,14 @@ const llmProviders: Record<string, LLMProvider> = {
7071
name: "Ollama (Local Server)",
7172
baseUrl: "http://localhost:11434/v1/chat/completions",
7273
models: [
73-
{ id: "qwen3-coder:30b", name: "Qwen 3 Coder" },
74-
{ id: "qwen2.5-coder:latest", name: "Qwen 2.5 Coder" },
75-
{ id: "codellama:latest", name: "Code Llama" },
76-
{ id: "llama3.1:latest", name: "Llama 3.1" },
77-
{ id: "mistral:latest", name: "Mistral" },
78-
{ id: "deepseek-coder:latest", name: "DeepSeek Coder" },
74+
{ id: "qwen3-coder-next:latest", name: "Qwen 3 Coder Next" }, // ← add
75+
{ id: "qwen3-coder-careful:latest", name: "Qwen 3 Coder Careful" }, // ← add
76+
// { id: "qwen3-coder:30b", name: "Qwen 3 Coder" },
77+
// { id: "qwen2.5-coder:latest", name: "Qwen 2.5 Coder" },
78+
// { id: "codellama:latest", name: "Code Llama" },
79+
// { id: "llama3.1:latest", name: "Llama 3.1" },
80+
// { id: "mistral:latest", name: "Mistral" },
81+
// { id: "deepseek-coder:latest", name: "DeepSeek Coder" },
7982
],
8083
noApiKey: true,
8184
customUrl: true,
@@ -133,9 +136,9 @@ const LLMPanel: React.FC<LLMPanelProps> = ({
133136
}) => {
134137
const [provider, setProvider] = useState<string>("ollama");
135138
const [model, setModel] = useState<string>("qwen3-coder:30b");
136-
const [ollamaUrl, setOllamaUrl] = useState<string>(
137-
"http://huo.neu.edu:11434"
138-
);
139+
// const [ollamaUrl, setOllamaUrl] = useState<string>(
140+
// "http://jin.neu.edu:11434"
141+
// );
139142
const [apiKey, setApiKey] = useState<string>("");
140143
const [generatedScript, setGeneratedScript] = useState<string>("");
141144
const [bidsPlan, setBidsPlan] = useState<string>(""); // add bids plan
@@ -247,16 +250,20 @@ const LLMPanel: React.FC<LLMPanelProps> = ({
247250
}),
248251
});
249252
} else if (provider === "ollama") {
250-
const ollamaBaseUrl = ollamaUrl || "http://localhost:11434";
251-
ddResponse = await fetch(`${ollamaBaseUrl}/v1/chat/completions`, {
252-
method: "POST",
253-
signal: controller.signal,
254-
headers: { "Content-Type": "application/json" },
255-
body: JSON.stringify({
256-
model,
257-
messages: [{ role: "user", content: ddPrompt }],
258-
}),
259-
});
253+
// const ollamaBaseUrl = ollamaUrl || "http://localhost:11434";
254+
// ddResponse = await fetch(`${ollamaBaseUrl}/v1/chat/completions`, {
255+
// method: "POST",
256+
// signal: controller.signal,
257+
// headers: { "Content-Type": "application/json" },
258+
// body: JSON.stringify({
259+
// model,
260+
// messages: [{ role: "user", content: ddPrompt }],
261+
// stream: false,
262+
// }),
263+
// });
264+
ddResponse = await OllamaService.chat(model, [
265+
{ role: "user", content: ddPrompt },
266+
]);
260267
} else {
261268
ddResponse = await fetch(currentProvider.baseUrl, {
262269
method: "POST",
@@ -273,7 +280,9 @@ const LLMPanel: React.FC<LLMPanelProps> = ({
273280
});
274281
}
275282

276-
const ddData = await ddResponse.json();
283+
// const ddData = await ddResponse.json();
284+
const ddData =
285+
provider === "ollama" ? ddResponse : await ddResponse.json();
277286
let ddText = currentProvider.isAnthropic
278287
? ddData.content[0].text
279288
: ddData.choices[0].message.content;
@@ -321,16 +330,20 @@ const LLMPanel: React.FC<LLMPanelProps> = ({
321330
}),
322331
});
323332
} else if (provider === "ollama") {
324-
const ollamaBaseUrl = ollamaUrl || "http://localhost:11434";
325-
readmeResponse = await fetch(`${ollamaBaseUrl}/v1/chat/completions`, {
326-
method: "POST",
327-
signal: controller.signal,
328-
headers: { "Content-Type": "application/json" },
329-
body: JSON.stringify({
330-
model,
331-
messages: [{ role: "user", content: readmePrompt }],
332-
}),
333-
});
333+
// const ollamaBaseUrl = ollamaUrl || "http://localhost:11434";
334+
// readmeResponse = await fetch(`${ollamaBaseUrl}/v1/chat/completions`, {
335+
// method: "POST",
336+
// signal: controller.signal,
337+
// headers: { "Content-Type": "application/json" },
338+
// body: JSON.stringify({
339+
// model,
340+
// messages: [{ role: "user", content: readmePrompt }],
341+
// stream: false,
342+
// }),
343+
// });
344+
readmeResponse = await OllamaService.chat(model, [
345+
{ role: "user", content: readmePrompt },
346+
]);
334347
} else {
335348
readmeResponse = await fetch(currentProvider.baseUrl, {
336349
method: "POST",
@@ -347,7 +360,9 @@ const LLMPanel: React.FC<LLMPanelProps> = ({
347360
});
348361
}
349362

350-
const readmeData = await readmeResponse.json();
363+
// const readmeData = await readmeResponse.json();
364+
const readmeData =
365+
provider === "ollama" ? readmeResponse : await readmeResponse.json();
351366
readmeContent = currentProvider.isAnthropic
352367
? readmeData.content[0].text
353368
: readmeData.choices[0].message.content;
@@ -383,16 +398,20 @@ const LLMPanel: React.FC<LLMPanelProps> = ({
383398
}),
384399
});
385400
} else if (provider === "ollama") {
386-
const ollamaBaseUrl = ollamaUrl || "http://localhost:11434";
387-
partsResponse = await fetch(`${ollamaBaseUrl}/v1/chat/completions`, {
388-
method: "POST",
389-
signal: controller.signal,
390-
headers: { "Content-Type": "application/json" },
391-
body: JSON.stringify({
392-
model,
393-
messages: [{ role: "user", content: partsPrompt }],
394-
}),
395-
});
401+
// const ollamaBaseUrl = ollamaUrl || "http://localhost:11434";
402+
// partsResponse = await fetch(`${ollamaBaseUrl}/v1/chat/completions`, {
403+
// method: "POST",
404+
// signal: controller.signal,
405+
// headers: { "Content-Type": "application/json" },
406+
// body: JSON.stringify({
407+
// model,
408+
// messages: [{ role: "user", content: partsPrompt }],
409+
// stream: false,
410+
// }),
411+
// });
412+
partsResponse = await OllamaService.chat(model, [
413+
{ role: "user", content: partsPrompt },
414+
]);
396415
} else {
397416
partsResponse = await fetch(currentProvider.baseUrl, {
398417
method: "POST",
@@ -409,7 +428,9 @@ const LLMPanel: React.FC<LLMPanelProps> = ({
409428
});
410429
}
411430

412-
const partsData = await partsResponse.json();
431+
// const partsData = await partsResponse.json();
432+
const partsData =
433+
provider === "ollama" ? partsResponse : await partsResponse.json();
413434
const participantsRaw = currentProvider.isAnthropic
414435
? partsData.content[0].text
415436
: partsData.choices[0].message.content;
@@ -631,26 +652,34 @@ const LLMPanel: React.FC<LLMPanelProps> = ({
631652
let response;
632653

633654
if (provider === "ollama") {
634-
const ollamaBaseUrl = ollamaUrl || "http://localhost:11434";
635-
response = await fetch(`${ollamaBaseUrl}/v1/chat/completions`, {
636-
method: "POST",
637-
signal: controller.signal,
638-
headers: {
639-
"Content-Type": "application/json",
655+
// const ollamaBaseUrl = ollamaUrl || "http://localhost:11434";
656+
// response = await fetch(`${ollamaBaseUrl}/v1/chat/completions`, {
657+
// method: "POST",
658+
// signal: controller.signal,
659+
// headers: {
660+
// "Content-Type": "application/json",
661+
// },
662+
// body: JSON.stringify({
663+
// model,
664+
// messages: [
665+
// {
666+
// role: "system",
667+
// content:
668+
// "You are a neuroimaging data expert specializing in BIDS format conversion. Output only Python code without markdown fences or explanations.",
669+
// },
670+
// { role: "user", content: prompt },
671+
// ],
672+
// stream: false,
673+
// }),
674+
// });
675+
response = await OllamaService.chat(model, [
676+
{
677+
role: "system",
678+
content:
679+
"You are a neuroimaging data expert specializing in BIDS format conversion. Output only Python code without markdown fences or explanations.",
640680
},
641-
body: JSON.stringify({
642-
model,
643-
messages: [
644-
{
645-
role: "system",
646-
content:
647-
"You are a neuroimaging data expert specializing in BIDS format conversion. Output only Python code without markdown fences or explanations.",
648-
},
649-
{ role: "user", content: prompt },
650-
],
651-
stream: false,
652-
}),
653-
});
681+
{ role: "user", content: prompt },
682+
]);
654683
} else if (currentProvider.isAnthropic) {
655684
response = await fetch(currentProvider.baseUrl, {
656685
method: "POST",
@@ -695,18 +724,25 @@ const LLMPanel: React.FC<LLMPanelProps> = ({
695724
});
696725
}
697726

698-
const data = await response.json();
727+
// const data = await response.json();
728+
const data = provider === "ollama" ? response : await response.json();
699729

700-
if (!response.ok) {
730+
// if (!response.ok) {
731+
// throw new Error(data.error?.message || "Failed to generate script");
732+
// }
733+
if (!response.ok && provider !== "ollama") {
701734
throw new Error(data.error?.message || "Failed to generate script");
702735
}
703736

704-
let script = "";
705-
if (currentProvider.isAnthropic) {
706-
script = data.content[0].text;
707-
} else {
708-
script = data.choices[0].message.content;
709-
}
737+
// let script = "";
738+
// if (currentProvider.isAnthropic) {
739+
// script = data.content[0].text;
740+
// } else {
741+
// script = data.choices[0].message.content;
742+
// }
743+
let script = currentProvider.isAnthropic
744+
? data.content[0].text
745+
: data.choices[0].message.content;
710746

711747
// Clean up markdown fences if AI included them anyway
712748
script = script.replace(/^```python\n?/g, "").replace(/\n?```$/g, "");
@@ -790,24 +826,32 @@ const LLMPanel: React.FC<LLMPanelProps> = ({
790826
let response;
791827

792828
if (provider === "ollama") {
793-
const ollamaBaseUrl = ollamaUrl || "http://localhost:11434";
794-
response = await fetch(`${ollamaBaseUrl}/v1/chat/completions`, {
795-
method: "POST",
796-
signal: controller.signal,
797-
headers: { "Content-Type": "application/json" },
798-
body: JSON.stringify({
799-
model,
800-
messages: [
801-
{
802-
role: "system",
803-
content:
804-
"You are a BIDS dataset architect. Output only valid YAML without markdown fences or explanations.",
805-
},
806-
{ role: "user", content: prompt },
807-
],
808-
stream: false,
809-
}),
810-
});
829+
// const ollamaBaseUrl = ollamaUrl || "http://localhost:11434";
830+
// response = await fetch(`${ollamaBaseUrl}/v1/chat/completions`, {
831+
// method: "POST",
832+
// signal: controller.signal,
833+
// headers: { "Content-Type": "application/json" },
834+
// body: JSON.stringify({
835+
// model,
836+
// messages: [
837+
// {
838+
// role: "system",
839+
// content:
840+
// "You are a BIDS dataset architect. Output only valid YAML without markdown fences or explanations.",
841+
// },
842+
// { role: "user", content: prompt },
843+
// ],
844+
// stream: false,
845+
// }),
846+
// });
847+
response = await OllamaService.chat(model, [
848+
{
849+
role: "system",
850+
content:
851+
"You are a BIDS dataset architect. Output only valid YAML without markdown fences or explanations.",
852+
},
853+
{ role: "user", content: prompt },
854+
]);
811855
} else if (currentProvider.isAnthropic) {
812856
response = await fetch(currentProvider.baseUrl, {
813857
method: "POST",
@@ -847,8 +891,13 @@ const LLMPanel: React.FC<LLMPanelProps> = ({
847891
});
848892
}
849893

850-
const data = await response.json();
851-
if (!response.ok) {
894+
// const data = await response.json();
895+
896+
// if (!response.ok) {
897+
// throw new Error(data.error?.message || "Failed to generate BIDSPlan");
898+
// }
899+
const data = provider === "ollama" ? response : await response.json();
900+
if (!response.ok && provider !== "ollama") {
852901
throw new Error(data.error?.message || "Failed to generate BIDSPlan");
853902
}
854903

@@ -1267,7 +1316,7 @@ const LLMPanel: React.FC<LLMPanelProps> = ({
12671316
</FormControl>
12681317

12691318
{/* Ollama Server URL field */}
1270-
{provider === "ollama" && (
1319+
{/* {provider === "ollama" && (
12711320
<TextField
12721321
fullWidth
12731322
label="Ollama Server URL"
@@ -1276,7 +1325,7 @@ const LLMPanel: React.FC<LLMPanelProps> = ({
12761325
placeholder="http://localhost:11434"
12771326
sx={{ mb: 2 }}
12781327
/>
1279-
)}
1328+
)} */}
12801329
{/* Base Directory Path field (shows for ALL providers) */}
12811330
<TextField
12821331
fullWidth

0 commit comments

Comments
 (0)