Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions workspaces/lightspeed/.changeset/late-beers-taste.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
---
'@red-hat-developer-hub/backstage-plugin-lightspeed-backend': minor
'@red-hat-developer-hub/backstage-plugin-lightspeed': minor
---

- Hide notebooks tab when `lightspeed.notebooks.enabled: false` in config
- Fix notebook queries to display correct model from config instead of chat's selected model
- Add `/notebook-conversation-ids` endpoint to filter notebook conversations from chat list even when notebooks disabled
10 changes: 7 additions & 3 deletions workspaces/lightspeed/app-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,13 @@ app:
organization:
name: Red Hat

# Disable AI Notebooks feature by default
lightspeed:
notebooks:
enabled: true
enabled: ${NOTEBOOKS_ENABLED:-false}
queryDefaults:
model: llama3.2:3b
provider_id: vllm
model: ${NOTEBOOKS_QUERY_MODEL}
provider_id: ${NOTEBOOKS_QUERY_PROVIDER_ID}

backend:
# Used for enabling authentication, secret is shared by all backend plugins
Expand Down Expand Up @@ -118,3 +119,6 @@ catalog:
pullRequestBranchName: backstage-integration
rules:
- allow: [Component, System, API, Resource, Location]
locations:
- type: file
target: ./catalog-info.yaml
5 changes: 5 additions & 0 deletions workspaces/lightspeed/playwright.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,11 @@ export default defineConfig({
port: 3000,
reuseExistingServer: true,
cwd: __dirname,
env: {
NOTEBOOKS_ENABLED: 'true',
NOTEBOOKS_QUERY_MODEL: 'gpt-4',
NOTEBOOKS_QUERY_PROVIDER_ID: 'openai',
},
},

retries: process.env.CI ? 2 : 0,
Expand Down
42 changes: 29 additions & 13 deletions workspaces/lightspeed/plugins/lightspeed-backend/src/plugin.ts
Original file line number Diff line number Diff line change
Expand Up @@ -64,22 +64,38 @@ export const lightspeedPlugin = createBackendPlugin({

const aiNotebooksEnabled =
config.getOptionalBoolean('lightspeed.notebooks.enabled') ?? false;

if (aiNotebooksEnabled) {
http.use(
await createNotebooksRouter({
config: config,
logger: logger,
httpAuth: httpAuth,
userInfo: userInfo,
permissions,
}),
const queryModel = config.getOptionalString(
'lightspeed.notebooks.queryDefaults.model',
);
const queryProvider = config.getOptionalString(
'lightspeed.notebooks.queryDefaults.provider_id',
);
logger.info('AI Notebooks enabled');

http.addAuthPolicy({
path: '/notebooks/health',
allow: 'unauthenticated',
});
if (!queryModel || !queryProvider) {
logger.warn(
'AI Notebooks feature is enabled but required configuration is missing. ' +
'Please configure lightspeed.notebooks.queryDefaults.model and lightspeed.notebooks.queryDefaults.provider_id. ' +
'Notebooks will not be available until these are set.',
);
} else {
http.use(
await createNotebooksRouter({
config: config,
logger: logger,
httpAuth: httpAuth,
userInfo: userInfo,
permissions,
}),
);
logger.info('AI Notebooks enabled');

http.addAuthPolicy({
path: '/notebooks/health',
allow: 'unauthenticated',
});
}
}

// Configure authentication policies
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,20 +69,14 @@ export async function createNotebooksRouter(
config.getOptionalNumber('lightspeed.servicePort') ??
DEFAULT_LIGHTSPEED_SERVICE_PORT;
const lightspeedBaseUrl = `http://${DEFAULT_LIGHTSPEED_SERVICE_HOST}:${lightSpeedPort}`;
const queryModel = config.getOptionalString(
const queryModel = config.getString(
'lightspeed.notebooks.queryDefaults.model',
);
const queryProvider = config.getOptionalString(
const queryProvider = config.getString(
'lightspeed.notebooks.queryDefaults.provider_id',
);
const systemPrompt = NOTEBOOKS_SYSTEM_PROMPT;

if (!queryModel || !queryProvider) {
throw new Error(
'Query model and provider are required. Please configure lightspeed.notebooks.queryDefaults.model and lightspeed.notebooks.queryDefaults.provider_id',
);
}

logger.info(
`AI Notebooks connecting to Lightspeed-Core at ${lightspeedBaseUrl}`,
);
Expand Down Expand Up @@ -496,9 +490,9 @@ export async function createNotebooksRouter(
tools: [{ type: 'file_search', vector_store_ids: [sessionId] }],
model: `${queryProvider}/${queryModel}`,
stream: true,
temperature: 0.05,
temperature: 0.35,
shield_ids: [],
max_tool_calls: 10,
max_tool_calls: 15,
...(conversationId && { conversation: conversationId }),
};

Expand Down Expand Up @@ -558,6 +552,7 @@ export async function createNotebooksRouter(
.pipe(createResponsesApiTransform(session, sessionId, userId))
.pipe(res);
}
console.log('response1234', response.body);
break;
}
}),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -415,6 +415,43 @@ export async function createRouter(
}
});

// Returns conversation IDs associated with notebook sessions for filtering
router.get('/notebook-conversation-ids', async (req, res) => {
try {
const credentials = await httpAuth.credentials(req);
const user = await userInfo.getUserInfo(credentials);
const userId = user.userEntityRef;

const vectorStoresPage = await vectorStoresOperator.vectorStores.list();
const vectorStores = vectorStoresPage.data || [];

const conversationIds: string[] = [];

for (const store of vectorStores) {
const sessionUserId = store.metadata?.user_id as string;
const conversationId = store.metadata?.conversation_id as string | null;

// Only include this user's sessions with a conversation_id
if (sessionUserId === userId && conversationId) {
conversationIds.push(conversationId);
}
}

res.json({
conversation_ids: conversationIds,
});
} catch (error) {
const errormsg = `Error fetching notebook conversation IDs: ${error}`;
logger.error(errormsg);

if (error instanceof NotAllowedError) {
res.status(403).json({ error: error.message });
} else {
res.status(500).json({ error: errormsg });
}
}
});

// ─── Proxy Middleware (existing) ────────────────────────────────────

router.use('/', async (req, res, next) => {
Expand Down
30 changes: 30 additions & 0 deletions workspaces/lightspeed/plugins/lightspeed/config.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,5 +36,35 @@ export interface Config {
*/
message: string;
}>;
/**
* Configuration for AI Notebooks
* @visibility frontend
*/
notebooks?: {
/**
* Enable/disable AI Notebooks feature
* When enabled, exposes AI Notebooks REST API endpoints for document-based conversations with RAG.
* Requires Lightspeed service to be running (default: http://0.0.0.0:8080).
* @default false
* @visibility frontend
*/
enabled: boolean;
/**
* Query configuration for notebooks
* @visibility frontend
*/
queryDefaults?: {
/**
* Model to use for answering queries
* @visibility frontend
*/
model: string;
/**
* AI provider for the query model
* @visibility frontend
*/
provider_id: string;
};
};
};
}
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,20 @@ export class LightspeedApiClient implements LightspeedAPI {
return response.conversations ?? [];
}

async getNotebookConversationIds() {
const baseUrl = await this.getBaseUrl();
const result = await this.fetcher(`${baseUrl}/notebook-conversation-ids`);

if (!result.ok) {
throw new Error(
`failed to get notebook conversation IDs, status ${result.status}: ${result.statusText}`,
);
}

const response = await result.json();
return response.conversation_ids ?? [];
}

async stopMessage(requestId: string): Promise<{ success: boolean }> {
const baseUrl = await this.getBaseUrl();
const response = await this.fetchApi.fetch(
Expand Down
1 change: 1 addition & 0 deletions workspaces/lightspeed/plugins/lightspeed/src/api/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ export type LightspeedAPI = {
newName: string,
) => Promise<{ success: boolean }>;
getConversations: () => Promise<ConversationList>;
getNotebookConversationIds: () => Promise<string[]>;
getFeedbackStatus: () => Promise<boolean>;
captureFeedback: (payload: CaptureFeedback) => Promise<{ response: string }>;
isTopicRestrictionEnabled: () => Promise<boolean>;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ import {
} from 'react-dropzone';
import { useLocation, useMatch, useNavigate } from 'react-router-dom';

import { configApiRef, useApi } from '@backstage/core-plugin-api';

import { Button, makeStyles } from '@material-ui/core';
import {
Chatbot,
Expand Down Expand Up @@ -87,6 +89,7 @@ import {
useLastOpenedConversation,
useLightspeedDeletePermission,
useLightspeedNotebooksPermission,
useNotebookConversationIds,
useNotebookSession,
useNotebookSessions,
usePinnedChatsSettings,
Expand Down Expand Up @@ -487,9 +490,16 @@ export const LightspeedChat = ({
const classes = useStyles();
const { t } = useTranslation();
const navigate = useNavigate();
const configApi = useApi(configApiRef);
const notebooksEnabled =
configApi.getOptionalBoolean('lightspeed.notebooks.enabled') ?? false;
const notebooksRouteMatch = useMatch('/lightspeed/notebooks');
const notebookViewRouteMatch = useMatch('/lightspeed/notebooks/:notebookId');
const routeNotebookId = notebookViewRouteMatch?.params?.notebookId;
const isOnNotebookRoute = Boolean(
notebooksRouteMatch || notebookViewRouteMatch,
);
const shouldShowTabs = notebooksEnabled || isOnNotebookRoute;
const {
displayMode,
setDisplayMode,
Expand Down Expand Up @@ -529,6 +539,9 @@ export const LightspeedChat = ({
useLightspeedNotebooksPermission();
const notebooksPermissionResolved =
!notebooksPermissionLoading && hasNotebooksAccess;

const { data: notebookConversationIdsArray = [] } =
useNotebookConversationIds();
const { data: notebooks = [], refetch: refetchNotebooks } =
useNotebookSessions(notebooksPermissionResolved);
const hasNotebooks = notebooks.length > 0;
Expand Down Expand Up @@ -588,9 +601,9 @@ export const LightspeedChat = ({
const wasStoppedByUserRef = useRef(false);
const { isReady, lastOpenedId, setLastOpenedId, clearLastOpenedId } =
useLastOpenedConversation(user);
// Chat vs Notebooks tabs are fullscreen-only; overlay and docked always show Chat.
const showChatPanel = !isFullscreenMode || activeTab === 0;
const showNotebooksPanel = isFullscreenMode && activeTab !== 0;
const showNotebooksPanel =
(notebooksEnabled || isOnNotebookRoute) && activeTab !== 0;
const [isChatHistoryDrawerOpen, setIsChatHistoryDrawerOpen] =
useState<boolean>(!isMobile && isFullscreenMode);

Expand Down Expand Up @@ -1066,13 +1079,8 @@ export const LightspeedChat = ({
);

const notebookConversationIds = useMemo(
() =>
new Set(
notebooks
.map(n => n.metadata?.conversation_id)
.filter((id): id is string => !!id),
),
[notebooks],
() => new Set(notebookConversationIdsArray),
[notebookConversationIdsArray],
);

const chatOnlyConversations = useMemo(
Expand Down Expand Up @@ -1765,7 +1773,7 @@ export const LightspeedChat = ({
onMcpSettingsClick={() => setIsMcpSettingsOpen(true)}
/>
</ChatbotHeader>
{isFullscreenMode && (
{isFullscreenMode && shouldShowTabs && (
<>
<Tabs
activeKey={activeTab}
Expand Down Expand Up @@ -1871,7 +1879,6 @@ export const LightspeedChat = ({
avatar={avatar}
profileLoading={profileLoading}
topicRestrictionEnabled={topicRestrictionEnabled}
selectedModel={selectedModel}
onClose={handleCloseNotebook}
/>
)}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,14 +154,28 @@ const mockUseLightspeedDrawerContext =
typeof useLightspeedDrawerContext
>;

const configAPi = mockApis.config({});
const configAPi = mockApis.config({
data: {
lightspeed: {
notebooks: {
enabled: true,
queryDefaults: {
model: 'gpt-4',
provider_id: 'openai',
},
},
},
},
});

const mockLightspeedApi = {
getAllModels: jest.fn().mockResolvedValue([]),
getConversationMessages: jest.fn().mockResolvedValue([]),
createMessage: jest.fn().mockResolvedValue(new Response().body),
deleteConversation: jest.fn().mockResolvedValue({ success: true }),
renameConversation: jest.fn().mockResolvedValue({ success: true }),
getConversations: jest.fn().mockResolvedValue([]),
getNotebookConversationIds: jest.fn().mockResolvedValue([]),
getFeedbackStatus: jest.fn().mockResolvedValue(false),
captureFeedback: jest.fn().mockResolvedValue({ response: 'success' }),
isTopicRestrictionEnabled: jest.fn().mockResolvedValue(false),
Expand Down
Loading
Loading