Skip to content

Commit d33450a

Browse files
authored
Merge pull request #22 from trypear/pearai-models-6
Add deepseek for agent and static model choice defaults
2 parents 91aa9f2 + 99d8607 commit d33450a

7 files changed

Lines changed: 319 additions & 47 deletions

File tree

src/api/providers/deepseek.ts

Lines changed: 148 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,158 @@
1-
import { OpenAiHandler, OpenAiHandlerOptions } from "./openai"
2-
import { ModelInfo } from "../../shared/api"
3-
import { deepSeekModels, deepSeekDefaultModelId } from "../../shared/api"
4-
5-
export class DeepSeekHandler extends OpenAiHandler {
6-
constructor(options: OpenAiHandlerOptions) {
7-
super({
8-
...options,
9-
openAiApiKey: options.deepSeekApiKey ?? "not-provided",
10-
openAiModelId: options.apiModelId ?? deepSeekDefaultModelId,
11-
openAiBaseUrl: options.deepSeekBaseUrl ?? "https://api.deepseek.com/v1",
12-
openAiStreamingEnabled: true,
13-
includeMaxTokens: true,
1+
import { Anthropic } from "@anthropic-ai/sdk"
2+
import { ApiHandlerOptions, ModelInfo, deepSeekModels, deepSeekDefaultModelId } from "../../shared/api"
3+
import { ApiHandler, SingleCompletionHandler } from "../index"
4+
import { convertToR1Format } from "../transform/r1-format"
5+
import { convertToOpenAiMessages } from "../transform/openai-format"
6+
import { ApiStream } from "../transform/stream"
7+
8+
interface DeepSeekUsage {
9+
prompt_tokens: number
10+
completion_tokens: number
11+
prompt_cache_miss_tokens?: number
12+
prompt_cache_hit_tokens?: number
13+
}
14+
15+
export class DeepSeekHandler implements ApiHandler, SingleCompletionHandler {
16+
private options: ApiHandlerOptions
17+
18+
constructor(options: ApiHandlerOptions) {
19+
if (!options.deepSeekApiKey) {
20+
throw new Error("DeepSeek API key is required. Please provide it in the settings.")
21+
}
22+
this.options = options
23+
}
24+
25+
private get baseUrl(): string {
26+
return this.options.deepSeekBaseUrl ?? "https://api.deepseek.com/v1"
27+
}
28+
29+
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
30+
const modelInfo = this.getModel().info
31+
const modelId = this.options.apiModelId ?? deepSeekDefaultModelId
32+
const isReasoner = modelId.includes("deepseek-reasoner")
33+
34+
const systemMessage = { role: "system", content: systemPrompt }
35+
const formattedMessages = isReasoner
36+
? convertToR1Format([{ role: "user", content: systemPrompt }, ...messages])
37+
: [systemMessage, ...convertToOpenAiMessages(messages)]
38+
39+
const response = await fetch(`${this.baseUrl}/chat/completions`, {
40+
method: "POST",
41+
headers: {
42+
"Content-Type": "application/json",
43+
Authorization: `Bearer ${this.options.deepSeekApiKey}`,
44+
},
45+
body: JSON.stringify({
46+
model: modelId,
47+
messages: formattedMessages,
48+
temperature: 0,
49+
stream: true,
50+
max_tokens: modelInfo.maxTokens,
51+
}),
1452
})
53+
54+
if (!response.ok) {
55+
throw new Error(`DeepSeek API error: ${response.statusText}`)
56+
}
57+
58+
if (!response.body) {
59+
throw new Error("No response body received from DeepSeek API")
60+
}
61+
62+
const reader = response.body.getReader()
63+
const decoder = new TextDecoder()
64+
let buffer = ""
65+
66+
try {
67+
while (true) {
68+
const { done, value } = await reader.read()
69+
if (done) break
70+
71+
buffer += decoder.decode(value, { stream: true })
72+
const lines = buffer.split("\n")
73+
buffer = lines.pop() || ""
74+
75+
for (const line of lines) {
76+
if (line.trim() === "") continue
77+
if (!line.startsWith("data: ")) continue
78+
79+
const data = line.slice(6)
80+
if (data === "[DONE]") continue
81+
82+
try {
83+
const chunk = JSON.parse(data)
84+
const delta = chunk.choices[0]?.delta ?? {}
85+
86+
if (delta.content) {
87+
yield {
88+
type: "text",
89+
text: delta.content,
90+
}
91+
}
92+
93+
if ("reasoning_content" in delta && delta.reasoning_content) {
94+
yield {
95+
type: "reasoning",
96+
text: delta.reasoning_content,
97+
}
98+
}
99+
100+
if (chunk.usage) {
101+
const usage = chunk.usage as DeepSeekUsage
102+
let inputTokens = (usage.prompt_tokens || 0) - (usage.prompt_cache_hit_tokens || 0)
103+
yield {
104+
type: "usage",
105+
inputTokens: inputTokens,
106+
outputTokens: usage.completion_tokens || 0,
107+
cacheReadTokens: usage.prompt_cache_hit_tokens || 0,
108+
cacheWriteTokens: usage.prompt_cache_miss_tokens || 0,
109+
}
110+
}
111+
} catch (error) {
112+
console.error("Error parsing DeepSeek response:", error)
113+
}
114+
}
115+
}
116+
} finally {
117+
reader.releaseLock()
118+
}
15119
}
16120

17-
override getModel(): { id: string; info: ModelInfo } {
121+
getModel(): { id: string; info: ModelInfo } {
18122
const modelId = this.options.apiModelId ?? deepSeekDefaultModelId
19123
return {
20124
id: modelId,
21125
info: deepSeekModels[modelId as keyof typeof deepSeekModels] || deepSeekModels[deepSeekDefaultModelId],
22126
}
23127
}
128+
129+
async completePrompt(prompt: string): Promise<string> {
130+
try {
131+
const response = await fetch(`${this.baseUrl}/chat/completions`, {
132+
method: "POST",
133+
headers: {
134+
"Content-Type": "application/json",
135+
Authorization: `Bearer ${this.options.deepSeekApiKey}`,
136+
},
137+
body: JSON.stringify({
138+
model: this.getModel().id,
139+
messages: [{ role: "user", content: prompt }],
140+
temperature: 0,
141+
stream: false,
142+
}),
143+
})
144+
145+
if (!response.ok) {
146+
throw new Error(`DeepSeek API error: ${response.statusText}`)
147+
}
148+
149+
const data = await response.json()
150+
return data.choices[0]?.message?.content || ""
151+
} catch (error) {
152+
if (error instanceof Error) {
153+
throw new Error(`DeepSeek completion error: ${error.message}`)
154+
}
155+
throw error
156+
}
157+
}
24158
}

src/api/providers/pearai.ts

Lines changed: 92 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,36 +1,109 @@
1-
import { OpenAiHandler } from "./openai"
21
import * as vscode from "vscode"
3-
import { AnthropicModelId, ApiHandlerOptions, ModelInfo, PEARAI_URL } from "../../shared/api"
2+
import { ApiHandlerOptions, PEARAI_URL, ModelInfo } from "../../shared/api"
43
import { AnthropicHandler } from "./anthropic"
4+
import { DeepSeekHandler } from "./deepseek"
5+
6+
interface PearAiModelsResponse {
7+
models: {
8+
[key: string]: {
9+
underlyingModel?: string
10+
[key: string]: any
11+
}
12+
}
13+
defaultModelId: string
14+
}
15+
16+
export class PearAiHandler {
17+
private handler!: AnthropicHandler | DeepSeekHandler
518

6-
export class PearAiHandler extends AnthropicHandler {
719
constructor(options: ApiHandlerOptions) {
820
if (!options.pearaiApiKey) {
921
vscode.window.showErrorMessage("PearAI API key not found.", "Login to PearAI").then(async (selection) => {
1022
if (selection === "Login to PearAI") {
1123
const extensionUrl = `${vscode.env.uriScheme}://pearai.pearai/auth`
1224
const callbackUri = await vscode.env.asExternalUri(vscode.Uri.parse(extensionUrl))
13-
1425
vscode.env.openExternal(
1526
await vscode.env.asExternalUri(
16-
vscode.Uri.parse(
17-
`https://trypear.ai/signin?callback=${callbackUri.toString()}`, // Change to localhost if running locally
18-
),
27+
vscode.Uri.parse(`https://trypear.ai/signin?callback=${callbackUri.toString()}`),
1928
),
2029
)
2130
}
2231
})
2332
throw new Error("PearAI API key not found. Please login to PearAI.")
2433
}
25-
super({
34+
35+
// Initialize with a default handler synchronously
36+
this.handler = new AnthropicHandler({
2637
...options,
2738
apiKey: options.pearaiApiKey,
2839
anthropicBaseUrl: PEARAI_URL,
40+
apiModelId: "claude-3-5-sonnet-20241022",
41+
})
42+
43+
// Then try to initialize the correct handler asynchronously
44+
this.initializeHandler(options).catch((error) => {
45+
console.error("Failed to initialize PearAI handler:", error)
2946
})
3047
}
3148

32-
override getModel(): { id: AnthropicModelId; info: ModelInfo } {
33-
const baseModel = super.getModel()
49+
private async initializeHandler(options: ApiHandlerOptions): Promise<void> {
50+
const modelId = options.apiModelId || "pearai-model"
51+
52+
if (modelId === "pearai-model") {
53+
try {
54+
const response = await fetch(`${PEARAI_URL}/getPearAIAgentModels`)
55+
if (!response.ok) {
56+
throw new Error(`Failed to fetch models: ${response.statusText}`)
57+
}
58+
const data = (await response.json()) as PearAiModelsResponse
59+
const underlyingModel = data.models[modelId]?.underlyingModel || "claude-3-5-sonnet-20241022"
60+
console.dir(underlyingModel)
61+
if (underlyingModel.startsWith("deepseek")) {
62+
this.handler = new DeepSeekHandler({
63+
...options,
64+
deepSeekApiKey: options.pearaiApiKey,
65+
deepSeekBaseUrl: PEARAI_URL,
66+
apiModelId: underlyingModel,
67+
})
68+
} else {
69+
// Default to Claude
70+
this.handler = new AnthropicHandler({
71+
...options,
72+
apiKey: options.pearaiApiKey,
73+
anthropicBaseUrl: PEARAI_URL,
74+
apiModelId: underlyingModel,
75+
})
76+
}
77+
} catch (error) {
78+
console.error("Error fetching PearAI models:", error)
79+
// Default to Claude if there's an error
80+
this.handler = new AnthropicHandler({
81+
...options,
82+
apiKey: options.pearaiApiKey,
83+
anthropicBaseUrl: PEARAI_URL,
84+
apiModelId: "claude-3-5-sonnet-20241022",
85+
})
86+
}
87+
} else if (modelId.startsWith("claude")) {
88+
this.handler = new AnthropicHandler({
89+
...options,
90+
apiKey: options.pearaiApiKey,
91+
anthropicBaseUrl: PEARAI_URL,
92+
})
93+
} else if (modelId.startsWith("deepseek")) {
94+
this.handler = new DeepSeekHandler({
95+
...options,
96+
deepSeekApiKey: options.pearaiApiKey,
97+
deepSeekBaseUrl: PEARAI_URL,
98+
})
99+
} else {
100+
throw new Error(`Unsupported model: ${modelId}`)
101+
}
102+
}
103+
104+
getModel(): { id: string; info: ModelInfo } {
105+
console.dir(this.handler)
106+
const baseModel = this.handler.getModel()
34107
return {
35108
id: baseModel.id,
36109
info: {
@@ -42,4 +115,13 @@ export class PearAiHandler extends AnthropicHandler {
42115
},
43116
}
44117
}
118+
119+
async *createMessage(systemPrompt: string, messages: any[]): AsyncGenerator<any> {
120+
const generator = this.handler.createMessage(systemPrompt, messages)
121+
yield* generator
122+
}
123+
124+
async completePrompt(prompt: string): Promise<string> {
125+
return this.handler.completePrompt(prompt)
126+
}
45127
}

src/shared/api.ts

Lines changed: 68 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -698,18 +698,22 @@ export const deepSeekModels = {
698698
maxTokens: 8192,
699699
contextWindow: 64_000,
700700
supportsImages: false,
701-
supportsPromptCache: false,
701+
supportsPromptCache: true,
702702
inputPrice: 0.014, // $0.014 per million tokens
703703
outputPrice: 0.28, // $0.28 per million tokens
704+
cacheWritesPrice: 0.27, // $0.27 per million tokens (cache miss)
705+
cacheReadsPrice: 0.07, // $0.07 per million tokens (cache hit)
704706
description: `DeepSeek-V3 achieves a significant breakthrough in inference speed over previous models. It tops the leaderboard among open-source models and rivals the most advanced closed-source models globally.`,
705707
},
706708
"deepseek-reasoner": {
707709
maxTokens: 8192,
708710
contextWindow: 64_000,
709711
supportsImages: false,
710-
supportsPromptCache: false,
712+
supportsPromptCache: true,
711713
inputPrice: 0.55, // $0.55 per million tokens
712714
outputPrice: 2.19, // $2.19 per million tokens
715+
cacheWritesPrice: 0.55, // $0.55 per million tokens (cache miss)
716+
cacheReadsPrice: 0.14, // $0.14 per million tokens (cache hit)
713717
description: `DeepSeek-R1 achieves performance comparable to OpenAI-o1 across math, code, and reasoning tasks.`,
714718
},
715719
} as const satisfies Record<string, ModelInfo>
@@ -791,3 +795,65 @@ export const unboundDefaultModelInfo: ModelInfo = {
791795
export const PEARAI_URL = "https://stingray-app-gb2an.ondigitalocean.app/pearai-server-api2/integrations/cline"
792796
// DEV:
793797
// export const PEARAI_URL = "http://localhost:8000/integrations/cline"
798+
799+
// PearAI
800+
export type PearAiModelId = keyof typeof pearAiModels
801+
export const pearAiDefaultModelId: PearAiModelId = "pearai-model"
802+
export const pearAiModels = {
803+
"pearai-model": {
804+
maxTokens: 8192,
805+
contextWindow: 64000,
806+
supportsImages: false,
807+
supportsPromptCache: true,
808+
inputPrice: 0.014,
809+
outputPrice: 0.28,
810+
cacheWritesPrice: 0.27,
811+
cacheReadsPrice: 0.07,
812+
description:
813+
"DeepSeek-V3 achieves a significant breakthrough in inference speed over previous models. It tops the leaderboard among open-source models and rivals the most advanced closed-source models globally.",
814+
},
815+
"claude-3-5-sonnet-20241022": {
816+
maxTokens: 8192,
817+
contextWindow: 200000,
818+
supportsImages: true,
819+
supportsComputerUse: true,
820+
supportsPromptCache: true,
821+
inputPrice: 3.0,
822+
outputPrice: 15.0,
823+
cacheWritesPrice: 3.75,
824+
cacheReadsPrice: 0.3,
825+
},
826+
"claude-3-5-haiku-20241022": {
827+
maxTokens: 8192,
828+
contextWindow: 200000,
829+
supportsImages: false,
830+
supportsPromptCache: true,
831+
inputPrice: 1.0,
832+
outputPrice: 5.0,
833+
cacheWritesPrice: 1.25,
834+
cacheReadsPrice: 0.1,
835+
},
836+
"deepseek-chat": {
837+
maxTokens: 8192,
838+
contextWindow: 64000,
839+
supportsImages: false,
840+
supportsPromptCache: true,
841+
inputPrice: 0.014,
842+
outputPrice: 0.28,
843+
cacheWritesPrice: 0.27,
844+
cacheReadsPrice: 0.07,
845+
description:
846+
"DeepSeek-V3 achieves a significant breakthrough in inference speed over previous models. It tops the leaderboard among open-source models and rivals the most advanced closed-source models globally.",
847+
},
848+
"deepseek-reasoner": {
849+
maxTokens: 8192,
850+
contextWindow: 64000,
851+
supportsImages: false,
852+
supportsPromptCache: true,
853+
inputPrice: 0.55,
854+
outputPrice: 2.19,
855+
cacheWritesPrice: 0.55,
856+
cacheReadsPrice: 0.14,
857+
description: "DeepSeek-R1 achieves performance comparable to OpenAI-o1 across math, code, and reasoning tasks.",
858+
},
859+
} as const satisfies Record<string, ModelInfo>

0 commit comments

Comments
 (0)