Skip to content

Commit 7863303

Browse files
authored
Fall back to execa if VSCode terminal integration fails (RooCodeInc#3049)
1 parent 33a1f35 commit 7863303

34 files changed

Lines changed: 382 additions & 218 deletions

.changeset/many-boats-hunt.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"roo-cline": patch
3+
---
4+
5+
Use a fallback terminal if VSCode shell integration fails

.roomodes

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
"customModes": [
33
{
44
"slug": "test",
5-
"name": "Test",
5+
"name": "🧪 Test",
66
"roleDefinition": "You are Roo, a Jest testing specialist with deep expertise in:\n- Writing and maintaining Jest test suites\n- Test-driven development (TDD) practices\n- Mocking and stubbing with Jest\n- Integration testing strategies\n- TypeScript testing patterns\n- Code coverage analysis\n- Test performance optimization\n\nYour focus is on maintaining high test quality and coverage across the codebase, working primarily with:\n- Test files in __tests__ directories\n- Mock implementations in __mocks__\n- Test utilities and helpers\n- Jest configuration and setup\n\nYou ensure tests are:\n- Well-structured and maintainable\n- Following Jest best practices\n- Properly typed with TypeScript\n- Providing meaningful coverage\n- Using appropriate mocking strategies",
77
"groups": [
88
"read",
@@ -20,7 +20,7 @@
2020
},
2121
{
2222
"slug": "translate",
23-
"name": "Translate",
23+
"name": "🌐 Translate",
2424
"roleDefinition": "You are Roo, a linguistic specialist focused on translating and managing localization files. Your responsibility is to help maintain and update translation files for the application, ensuring consistency and accuracy across all language resources.",
2525
"groups": [
2626
"read",

scripts/run-tests.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
const { execSync } = require("child_process")
33

44
if (process.platform === "win32") {
5-
execSync("npm-run-all test:* lint:*", { stdio: "inherit" })
5+
execSync("npm-run-all test:*", { stdio: "inherit" })
66
} else {
7-
execSync("npm-run-all -p test:* lint:*", { stdio: "inherit" })
7+
execSync("npm-run-all -p test:*", { stdio: "inherit" })
88
}

src/__mocks__/jest.setup.ts

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,15 @@
1+
import nock from "nock"
2+
3+
nock.disableNetConnect()
4+
5+
export function allowNetConnect(host?: string | RegExp) {
6+
if (host) {
7+
nock.enableNetConnect(host)
8+
} else {
9+
nock.enableNetConnect()
10+
}
11+
}
12+
113
// Mock the logger globally for all tests
214
jest.mock("../utils/logging", () => ({
315
logger: {

src/api/providers/__tests__/glama.test.ts

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,36 @@ import { Anthropic } from "@anthropic-ai/sdk"
55
import { GlamaHandler } from "../glama"
66
import { ApiHandlerOptions } from "../../../shared/api"
77

8+
// Mock dependencies
9+
jest.mock("../fetchers/cache", () => ({
10+
getModels: jest.fn().mockImplementation(() => {
11+
return Promise.resolve({
12+
"anthropic/claude-3-7-sonnet": {
13+
maxTokens: 8192,
14+
contextWindow: 200000,
15+
supportsImages: true,
16+
supportsPromptCache: true,
17+
inputPrice: 3,
18+
outputPrice: 15,
19+
cacheWritesPrice: 3.75,
20+
cacheReadsPrice: 0.3,
21+
description: "Claude 3.7 Sonnet",
22+
thinking: false,
23+
supportsComputerUse: true,
24+
},
25+
"openai/gpt-4o": {
26+
maxTokens: 4096,
27+
contextWindow: 128000,
28+
supportsImages: true,
29+
supportsPromptCache: false,
30+
inputPrice: 5,
31+
outputPrice: 15,
32+
description: "GPT-4o",
33+
},
34+
})
35+
}),
36+
}))
37+
838
// Mock OpenAI client
939
const mockCreate = jest.fn()
1040
const mockWithResponse = jest.fn()

src/api/providers/__tests__/openrouter.test.ts

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,38 @@ import { ApiHandlerOptions } from "../../../shared/api"
99
// Mock dependencies
1010
jest.mock("openai")
1111
jest.mock("delay", () => jest.fn(() => Promise.resolve()))
12+
jest.mock("../fetchers/cache", () => ({
13+
getModels: jest.fn().mockImplementation(() => {
14+
return Promise.resolve({
15+
"anthropic/claude-3.7-sonnet": {
16+
maxTokens: 8192,
17+
contextWindow: 200000,
18+
supportsImages: true,
19+
supportsPromptCache: true,
20+
inputPrice: 3,
21+
outputPrice: 15,
22+
cacheWritesPrice: 3.75,
23+
cacheReadsPrice: 0.3,
24+
description: "Claude 3.7 Sonnet",
25+
thinking: false,
26+
supportsComputerUse: true,
27+
},
28+
"anthropic/claude-3.7-sonnet:thinking": {
29+
maxTokens: 128000,
30+
contextWindow: 200000,
31+
supportsImages: true,
32+
supportsPromptCache: true,
33+
inputPrice: 3,
34+
outputPrice: 15,
35+
cacheWritesPrice: 3.75,
36+
cacheReadsPrice: 0.3,
37+
description: "Claude 3.7 Sonnet with thinking",
38+
thinking: true,
39+
supportsComputerUse: true,
40+
},
41+
})
42+
}),
43+
}))
1244

1345
describe("OpenRouterHandler", () => {
1446
const mockOptions: ApiHandlerOptions = {

src/api/providers/__tests__/requesty.test.ts

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
// npx jest src/api/providers/__tests__/requesty.test.ts
2+
13
import { Anthropic } from "@anthropic-ai/sdk"
24
import OpenAI from "openai"
35
import { ApiHandlerOptions, ModelInfo } from "../../../shared/api"
@@ -9,6 +11,22 @@ import { convertToR1Format } from "../../transform/r1-format"
911
jest.mock("openai")
1012
jest.mock("../../transform/openai-format")
1113
jest.mock("../../transform/r1-format")
14+
jest.mock("../fetchers/cache", () => ({
15+
getModels: jest.fn().mockResolvedValue({
16+
"test-model": {
17+
maxTokens: 8192,
18+
contextWindow: 200_000,
19+
supportsImages: true,
20+
supportsComputerUse: true,
21+
supportsPromptCache: true,
22+
inputPrice: 3.0,
23+
outputPrice: 15.0,
24+
cacheWritesPrice: 3.75,
25+
cacheReadsPrice: 0.3,
26+
description: "Test model description",
27+
},
28+
}),
29+
}))
1230

1331
describe("RequestyHandler", () => {
1432
let handler: RequestyHandler

src/api/providers/__tests__/unbound.test.ts

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,58 @@ import { ApiHandlerOptions } from "../../../shared/api"
66

77
import { UnboundHandler } from "../unbound"
88

9+
// Mock dependencies
10+
jest.mock("../fetchers/cache", () => ({
11+
getModels: jest.fn().mockImplementation(() => {
12+
return Promise.resolve({
13+
"anthropic/claude-3-5-sonnet-20241022": {
14+
maxTokens: 8192,
15+
contextWindow: 200000,
16+
supportsImages: true,
17+
supportsPromptCache: true,
18+
inputPrice: 3,
19+
outputPrice: 15,
20+
cacheWritesPrice: 3.75,
21+
cacheReadsPrice: 0.3,
22+
description: "Claude 3.5 Sonnet",
23+
thinking: false,
24+
supportsComputerUse: true,
25+
},
26+
"anthropic/claude-3-7-sonnet-20250219": {
27+
maxTokens: 8192,
28+
contextWindow: 200000,
29+
supportsImages: true,
30+
supportsPromptCache: true,
31+
inputPrice: 3,
32+
outputPrice: 15,
33+
cacheWritesPrice: 3.75,
34+
cacheReadsPrice: 0.3,
35+
description: "Claude 3.7 Sonnet",
36+
thinking: false,
37+
supportsComputerUse: true,
38+
},
39+
"openai/gpt-4o": {
40+
maxTokens: 4096,
41+
contextWindow: 128000,
42+
supportsImages: true,
43+
supportsPromptCache: false,
44+
inputPrice: 5,
45+
outputPrice: 15,
46+
description: "GPT-4o",
47+
},
48+
"openai/o3-mini": {
49+
maxTokens: 4096,
50+
contextWindow: 128000,
51+
supportsImages: true,
52+
supportsPromptCache: false,
53+
inputPrice: 1,
54+
outputPrice: 3,
55+
description: "O3 Mini",
56+
},
57+
})
58+
}),
59+
}))
60+
961
// Mock OpenAI client
1062
const mockCreate = jest.fn()
1163
const mockWithResponse = jest.fn()

src/api/providers/fetchers/__tests__/openrouter.test.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ nockBack.setMode("lockdown")
1313

1414
describe("OpenRouter API", () => {
1515
describe("getOpenRouterModels", () => {
16-
it("fetches models and validates schema", async () => {
16+
it.skip("fetches models and validates schema", async () => {
1717
const { nockDone } = await nockBack("openrouter-models.json")
1818

1919
const models = await getOpenRouterModels()

src/api/providers/glama.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ export class GlamaHandler extends RouterProvider implements SingleCompletionHand
1919
constructor(options: ApiHandlerOptions) {
2020
super({
2121
options,
22-
name: "unbound",
22+
name: "glama",
2323
baseURL: "https://glama.ai/api/gateway/openai/v1",
2424
apiKey: options.glamaApiKey,
2525
modelId: options.glamaModelId,

0 commit comments

Comments
 (0)