Skip to content

Commit c7ce964

Browse files
authored
Merge branch 'main' into default-profile
2 parents 3bf80ba + 9df8048 commit c7ce964

7 files changed

Lines changed: 185 additions & 1 deletion

File tree

NEXT_CHANGELOG.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
* Added automatic detection of AI coding agents (Antigravity, Claude Code, Cline, Codex, Copilot CLI, Cursor, Gemini CLI, OpenCode) in the user-agent string. The SDK now appends `agent/<name>` to HTTP request headers when running inside a known AI agent environment.
88

99
### Bug Fixes
10+
* Added `X-Databricks-Org-Id` header to deprecated workspace SCIM APIs (Groups, ServicePrincipals, Users) for SPOG host compatibility.
1011
* Fixed Databricks CLI authentication to detect when the cached token's scopes don't match the SDK's configured scopes. Previously, a scope mismatch was silently ignored, causing requests to use wrong permissions. The SDK now raises an error with instructions to re-authenticate.
1112

1213
### Security Vulnerabilities
@@ -24,4 +25,4 @@
2425
* Add `cascade` field for `com.databricks.sdk.service.pipelines.DeletePipelineRequest`.
2526
* Add `defaultBranch` field for `com.databricks.sdk.service.postgres.ProjectSpec`.
2627
* Add `defaultBranch` field for `com.databricks.sdk.service.postgres.ProjectStatus`.
27-
* Add `ingress` and `ingressDryRun` fields for `com.databricks.sdk.service.settings.AccountNetworkPolicy`.
28+
* Add `ingress` and `ingressDryRun` fields for `com.databricks.sdk.service.settings.AccountNetworkPolicy`.

databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,9 @@ public Group create(Group request) {
2424
ApiClient.setQuery(req, request);
2525
req.withHeader("Accept", "application/json");
2626
req.withHeader("Content-Type", "application/json");
27+
if (apiClient.workspaceId() != null) {
28+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
29+
}
2730
return apiClient.execute(req, Group.class);
2831
} catch (IOException e) {
2932
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -36,6 +39,9 @@ public void delete(DeleteGroupRequest request) {
3639
try {
3740
Request req = new Request("DELETE", path);
3841
ApiClient.setQuery(req, request);
42+
if (apiClient.workspaceId() != null) {
43+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
44+
}
3945
apiClient.execute(req, Void.class);
4046
} catch (IOException e) {
4147
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -49,6 +55,9 @@ public Group get(GetGroupRequest request) {
4955
Request req = new Request("GET", path);
5056
ApiClient.setQuery(req, request);
5157
req.withHeader("Accept", "application/json");
58+
if (apiClient.workspaceId() != null) {
59+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
60+
}
5261
return apiClient.execute(req, Group.class);
5362
} catch (IOException e) {
5463
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -62,6 +71,9 @@ public ListGroupsResponse list(ListGroupsRequest request) {
6271
Request req = new Request("GET", path);
6372
ApiClient.setQuery(req, request);
6473
req.withHeader("Accept", "application/json");
74+
if (apiClient.workspaceId() != null) {
75+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
76+
}
6577
return apiClient.execute(req, ListGroupsResponse.class);
6678
} catch (IOException e) {
6779
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -75,6 +87,9 @@ public void patch(PartialUpdate request) {
7587
Request req = new Request("PATCH", path, apiClient.serialize(request));
7688
ApiClient.setQuery(req, request);
7789
req.withHeader("Content-Type", "application/json");
90+
if (apiClient.workspaceId() != null) {
91+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
92+
}
7893
apiClient.execute(req, Void.class);
7994
} catch (IOException e) {
8095
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -88,6 +103,9 @@ public void update(Group request) {
88103
Request req = new Request("PUT", path, apiClient.serialize(request));
89104
ApiClient.setQuery(req, request);
90105
req.withHeader("Content-Type", "application/json");
106+
if (apiClient.workspaceId() != null) {
107+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
108+
}
91109
apiClient.execute(req, Void.class);
92110
} catch (IOException e) {
93111
throw new DatabricksException("IO error: " + e.getMessage(), e);

databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,9 @@ public ServicePrincipal create(ServicePrincipal request) {
2424
ApiClient.setQuery(req, request);
2525
req.withHeader("Accept", "application/json");
2626
req.withHeader("Content-Type", "application/json");
27+
if (apiClient.workspaceId() != null) {
28+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
29+
}
2730
return apiClient.execute(req, ServicePrincipal.class);
2831
} catch (IOException e) {
2932
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -36,6 +39,9 @@ public void delete(DeleteServicePrincipalRequest request) {
3639
try {
3740
Request req = new Request("DELETE", path);
3841
ApiClient.setQuery(req, request);
42+
if (apiClient.workspaceId() != null) {
43+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
44+
}
3945
apiClient.execute(req, Void.class);
4046
} catch (IOException e) {
4147
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -49,6 +55,9 @@ public ServicePrincipal get(GetServicePrincipalRequest request) {
4955
Request req = new Request("GET", path);
5056
ApiClient.setQuery(req, request);
5157
req.withHeader("Accept", "application/json");
58+
if (apiClient.workspaceId() != null) {
59+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
60+
}
5261
return apiClient.execute(req, ServicePrincipal.class);
5362
} catch (IOException e) {
5463
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -62,6 +71,9 @@ public ListServicePrincipalResponse list(ListServicePrincipalsRequest request) {
6271
Request req = new Request("GET", path);
6372
ApiClient.setQuery(req, request);
6473
req.withHeader("Accept", "application/json");
74+
if (apiClient.workspaceId() != null) {
75+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
76+
}
6577
return apiClient.execute(req, ListServicePrincipalResponse.class);
6678
} catch (IOException e) {
6779
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -75,6 +87,9 @@ public void patch(PartialUpdate request) {
7587
Request req = new Request("PATCH", path, apiClient.serialize(request));
7688
ApiClient.setQuery(req, request);
7789
req.withHeader("Content-Type", "application/json");
90+
if (apiClient.workspaceId() != null) {
91+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
92+
}
7893
apiClient.execute(req, Void.class);
7994
} catch (IOException e) {
8095
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -88,6 +103,9 @@ public void update(ServicePrincipal request) {
88103
Request req = new Request("PUT", path, apiClient.serialize(request));
89104
ApiClient.setQuery(req, request);
90105
req.withHeader("Content-Type", "application/json");
106+
if (apiClient.workspaceId() != null) {
107+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
108+
}
91109
apiClient.execute(req, Void.class);
92110
} catch (IOException e) {
93111
throw new DatabricksException("IO error: " + e.getMessage(), e);

databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,9 @@ public User create(User request) {
2424
ApiClient.setQuery(req, request);
2525
req.withHeader("Accept", "application/json");
2626
req.withHeader("Content-Type", "application/json");
27+
if (apiClient.workspaceId() != null) {
28+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
29+
}
2730
return apiClient.execute(req, User.class);
2831
} catch (IOException e) {
2932
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -36,6 +39,9 @@ public void delete(DeleteUserRequest request) {
3639
try {
3740
Request req = new Request("DELETE", path);
3841
ApiClient.setQuery(req, request);
42+
if (apiClient.workspaceId() != null) {
43+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
44+
}
3945
apiClient.execute(req, Void.class);
4046
} catch (IOException e) {
4147
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -49,6 +55,9 @@ public User get(GetUserRequest request) {
4955
Request req = new Request("GET", path);
5056
ApiClient.setQuery(req, request);
5157
req.withHeader("Accept", "application/json");
58+
if (apiClient.workspaceId() != null) {
59+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
60+
}
5261
return apiClient.execute(req, User.class);
5362
} catch (IOException e) {
5463
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -61,6 +70,9 @@ public GetPasswordPermissionLevelsResponse getPermissionLevels() {
6170
try {
6271
Request req = new Request("GET", path);
6372
req.withHeader("Accept", "application/json");
73+
if (apiClient.workspaceId() != null) {
74+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
75+
}
6476
return apiClient.execute(req, GetPasswordPermissionLevelsResponse.class);
6577
} catch (IOException e) {
6678
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -73,6 +85,9 @@ public PasswordPermissions getPermissions() {
7385
try {
7486
Request req = new Request("GET", path);
7587
req.withHeader("Accept", "application/json");
88+
if (apiClient.workspaceId() != null) {
89+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
90+
}
7691
return apiClient.execute(req, PasswordPermissions.class);
7792
} catch (IOException e) {
7893
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -86,6 +101,9 @@ public ListUsersResponse list(ListUsersRequest request) {
86101
Request req = new Request("GET", path);
87102
ApiClient.setQuery(req, request);
88103
req.withHeader("Accept", "application/json");
104+
if (apiClient.workspaceId() != null) {
105+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
106+
}
89107
return apiClient.execute(req, ListUsersResponse.class);
90108
} catch (IOException e) {
91109
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -99,6 +117,9 @@ public void patch(PartialUpdate request) {
99117
Request req = new Request("PATCH", path, apiClient.serialize(request));
100118
ApiClient.setQuery(req, request);
101119
req.withHeader("Content-Type", "application/json");
120+
if (apiClient.workspaceId() != null) {
121+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
122+
}
102123
apiClient.execute(req, Void.class);
103124
} catch (IOException e) {
104125
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -113,6 +134,9 @@ public PasswordPermissions setPermissions(PasswordPermissionsRequest request) {
113134
ApiClient.setQuery(req, request);
114135
req.withHeader("Accept", "application/json");
115136
req.withHeader("Content-Type", "application/json");
137+
if (apiClient.workspaceId() != null) {
138+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
139+
}
116140
return apiClient.execute(req, PasswordPermissions.class);
117141
} catch (IOException e) {
118142
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -126,6 +150,9 @@ public void update(User request) {
126150
Request req = new Request("PUT", path, apiClient.serialize(request));
127151
ApiClient.setQuery(req, request);
128152
req.withHeader("Content-Type", "application/json");
153+
if (apiClient.workspaceId() != null) {
154+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
155+
}
129156
apiClient.execute(req, Void.class);
130157
} catch (IOException e) {
131158
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -140,6 +167,9 @@ public PasswordPermissions updatePermissions(PasswordPermissionsRequest request)
140167
ApiClient.setQuery(req, request);
141168
req.withHeader("Accept", "application/json");
142169
req.withHeader("Content-Type", "application/json");
170+
if (apiClient.workspaceId() != null) {
171+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
172+
}
143173
return apiClient.execute(req, PasswordPermissions.class);
144174
} catch (IOException e) {
145175
throw new DatabricksException("IO error: " + e.getMessage(), e);
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
package com.databricks.sdk.integration;
2+
3+
import com.databricks.sdk.AccountClient;
4+
import com.databricks.sdk.core.DatabricksConfig;
5+
import com.databricks.sdk.integration.framework.EnvContext;
6+
import com.databricks.sdk.integration.framework.EnvTest;
7+
import com.databricks.sdk.service.iam.Group;
8+
import com.databricks.sdk.service.iam.ListAccountGroupsRequest;
9+
import org.junit.jupiter.api.Test;
10+
import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
11+
import org.junit.jupiter.api.extension.ExtendWith;
12+
13+
@EnvContext("account")
14+
@ExtendWith(EnvTest.class)
15+
@EnabledIfEnvironmentVariable(named = "UNIFIED_HOST", matches = ".+")
16+
public class UnifiedHostAccountIT {
17+
@Test
18+
void listGroups(AccountClient a) {
19+
String unifiedHost = System.getenv("UNIFIED_HOST");
20+
DatabricksConfig cfg = a.config();
21+
cfg.setHost(unifiedHost);
22+
23+
Iterable<Group> groups = a.groups().list(new ListAccountGroupsRequest());
24+
assert groups.iterator().hasNext();
25+
}
26+
}
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
package com.databricks.sdk.integration;
2+
3+
import static org.junit.jupiter.api.Assertions.*;
4+
5+
import com.databricks.sdk.AccountClient;
6+
import com.databricks.sdk.WorkspaceClient;
7+
import com.databricks.sdk.core.DatabricksConfig;
8+
import com.databricks.sdk.integration.framework.EnvContext;
9+
import com.databricks.sdk.integration.framework.EnvOrSkip;
10+
import com.databricks.sdk.integration.framework.EnvTest;
11+
import com.databricks.sdk.service.iam.Group;
12+
import com.databricks.sdk.service.iam.ListGroupsRequest;
13+
import java.util.Iterator;
14+
import org.junit.jupiter.api.Test;
15+
import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable;
16+
import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
17+
import org.junit.jupiter.api.extension.ExtendWith;
18+
19+
@EnvContext("account")
20+
@ExtendWith(EnvTest.class)
21+
@EnabledIfEnvironmentVariable(named = "UNIFIED_HOST", matches = ".+")
22+
public class UnifiedHostGroupsIT {
23+
@Test
24+
@DisabledIfEnvironmentVariable(named = "CLOUD_PROVIDER", matches = "GCP")
25+
void listWorkspaceGroupsViaUnifiedHost(
26+
AccountClient a,
27+
@EnvOrSkip("UNIFIED_HOST") String unifiedHost,
28+
@EnvOrSkip("TEST_WORKSPACE_ID") String workspaceId,
29+
@EnvOrSkip("TEST_ACCOUNT_ID") String accountId) {
30+
DatabricksConfig config =
31+
new DatabricksConfig()
32+
.setHost(unifiedHost)
33+
.setClientId(a.config().getClientId())
34+
.setClientSecret(a.config().getClientSecret())
35+
.setWorkspaceId(workspaceId)
36+
.setAccountId(accountId);
37+
WorkspaceClient ws = new WorkspaceClient(config);
38+
39+
Iterable<Group> groups = ws.groups().list(new ListGroupsRequest().setAttributes("displayName"));
40+
Iterator<Group> it = groups.iterator();
41+
assertTrue(it.hasNext(), "Expected at least one group");
42+
Group first = it.next();
43+
assertNotNull(first.getDisplayName());
44+
}
45+
}
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
package com.databricks.sdk.integration;
2+
3+
import static org.junit.jupiter.api.Assertions.assertNotNull;
4+
5+
import com.databricks.sdk.AccountClient;
6+
import com.databricks.sdk.WorkspaceClient;
7+
import com.databricks.sdk.core.DatabricksConfig;
8+
import com.databricks.sdk.integration.framework.EnvContext;
9+
import com.databricks.sdk.integration.framework.EnvOrSkip;
10+
import com.databricks.sdk.integration.framework.EnvTest;
11+
import com.databricks.sdk.service.iam.User;
12+
import org.junit.jupiter.api.Test;
13+
import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable;
14+
import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
15+
import org.junit.jupiter.api.extension.ExtendWith;
16+
17+
@EnvContext("account")
18+
@ExtendWith(EnvTest.class)
19+
@EnabledIfEnvironmentVariable(named = "UNIFIED_HOST", matches = ".+")
20+
public class UnifiedHostWorkspaceIT {
21+
// google-credentials uses a GCP ID token with target_audience=cfg.host.
22+
// On the unified host this produces the same token for both account and workspace
23+
// requests (identical OIDC exchange, identical audience). Account-level APIs accept
24+
// this token, but workspace-level APIs return 401. The X-Databricks-Org-Id header
25+
// is set correctly. This appears to be a server-side limitation on unified hosts.
26+
@Test
27+
@DisabledIfEnvironmentVariable(named = "CLOUD_PROVIDER", matches = "GCP")
28+
void currentUserMe(
29+
AccountClient a,
30+
@EnvOrSkip("UNIFIED_HOST") String unifiedHost,
31+
@EnvOrSkip("TEST_WORKSPACE_ID") String workspaceId,
32+
@EnvOrSkip("TEST_ACCOUNT_ID") String accountId) {
33+
DatabricksConfig config =
34+
new DatabricksConfig()
35+
.setHost(unifiedHost)
36+
.setClientId(a.config().getClientId())
37+
.setClientSecret(a.config().getClientSecret())
38+
.setWorkspaceId(workspaceId)
39+
.setAccountId(accountId);
40+
41+
WorkspaceClient ws = new WorkspaceClient(config);
42+
43+
User me = ws.currentUser().me();
44+
assertNotNull(me.getUserName(), "Expected non-empty UserName");
45+
}
46+
}

0 commit comments

Comments
 (0)