Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
* Added automatic detection of AI coding agents (Antigravity, Claude Code, Cline, Codex, Copilot CLI, Cursor, Gemini CLI, OpenCode) in the user-agent string. The SDK now appends `agent/<name>` to HTTP request headers when running inside a known AI agent environment.

### Bug Fixes
* Added `X-Databricks-Org-Id` header to deprecated workspace SCIM APIs (Groups, ServicePrincipals, Users) for SPOG host compatibility.
* Fixed Databricks CLI authentication to detect when the cached token's scopes don't match the SDK's configured scopes. Previously, a scope mismatch was silently ignored, causing requests to use wrong permissions. The SDK now raises an error with instructions to re-authenticate.

### Security Vulnerabilities
Expand All @@ -23,4 +24,4 @@
* Add `cascade` field for `com.databricks.sdk.service.pipelines.DeletePipelineRequest`.
* Add `defaultBranch` field for `com.databricks.sdk.service.postgres.ProjectSpec`.
* Add `defaultBranch` field for `com.databricks.sdk.service.postgres.ProjectStatus`.
* Add `ingress` and `ingressDryRun` fields for `com.databricks.sdk.service.settings.AccountNetworkPolicy`.
* Add `ingress` and `ingressDryRun` fields for `com.databricks.sdk.service.settings.AccountNetworkPolicy`.
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@ public Group create(Group request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
return apiClient.execute(req, Group.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -36,6 +39,9 @@ public void delete(DeleteGroupRequest request) {
try {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -49,6 +55,9 @@ public Group get(GetGroupRequest request) {
Request req = new Request("GET", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
return apiClient.execute(req, Group.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -62,6 +71,9 @@ public ListGroupsResponse list(ListGroupsRequest request) {
Request req = new Request("GET", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
return apiClient.execute(req, ListGroupsResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -75,6 +87,9 @@ public void patch(PartialUpdate request) {
Request req = new Request("PATCH", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Content-Type", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -88,6 +103,9 @@ public void update(Group request) {
Request req = new Request("PUT", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Content-Type", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@ public ServicePrincipal create(ServicePrincipal request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
return apiClient.execute(req, ServicePrincipal.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -36,6 +39,9 @@ public void delete(DeleteServicePrincipalRequest request) {
try {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -49,6 +55,9 @@ public ServicePrincipal get(GetServicePrincipalRequest request) {
Request req = new Request("GET", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
return apiClient.execute(req, ServicePrincipal.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -62,6 +71,9 @@ public ListServicePrincipalResponse list(ListServicePrincipalsRequest request) {
Request req = new Request("GET", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
return apiClient.execute(req, ListServicePrincipalResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -75,6 +87,9 @@ public void patch(PartialUpdate request) {
Request req = new Request("PATCH", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Content-Type", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -88,6 +103,9 @@ public void update(ServicePrincipal request) {
Request req = new Request("PUT", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Content-Type", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@ public User create(User request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
return apiClient.execute(req, User.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -36,6 +39,9 @@ public void delete(DeleteUserRequest request) {
try {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -49,6 +55,9 @@ public User get(GetUserRequest request) {
Request req = new Request("GET", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
return apiClient.execute(req, User.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -61,6 +70,9 @@ public GetPasswordPermissionLevelsResponse getPermissionLevels() {
try {
Request req = new Request("GET", path);
req.withHeader("Accept", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
return apiClient.execute(req, GetPasswordPermissionLevelsResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -73,6 +85,9 @@ public PasswordPermissions getPermissions() {
try {
Request req = new Request("GET", path);
req.withHeader("Accept", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
return apiClient.execute(req, PasswordPermissions.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -86,6 +101,9 @@ public ListUsersResponse list(ListUsersRequest request) {
Request req = new Request("GET", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
return apiClient.execute(req, ListUsersResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -99,6 +117,9 @@ public void patch(PartialUpdate request) {
Request req = new Request("PATCH", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Content-Type", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -113,6 +134,9 @@ public PasswordPermissions setPermissions(PasswordPermissionsRequest request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
return apiClient.execute(req, PasswordPermissions.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -126,6 +150,9 @@ public void update(User request) {
Request req = new Request("PUT", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Content-Type", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand All @@ -140,6 +167,9 @@ public PasswordPermissions updatePermissions(PasswordPermissionsRequest request)
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
if (apiClient.workspaceId() != null) {
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
}
return apiClient.execute(req, PasswordPermissions.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
package com.databricks.sdk.integration;

import static org.junit.jupiter.api.Assertions.*;

import com.databricks.sdk.WorkspaceClient;
import com.databricks.sdk.core.DatabricksConfig;
import com.databricks.sdk.integration.framework.EnvContext;
import com.databricks.sdk.integration.framework.EnvOrSkip;
import com.databricks.sdk.integration.framework.EnvTest;
import com.databricks.sdk.service.iam.Group;
import com.databricks.sdk.service.iam.ListGroupsRequest;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;

@EnvContext("account")
@ExtendWith(EnvTest.class)
public class UnifiedHostGroupsIT {
@Test
void listWorkspaceGroupsViaUnifiedHost(
@EnvOrSkip("UNIFIED_HOST") String host,
@EnvOrSkip("TEST_WORKSPACE_ID") String workspaceId,
@EnvOrSkip("DATABRICKS_CLIENT_ID") String clientId,
@EnvOrSkip("DATABRICKS_CLIENT_SECRET") String clientSecret) {
DatabricksConfig config =
new DatabricksConfig()
.setHost(host)
.setWorkspaceId(workspaceId)
.setClientId(clientId)
.setClientSecret(clientSecret);
WorkspaceClient ws = new WorkspaceClient(config);

Iterable<Group> groups = ws.groups().list(new ListGroupsRequest().setAttributes("displayName"));
Group first = groups.iterator().next();
assertNotNull(first.getDisplayName());
}
}
Loading