diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 0bd9e224b..e7017db7a 100755 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -6,6 +6,7 @@ * Added automatic detection of AI coding agents (Antigravity, Claude Code, Cline, Codex, Copilot CLI, Cursor, Gemini CLI, OpenCode) in the user-agent string. The SDK now appends `agent/` to HTTP request headers when running inside a known AI agent environment. ### Bug Fixes +* Added `X-Databricks-Org-Id` header to deprecated workspace SCIM APIs (Groups, ServicePrincipals, Users) for SPOG host compatibility. * Fixed Databricks CLI authentication to detect when the cached token's scopes don't match the SDK's configured scopes. Previously, a scope mismatch was silently ignored, causing requests to use wrong permissions. The SDK now raises an error with instructions to re-authenticate. ### Security Vulnerabilities @@ -23,4 +24,4 @@ * Add `cascade` field for `com.databricks.sdk.service.pipelines.DeletePipelineRequest`. * Add `defaultBranch` field for `com.databricks.sdk.service.postgres.ProjectSpec`. * Add `defaultBranch` field for `com.databricks.sdk.service.postgres.ProjectStatus`. -* Add `ingress` and `ingressDryRun` fields for `com.databricks.sdk.service.settings.AccountNetworkPolicy`. \ No newline at end of file +* Add `ingress` and `ingressDryRun` fields for `com.databricks.sdk.service.settings.AccountNetworkPolicy`. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java index fabddee61..196538b23 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java @@ -24,6 +24,9 @@ public Group create(Group request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } return apiClient.execute(req, Group.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -36,6 +39,9 @@ public void delete(DeleteGroupRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -49,6 +55,9 @@ public Group get(GetGroupRequest request) { Request req = new Request("GET", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } return apiClient.execute(req, Group.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -62,6 +71,9 @@ public ListGroupsResponse list(ListGroupsRequest request) { Request req = new Request("GET", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } return apiClient.execute(req, ListGroupsResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -75,6 +87,9 @@ public void patch(PartialUpdate request) { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -88,6 +103,9 @@ public void update(Group request) { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java index f43216c63..ec79919cb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java @@ -24,6 +24,9 @@ public ServicePrincipal create(ServicePrincipal request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } return apiClient.execute(req, ServicePrincipal.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -36,6 +39,9 @@ public void delete(DeleteServicePrincipalRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -49,6 +55,9 @@ public ServicePrincipal get(GetServicePrincipalRequest request) { Request req = new Request("GET", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } return apiClient.execute(req, ServicePrincipal.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -62,6 +71,9 @@ public ListServicePrincipalResponse list(ListServicePrincipalsRequest request) { Request req = new Request("GET", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } return apiClient.execute(req, ListServicePrincipalResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -75,6 +87,9 @@ public void patch(PartialUpdate request) { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -88,6 +103,9 @@ public void update(ServicePrincipal request) { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java index ba2f1a67a..acc6a521b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java @@ -24,6 +24,9 @@ public User create(User request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } return apiClient.execute(req, User.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -36,6 +39,9 @@ public void delete(DeleteUserRequest request) { try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -49,6 +55,9 @@ public User get(GetUserRequest request) { Request req = new Request("GET", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } return apiClient.execute(req, User.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -61,6 +70,9 @@ public GetPasswordPermissionLevelsResponse getPermissionLevels() { try { Request req = new Request("GET", path); req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } return apiClient.execute(req, GetPasswordPermissionLevelsResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -73,6 +85,9 @@ public PasswordPermissions getPermissions() { try { Request req = new Request("GET", path); req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } return apiClient.execute(req, PasswordPermissions.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -86,6 +101,9 @@ public ListUsersResponse list(ListUsersRequest request) { Request req = new Request("GET", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } return apiClient.execute(req, ListUsersResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -99,6 +117,9 @@ public void patch(PartialUpdate request) { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -113,6 +134,9 @@ public PasswordPermissions setPermissions(PasswordPermissionsRequest request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } return apiClient.execute(req, PasswordPermissions.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -126,6 +150,9 @@ public void update(User request) { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } apiClient.execute(req, Void.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); @@ -140,6 +167,9 @@ public PasswordPermissions updatePermissions(PasswordPermissionsRequest request) ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } return apiClient.execute(req, PasswordPermissions.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/UnifiedHostGroupsIT.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/UnifiedHostGroupsIT.java new file mode 100644 index 000000000..a1fa61aec --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/UnifiedHostGroupsIT.java @@ -0,0 +1,45 @@ +package com.databricks.sdk.integration; + +import static org.junit.jupiter.api.Assertions.*; + +import com.databricks.sdk.AccountClient; +import com.databricks.sdk.WorkspaceClient; +import com.databricks.sdk.core.DatabricksConfig; +import com.databricks.sdk.integration.framework.EnvContext; +import com.databricks.sdk.integration.framework.EnvOrSkip; +import com.databricks.sdk.integration.framework.EnvTest; +import com.databricks.sdk.service.iam.Group; +import com.databricks.sdk.service.iam.ListGroupsRequest; +import java.util.Iterator; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; +import org.junit.jupiter.api.extension.ExtendWith; + +@EnvContext("account") +@ExtendWith(EnvTest.class) +@EnabledIfEnvironmentVariable(named = "UNIFIED_HOST", matches = ".+") +public class UnifiedHostGroupsIT { + @Test + @DisabledIfEnvironmentVariable(named = "CLOUD_PROVIDER", matches = "GCP") + void listWorkspaceGroupsViaUnifiedHost( + AccountClient a, + @EnvOrSkip("UNIFIED_HOST") String unifiedHost, + @EnvOrSkip("TEST_WORKSPACE_ID") String workspaceId, + @EnvOrSkip("TEST_ACCOUNT_ID") String accountId) { + DatabricksConfig config = + new DatabricksConfig() + .setHost(unifiedHost) + .setClientId(a.config().getClientId()) + .setClientSecret(a.config().getClientSecret()) + .setWorkspaceId(workspaceId) + .setAccountId(accountId); + WorkspaceClient ws = new WorkspaceClient(config); + + Iterable groups = ws.groups().list(new ListGroupsRequest().setAttributes("displayName")); + Iterator it = groups.iterator(); + assertTrue(it.hasNext(), "Expected at least one group"); + Group first = it.next(); + assertNotNull(first.getDisplayName()); + } +}