Skip to content

Commit 9df8048

Browse files
Add X-Databricks-Org-Id header to deprecated workspace SCIM APIs (#754)
## Summary - Added `X-Databricks-Org-Id` header to all methods in the deprecated `GroupsImpl`, `ServicePrincipalsImpl`, and `UsersImpl` workspace services (22 methods total) - These were the only workspace-level services missing this header, which is required for SPOG (unified) host compatibility - Added integration test `UnifiedHostGroupsIT` to verify Groups API works through a SPOG host ## Test plan - [ ] Verify `UnifiedHostGroupsIT.listWorkspaceGroupsViaUnifiedHost` passes against a SPOG host - [ ] Verify existing workspace SCIM integration tests still pass This pull request was AI-assisted by Isaac. --------- Signed-off-by: Hector Castejon Diaz <hector.castejon@databricks.com>
1 parent bf24daf commit 9df8048

File tree

5 files changed

+113
-1
lines changed

5 files changed

+113
-1
lines changed

NEXT_CHANGELOG.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
* Added automatic detection of AI coding agents (Antigravity, Claude Code, Cline, Codex, Copilot CLI, Cursor, Gemini CLI, OpenCode) in the user-agent string. The SDK now appends `agent/<name>` to HTTP request headers when running inside a known AI agent environment.
77

88
### Bug Fixes
9+
* Added `X-Databricks-Org-Id` header to deprecated workspace SCIM APIs (Groups, ServicePrincipals, Users) for SPOG host compatibility.
910
* Fixed Databricks CLI authentication to detect when the cached token's scopes don't match the SDK's configured scopes. Previously, a scope mismatch was silently ignored, causing requests to use wrong permissions. The SDK now raises an error with instructions to re-authenticate.
1011

1112
### Security Vulnerabilities
@@ -23,4 +24,4 @@
2324
* Add `cascade` field for `com.databricks.sdk.service.pipelines.DeletePipelineRequest`.
2425
* Add `defaultBranch` field for `com.databricks.sdk.service.postgres.ProjectSpec`.
2526
* Add `defaultBranch` field for `com.databricks.sdk.service.postgres.ProjectStatus`.
26-
* Add `ingress` and `ingressDryRun` fields for `com.databricks.sdk.service.settings.AccountNetworkPolicy`.
27+
* Add `ingress` and `ingressDryRun` fields for `com.databricks.sdk.service.settings.AccountNetworkPolicy`.

databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,9 @@ public Group create(Group request) {
2424
ApiClient.setQuery(req, request);
2525
req.withHeader("Accept", "application/json");
2626
req.withHeader("Content-Type", "application/json");
27+
if (apiClient.workspaceId() != null) {
28+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
29+
}
2730
return apiClient.execute(req, Group.class);
2831
} catch (IOException e) {
2932
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -36,6 +39,9 @@ public void delete(DeleteGroupRequest request) {
3639
try {
3740
Request req = new Request("DELETE", path);
3841
ApiClient.setQuery(req, request);
42+
if (apiClient.workspaceId() != null) {
43+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
44+
}
3945
apiClient.execute(req, Void.class);
4046
} catch (IOException e) {
4147
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -49,6 +55,9 @@ public Group get(GetGroupRequest request) {
4955
Request req = new Request("GET", path);
5056
ApiClient.setQuery(req, request);
5157
req.withHeader("Accept", "application/json");
58+
if (apiClient.workspaceId() != null) {
59+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
60+
}
5261
return apiClient.execute(req, Group.class);
5362
} catch (IOException e) {
5463
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -62,6 +71,9 @@ public ListGroupsResponse list(ListGroupsRequest request) {
6271
Request req = new Request("GET", path);
6372
ApiClient.setQuery(req, request);
6473
req.withHeader("Accept", "application/json");
74+
if (apiClient.workspaceId() != null) {
75+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
76+
}
6577
return apiClient.execute(req, ListGroupsResponse.class);
6678
} catch (IOException e) {
6779
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -75,6 +87,9 @@ public void patch(PartialUpdate request) {
7587
Request req = new Request("PATCH", path, apiClient.serialize(request));
7688
ApiClient.setQuery(req, request);
7789
req.withHeader("Content-Type", "application/json");
90+
if (apiClient.workspaceId() != null) {
91+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
92+
}
7893
apiClient.execute(req, Void.class);
7994
} catch (IOException e) {
8095
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -88,6 +103,9 @@ public void update(Group request) {
88103
Request req = new Request("PUT", path, apiClient.serialize(request));
89104
ApiClient.setQuery(req, request);
90105
req.withHeader("Content-Type", "application/json");
106+
if (apiClient.workspaceId() != null) {
107+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
108+
}
91109
apiClient.execute(req, Void.class);
92110
} catch (IOException e) {
93111
throw new DatabricksException("IO error: " + e.getMessage(), e);

databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,9 @@ public ServicePrincipal create(ServicePrincipal request) {
2424
ApiClient.setQuery(req, request);
2525
req.withHeader("Accept", "application/json");
2626
req.withHeader("Content-Type", "application/json");
27+
if (apiClient.workspaceId() != null) {
28+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
29+
}
2730
return apiClient.execute(req, ServicePrincipal.class);
2831
} catch (IOException e) {
2932
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -36,6 +39,9 @@ public void delete(DeleteServicePrincipalRequest request) {
3639
try {
3740
Request req = new Request("DELETE", path);
3841
ApiClient.setQuery(req, request);
42+
if (apiClient.workspaceId() != null) {
43+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
44+
}
3945
apiClient.execute(req, Void.class);
4046
} catch (IOException e) {
4147
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -49,6 +55,9 @@ public ServicePrincipal get(GetServicePrincipalRequest request) {
4955
Request req = new Request("GET", path);
5056
ApiClient.setQuery(req, request);
5157
req.withHeader("Accept", "application/json");
58+
if (apiClient.workspaceId() != null) {
59+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
60+
}
5261
return apiClient.execute(req, ServicePrincipal.class);
5362
} catch (IOException e) {
5463
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -62,6 +71,9 @@ public ListServicePrincipalResponse list(ListServicePrincipalsRequest request) {
6271
Request req = new Request("GET", path);
6372
ApiClient.setQuery(req, request);
6473
req.withHeader("Accept", "application/json");
74+
if (apiClient.workspaceId() != null) {
75+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
76+
}
6577
return apiClient.execute(req, ListServicePrincipalResponse.class);
6678
} catch (IOException e) {
6779
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -75,6 +87,9 @@ public void patch(PartialUpdate request) {
7587
Request req = new Request("PATCH", path, apiClient.serialize(request));
7688
ApiClient.setQuery(req, request);
7789
req.withHeader("Content-Type", "application/json");
90+
if (apiClient.workspaceId() != null) {
91+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
92+
}
7893
apiClient.execute(req, Void.class);
7994
} catch (IOException e) {
8095
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -88,6 +103,9 @@ public void update(ServicePrincipal request) {
88103
Request req = new Request("PUT", path, apiClient.serialize(request));
89104
ApiClient.setQuery(req, request);
90105
req.withHeader("Content-Type", "application/json");
106+
if (apiClient.workspaceId() != null) {
107+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
108+
}
91109
apiClient.execute(req, Void.class);
92110
} catch (IOException e) {
93111
throw new DatabricksException("IO error: " + e.getMessage(), e);

databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,9 @@ public User create(User request) {
2424
ApiClient.setQuery(req, request);
2525
req.withHeader("Accept", "application/json");
2626
req.withHeader("Content-Type", "application/json");
27+
if (apiClient.workspaceId() != null) {
28+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
29+
}
2730
return apiClient.execute(req, User.class);
2831
} catch (IOException e) {
2932
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -36,6 +39,9 @@ public void delete(DeleteUserRequest request) {
3639
try {
3740
Request req = new Request("DELETE", path);
3841
ApiClient.setQuery(req, request);
42+
if (apiClient.workspaceId() != null) {
43+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
44+
}
3945
apiClient.execute(req, Void.class);
4046
} catch (IOException e) {
4147
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -49,6 +55,9 @@ public User get(GetUserRequest request) {
4955
Request req = new Request("GET", path);
5056
ApiClient.setQuery(req, request);
5157
req.withHeader("Accept", "application/json");
58+
if (apiClient.workspaceId() != null) {
59+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
60+
}
5261
return apiClient.execute(req, User.class);
5362
} catch (IOException e) {
5463
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -61,6 +70,9 @@ public GetPasswordPermissionLevelsResponse getPermissionLevels() {
6170
try {
6271
Request req = new Request("GET", path);
6372
req.withHeader("Accept", "application/json");
73+
if (apiClient.workspaceId() != null) {
74+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
75+
}
6476
return apiClient.execute(req, GetPasswordPermissionLevelsResponse.class);
6577
} catch (IOException e) {
6678
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -73,6 +85,9 @@ public PasswordPermissions getPermissions() {
7385
try {
7486
Request req = new Request("GET", path);
7587
req.withHeader("Accept", "application/json");
88+
if (apiClient.workspaceId() != null) {
89+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
90+
}
7691
return apiClient.execute(req, PasswordPermissions.class);
7792
} catch (IOException e) {
7893
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -86,6 +101,9 @@ public ListUsersResponse list(ListUsersRequest request) {
86101
Request req = new Request("GET", path);
87102
ApiClient.setQuery(req, request);
88103
req.withHeader("Accept", "application/json");
104+
if (apiClient.workspaceId() != null) {
105+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
106+
}
89107
return apiClient.execute(req, ListUsersResponse.class);
90108
} catch (IOException e) {
91109
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -99,6 +117,9 @@ public void patch(PartialUpdate request) {
99117
Request req = new Request("PATCH", path, apiClient.serialize(request));
100118
ApiClient.setQuery(req, request);
101119
req.withHeader("Content-Type", "application/json");
120+
if (apiClient.workspaceId() != null) {
121+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
122+
}
102123
apiClient.execute(req, Void.class);
103124
} catch (IOException e) {
104125
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -113,6 +134,9 @@ public PasswordPermissions setPermissions(PasswordPermissionsRequest request) {
113134
ApiClient.setQuery(req, request);
114135
req.withHeader("Accept", "application/json");
115136
req.withHeader("Content-Type", "application/json");
137+
if (apiClient.workspaceId() != null) {
138+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
139+
}
116140
return apiClient.execute(req, PasswordPermissions.class);
117141
} catch (IOException e) {
118142
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -126,6 +150,9 @@ public void update(User request) {
126150
Request req = new Request("PUT", path, apiClient.serialize(request));
127151
ApiClient.setQuery(req, request);
128152
req.withHeader("Content-Type", "application/json");
153+
if (apiClient.workspaceId() != null) {
154+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
155+
}
129156
apiClient.execute(req, Void.class);
130157
} catch (IOException e) {
131158
throw new DatabricksException("IO error: " + e.getMessage(), e);
@@ -140,6 +167,9 @@ public PasswordPermissions updatePermissions(PasswordPermissionsRequest request)
140167
ApiClient.setQuery(req, request);
141168
req.withHeader("Accept", "application/json");
142169
req.withHeader("Content-Type", "application/json");
170+
if (apiClient.workspaceId() != null) {
171+
req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
172+
}
143173
return apiClient.execute(req, PasswordPermissions.class);
144174
} catch (IOException e) {
145175
throw new DatabricksException("IO error: " + e.getMessage(), e);
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
package com.databricks.sdk.integration;
2+
3+
import static org.junit.jupiter.api.Assertions.*;
4+
5+
import com.databricks.sdk.AccountClient;
6+
import com.databricks.sdk.WorkspaceClient;
7+
import com.databricks.sdk.core.DatabricksConfig;
8+
import com.databricks.sdk.integration.framework.EnvContext;
9+
import com.databricks.sdk.integration.framework.EnvOrSkip;
10+
import com.databricks.sdk.integration.framework.EnvTest;
11+
import com.databricks.sdk.service.iam.Group;
12+
import com.databricks.sdk.service.iam.ListGroupsRequest;
13+
import java.util.Iterator;
14+
import org.junit.jupiter.api.Test;
15+
import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable;
16+
import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
17+
import org.junit.jupiter.api.extension.ExtendWith;
18+
19+
@EnvContext("account")
20+
@ExtendWith(EnvTest.class)
21+
@EnabledIfEnvironmentVariable(named = "UNIFIED_HOST", matches = ".+")
22+
public class UnifiedHostGroupsIT {
23+
@Test
24+
@DisabledIfEnvironmentVariable(named = "CLOUD_PROVIDER", matches = "GCP")
25+
void listWorkspaceGroupsViaUnifiedHost(
26+
AccountClient a,
27+
@EnvOrSkip("UNIFIED_HOST") String unifiedHost,
28+
@EnvOrSkip("TEST_WORKSPACE_ID") String workspaceId,
29+
@EnvOrSkip("TEST_ACCOUNT_ID") String accountId) {
30+
DatabricksConfig config =
31+
new DatabricksConfig()
32+
.setHost(unifiedHost)
33+
.setClientId(a.config().getClientId())
34+
.setClientSecret(a.config().getClientSecret())
35+
.setWorkspaceId(workspaceId)
36+
.setAccountId(accountId);
37+
WorkspaceClient ws = new WorkspaceClient(config);
38+
39+
Iterable<Group> groups = ws.groups().list(new ListGroupsRequest().setAttributes("displayName"));
40+
Iterator<Group> it = groups.iterator();
41+
assertTrue(it.hasNext(), "Expected at least one group");
42+
Group first = it.next();
43+
assertNotNull(first.getDisplayName());
44+
}
45+
}

0 commit comments

Comments
 (0)