Skip to content

Commit 5bc7b49

Browse files
tychtjanclaude
andcommitted
fix(tests): unify dummy credentials across environments
Move BigQuery/Databricks dummy credentials to shared config section so they are defined once and reused by all environments. The staging env previously used structurally invalid placeholders (e.g. "dummy-bigquery-token") that failed backend schema validation, forcing a pytest.skip. Now both local and staging use the same Base64-encoded service account JSON that passes BigQueryDataSourceValidator. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent 7b95d35 commit 5bc7b49

2 files changed

Lines changed: 8 additions & 13 deletions

File tree

packages/gooddata-sdk/tests/catalog/test_catalog_data_source.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -489,11 +489,6 @@ def test_store_declarative_data_sources(test_config):
489489

490490
@gd_vcr.use_cassette(str(_fixtures_dir / "demo_load_and_put_declarative_data_sources.yaml"))
491491
def test_load_and_put_declarative_data_sources(test_config, snapshot_data_sources):
492-
if test_config.get("staging", False):
493-
# load_and_put loads ALL data sources from the layout directory in a single PUT,
494-
# including bigquery/databricks with dummy credentials. Staging validates credential
495-
# formats and rejects them (400). Can't exclude individual data sources from the PUT.
496-
pytest.skip("Staging server validates credential formats; this test uses mocked dummy credentials")
497492
load_folder = _current_dir / "load"
498493
sdk = GoodDataSdk.create(host_=test_config["host"], token_=test_config["token"])
499494
credentials_path = _current_dir / "load" / "data_source_credentials" / "data_sources_credentials.yaml"

packages/gooddata-sdk/tests/gd_test_config.yaml

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,14 @@ admin_user_group: "adminGroup"
2222
visualization_id: "customers_trend"
2323
visualization_name: "Customers Trend"
2424

25+
# --- Dummy credentials (shared across all environments) ---
26+
# These are structurally valid placeholders that pass backend schema validation
27+
# but do not connect to any real service.
28+
bigquery_token: "eyJ0eXBlIjogInNlcnZpY2VfYWNjb3VudCIsICJwcm9qZWN0X2lkIjogIlBST0pFQ1RfSUQiLCAicHJpdmF0ZV9rZXlfaWQiOiAiS0VZX0lEIiwgInByaXZhdGVfa2V5IjogIi0tLS0tQkVHSU4gUFJJVkFURSBLRVktLS0tLVxuUFJJVkFURV9LRVlcbi0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS1cbiIsICJjbGllbnRfZW1haWwiOiAiU0VSVklDRV9BQ0NPVU5UX0VNQUlMIiwgImNsaWVudF9pZCI6ICJDTElFTlRfSUQiLCAiYXV0aF91cmkiOiAiaHR0cHM6Ly9hY2NvdW50cy5nb29nbGUuY29tL28vb2F1dGgyL2F1dGgiLCAidG9rZW5fdXJpIjogImh0dHBzOi8vYWNjb3VudHMuZ29vZ2xlLmNvbS9vL29hdXRoMi90b2tlbiIsICJhdXRoX3Byb3ZpZGVyX3g1MDlfY2VydF91cmwiOiAiaHR0cHM6Ly93d3cuZ29vZ2xlYXBpcy5jb20vb2F1dGgyL3YxL2NlcnRzIiwgImNsaWVudF94NTA5X2NlcnRfdXJsIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL3JvYm90L3YxL21ldGFkYXRhL3g1MDkvU0VSVklDRV9BQ0NPVU5UX0VNQUlMIn0="
29+
bigquery_token_file: '{"type": "service_account", "project_id": "PROJECT_ID", "private_key_id": "KEY_ID", "private_key": "-----BEGIN PRIVATE KEY-----\\nPRIVATE_KEY\\n-----END PRIVATE KEY-----\\n", "client_email": "SERVICE_ACCOUNT_EMAIL", "client_id": "CLIENT_ID", "auth_uri": "https://accounts.google.com/o/oauth2/auth", "token_uri": "https://accounts.google.com/o/oauth2/token", "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/SERVICE_ACCOUNT_EMAIL"}'
30+
databricks_client_secret: "databricks-client-secret"
31+
databricks_token: "databricks-token"
32+
2533
# --- Environment-specific overrides ---
2634
environments:
2735
local:
@@ -34,10 +42,6 @@ environments:
3442
ds_url: "jdbc:postgresql://postgres:5432/tiger?sslmode=prefer"
3543
ds_username: "postgres"
3644
ds_password: "passw0rd"
37-
bigquery_token: "eyJ0eXBlIjogInNlcnZpY2VfYWNjb3VudCIsICJwcm9qZWN0X2lkIjogIlBST0pFQ1RfSUQiLCAicHJpdmF0ZV9rZXlfaWQiOiAiS0VZX0lEIiwgInByaXZhdGVfa2V5IjogIi0tLS0tQkVHSU4gUFJJVkFURSBLRVktLS0tLVxuUFJJVkFURV9LRVlcbi0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS1cbiIsICJjbGllbnRfZW1haWwiOiAiU0VSVklDRV9BQ0NPVU5UX0VNQUlMIiwgImNsaWVudF9pZCI6ICJDTElFTlRfSUQiLCAiYXV0aF91cmkiOiAiaHR0cHM6Ly9hY2NvdW50cy5nb29nbGUuY29tL28vb2F1dGgyL2F1dGgiLCAidG9rZW5fdXJpIjogImh0dHBzOi8vYWNjb3VudHMuZ29vZ2xlLmNvbS9vL29hdXRoMi90b2tlbiIsICJhdXRoX3Byb3ZpZGVyX3g1MDlfY2VydF91cmwiOiAiaHR0cHM6Ly93d3cuZ29vZ2xlYXBpcy5jb20vb2F1dGgyL3YxL2NlcnRzIiwgImNsaWVudF94NTA5X2NlcnRfdXJsIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL3JvYm90L3YxL21ldGFkYXRhL3g1MDkvU0VSVklDRV9BQ0NPVU5UX0VNQUlMIn0="
38-
bigquery_token_file: '{"type": "service_account", "project_id": "PROJECT_ID", "private_key_id": "KEY_ID", "private_key": "-----BEGIN PRIVATE KEY-----\\nPRIVATE_KEY\\n-----END PRIVATE KEY-----\\n", "client_email": "SERVICE_ACCOUNT_EMAIL", "client_id": "CLIENT_ID", "auth_uri": "https://accounts.google.com/o/oauth2/auth", "token_uri": "https://accounts.google.com/o/oauth2/token", "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/SERVICE_ACCOUNT_EMAIL"}'
39-
databricks_client_secret: "databricks-client-secret"
40-
databricks_token: "databricks-token"
4145

4246
staging:
4347
staging: true
@@ -50,7 +54,3 @@ environments:
5054
ds_url: "jdbc:postgresql://cnpg-cluster-pooler:5432/tiger?sslmode=prefer"
5155
ds_username: "tiger"
5256
ds_password: "passw0rd" # overridden from DS_PASSWORD env var
53-
bigquery_token: "dummy-bigquery-token"
54-
bigquery_token_file: ""
55-
databricks_client_secret: "dummy-databricks-secret"
56-
databricks_token: "dummy-databricks-token"

0 commit comments

Comments
 (0)