Skip to content

Commit 0652a3c

Browse files
author
gdgate
authored
Merge pull request #1677 from phong-nguyen-duy/my-master
BUGFIX: TMA-1673 - Bump brick version to 3.7.21 Reviewed-by: https://github.com/danh-ung
2 parents aa404ca + ee09480 commit 0652a3c

18 files changed

Lines changed: 65833 additions & 45038 deletions

VERSION

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
3.7.20
1+
3.7.21

lib/gooddata/helpers/global_helpers_params.rb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -242,7 +242,7 @@ def stringify_values(value)
242242

243243
def resolve_reference_params(data_params, params)
244244
reference_values = []
245-
regexps = Regexp.union(/\\\\/, /\\\$/, /\$\{(\w+)\}/)
245+
regexps = Regexp.union(/\\\\/, /\\\$/, /\$\{([\w\s\.]+)\}/)
246246
resolve_reference = lambda do |v|
247247
if v.is_a? Hash
248248
Hash[
@@ -262,7 +262,7 @@ def resolve_reference_params(data_params, params)
262262
data_params.is_a?(Hash) ? '\\' : '\\\\' # rubocop: disable Metrics/BlockNesting
263263
elsif match =~ /\\\$/
264264
'$'
265-
elsif match =~ /\$\{(\w+)\}/
265+
elsif match =~ /\$\{([\w\s\.]+)\}/
266266
val = params["#{$1}"]
267267
if val
268268
reference_values << val
Lines changed: 116 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,116 @@
1+
# frozen_string_literal: true
2+
# (C) 2019-2020 GoodData Corporation
3+
require_relative 'base_action'
4+
5+
# Migrate date dimension urn:gooddata:date or urn:custom:date to urn:custom_v2:date
6+
module GoodData
7+
module LCM2
8+
class MigrateGdcDateDimension < BaseAction
9+
DESCRIPTION = 'Migrate Gdc Date Dimension'
10+
DATE_DIMENSION_CUSTOM_V2 = 'urn:custom_v2:date'
11+
DATE_DIMENSION_OLD = %w[urn:gooddata:date urn:custom:date]
12+
13+
PARAMS = define_params(self) do
14+
description 'Client Used for Connecting to GD'
15+
param :gdc_gd_client, instance_of(Type::GdClientType), required: true
16+
17+
description 'Specifies how to synchronize LDM and resolve possible conflicts'
18+
param :synchronize_ldm, instance_of(Type::SynchronizeLDM), required: false, default: 'diff_against_master_with_fallback'
19+
20+
description 'Synchronization Info'
21+
param :synchronize, array_of(instance_of(Type::SynchronizationInfoType)), required: true, generated: true
22+
end
23+
24+
RESULT_HEADER = %i[from to status]
25+
26+
class << self
27+
def call(params)
28+
results = []
29+
params.synchronize.map do |segment_info|
30+
result = migrate_date_dimension(params, segment_info)
31+
results.concat(result)
32+
end
33+
34+
{
35+
results: results
36+
}
37+
end
38+
39+
def migrate_date_dimension(params, segment_info)
40+
results = []
41+
client = params.gdc_gd_client
42+
latest_blueprint = segment_info[:from_blueprint]
43+
# don't migrate when latest master doesn't contain custom v2 date.
44+
return results unless contain_v2?(latest_blueprint)
45+
46+
previous_blueprint = segment_info[:previous_master]&.blueprint
47+
# check latest master and previous master
48+
master_upgrade_datasets = get_upgrade_dates(latest_blueprint, previous_blueprint) if params[:synchronize_ldm].downcase == 'diff_against_master' && previous_blueprint
49+
unless master_upgrade_datasets&.empty?
50+
segment_info[:to].pmap do |entry|
51+
pid = entry[:pid]
52+
to_project = client.projects(pid) || fail("Invalid 'to' project specified - '#{pid}'")
53+
to_blueprint = to_project.blueprint
54+
upgrade_datasets = get_upgrade_dates(latest_blueprint, to_blueprint)
55+
next if upgrade_datasets.empty?
56+
57+
message = get_upgrade_message(upgrade_datasets)
58+
59+
results << {
60+
from: segment_info[:from],
61+
to: pid,
62+
status: to_project.upgrade_custom_v2(message)
63+
}
64+
end
65+
end
66+
67+
results
68+
end
69+
70+
def get_upgrade_dates(src_blueprint, dest_blueprint)
71+
dest_dates = get_date_dimensions(dest_blueprint) if dest_blueprint
72+
src_dates = get_date_dimensions(src_blueprint) if src_blueprint
73+
74+
return false if dest_dates.empty? || src_dates.empty?
75+
76+
upgrade_datasets = []
77+
dest_dates.each do |dest|
78+
src_dim = get_date_dimension(src_blueprint, dest[:id])
79+
next unless src_dim
80+
81+
upgrade_datasets << src_dim[:identifier] if upgrade?(src_dim, dest) && src_dim[:identifier]
82+
end
83+
84+
upgrade_datasets
85+
end
86+
87+
def get_upgrade_message(upgrade_datasets)
88+
{
89+
upgrade: {
90+
dateDatasets: {
91+
upgrade: "exact",
92+
datasets: upgrade_datasets
93+
}
94+
}
95+
}
96+
end
97+
98+
def upgrade?(src_dim, dest_dim)
99+
src_dim[:urn] == DATE_DIMENSION_CUSTOM_V2 && DATE_DIMENSION_OLD.any? { |e| dest_dim[:urn] == e }
100+
end
101+
102+
def contain_v2?(blueprint)
103+
get_date_dimensions(blueprint).any? { |e| e[:urn] == DATE_DIMENSION_CUSTOM_V2 }
104+
end
105+
106+
def get_date_dimension(blueprint, id)
107+
GoodData::Model::ProjectBlueprint.find_date_dimension(blueprint, id)
108+
end
109+
110+
def get_date_dimensions(blueprint)
111+
GoodData::Model::ProjectBlueprint.date_dimensions(blueprint)
112+
end
113+
end
114+
end
115+
end
116+
end

lib/gooddata/lcm/actions/synchronize_ldm.rb

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,8 @@ class SynchronizeLdm < BaseAction
4949
param :include_deprecated, instance_of(Type::BooleanType), required: false, default: false
5050
end
5151

52+
RESULT_HEADER = %i[from to status]
53+
5254
class << self
5355
def call(params)
5456
results = []
@@ -76,9 +78,9 @@ def sync_segment_ldm(params, segment_info)
7678
include_deprecated = params.include_deprecated.to_b
7779
from_pid = segment_info[:from]
7880
from = params.development_client.projects(from_pid) || fail("Invalid 'from' project specified - '#{from_pid}'")
79-
8081
GoodData.logger.info "Creating Blueprint, project: '#{from.title}', PID: #{from_pid}"
8182
blueprint = from.blueprint(include_ca: params.include_computed_attributes.to_b)
83+
segment_info[:from_blueprint] = blueprint
8284
maql_diff = nil
8385
previous_master = segment_info[:previous_master]
8486
diff_against_master = %w(diff_against_master_with_fallback diff_against_master)

lib/gooddata/lcm/lcm2.rb

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -138,6 +138,7 @@ def respond_to_missing?(name, *_args)
138138
EnsureTechnicalUsersDomain,
139139
EnsureTechnicalUsersProject,
140140
SynchronizeLdm,
141+
MigrateGdcDateDimension,
141142
SynchronizeClients,
142143
SynchronizeComputedAttributes,
143144
CollectDymanicScheduleParams,

lib/gooddata/models/from_wire.rb

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,7 @@ def self.parse_date_dimensions(date_dim)
105105
d[:title] = date_dim['dateDimension']['title']
106106
d[:urn] = date_dim['dateDimension']['urn']
107107
d[:identifier_prefix] = date_dim['dateDimension']['identifierPrefix']
108+
d[:identifier] = date_dim['dateDimension']['identifier'] if date_dim['dateDimension']['identifier']
108109
d[:columns] = parse_bridges(date_dim)
109110
end
110111
end

lib/gooddata/models/process.rb

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -118,11 +118,13 @@ def deploy_simple_process(path, options = { client: GoodData.client, project: Go
118118
GoodData.logger.info("Deploying #{path}") if verbose
119119

120120
deployed_path = Process.upload_package(path, files_to_exclude, client: client, project: project)
121+
data_sources = options[:data_sources] || []
121122
data = {
122123
:process => {
123124
:name => deploy_name,
124125
:path => "/uploads/#{File.basename(deployed_path)}",
125-
:type => type
126+
:type => type,
127+
:dataSources => data_sources
126128
}
127129
}
128130

@@ -171,10 +173,12 @@ def deploy_from_appstore(path, options = { :client => GoodData.client, :project
171173
verbose = options[:verbose] || false
172174
GoodData.logger.info("Deploying #{path}") if verbose
173175

176+
data_sources = options[:data_sources] || []
174177
data = {
175178
process: {
176179
name: deploy_name,
177180
path: path,
181+
dataSources: data_sources,
178182
type: 'RUBY'
179183
}
180184
}
@@ -185,7 +189,7 @@ def deploy_from_appstore(path, options = { :client => GoodData.client, :project
185189
def deploy_component(data, options = { client: GoodData.client, project: GoodData.project })
186190
client, project = GoodData.get_client_and_project(options)
187191
data = { process: data } unless data[:process]
188-
data[:process] = GoodData::Helpers.symbolize_keys(data[:process]).select { |k| %i[type name component].include? k }
192+
data[:process] = GoodData::Helpers.symbolize_keys(data[:process]).select { |k| %i[type name component dataSources].include? k }
189193
data[:process][:component] = GoodData::Helpers.symbolize_keys(data[:process][:component]).select { |k| %i[name version configLocation config].include? k }
190194

191195
save(data, options)
@@ -266,7 +270,7 @@ def delete
266270
# @option options [String] :name Readable name of the process
267271
# @option options [Boolean] :verbose (false) Switch on verbose mode for detailed logging
268272
def deploy(path, options = {})
269-
Process.deploy(path, { client: client, process_id: process_id, :project => project, :name => name, :type => type }.merge(options))
273+
Process.deploy(path, { client: client, process_id: process_id, :project => project, :name => name, :type => type, :data_sources => data_sources }.merge(options))
270274
end
271275

272276
# Downloads the process from S3 in a zipped form.
@@ -326,6 +330,10 @@ def component
326330
process['component']
327331
end
328332

333+
def data_sources
334+
process['dataSources']
335+
end
336+
329337
# Determines whether the process is an ADDv2 component.
330338
# @return [Bool] True if the process is an ADDv2 component.
331339
def add_v2_component?

lib/gooddata/models/project.rb

Lines changed: 19 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -271,11 +271,11 @@ def transfer_processes(from_project, to_project, options = {})
271271

272272
to_process = if process.path
273273
to_process.delete if to_process
274-
GoodData::Process.deploy_from_appstore(process.path, name: process.name, client: to_project.client, project: to_project)
274+
Process.deploy_from_appstore(process.path, name: process.name, client: to_project.client, project: to_project, data_sources: process.data_sources)
275275
elsif process.component
276276
to_process.delete if to_process
277277
process_hash = GoodData::Helpers::DeepMergeableHash[GoodData::Helpers.symbolize_keys(process.to_hash)].deep_merge(additional_hidden_params)
278-
GoodData::Process.deploy_component(process_hash, project: to_project, client: to_project.client)
278+
Process.deploy_component(process_hash, project: to_project, client: to_project.client)
279279
else
280280
Dir.mktmpdir('etl_transfer') do |dir|
281281
dir = Pathname(dir)
@@ -285,9 +285,9 @@ def transfer_processes(from_project, to_project, options = {})
285285
end
286286

287287
if to_process
288-
to_process.deploy(filename, type: process.type, name: process.name)
288+
to_process.deploy(filename, type: process.type, name: process.name, data_sources: process.data_sources)
289289
else
290-
to_project.deploy_process(filename, type: process.type, name: process.name)
290+
to_project.deploy_process(filename, type: process.type, name: process.name, data_sources: process.data_sources)
291291
end
292292
end
293293
end
@@ -625,6 +625,7 @@ def attributes_by_title(title)
625625
def blueprint(options = {})
626626
options = { include_ca: true }.merge(options)
627627
result = client.get("/gdc/projects/#{pid}/model/view", params: { includeDeprecated: true, includeGrain: true, includeCA: options[:include_ca] })
628+
628629
polling_url = result['asyncTask']['link']['poll']
629630
model = client.poll_on_code(polling_url, options)
630631
bp = GoodData::Model::FromWire.from_wire(model, options)
@@ -1922,6 +1923,20 @@ def resolve_roles(login, desired_roles, options = {})
19221923
[user, roles]
19231924
end
19241925

1926+
def upgrade_custom_v2(message, options = {})
1927+
uri = "/gdc/md/#{pid}/datedimension/upgrade"
1928+
poll_result = client&.post(uri, message)
1929+
1930+
return poll_result['wTaskStatus']['status'] if poll_result['wTaskStatus'] && poll_result['wTaskStatus']['status']
1931+
1932+
polling_uri = poll_result['asyncTask']['link']['poll']
1933+
result = client&.poll_on_response(polling_uri, options) do |body|
1934+
body && body['wTaskStatus'] && body['wTaskStatus']['status'] == 'RUNNING'
1935+
end
1936+
1937+
result['wTaskStatus']['status'] == 'OK' ? 'OK' : 'FAIL'
1938+
end
1939+
19251940
def add
19261941
@add ||= GoodData::AutomatedDataDistribution.new(self)
19271942
@add
Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
{
2+
"title": "blueprint old date dimension",
3+
"include_ca": true,
4+
"datasets": [
5+
{
6+
"type": "dataset",
7+
"title": "person",
8+
"id": "dataset.person",
9+
"columns": [
10+
{
11+
"type": "anchor",
12+
"id": "attr.person.factsof",
13+
"title": "Records of person",
14+
"description": "Records of person",
15+
"folder": "person"
16+
},
17+
{
18+
"type": "attribute",
19+
"id": "attr.person.name",
20+
"title": "name",
21+
"description": "name",
22+
"folder": "person"
23+
},
24+
{
25+
"type": "label",
26+
"id": "label.person.name",
27+
"reference": "attr.person.name",
28+
"title": "name",
29+
"gd_data_type": "VARCHAR(128)",
30+
"gd_type": "GDC.text",
31+
"default_label": true
32+
},
33+
{
34+
"type": "fact",
35+
"id": "fact.person.id",
36+
"title": "id",
37+
"description": "id",
38+
"folder": "person",
39+
"gd_data_type": "DECIMAL(12,2)"
40+
},
41+
{
42+
"type": "date",
43+
"dataset": "datecustom"
44+
},
45+
{
46+
"type": "date",
47+
"dataset": "dategooddata"
48+
}
49+
]
50+
}
51+
],
52+
"date_dimensions": [
53+
{
54+
"type": "date_dimension",
55+
"id": "datecustom",
56+
"title": "DateCustom",
57+
"urn": "urn:custom:date",
58+
"identifier_prefix": "datecustom",
59+
"columns": [
60+
61+
]
62+
},
63+
{
64+
"type": "date_dimension",
65+
"id": "dategooddata",
66+
"title": "DateGooddata",
67+
"urn": "urn:gooddata:date",
68+
"identifier_prefix": "dategooddata",
69+
"columns": [
70+
71+
]
72+
}
73+
]
74+
}

0 commit comments

Comments
 (0)