Skip to content

Commit 7c0f1d1

Browse files
author
Tomas Svarovsky
committed
Adding dry_run for migrate_datasets for getting the MAQL chunks. Migrate does not crash when chunks are nil. The title is based on identifier now.
1 parent 7866be1 commit 7c0f1d1

4 files changed

Lines changed: 50 additions & 32 deletions

File tree

lib/gooddata/models/from_wire.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ def self.parse_date_dimensions(stuff)
9090
d[:type] = :date_dimension
9191
# d[:urn] = :date_dimension
9292
d[:name] = stuff['dateDimension']['name']
93-
d[:title] = stuff['dateDimension']['title'] if stuff['dateDimension']['title'] != stuff['dateDimension']['title'].titleize
93+
d[:title] = stuff['dateDimension']['title'] if stuff['dateDimension']['title'] != d[:name].titleize
9494
end
9595
end
9696

lib/gooddata/models/model.rb

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ def title(item)
2929
item[:title] || item[:name].titleize
3030
end
3131

32-
def identifier_for(dataset, column = nil, column2 = nil)
32+
def identifier_for(dataset, column = nil, column2 = nil) # rubocop:disable UnusedMethodArgument
3333
return "dataset.#{dataset[:name]}" if column.nil?
3434
column = DatasetBlueprint.find_column_by_name(dataset, column) if column.is_a?(String)
3535
case column[:type].to_sym
@@ -46,11 +46,15 @@ def identifier_for(dataset, column = nil, column2 = nil)
4646
when :primary_label
4747
"label.#{dataset[:name]}.#{column[:name]}"
4848
when :label
49-
"label.#{dataset[:name]}.#{column2[:name]}.#{column[:name]}"
49+
"label.#{dataset[:name]}.#{column[:reference]}.#{column[:name]}"
5050
when :date_ref
5151
"#{dataset[:name]}.date.mdyy"
5252
when :dataset
5353
"dataset.#{dataset[:name]}"
54+
when :date
55+
'DATE'
56+
when :reference
57+
'REF'
5458
else
5559
fail "Unknown type #{column[:type].to_sym}"
5660
end

lib/gooddata/models/project_creator.rb

Lines changed: 17 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,8 @@ module GoodData
99
module Model
1010
class ProjectCreator
1111
class << self
12-
def migrate(opts = { client: GoodData.connection, project: GoodData.project })
12+
def migrate(opts = {})
13+
opts = { client: GoodData.connection, project: GoodData.project }.merge(opts)
1314
client = opts[:client]
1415
fail ArgumentError, 'No :client specified' if client.nil?
1516

@@ -25,27 +26,21 @@ def migrate(opts = { client: GoodData.connection, project: GoodData.project })
2526

2627
begin
2728
GoodData.with_project(project, opts) do |p|
28-
# migrate_date_dimensions(p, spec[:date_dimensions] || [])
29-
migrate_datasets(spec, :project => p, :client => client)
29+
migrate_datasets(spec, opts.merge(project: p, client: client))
3030
load(p, spec)
3131
migrate_metrics(p, spec[:metrics] || [])
3232
migrate_reports(p, spec[:reports] || [])
3333
migrate_dashboards(p, spec[:dashboards] || [])
34-
migrate_users(p, spec[:users] || [])
3534
execute_tests(p, spec[:assert_tests] || [])
3635
p
3736
end
3837
end
3938
end
4039

41-
def migrate_date_dimensions(project, spec)
42-
spec.each do |dd|
43-
Model.add_schema(DateDimension.new(dd), project)
44-
end
45-
end
46-
47-
def migrate_datasets(spec, opts = { :client => GoodData.connection })
40+
def migrate_datasets(spec, opts = {})
41+
opts = { client: GoodData.connection }.merge(opts)
4842
client = opts[:client]
43+
dry_run = opts[:dry_run]
4944
fail ArgumentError, 'No :client specified' if client.nil?
5045

5146
p = opts[:project]
@@ -63,29 +58,29 @@ def migrate_datasets(spec, opts = { :client => GoodData.connection })
6358
link = result['asyncTask']['link']['poll']
6459
response = client.get(link, :process => false)
6560

66-
# pp response
6761
while response.code != 200
6862
sleep 1
6963
GoodData::Rest::Client.retryable(:tries => 3) do
7064
sleep 1
7165
response = client.get(link, :process => false)
72-
# pp response
7366
end
7467
end
7568

7669
response = client.get(link)
7770

7871
chunks = pick_correct_chunks(response['projectModelDiff']['updateScripts'])
79-
chunks['updateScript']['maqlDdlChunks'].each do |chunk|
80-
result = project.execute_maql(chunk)
81-
fail 'Creating dataset failed' if result['wTaskStatus']['status'] == 'ERROR'
82-
end
83-
84-
bp.datasets.zip(GoodData::Model::ToManifest.to_manifest(bp.to_hash)).each do |ds|
85-
dataset = ds[0]
86-
manifest = ds[1]
87-
GoodData::ProjectMetadata["manifest_#{dataset.name}", :client => client, :project => project] = manifest.to_json
72+
if !chunks.nil? && !dry_run
73+
chunks['updateScript']['maqlDdlChunks'].each do |chunk|
74+
result = project.execute_maql(chunk)
75+
fail 'Creating dataset failed' if result['wTaskStatus']['status'] == 'ERROR'
76+
end
77+
bp.datasets.zip(GoodData::Model::ToManifest.to_manifest(bp.to_hash)).each do |ds|
78+
dataset = ds[0]
79+
manifest = ds[1]
80+
GoodData::ProjectMetadata["manifest_#{dataset.name}", :client => client, :project => project] = manifest.to_json
81+
end
8882
end
83+
chunks
8984
end
9085

9186
def migrate_reports(project, spec)
@@ -106,13 +101,6 @@ def migrate_metrics(project, spec)
106101
end
107102
end
108103

109-
def migrate_users(_project, spec)
110-
spec.each do |user|
111-
puts "Would migrate user #{user}"
112-
# project.add_user(user)
113-
end
114-
end
115-
116104
def load(project, spec)
117105
if spec.key?(:uploads) # rubocop:disable Style/GuardClause
118106
spec[:uploads].each do |load|

spec/integration/full_project_spec.rb

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,32 @@
2020
}.to raise_error(GoodData::ValidationError)
2121
end
2222

23+
it "should do nothing if the project is updated with the same blueprint" do
24+
results = GoodData::Model::ProjectCreator.migrate_datasets(@spec, project: @project, client: @client, dry_run: true)
25+
expect(results).to be_nil
26+
end
27+
28+
it 'should try to rename a dataset back' do
29+
dataset = @project.datasets('dataset.repos')
30+
dataset.title = "Some title"
31+
dataset.save
32+
33+
# Now the update of project using the original blueprint should offer update of the title. Nothing else.
34+
results = GoodData::Model::ProjectCreator.migrate_datasets(@spec, project: @project, client: @client, dry_run: true)
35+
expect(results['updateScript']['maqlDdl']).to eq "ALTER DATASET {dataset.repos} VISUAL(TITLE \"Repos\", DESCRIPTION \"\");\n"
36+
37+
# Update using a freshly gained blueprint should offer no changes.
38+
new_blueprint = @project.blueprint
39+
results = GoodData::Model::ProjectCreator.migrate_datasets(new_blueprint, project: @project, client: @client, dry_run: true)
40+
expect(results).to be_nil
41+
42+
# When we change the model using the original blueprint. Basically change the title back.
43+
results = GoodData::Model::ProjectCreator.migrate_datasets(@spec, project: @project, client: @client)
44+
# It should offer no changes using the original blueprint
45+
results = GoodData::Model::ProjectCreator.migrate_datasets(@spec, project: @project, client: @client, dry_run: true)
46+
expect(results).to be_nil
47+
end
48+
2349
it "should contain datasets" do
2450
@project.blueprint.tap do |bp|
2551
expect(bp.datasets.count).to eq 3

0 commit comments

Comments
 (0)