Skip to content

Commit 4f7e6a4

Browse files
BUGFIX: TMA-1643 Don't convert null value to empty string
1 parent f2fccd7 commit 4f7e6a4

8 files changed

Lines changed: 29 additions & 21 deletions

File tree

lib/gooddata/cloud_resources/bigquery/bigquery_client.rb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -60,10 +60,10 @@ def realize_query(query, _params)
6060
result = table_result.iterateAll
6161
field_list = table_result.getSchema.getFields
6262
col_count = field_list.size
63-
CSV.open(filename, 'wb', :force_quotes => true) do |csv|
63+
CSV.open(filename, 'wb') do |csv|
6464
csv << Array(1..col_count).map { |i| field_list.get(i - 1).getName } # build the header
6565
result.each do |row|
66-
csv << Array(1..col_count).map { |i| row.get(i - 1).getStringValue }
66+
csv << Array(1..col_count).map { |i| row.get(i - 1).getValue&.to_s }
6767
end
6868
end
6969
end

lib/gooddata/cloud_resources/redshift/redshift_client.rb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,9 +58,9 @@ def realize_query(query, _params)
5858
result = statement.get_result_set
5959
metadata = result.get_meta_data
6060
col_count = metadata.column_count
61-
CSV.open(filename, 'wb', :force_quotes => true) do |csv|
61+
CSV.open(filename, 'wb') do |csv|
6262
csv << Array(1..col_count).map { |i| metadata.get_column_name(i) } # build the header
63-
csv << Array(1..col_count).map { |i| result.get_string(i) } while result.next
63+
csv << Array(1..col_count).map { |i| result.get_string(i)&.to_s } while result.next
6464
end
6565
end
6666
end

lib/gooddata/cloud_resources/snowflake/snowflake_client.rb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -54,9 +54,9 @@ def realize_query(query, _params)
5454
result = statement.get_result_set
5555
metadata = result.get_meta_data
5656
col_count = metadata.column_count
57-
CSV.open(filename, 'wb', :force_quotes => true) do |csv|
57+
CSV.open(filename, 'wb') do |csv|
5858
csv << Array(1..col_count).map { |i| metadata.get_column_name(i) } # build the header
59-
csv << Array(1..col_count).map { |i| result.get_string(i) } while result.next
59+
csv << Array(1..col_count).map { |i| result.get_string(i)&.to_s } while result.next
6060
end
6161
end
6262
end

spec/data/bigquery_data.csv

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
"EmployeeID","Fullname","Salary","DOB","Departments","x__client_id","x__timestamp","x__deleted"
2-
"E101","Name 01","4000","1970-01-20","101","mtt_testing_01","1557396000.001","false"
3-
"E102","Name 02","3000","1918-05-26","102","mtt_testing_01","1557396000.002","false"
4-
"E103","Name 03","5000","1960-02-03","103","mtt_testing_01","1557396000.003","false"
5-
"E104","Name 04","5000","1960-02-03","104","MTT-Client02","1557396000.004","false"
1+
EmployeeID,Fullname,Salary,DOB,Departments,x__client_id,x__timestamp,x__deleted
2+
E101,Name 01,4000,1970-01-20,101,mtt_testing_01,1557396000.0,false
3+
E102,Name 02,3000,1918-05-26,102,mtt_testing_01,1557396000.0,false
4+
E103,Name 03,5000,1960-02-03,103,mtt_testing_01,1557396000.0,false
5+
E104,"Name, 04",5000,,104,MTT-Client02,1557396000.0,false

spec/data/redshift_data.csv

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
"login","client_id","role","user_groups"
2-
"nahunta@flsb11.com","client01","admin","GROUP_01"
3-
"rhabille@workwolf.site","client02","admin","GROUP_02"
1+
login,client_id,role,user_groups
2+
nahunta@flsb11.com,client01,admin,GROUP_01
3+
rhabille@workwolf.site,client02,admin,GROUP_02

spec/data/redshift_data2.csv

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
"login","client_id","role","user_groups"
2-
"nahunta@flsb11.com","client01","admin","GROUP_01, GROUP_02"
3-
"rhabille@workwolf.site","client02","admin","GROUP_02"
1+
login,client_id,role,user_groups
2+
nahunta@flsb11.com,client01,admin,"GROUP_01, GROUP_02"
3+
rhabille@workwolf.site,client02,admin,GROUP_02

spec/data/snowflake_data.csv

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
"CP__CUSTKEY","A__NAME","D__DATEADDED","X__TIMESTAMP"
2-
"cus3","phong","2019-03-05",""
1+
CP__CUSTKEY,A__NAME,D__DATEADDED,X__TIMESTAMP
2+
cus3,phong,,

spec/lcm/integration/data_helper_spec.rb

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -138,54 +138,61 @@
138138
}
139139
},
140140
"project"=> "gdc-us-dev",
141-
"schema"=> "mtt_team_checklist"
141+
"schema"=> "lcm_test"
142142
}
143143
},
144144
"input_source"=> {
145145
"type"=> "bigquery",
146-
"query"=> "select * from employees;"
146+
"query"=> "select * from employees order by EmployeeID"
147147
}
148148
}
149+
149150
describe 'data helper', :vcr do
150151

151152
it 'connect to redshift with IAM authentication' do
152153
data_helper = GoodData::Helpers::DataSource.new(iam_params['input_source'])
153154
file_path = data_helper.realize(iam_params)
155+
puts "redshift iam: #{file_path}"
154156
data = File.open('spec/data/redshift_data2.csv').read
155157
expect(data).to eq File.open(file_path).read
156158
end
157159

158160
it 'connect to redshift with BASIC authentication' do
159161
data_helper = GoodData::Helpers::DataSource.new(basic_params['input_source'])
160162
file_path = data_helper.realize(basic_params)
163+
puts "redshift basic 1: #{file_path}"
161164
data = File.open('spec/data/redshift_data2.csv').read
162165
expect(data).to eq File.open(file_path).read
163166
end
164167

165168
it 'connect to redshift with BASIC authentication without schema' do
166169
data_helper = GoodData::Helpers::DataSource.new(basic_params_without_schema['input_source'])
167170
file_path = data_helper.realize(basic_params_without_schema)
171+
puts "redshift basic 2: #{file_path}"
168172
data = File.open('spec/data/redshift_data.csv').read
169173
expect(data).to eq File.open(file_path).read
170174
end
171175

172176
it 'connect to redshift with BASIC authentication and dynamic source' do
173177
data_helper = GoodData::Helpers::DataSource.new(basic_params_dynamic_source['dynamic_params']['input_source'])
174178
file_path = data_helper.realize(basic_params_dynamic_source)
179+
puts "redshift basic 3: #{file_path}"
175180
data = File.open('spec/data/redshift_data2.csv').read
176181
expect(data).to eq File.open(file_path).read
177182
end
178183

179184
it 'connect to redshift with BASIC authentication and url has parameter' do
180185
data_helper = GoodData::Helpers::DataSource.new(basic_params_url_parameters['input_source'])
181186
file_path = data_helper.realize(basic_params_url_parameters)
187+
puts "redshift basic 4: #{file_path}"
182188
data = File.open('spec/data/redshift_data2.csv').read
183189
expect(data).to eq File.open(file_path).read
184190
end
185191

186192
it 'connect to snowflake with BASIC authentication' do
187193
data_helper = GoodData::Helpers::DataSource.new(snowflake_basic_params['input_source'])
188194
file_path = data_helper.realize(snowflake_basic_params)
195+
puts "snowflake: #{file_path}"
189196
data = File.open('spec/data/snowflake_data.csv').read
190197
expect(data).to eq File.open(file_path).read
191198
end
@@ -198,6 +205,7 @@
198205

199206
data_helper = GoodData::Helpers::DataSource.new(bigquery_basic_params['input_source'])
200207
file_path = data_helper.realize(bigquery_basic_params)
208+
puts "bigquery: #{file_path}"
201209
data = File.open('spec/data/bigquery_data.csv').read
202210
expect(data).to eq File.open(file_path).read
203211
end

0 commit comments

Comments
 (0)