@@ -18,6 +18,7 @@ def initialize(task, schema, fields = nil)
18
18
@schema = schema
19
19
reset_fields ( fields ) if fields
20
20
@project = @task [ 'project' ]
21
+ @destination_project = @task [ 'destination_project' ]
21
22
@dataset = @task [ 'dataset' ]
22
23
@location = @task [ 'location' ]
23
24
@location_for_log = @location . nil? ? 'us/eu' : @location
@@ -80,7 +81,7 @@ def load_from_gcs(object_uris, table)
80
81
# As https://cloud.google.com/bigquery/docs/managing_jobs_datasets_projects#managingjobs says,
81
82
# we should generate job_id in client code, otherwise, retrying would cause duplication
82
83
job_id = "embulk_load_job_#{ SecureRandom . uuid } "
83
- Embulk . logger . info { "embulk-output-bigquery: Load job starting... job_id:[#{ job_id } ] #{ object_uris } => #{ @project } :#{ @dataset } .#{ table } in #{ @location_for_log } " }
84
+ Embulk . logger . info { "embulk-output-bigquery: Load job starting... job_id:[#{ job_id } ] #{ object_uris } => #{ @destination_project } :#{ @dataset } .#{ table } in #{ @location_for_log } " }
84
85
85
86
body = {
86
87
job_reference : {
@@ -90,7 +91,7 @@ def load_from_gcs(object_uris, table)
90
91
configuration : {
91
92
load : {
92
93
destination_table : {
93
- project_id : @project ,
94
+ project_id : @destination_project ,
94
95
dataset_id : @dataset ,
95
96
table_id : table ,
96
97
} ,
@@ -130,7 +131,7 @@ def load_from_gcs(object_uris, table)
130
131
Embulk . logger . error {
131
132
"embulk-output-bigquery: insert_job(#{ @project } , #{ body } , #{ opts } ), response:#{ response } "
132
133
}
133
- raise Error , "failed to load #{ object_uris } to #{ @project } :#{ @dataset } .#{ table } in #{ @location_for_log } , response:#{ response } "
134
+ raise Error , "failed to load #{ object_uris } to #{ @destination_project } :#{ @dataset } .#{ table } in #{ @location_for_log } , response:#{ response } "
134
135
end
135
136
end
136
137
end
@@ -171,7 +172,7 @@ def load(path, table, write_disposition: 'WRITE_APPEND')
171
172
# As https://cloud.google.com/bigquery/docs/managing_jobs_datasets_projects#managingjobs says,
172
173
# we should generate job_id in client code, otherwise, retrying would cause duplication
173
174
job_id = "embulk_load_job_#{ SecureRandom . uuid } "
174
- Embulk . logger . info { "embulk-output-bigquery: Load job starting... job_id:[#{ job_id } ] #{ path } => #{ @project } :#{ @dataset } .#{ table } in #{ @location_for_log } " }
175
+ Embulk . logger . info { "embulk-output-bigquery: Load job starting... job_id:[#{ job_id } ] #{ path } => #{ @destination_project } :#{ @dataset } .#{ table } in #{ @location_for_log } " }
175
176
else
176
177
Embulk . logger . info { "embulk-output-bigquery: Load job starting... #{ path } does not exist, skipped" }
177
178
return
@@ -185,7 +186,7 @@ def load(path, table, write_disposition: 'WRITE_APPEND')
185
186
configuration : {
186
187
load : {
187
188
destination_table : {
188
- project_id : @project ,
189
+ project_id : @destination_project ,
189
190
dataset_id : @dataset ,
190
191
table_id : table ,
191
192
} ,
@@ -232,7 +233,7 @@ def load(path, table, write_disposition: 'WRITE_APPEND')
232
233
Embulk . logger . error {
233
234
"embulk-output-bigquery: insert_job(#{ @project } , #{ body } , #{ opts } ), response:#{ response } "
234
235
}
235
- raise Error , "failed to load #{ path } to #{ @project } :#{ @dataset } .#{ table } in #{ @location_for_log } , response:#{ response } "
236
+ raise Error , "failed to load #{ path } to #{ @destination_project } :#{ @dataset } .#{ table } in #{ @location_for_log } , response:#{ response } "
236
237
end
237
238
end
238
239
end
@@ -245,7 +246,7 @@ def copy(source_table, destination_table, destination_dataset = nil, write_dispo
245
246
246
247
Embulk . logger . info {
247
248
"embulk-output-bigquery: Copy job starting... job_id:[#{ job_id } ] " \
248
- "#{ @project } :#{ @dataset } .#{ source_table } => #{ @project } :#{ destination_dataset } .#{ destination_table } "
249
+ "#{ @destination_project } :#{ @dataset } .#{ source_table } => #{ @destination_project } :#{ destination_dataset } .#{ destination_table } "
249
250
}
250
251
251
252
body = {
@@ -258,12 +259,12 @@ def copy(source_table, destination_table, destination_dataset = nil, write_dispo
258
259
create_deposition : 'CREATE_IF_NEEDED' ,
259
260
write_disposition : write_disposition ,
260
261
source_table : {
261
- project_id : @project ,
262
+ project_id : @destination_project ,
262
263
dataset_id : @dataset ,
263
264
table_id : source_table ,
264
265
} ,
265
266
destination_table : {
266
- project_id : @project ,
267
+ project_id : @destination_project ,
267
268
dataset_id : destination_dataset ,
268
269
table_id : destination_table ,
269
270
} ,
@@ -284,8 +285,8 @@ def copy(source_table, destination_table, destination_dataset = nil, write_dispo
284
285
Embulk . logger . error {
285
286
"embulk-output-bigquery: insert_job(#{ @project } , #{ body } , #{ opts } ), response:#{ response } "
286
287
}
287
- raise Error , "failed to copy #{ @project } :#{ @dataset } .#{ source_table } " \
288
- "to #{ @project } :#{ destination_dataset } .#{ destination_table } , response:#{ response } "
288
+ raise Error , "failed to copy #{ @destination_project } :#{ @dataset } .#{ source_table } " \
289
+ "to #{ @destination_project } :#{ destination_dataset } .#{ destination_table } , response:#{ response } "
289
290
end
290
291
end
291
292
end
@@ -354,7 +355,7 @@ def wait_load(kind, response)
354
355
def create_dataset ( dataset = nil , reference : nil )
355
356
dataset ||= @dataset
356
357
begin
357
- Embulk . logger . info { "embulk-output-bigquery: Create dataset... #{ @project } :#{ dataset } in #{ @location_for_log } " }
358
+ Embulk . logger . info { "embulk-output-bigquery: Create dataset... #{ @destination_project } :#{ dataset } in #{ @location_for_log } " }
358
359
hint = { }
359
360
if reference
360
361
response = get_dataset ( reference )
@@ -382,25 +383,25 @@ def create_dataset(dataset = nil, reference: nil)
382
383
Embulk . logger . error {
383
384
"embulk-output-bigquery: insert_dataset(#{ @project } , #{ body } , #{ opts } ), response:#{ response } "
384
385
}
385
- raise Error , "failed to create dataset #{ @project } :#{ dataset } in #{ @location_for_log } , response:#{ response } "
386
+ raise Error , "failed to create dataset #{ @destination_project } :#{ dataset } in #{ @location_for_log } , response:#{ response } "
386
387
end
387
388
end
388
389
389
390
def get_dataset ( dataset = nil )
390
391
dataset ||= @dataset
391
392
begin
392
- Embulk . logger . info { "embulk-output-bigquery: Get dataset... #{ @project } :#{ dataset } " }
393
- with_network_retry { client . get_dataset ( @project , dataset ) }
393
+ Embulk . logger . info { "embulk-output-bigquery: Get dataset... #{ @destination_project } :#{ dataset } " }
394
+ with_network_retry { client . get_dataset ( @destination_project , dataset ) }
394
395
rescue Google ::Apis ::ServerError , Google ::Apis ::ClientError , Google ::Apis ::AuthorizationError => e
395
396
if e . status_code == 404
396
- raise NotFoundError , "Dataset #{ @project } :#{ dataset } is not found"
397
+ raise NotFoundError , "Dataset #{ @destination_project } :#{ dataset } is not found"
397
398
end
398
399
399
400
response = { status_code : e . status_code , message : e . message , error_class : e . class }
400
401
Embulk . logger . error {
401
- "embulk-output-bigquery: get_dataset(#{ @project } , #{ dataset } ), response:#{ response } "
402
+ "embulk-output-bigquery: get_dataset(#{ @destination_project } , #{ dataset } ), response:#{ response } "
402
403
}
403
- raise Error , "failed to get dataset #{ @project } :#{ dataset } , response:#{ response } "
404
+ raise Error , "failed to get dataset #{ @destination_project } :#{ dataset } , response:#{ response } "
404
405
end
405
406
end
406
407
@@ -414,7 +415,7 @@ def create_table_if_not_exists(table, dataset: nil, options: nil)
414
415
table = Helper . chomp_partition_decorator ( table )
415
416
end
416
417
417
- Embulk . logger . info { "embulk-output-bigquery: Create table... #{ @project } :#{ dataset } .#{ table } " }
418
+ Embulk . logger . info { "embulk-output-bigquery: Create table... #{ @destination_project } :#{ dataset } .#{ table } " }
418
419
body = {
419
420
table_reference : {
420
421
table_id : table ,
@@ -452,7 +453,7 @@ def create_table_if_not_exists(table, dataset: nil, options: nil)
452
453
Embulk . logger . error {
453
454
"embulk-output-bigquery: insert_table(#{ @project } , #{ dataset } , #{ @location_for_log } , #{ body } , #{ opts } ), response:#{ response } "
454
455
}
455
- raise Error , "failed to create table #{ @project } :#{ dataset } .#{ table } in #{ @location_for_log } , response:#{ response } "
456
+ raise Error , "failed to create table #{ @destination_project } :#{ dataset } .#{ table } in #{ @location_for_log } , response:#{ response } "
456
457
end
457
458
end
458
459
@@ -469,8 +470,8 @@ def delete_partition(table, dataset: nil)
469
470
def delete_table_or_partition ( table , dataset : nil )
470
471
begin
471
472
dataset ||= @dataset
472
- Embulk . logger . info { "embulk-output-bigquery: Delete table... #{ @project } :#{ dataset } .#{ table } " }
473
- with_network_retry { client . delete_table ( @project , dataset , table ) }
473
+ Embulk . logger . info { "embulk-output-bigquery: Delete table... #{ @destination_project } :#{ dataset } .#{ table } " }
474
+ with_network_retry { client . delete_table ( @destination_project , dataset , table ) }
474
475
rescue Google ::Apis ::ServerError , Google ::Apis ::ClientError , Google ::Apis ::AuthorizationError => e
475
476
if e . status_code == 404 && /Not found:/ =~ e . message
476
477
# ignore 'Not Found' error
@@ -479,9 +480,9 @@ def delete_table_or_partition(table, dataset: nil)
479
480
480
481
response = { status_code : e . status_code , message : e . message , error_class : e . class }
481
482
Embulk . logger . error {
482
- "embulk-output-bigquery: delete_table(#{ @project } , #{ dataset } , #{ table } ), response:#{ response } "
483
+ "embulk-output-bigquery: delete_table(#{ @destination_project } , #{ dataset } , #{ table } ), response:#{ response } "
483
484
}
484
- raise Error , "failed to delete table #{ @project } :#{ dataset } .#{ table } , response:#{ response } "
485
+ raise Error , "failed to delete table #{ @destination_project } :#{ dataset } .#{ table } , response:#{ response } "
485
486
end
486
487
end
487
488
@@ -497,18 +498,18 @@ def get_partition(table, dataset: nil)
497
498
def get_table_or_partition ( table , dataset : nil )
498
499
begin
499
500
dataset ||= @dataset
500
- Embulk . logger . info { "embulk-output-bigquery: Get table... #{ @project } :#{ dataset } .#{ table } " }
501
- with_network_retry { client . get_table ( @project , dataset , table ) }
501
+ Embulk . logger . info { "embulk-output-bigquery: Get table... #{ @destination_project } :#{ dataset } .#{ table } " }
502
+ with_network_retry { client . get_table ( @destination_project , dataset , table ) }
502
503
rescue Google ::Apis ::ServerError , Google ::Apis ::ClientError , Google ::Apis ::AuthorizationError => e
503
504
if e . status_code == 404
504
- raise NotFoundError , "Table #{ @project } :#{ dataset } .#{ table } is not found"
505
+ raise NotFoundError , "Table #{ @destination_project } :#{ dataset } .#{ table } is not found"
505
506
end
506
507
507
508
response = { status_code : e . status_code , message : e . message , error_class : e . class }
508
509
Embulk . logger . error {
509
- "embulk-output-bigquery: get_table(#{ @project } , #{ dataset } , #{ table } ), response:#{ response } "
510
+ "embulk-output-bigquery: get_table(#{ @destination_project } , #{ dataset } , #{ table } ), response:#{ response } "
510
511
}
511
- raise Error , "failed to get table #{ @project } :#{ dataset } .#{ table } , response:#{ response } "
512
+ raise Error , "failed to get table #{ @destination_project } :#{ dataset } .#{ table } , response:#{ response } "
512
513
end
513
514
end
514
515
end
0 commit comments