Skip to content

Commit 980dd1f

Browse files
authored
[BEAM-10853] Merge pull request #12772: Cherrypick: Fixing copy name issue: Single name for all copies
[cherry-pick][BEAM-10853] Fixing copy name issue: Single name for all copies
2 parents 3a9ed0c + f25e808 commit 980dd1f

2 files changed

Lines changed: 14 additions & 4 deletions

File tree

sdks/python/apache_beam/io/gcp/bigquery_file_loads.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,10 @@
7070

7171
def _generate_job_name(job_name, job_type, step_name):
7272
return bigquery_tools.generate_bq_job_name(
73-
job_name, step_name, job_type, random.randint(0, 1000))
73+
job_name=job_name,
74+
step_id=step_name,
75+
job_type=job_type,
76+
random=random.randint(0, 1000))
7477

7578

7679
def file_prefix_generator(
@@ -360,9 +363,9 @@ def process(self, element, job_name_prefix=None):
360363
job_name_prefix,
361364
_bq_uuid(
362365
'%s:%s.%s' % (
363-
copy_to_reference.projectId,
364-
copy_to_reference.datasetId,
365-
copy_to_reference.tableId)))
366+
copy_from_reference.projectId,
367+
copy_from_reference.datasetId,
368+
copy_from_reference.tableId)))
366369

367370
_LOGGER.info(
368371
"Triggering copy job from %s to %s",

sdks/python/apache_beam/io/gcp/bigquery_test.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1135,6 +1135,11 @@ def setUp(self):
11351135
def test_avro_file_load(self):
11361136
# Construct elements such that they can be written via Avro but not via
11371137
# JSON. See BEAM-8841.
1138+
from apache_beam.io.gcp import bigquery_file_loads
1139+
old_max_files = bigquery_file_loads._MAXIMUM_SOURCE_URIS
1140+
old_max_file_size = bigquery_file_loads._DEFAULT_MAX_FILE_SIZE
1141+
bigquery_file_loads._MAXIMUM_SOURCE_URIS = 1
1142+
bigquery_file_loads._DEFAULT_MAX_FILE_SIZE = 100
11381143
elements = [
11391144
{
11401145
'name': u'Negative infinity',
@@ -1194,6 +1199,8 @@ def test_avro_file_load(self):
11941199
method='FILE_LOADS',
11951200
temp_file_format=bigquery_tools.FileFormat.AVRO,
11961201
))
1202+
bigquery_file_loads._MAXIMUM_SOURCE_URIS = old_max_files
1203+
bigquery_file_loads._DEFAULT_MAX_FILE_SIZE = old_max_file_size
11971204

11981205
def tearDown(self):
11991206
request = bigquery.BigqueryDatasetsDeleteRequest(

0 commit comments

Comments
 (0)