Skip to content

Commit

Permalink
fixup! Parquet to BigQuery import for GCP-backed AnVIL snapshots (#6355)
Browse files Browse the repository at this point in the history
  • Loading branch information
nadove-ucsc committed Jul 31, 2024
1 parent d7f577c commit 12b839f
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
4 changes: 2 additions & 2 deletions src/azul/plugins/repository/tdr_anvil/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -747,7 +747,7 @@ def _columns(self, entity_type: EntityType) -> set[str]:

def import_tables(self, source: TDRSourceRef):
"""
Import tables for an AnVIL snapshot into BigQuery via TDR's parquet
Import tables for an AnVIL snapshot into BigQuery via TDR's Parquet
export API. Only tables defined in the AnVIL schema will be imported.
Currently, only GS-backed snapshots are supported.
"""
Expand All @@ -758,7 +758,7 @@ def import_tables(self, source: TDRSourceRef):

urls_by_table = self.tdr.export_parquet_urls(source.id)
reject(urls_by_table is None,
'No parquet access information is available for snapshot %r.', source.spec)
'No Parquet access information is available for snapshot %r.', source.spec)

for table in anvil_schema['tables']:
table_name = table['name']
Expand Down
2 changes: 1 addition & 1 deletion src/azul/terra.py
Original file line number Diff line number Diff line change
Expand Up @@ -692,7 +692,7 @@ def create_table(self,
:param table_name: Unqualified name of the new table
:param import_urls: URLs of parquet file(s) to populate the table. These
:param import_urls: URLs of Parquet file(s) to populate the table. These
must be `gs://` URLS and the GCS bucket's region
must be compatible with the target dataset's. See
https://cloud.google.com/bigquery/docs/loading-data-cloud-storage-parquet#limitations
Expand Down

0 comments on commit 12b839f

Please sign in to comment.