如何使用BigQueryCreateEmptyTableOperator创建一个具有“Clustered by”列的表?

2024-04-24 22:41:14 发布

您现在位置:Python中文网/ 问答频道 /正文

我正在尝试使用python脚本在GCP composer任务中使用BigQueryCreateEmptyTableOperator和cluster\ by column创建一个表。我正在使用“cluster\u fields”创建列,但它不起作用。请告诉我正确的方法是什么??你知道吗

下面是我正在使用的代码。你知道吗

stop_op = BigQueryCreateEmptyTableOperator(
        task_id='BigQuery_CreateTable',
        dataset_id=dataset_nm,
        table_id=table_nm,
        project_id=project_nm,
        schema_fields=[{"name": "emp_name", "type": "STRING", "mode": "REQUIRED"},
                       {"name": "salary", "type": "INTEGER", "mode": "NULLABLE"},
                       {"name": "distribution_name", "type": "STRING", "mode": "NULLABLE"},
                       {"name": "transaction_date", "type": "DATE", "mode": "NULLABLE"}],
    time_partitioning={'type': 'DAY', 'field': 'transaction_date'},
    cluster_fields='distribution_name',
    bigquery_conn_id='bigquery_default',
    google_cloud_storage_conn_id='bigquery_default',
    autodetect=True,
    dag=dag
)

Tags: nameprojectidfieldsstringmodetypetable
1条回答
网友
1楼 · 发布于 2024-04-24 22:41:14

目前,最新版本的Airflow(1.10.5)中不提供此功能。你知道吗

但是,您可以使用下面的create new操作符并使用它。你知道吗

from airflow.utils.decorators import apply_defaults 
from airflow.contrib.hooks.bigquery_hook import BigQueryHook 
from airflow.contrib.hooks.gcs_hook import GoogleCloudStorageHook, _parse_gcs_url
from airflow.contrib.operators.bigquery_operator import BigQueryCreateEmptyTableOperator 
import json

class BQCreateEmptyTableWithClusteredFieldsOp(BigQueryCreateEmptyTableOperator):
    template_fields = ('dataset_id', 'table_id', 'project_id',
                       'gcs_schema_object', 'labels')
    ui_color = '#f0eee4'

    # pylint: disable=too-many-arguments
    @apply_defaults
    def __init__(self,
                 dataset_id,
                 table_id,
                 project_id=None,
                 schema_fields=None,
                 gcs_schema_object=None,
                 time_partitioning=None,
                 bigquery_conn_id='bigquery_default',
                 google_cloud_storage_conn_id='google_cloud_default',
                 delegate_to=None,
                 labels=None,
                 encryption_configuration=None,
                 cluster_fields=None,
                 *args, **kwargs):

        super(BigQueryCreateEmptyTableOperator, self).__init__(*args, **kwargs)

        self.project_id = project_id
        self.dataset_id = dataset_id
        self.table_id = table_id
        self.schema_fields = schema_fields
        self.gcs_schema_object = gcs_schema_object
        self.bigquery_conn_id = bigquery_conn_id
        self.google_cloud_storage_conn_id = google_cloud_storage_conn_id
        self.delegate_to = delegate_to
        self.time_partitioning = {} if time_partitioning is None else time_partitioning
        self.labels = labels
        self.encryption_configuration = encryption_configuration
        self.cluster_fields = cluster_fields or []

    def execute(self, context):
        bq_hook = BigQueryHook(bigquery_conn_id=self.bigquery_conn_id,
                               delegate_to=self.delegate_to)

        if not self.schema_fields and self.gcs_schema_object:

            gcs_bucket, gcs_object = _parse_gcs_url(self.gcs_schema_object)

            gcs_hook = GoogleCloudStorageHook(
                google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
                delegate_to=self.delegate_to)
            schema_fields = json.loads(gcs_hook.download(
                gcs_bucket,
                gcs_object).decode("utf-8"))
        else:
            schema_fields = self.schema_fields

        conn = bq_hook.get_conn()
        cursor = conn.cursor()

        cursor.create_empty_table(
            project_id=self.project_id,
            dataset_id=self.dataset_id,
            table_id=self.table_id,
            schema_fields=schema_fields,
            time_partitioning=self.time_partitioning,
            labels=self.labels,
            cluster_fields=self.cluster_fields,
            encryption_configuration=self.encryption_configuration
        )

现在可以按如下方式使用:

stop_op = BQCreateEmptyTableWithClusteredFieldsOp(
        task_id='BigQuery_CreateTable',
        dataset_id=dataset_nm,
        table_id=table_nm,
        project_id=project_nm,
        schema_fields=[{"name": "emp_name", "type": "STRING", "mode": "REQUIRED"},
                       {"name": "salary", "type": "INTEGER", "mode": "NULLABLE"},
                       {"name": "distribution_name", "type": "STRING", "mode": "NULLABLE"},
                       {"name": "transaction_date", "type": "DATE", "mode": "NULLABLE"}],
    time_partitioning={'type': 'DAY', 'field': 'transaction_date'},
    cluster_fields='distribution_name',
    bigquery_conn_id='bigquery_default',
    google_cloud_storage_conn_id='bigquery_default',
    autodetect=True,
    dag=dag
)

相关问题 更多 >