Source code for airflow.providers.google.cloud.example_dags.example_bigquery_dts
## Licensed to the Apache Software Foundation (ASF) under one# or more contributor license agreements. See the NOTICE file# distributed with this work for additional information# regarding copyright ownership. The ASF licenses this file# to you under the Apache License, Version 2.0 (the# "License"); you may not use this file except in compliance# with the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing,# software distributed under the License is distributed on an# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY# KIND, either express or implied. See the License for the# specific language governing permissions and limitations# under the License."""Example Airflow DAG that creates and deletes Bigquery data transfer configurations."""importosimporttimefromdatetimeimportdatetimefromairflowimportmodelsfromairflow.providers.google.cloud.operators.bigquery_dtsimport(BigQueryCreateDataTransferOperator,BigQueryDataTransferServiceStartTransferRunsOperator,BigQueryDeleteDataTransferConfigOperator,)fromairflow.providers.google.cloud.sensors.bigquery_dtsimportBigQueryDataTransferServiceTransferRunSensor
# [START howto_bigquery_dts_create_args]# In the case of Airflow, the customer needs to create a transfer# config with the automatic scheduling disabled and then trigger# a transfer run using a specialized Airflow operator
[docs]TRANSFER_CONFIG={"destination_dataset_id":GCP_DTS_BQ_DATASET,"display_name":"GCS Test Config","data_source_id":"google_cloud_storage","schedule_options":schedule_options,"params":PARAMS,
}# [END howto_bigquery_dts_create_args]withmodels.DAG("example_gcp_bigquery_dts",schedule_interval='@once',# Override to match your needsstart_date=datetime(2021,1,1),catchup=False,tags=['example'],)asdag:# [START howto_bigquery_create_data_transfer]