Source code for tests.system.google.cloud.gcs.example_s3_to_gcs
# Licensed to the Apache Software Foundation (ASF) under one# or more contributor license agreements. See the NOTICE file# distributed with this work for additional information# regarding copyright ownership. The ASF licenses this file# to you under the Apache License, Version 2.0 (the# "License"); you may not use this file except in compliance# with the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing,# software distributed under the License is distributed on an# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY# KIND, either express or implied. See the License for the# specific language governing permissions and limitations# under the License.from__future__importannotationsimportosfromdatetimeimportdatetimefromairflow.decoratorsimporttaskfromairflow.models.dagimportDAGfromairflow.providers.amazon.aws.hooks.s3importS3Hookfromairflow.providers.amazon.aws.operators.s3importS3CreateBucketOperator,S3DeleteBucketOperatorfromairflow.providers.google.cloud.hooks.gcsimportGCSHookfromairflow.providers.google.cloud.operators.gcsimportGCSCreateBucketOperator,GCSDeleteBucketOperatorfromairflow.providers.google.cloud.transfers.s3_to_gcsimportS3ToGCSOperatorfromairflow.utils.trigger_ruleimportTriggerRulefromproviders.tests.system.googleimportDEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
[docs]defupload_file():"""A callable to upload file from GCS to AWS bucket"""gcs_hook=GCSHook()s3_hook=S3Hook()withgcs_hook.provide_file(bucket_name=RESOURCES_BUCKET_NAME,object_name=UPLOAD_FILE)asgcs_file:s3_hook.load_file_obj(file_obj=gcs_file,key=UPLOAD_FILE,bucket_name=BUCKET_NAME)
create_gcs_bucket=GCSCreateBucketOperator(task_id="create_bucket",bucket_name=BUCKET_NAME,project_id=GCP_PROJECT_ID,)# [START howto_transfer_s3togcs_operator]transfer_to_gcs=S3ToGCSOperator(task_id="s3_to_gcs_task",bucket=BUCKET_NAME,prefix=PREFIX,dest_gcs=GCS_BUCKET_URL,apply_gcs_prefix=True,)# [END howto_transfer_s3togcs_operator]# [START howto_transfer_s3togcs_operator_async]transfer_to_gcs_def=S3ToGCSOperator(task_id="s3_to_gcs_task_def",bucket=BUCKET_NAME,prefix=PREFIX,dest_gcs=GCS_BUCKET_URL,deferrable=True,)# [END howto_transfer_s3togcs_operator_async]delete_s3_bucket=S3DeleteBucketOperator(task_id="delete_s3_bucket",bucket_name=BUCKET_NAME,force_delete=True,trigger_rule=TriggerRule.ALL_DONE,)delete_gcs_bucket=GCSDeleteBucketOperator(task_id="delete_gcs_bucket",bucket_name=BUCKET_NAME,trigger_rule=TriggerRule.ALL_DONE)(# TEST SETUPcreate_gcs_bucket>>create_s3_bucket>>upload_file()# TEST BODY>>[transfer_to_gcs,transfer_to_gcs_def]# TEST TEARDOWN>>delete_s3_bucket>>delete_gcs_bucket)fromtests_common.test_utils.watcherimportwatcher# This test needs watcher in order to properly mark success/failure# when "tearDown" task with trigger rule is part of the DAGlist(dag.tasks)>>watcher()fromtests_common.test_utils.system_testsimportget_test_run# noqa: E402# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)