Source code for tests.system.google.cloud.automl.example_automl_video_tracking
## Licensed to the Apache Software Foundation (ASF) under one# or more contributor license agreements. See the NOTICE file# distributed with this work for additional information# regarding copyright ownership. The ASF licenses this file# to you under the Apache License, Version 2.0 (the# "License"); you may not use this file except in compliance# with the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing,# software distributed under the License is distributed on an# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY# KIND, either express or implied. See the License for the# specific language governing permissions and limitations# under the License."""Example Airflow DAG that uses Google AutoML services."""from__future__importannotationsimportosfromdatetimeimportdatetimefromgoogle.cloud.aiplatformimportschemafromgoogle.protobuf.struct_pb2importValuefromairflow.models.dagimportDAGfromairflow.providers.google.cloud.operators.gcsimport(GCSCreateBucketOperator,GCSDeleteBucketOperator,GCSSynchronizeBucketsOperator,)fromairflow.providers.google.cloud.operators.vertex_ai.auto_mlimport(CreateAutoMLVideoTrainingJobOperator,DeleteAutoMLTrainingJobOperator,)fromairflow.providers.google.cloud.operators.vertex_ai.datasetimport(CreateDatasetOperator,DeleteDatasetOperator,ImportDataOperator,)fromairflow.utils.trigger_ruleimportTriggerRule
# Example DAG for AutoML Video Intelligence Object TrackingwithDAG(DAG_ID,schedule="@once",start_date=datetime(2021,1,1),catchup=False,tags=["example","auto_ml","video","tracking"],)asdag:
move_dataset_file=GCSSynchronizeBucketsOperator(task_id="move_dataset_to_bucket",source_bucket=RESOURCE_DATA_BUCKET,source_object="automl/datasets/video",destination_bucket=VIDEO_GCS_BUCKET_NAME,destination_object="automl",recursive=True,)create_video_dataset=CreateDatasetOperator(task_id="video_dataset",dataset=VIDEO_DATASET,region=REGION,project_id=PROJECT_ID,)video_dataset_id=create_video_dataset.output["dataset_id"]import_video_dataset=ImportDataOperator(task_id="import_video_data",dataset_id=video_dataset_id,region=REGION,project_id=PROJECT_ID,import_configs=VIDEO_DATA_CONFIG,)# [START howto_cloud_create_video_tracking_training_job_operator]create_auto_ml_video_training_job=CreateAutoMLVideoTrainingJobOperator(task_id="auto_ml_video_task",display_name=VIDEO_DISPLAY_NAME,prediction_type="object_tracking",model_type="CLOUD",dataset_id=video_dataset_id,model_display_name=MODEL_DISPLAY_NAME,region=REGION,project_id=PROJECT_ID,)# [END howto_cloud_create_video_tracking_training_job_operator]delete_auto_ml_video_training_job=DeleteAutoMLTrainingJobOperator(task_id="delete_auto_ml_video_training_job",training_pipeline_id="{{ task_instance.xcom_pull(task_ids='auto_ml_video_task', ""key='training_id') }}",region=REGION,project_id=PROJECT_ID,trigger_rule=TriggerRule.ALL_DONE,)delete_video_dataset=DeleteDatasetOperator(task_id="delete_video_dataset",dataset_id=video_dataset_id,region=REGION,project_id=PROJECT_ID,trigger_rule=TriggerRule.ALL_DONE,)delete_bucket=GCSDeleteBucketOperator(task_id="delete_bucket",bucket_name=VIDEO_GCS_BUCKET_NAME,trigger_rule=TriggerRule.ALL_DONE,)(# TEST SETUP[create_bucket>>move_dataset_file,create_video_dataset,]>>import_video_dataset# TEST BODY>>create_auto_ml_video_training_job# TEST TEARDOWN>>delete_auto_ml_video_training_job>>delete_video_dataset>>delete_bucket)fromtests_common.test_utils.watcherimportwatcher# This test needs watcher in order to properly mark success/failure# when "tearDown" task with trigger rule is part of the DAGlist(dag.tasks)>>watcher()fromtests_common.test_utils.system_testsimportget_test_run# noqa: E402# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)