Source code for airflow.providers.google.cloud.sensors.gcs
## Licensed to the Apache Software Foundation (ASF) under one# or more contributor license agreements. See the NOTICE file# distributed with this work for additional information# regarding copyright ownership. The ASF licenses this file# to you under the Apache License, Version 2.0 (the# "License"); you may not use this file except in compliance# with the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing,# software distributed under the License is distributed on an# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY# KIND, either express or implied. See the License for the# specific language governing permissions and limitations# under the License."""This module contains Google Cloud Storage sensors."""from__future__importannotationsimportosimporttextwrapfromdatetimeimportdatetime,timedeltafromtypingimportTYPE_CHECKING,Any,Callable,Sequencefromgoogle.cloud.storage.retryimportDEFAULT_RETRYfromairflow.configurationimportconffromairflow.exceptionsimportAirflowException,AirflowProviderDeprecationWarningfromairflow.providers.google.cloud.hooks.gcsimportGCSHookfromairflow.providers.google.cloud.triggers.gcsimport(GCSBlobTrigger,GCSCheckBlobUpdateTimeTrigger,GCSPrefixBlobTrigger,GCSUploadSessionTrigger,)fromairflow.providers.google.common.deprecatedimportdeprecatedfromairflow.sensors.baseimportBaseSensorOperator,poke_mode_onlyifTYPE_CHECKING:fromgoogle.api_core.retryimportRetryfromairflow.utils.contextimportContext
[docs]classGCSObjectExistenceSensor(BaseSensorOperator):""" Checks for the existence of a file in Google Cloud Storage. :param bucket: The Google Cloud Storage bucket where the object is. :param object: The name of the object to check in the Google cloud storage bucket. :param use_glob: When set to True the object parameter is interpreted as glob :param google_cloud_conn_id: The connection ID to use when connecting to Google Cloud Storage. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). :param retry: (Optional) How to retry the RPC """
[docs]defpoke(self,context:Context)->bool:self.log.info("Sensor checks existence of : %s, %s",self.bucket,self.object)hook=GCSHook(gcp_conn_id=self.google_cloud_conn_id,impersonation_chain=self.impersonation_chain,)self._matches=(bool(hook.list(self.bucket,match_glob=self.object))ifself.use_globelsehook.exists(self.bucket,self.object,self.retry))returnself._matches
[docs]defexecute(self,context:Context):"""Airflow runs this method on the worker and defers using the trigger."""ifself.deferrable:ifnotself.poke(context=context):self.defer(timeout=timedelta(seconds=self.timeout),trigger=GCSBlobTrigger(bucket=self.bucket,object_name=self.object,use_glob=self.use_glob,poke_interval=self.poke_interval,google_cloud_conn_id=self.google_cloud_conn_id,hook_params={"impersonation_chain":self.impersonation_chain,},),method_name="execute_complete",)else:super().execute(context)returnself._matches
[docs]defexecute_complete(self,context:Context,event:dict[str,str])->bool:""" Act as a callback for when the trigger fires - returns immediately. Relies on trigger to throw an exception, otherwise it assumes execution was successful. """ifevent["status"]=="error":raiseAirflowException(event["message"])self.log.info("File %s was found in bucket %s.",self.object,self.bucket)returnTrue
@deprecated(planned_removal_date="November 01, 2024",use_instead="GCSObjectExistenceSensor",instructions="Please use GCSObjectExistenceSensor and set deferrable attribute to True.",category=AirflowProviderDeprecationWarning,)
[docs]classGCSObjectExistenceAsyncSensor(GCSObjectExistenceSensor):""" Checks for the existence of a file in Google Cloud Storage. This class is deprecated and will be removed in a future release. Please use :class:`airflow.providers.google.cloud.sensors.gcs.GCSObjectExistenceSensor` and set *deferrable* attribute to *True* instead. :param bucket: The Google Cloud Storage bucket where the object is. :param object: The name of the object to check in the Google cloud storage bucket. :param google_cloud_conn_id: The connection ID to use when connecting to Google Cloud Storage. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). """def__init__(self,**kwargs:Any)->None:super().__init__(deferrable=True,**kwargs)
[docs]defts_function(context):""" Act as a default callback for the GoogleCloudStorageObjectUpdatedSensor. The default behaviour is check for the object being updated after the data interval's end, or execution_date + interval on Airflow versions prior to 2.2 (before AIP-39 implementation). """try:returncontext["data_interval_end"]exceptKeyError:fromairflow.utilsimporttimezonedata_interval=context["dag"].infer_automated_data_interval(timezone.coerce_datetime(context["execution_date"]))next_info=context["dag"].next_dagrun_info(data_interval,restricted=False)ifnext_infoisNone:returnNonereturnnext_info.data_interval.start
[docs]classGCSObjectUpdateSensor(BaseSensorOperator):""" Checks if an object is updated in Google Cloud Storage. :param bucket: The Google Cloud Storage bucket where the object is. :param object: The name of the object to download in the Google cloud storage bucket. :param ts_func: Callback for defining the update condition. The default callback returns execution_date + schedule_interval. The callback takes the context as parameter. :param google_cloud_conn_id: The connection ID to use when connecting to Google Cloud Storage. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). :param deferrable: Run sensor in deferrable mode """
[docs]defpoke(self,context:Context)->bool:self.log.info("Sensor checks existence of : %s, %s",self.bucket,self.object)hook=GCSHook(gcp_conn_id=self.google_cloud_conn_id,impersonation_chain=self.impersonation_chain,)returnhook.is_updated_after(self.bucket,self.object,self.ts_func(context))
[docs]defexecute(self,context:Context)->None:"""Airflow runs this method on the worker and defers using the trigger."""ifself.deferrableisFalse:super().execute(context)else:ifnotself.poke(context=context):self.defer(timeout=timedelta(seconds=self.timeout),trigger=GCSCheckBlobUpdateTimeTrigger(bucket=self.bucket,object_name=self.object,target_date=self.ts_func(context),poke_interval=self.poke_interval,google_cloud_conn_id=self.google_cloud_conn_id,hook_params={"impersonation_chain":self.impersonation_chain,},),method_name="execute_complete",)
[docs]defexecute_complete(self,context:dict[str,Any],event:dict[str,str]|None=None)->str:"""Return immediately and rely on trigger to throw a success event. Callback for the trigger."""ifevent:ifevent["status"]=="success":self.log.info("Checking last updated time for object %s in bucket : %s",self.object,self.bucket)returnevent["message"]raiseAirflowException(event["message"])message="No event received in trigger callback"raiseAirflowException(message)
[docs]classGCSObjectsWithPrefixExistenceSensor(BaseSensorOperator):""" Checks for the existence of GCS objects at a given prefix, passing matches via XCom. When files matching the given prefix are found, the poke method's criteria will be fulfilled and the matching objects will be returned from the operator and passed through XCom for downstream tasks. :param bucket: The Google Cloud Storage bucket where the object is. :param prefix: The name of the prefix to check in the Google cloud storage bucket. :param google_cloud_conn_id: The connection ID to use when connecting to Google Cloud Storage. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). :param deferrable: Run sensor in deferrable mode """
[docs]defpoke(self,context:Context)->bool:self.log.info("Checking for existence of object: %s, %s",self.bucket,self.prefix)hook=GCSHook(gcp_conn_id=self.google_cloud_conn_id,impersonation_chain=self.impersonation_chain,)self._matches=hook.list(self.bucket,prefix=self.prefix)returnbool(self._matches)
[docs]defexecute(self,context:Context):"""Overridden to allow matches to be passed."""self.log.info("Checking for existence of object: %s, %s",self.bucket,self.prefix)ifnotself.deferrable:super().execute(context)returnself._matcheselse:ifnotself.poke(context=context):self.defer(timeout=timedelta(seconds=self.timeout),trigger=GCSPrefixBlobTrigger(bucket=self.bucket,prefix=self.prefix,poke_interval=self.poke_interval,google_cloud_conn_id=self.google_cloud_conn_id,hook_params={"impersonation_chain":self.impersonation_chain,},),method_name="execute_complete",)else:returnself._matches
[docs]defexecute_complete(self,context:dict[str,Any],event:dict[str,str|list[str]])->str|list[str]:"""Return immediately and rely on trigger to throw a success event. Callback for the trigger."""self.log.info("Resuming from trigger and checking status")ifevent["status"]=="success":returnevent["matches"]raiseAirflowException(event["message"])
[docs]defget_time():"""Act as a wrapper of datetime.datetime.now to simplify mocking in the unittests."""returndatetime.now()
@poke_mode_only
[docs]classGCSUploadSessionCompleteSensor(BaseSensorOperator):""" Return True if the inactivity period has passed with no increase in the number of objects in the bucket. Checks for changes in the number of objects at prefix in Google Cloud Storage bucket and returns True if the inactivity period has passed with no increase in the number of objects. Note, this sensor will not behave correctly in reschedule mode, as the state of the listed objects in the GCS bucket will be lost between rescheduled invocations. :param bucket: The Google Cloud Storage bucket where the objects are. expected. :param prefix: The name of the prefix to check in the Google cloud storage bucket. :param inactivity_period: The total seconds of inactivity to designate an upload session is over. Note, this mechanism is not real time and this operator may not return until a poke_interval after this period has passed with no additional objects sensed. :param min_objects: The minimum number of objects needed for upload session to be considered valid. :param previous_objects: The set of object ids found during the last poke. :param allow_delete: Should this sensor consider objects being deleted between pokes valid behavior. If true a warning message will be logged when this happens. If false an error will be raised. :param google_cloud_conn_id: The connection ID to use when connecting to Google Cloud Storage. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). :param deferrable: Run sensor in deferrable mode """
def__init__(self,bucket:str,prefix:str,inactivity_period:float=60*60,min_objects:int=1,previous_objects:set[str]|None=None,allow_delete:bool=True,google_cloud_conn_id:str="google_cloud_default",impersonation_chain:str|Sequence[str]|None=None,deferrable:bool=conf.getboolean("operators","default_deferrable",fallback=False),**kwargs,)->None:super().__init__(**kwargs)self.bucket=bucketself.prefix=prefixifinactivity_period<0:raiseValueError("inactivity_period must be non-negative")self.inactivity_period=inactivity_periodself.min_objects=min_objectsself.previous_objects=previous_objectsorset()self.inactivity_seconds=0self.allow_delete=allow_deleteself.google_cloud_conn_id=google_cloud_conn_idself.last_activity_time=Noneself.impersonation_chain=impersonation_chainself.hook:GCSHook|None=Noneself.deferrable=deferrabledef_get_gcs_hook(self)->GCSHook|None:ifnotself.hook:self.hook=GCSHook(gcp_conn_id=self.google_cloud_conn_id,impersonation_chain=self.impersonation_chain,)returnself.hook
[docs]defis_bucket_updated(self,current_objects:set[str])->bool:""" Check whether new objects have been added and the inactivity_period has passed, and update the state. :param current_objects: set of object ids in bucket during last poke. """current_num_objects=len(current_objects)ifcurrent_objects>self.previous_objects:# When new objects arrived, reset the inactivity_seconds# and update previous_objects for the next poke.self.log.info("New objects found at %s resetting last_activity_time.",os.path.join(self.bucket,self.prefix),)self.log.debug("New objects: %s","\n".join(current_objects-self.previous_objects))self.last_activity_time=get_time()self.inactivity_seconds=0self.previous_objects=current_objectsreturnFalseifself.previous_objects-current_objects:# During the last poke interval objects were deleted.ifself.allow_delete:self.previous_objects=current_objectsself.last_activity_time=get_time()self.log.warning(textwrap.dedent("""\ Objects were deleted during the last poke interval. Updating the file counter and resetting last_activity_time. %s\ """),self.previous_objects-current_objects,)returnFalsemessage=("Illegal behavior: objects were deleted in "f"{os.path.join(self.bucket,self.prefix)} between pokes.")raiseAirflowException(message)ifself.last_activity_time:self.inactivity_seconds=(get_time()-self.last_activity_time).total_seconds()else:# Handles the first poke where last inactivity time is None.self.last_activity_time=get_time()self.inactivity_seconds=0ifself.inactivity_seconds>=self.inactivity_period:path=os.path.join(self.bucket,self.prefix)ifcurrent_num_objects>=self.min_objects:self.log.info(textwrap.dedent("""\ SUCCESS: Sensor found %s objects at %s. Waited at least %s seconds, with no new objects dropped. """),current_num_objects,path,self.inactivity_period,)returnTrueself.log.error("FAILURE: Inactivity Period passed, not enough objects found in %s",path)returnFalsereturnFalse
[docs]defexecute(self,context:Context)->None:"""Airflow runs this method on the worker and defers using the trigger."""hook_params={"impersonation_chain":self.impersonation_chain}ifnotself.deferrable:returnsuper().execute(context)ifnotself.poke(context=context):self.defer(timeout=timedelta(seconds=self.timeout),trigger=GCSUploadSessionTrigger(bucket=self.bucket,prefix=self.prefix,poke_interval=self.poke_interval,google_cloud_conn_id=self.google_cloud_conn_id,inactivity_period=self.inactivity_period,min_objects=self.min_objects,previous_objects=self.previous_objects,allow_delete=self.allow_delete,hook_params=hook_params,),method_name="execute_complete",)
[docs]defexecute_complete(self,context:dict[str,Any],event:dict[str,str]|None=None)->str:""" Rely on trigger to throw an exception, otherwise it assumes execution was successful. Callback for when the trigger fires - returns immediately. """ifevent:ifevent["status"]=="success":returnevent["message"]raiseAirflowException(event["message"])message="No event received in trigger callback"raiseAirflowException(message)