## Licensed to the Apache Software Foundation (ASF) under one# or more contributor license agreements. See the NOTICE file# distributed with this work for additional information# regarding copyright ownership. The ASF licenses this file# to you under the Apache License, Version 2.0 (the# "License"); you may not use this file except in compliance# with the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing,# software distributed under the License is distributed on an# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY# KIND, either express or implied. See the License for the# specific language governing permissions and limitations# under the License.importinspectimportosimportpickleimportshutilimportsysimporttypesimportwarningsfromtempfileimportTemporaryDirectoryfromtextwrapimportdedentfromtypingimportAny,Callable,Collection,Dict,Iterable,List,Mapping,Optional,Sequence,Unionimportdillfromairflow.exceptionsimportAirflowExceptionfromairflow.models.baseoperatorimportBaseOperatorfromairflow.models.skipmixinimportSkipMixinfromairflow.models.taskinstanceimport_CURRENT_CONTEXTfromairflow.utils.contextimportContext,context_copy_partial,context_mergefromairflow.utils.operator_helpersimportKeywordParametersfromairflow.utils.process_utilsimportexecute_in_subprocessfromairflow.utils.python_virtualenvimportprepare_virtualenv,write_python_script
[docs]deftask(python_callable:Optional[Callable]=None,multiple_outputs:Optional[bool]=None,**kwargs):""" Deprecated function that calls @task.python and allows users to turn a python function into an Airflow task. Please use the following instead: from airflow.decorators import task @task def my_task() :param python_callable: A reference to an object that is callable :param op_kwargs: a dictionary of keyword arguments that will get unpacked in your function (templated) :param op_args: a list of positional arguments that will get unpacked when calling your callable (templated) :param multiple_outputs: if set, function return value will be unrolled to multiple XCom values. Dict will unroll to xcom values with keys as keys. Defaults to False. :return: """# To maintain backwards compatibility, we import the task object into this file# This prevents breakages in dags that use `from airflow.operators.python import task`fromairflow.decorators.pythonimportpython_taskwarnings.warn("""airflow.operators.python.task is deprecated. Please use the following instead from airflow.decorators import task @task def my_task()""",DeprecationWarning,stacklevel=2,)returnpython_task(python_callable=python_callable,multiple_outputs=multiple_outputs,**kwargs)
[docs]classPythonOperator(BaseOperator):""" Executes a Python callable .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:PythonOperator` When running your callable, Airflow will pass a set of keyword arguments that can be used in your function. This set of kwargs correspond exactly to what you can use in your jinja templates. For this to work, you need to define ``**kwargs`` in your function header, or you can add directly the keyword arguments you would like to get - for example with the below code your callable will get the values of ``ti`` and ``next_ds`` context variables. With explicit arguments: .. code-block:: python def my_python_callable(ti, next_ds): pass With kwargs: .. code-block:: python def my_python_callable(**kwargs): ti = kwargs["ti"] next_ds = kwargs["next_ds"] :param python_callable: A reference to an object that is callable :param op_kwargs: a dictionary of keyword arguments that will get unpacked in your function :param op_args: a list of positional arguments that will get unpacked when calling your callable :param templates_dict: a dictionary where the values are templates that will get templated by the Airflow engine sometime between ``__init__`` and ``execute`` takes place and are made available in your callable's context after the template has been applied. (templated) :param templates_exts: a list of file extensions to resolve while processing templated fields, for examples ``['.sql', '.hql']`` :param show_return_value_in_logs: a bool value whether to show return_value logs. Defaults to True, which allows return value log output. It can be set to False to prevent log output of return value when you return huge data such as transmission a large amount of XCom to TaskAPI. """
def__init__(self,*,python_callable:Callable,op_args:Optional[Collection[Any]]=None,op_kwargs:Optional[Mapping[str,Any]]=None,templates_dict:Optional[Dict[str,Any]]=None,templates_exts:Optional[Sequence[str]]=None,show_return_value_in_logs:bool=True,**kwargs,)->None:ifkwargs.get("provide_context"):warnings.warn("provide_context is deprecated as of 2.0 and is no longer required",DeprecationWarning,stacklevel=2,)kwargs.pop('provide_context',None)super().__init__(**kwargs)ifnotcallable(python_callable):raiseAirflowException('`python_callable` param must be callable')self.python_callable=python_callableself.op_args=op_argsor()self.op_kwargs=op_kwargsor{}self.templates_dict=templates_dictiftemplates_exts:self.template_ext=templates_extsself.show_return_value_in_logs=show_return_value_in_logs
[docs]defexecute(self,context:Context)->Any:context_merge(context,self.op_kwargs,templates_dict=self.templates_dict)self.op_kwargs=self.determine_kwargs(context)return_value=self.execute_callable()ifself.show_return_value_in_logs:self.log.info("Done. Returned value was: %s",return_value)else:self.log.info("Done. Returned value not shown")returnreturn_value
[docs]defexecute_callable(self):""" Calls the python callable with the given arguments. :return: the return value of the call. :rtype: any """returnself.python_callable(*self.op_args,**self.op_kwargs)
[docs]classBranchPythonOperator(PythonOperator,SkipMixin):""" Allows a workflow to "branch" or follow a path following the execution of this task. It derives the PythonOperator and expects a Python function that returns a single task_id or list of task_ids to follow. The task_id(s) returned should point to a task directly downstream from {self}. All other "branches" or directly downstream tasks are marked with a state of ``skipped`` so that these paths can't move forward. The ``skipped`` states are propagated downstream to allow for the DAG state to fill up and the DAG run's state to be inferred. """
[docs]defexecute(self,context:Context)->Any:branch=super().execute(context)# TODO: The logic should be moved to SkipMixin to be available to all branch operators.ifisinstance(branch,str):branches={branch}elifisinstance(branch,list):branches=set(branch)elifbranchisNone:branches=set()else:raiseAirflowException("Branch callable must return either None, a task ID, or a list of IDs")valid_task_ids=set(context["dag"].task_ids)invalid_task_ids=branches-valid_task_idsifinvalid_task_ids:raiseAirflowException(f"Branch callable must return valid task_ids. Invalid tasks found: {invalid_task_ids}")self.skip_all_except(context['ti'],branch)returnbranch
[docs]classShortCircuitOperator(PythonOperator,SkipMixin):""" Allows a pipeline to continue based on the result of a ``python_callable``. The ShortCircuitOperator is derived from the PythonOperator and evaluates the result of a ``python_callable``. If the returned result is False or a falsy value, the pipeline will be short-circuited. Downstream tasks will be marked with a state of "skipped" based on the short-circuiting mode configured. If the returned result is True or a truthy value, downstream tasks proceed as normal and an ``XCom`` of the returned result is pushed. The short-circuiting can be configured to either respect or ignore the ``trigger_rule`` set for downstream tasks. If ``ignore_downstream_trigger_rules`` is set to True, the default setting, all downstream tasks are skipped without considering the ``trigger_rule`` defined for tasks. However, if this parameter is set to False, the direct downstream tasks are skipped but the specified ``trigger_rule`` for other subsequent downstream tasks are respected. In this mode, the operator assumes the direct downstream tasks were purposely meant to be skipped but perhaps not other subsequent tasks. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:ShortCircuitOperator` :param ignore_downstream_trigger_rules: If set to True, all downstream tasks from this operator task will be skipped. This is the default behavior. If set to False, the direct, downstream task(s) will be skipped but the ``trigger_rule`` defined for a other downstream tasks will be respected. """def__init__(self,*,ignore_downstream_trigger_rules:bool=True,**kwargs)->None:super().__init__(**kwargs)self.ignore_downstream_trigger_rules=ignore_downstream_trigger_rules
[docs]defexecute(self,context:Context)->Any:condition=super().execute(context)self.log.info("Condition result is %s",condition)ifcondition:self.log.info('Proceeding with downstream tasks...')returnconditiondownstream_tasks=context['task'].get_flat_relatives(upstream=False)self.log.debug("Downstream task IDs %s",downstream_tasks)ifdownstream_tasks:dag_run=context["dag_run"]execution_date=dag_run.execution_dateifself.ignore_downstream_trigger_rulesisTrue:self.log.info("Skipping all downstream tasks...")self.skip(dag_run,execution_date,downstream_tasks)else:self.log.info("Skipping downstream tasks while respecting trigger rules...")# Explicitly setting the state of the direct, downstream task(s) to "skipped" and letting the# Scheduler handle the remaining downstream task(s) appropriately.self.skip(dag_run,execution_date,context["task"].get_direct_relatives(upstream=False))self.log.info("Done.")
[docs]classPythonVirtualenvOperator(PythonOperator):""" Allows one to run a function in a virtualenv that is created and destroyed automatically (with certain caveats). The function must be defined using def, and not be part of a class. All imports must happen inside the function and no variables outside of the scope may be referenced. A global scope variable named virtualenv_string_args will be available (populated by string_args). In addition, one can pass stuff through op_args and op_kwargs, and one can use a return value. Note that if your virtualenv runs in a different Python major version than Airflow, you cannot use return values, op_args, op_kwargs, or use any macros that are being provided to Airflow through plugins. You can use string_args though. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:PythonVirtualenvOperator` :param python_callable: A python function with no references to outside variables, defined with def, which will be run in a virtualenv :param requirements: Either a list of requirement strings, or a (templated) "requirements file" as specified by pip. :param python_version: The Python version to run the virtualenv with. Note that both 2 and 2.7 are acceptable forms. :param use_dill: Whether to use dill to serialize the args and result (pickle is default). This allow more complex types but requires you to include dill in your requirements. :param system_site_packages: Whether to include system_site_packages in your virtualenv. See virtualenv documentation for more information. :param pip_install_options: a list of pip install options when installing requirements See 'pip install -h' for available options :param op_args: A list of positional arguments to pass to python_callable. :param op_kwargs: A dict of keyword arguments to pass to python_callable. :param string_args: Strings that are present in the global var virtualenv_string_args, available to python_callable at runtime as a list[str]. Note that args are split by newline. :param templates_dict: a dictionary where the values are templates that will get templated by the Airflow engine sometime between ``__init__`` and ``execute`` takes place and are made available in your callable's context after the template has been applied :param templates_exts: a list of file extensions to resolve while processing templated fields, for examples ``['.sql', '.hql']`` """
def__init__(self,*,python_callable:Callable,requirements:Union[None,Iterable[str],str]=None,python_version:Optional[Union[str,int,float]]=None,use_dill:bool=False,system_site_packages:bool=True,pip_install_options:Optional[List[str]]=None,op_args:Optional[Collection[Any]]=None,op_kwargs:Optional[Mapping[str,Any]]=None,string_args:Optional[Iterable[str]]=None,templates_dict:Optional[Dict]=None,templates_exts:Optional[List[str]]=None,**kwargs,):if(notisinstance(python_callable,types.FunctionType)orisinstance(python_callable,types.LambdaType)andpython_callable.__name__=="<lambda>"):raiseAirflowException('PythonVirtualenvOperator only supports functions for python_callable arg')if(python_versionandstr(python_version)[0]!=str(sys.version_info.major)and(op_argsorop_kwargs)):raiseAirflowException("Passing op_args or op_kwargs is not supported across different Python ""major versions for PythonVirtualenvOperator. Please use string_args.")ifnotshutil.which("virtualenv"):raiseAirflowException('PythonVirtualenvOperator requires virtualenv, please install it.')super().__init__(python_callable=python_callable,op_args=op_args,op_kwargs=op_kwargs,templates_dict=templates_dict,templates_exts=templates_exts,**kwargs,)ifnotrequirements:self.requirements:Union[List[str],str]=[]elifisinstance(requirements,str):self.requirements=requirementselse:self.requirements=list(requirements)self.string_args=string_argsor[]self.python_version=python_versionself.use_dill=use_dillself.system_site_packages=system_site_packagesself.pip_install_options=pip_install_optionsself.pickling_library=dillifself.use_dillelsepickle
[docs]defget_python_source(self):""" Returns the source of self.python_callable @return: """returndedent(inspect.getsource(self.python_callable))
def_write_args(self,filename):ifself.op_argsorself.op_kwargs:withopen(filename,'wb')asfile:self.pickling_library.dump({'args':self.op_args,'kwargs':self.op_kwargs},file)def_iter_serializable_context_keys(self):yield fromself.BASE_SERIALIZABLE_CONTEXT_KEYSifself.system_site_packagesor'apache-airflow'inself.requirements:yield fromself.AIRFLOW_SERIALIZABLE_CONTEXT_KEYSyield fromself.PENDULUM_SERIALIZABLE_CONTEXT_KEYSelif'pendulum'inself.requirements:yield fromself.PENDULUM_SERIALIZABLE_CONTEXT_KEYSdef_write_string_args(self,filename):withopen(filename,'w')asfile:file.write('\n'.join(map(str,self.string_args)))def_read_result(self,filename):ifos.stat(filename).st_size==0:returnNonewithopen(filename,'rb')asfile:try:returnself.pickling_library.load(file)exceptValueError:self.log.error("Error deserializing result. Note that result deserialization ""is not supported across major Python versions.")raise
[docs]def__deepcopy__(self,memo):# module objects can't be copied _at all__memo[id(self.pickling_library)]=self.pickling_libraryreturnsuper().__deepcopy__(memo)
[docs]defget_current_context()->Context:""" Obtain the execution context for the currently executing operator without altering user method's signature. This is the simplest method of retrieving the execution context dictionary. **Old style:** .. code:: python def my_task(**context): ti = context["ti"] **New style:** .. code:: python from airflow.operators.python import get_current_context def my_task(): context = get_current_context() ti = context["ti"] Current context will only have value if this method was called after an operator was starting to execute. """ifnot_CURRENT_CONTEXT:raiseAirflowException("Current context was requested but no context was found! ""Are you running within an airflow task?")return_CURRENT_CONTEXT[-1]