repo_name
stringclasses
4 values
method_name
stringlengths
3
72
method_code
stringlengths
87
3.59k
method_summary
stringlengths
12
196
original_method_code
stringlengths
129
8.98k
method_path
stringlengths
15
136
apache/airflow
DatastoreHook.lookup
def lookup(self, keys, read_consistency=None, transaction=None): conn = self.get_conn() body = {'keys': keys} if read_consistency: body['readConsistency'] = read_consistency if transaction: body['transaction'] = transaction resp = (conn .p...
Lookup some entities by key.
def lookup(self, keys, read_consistency=None, transaction=None): """ Lookup some entities by key. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/lookup :param keys: the keys to lookup. :type keys: list :param read_consistency...
airflow/contrib/hooks/datastore_hook.py
apache/airflow
DatastoreHook.rollback
def rollback(self, transaction): conn = self.get_conn() conn.projects().rollback( projectId=self.project_id, body={'transaction': transaction} ).execute(num_retries=self.num_retries)
Roll back a transaction.
def rollback(self, transaction): """ Roll back a transaction. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/rollback :param transaction: the transaction to roll back. :type transaction: str """ conn = self.get_conn()...
airflow/contrib/hooks/datastore_hook.py
apache/airflow
DatastoreHook.run_query
def run_query(self, body): conn = self.get_conn() resp = (conn .projects() .runQuery(projectId=self.project_id, body=body) .execute(num_retries=self.num_retries)) return resp['batch']
Run a query for entities.
def run_query(self, body): """ Run a query for entities. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/runQuery :param body: the body of the query request. :type body: dict :return: the batch of query results. :rtype...
airflow/contrib/hooks/datastore_hook.py
apache/airflow
DatastoreHook.get_operation
def get_operation(self, name): conn = self.get_conn() resp = (conn .projects() .operations() .get(name=name) .execute(num_retries=self.num_retries)) return resp
Gets the latest state of a long-running operation.
def get_operation(self, name): """ Gets the latest state of a long-running operation. .. seealso:: https://cloud.google.com/datastore/docs/reference/data/rest/v1/projects.operations/get :param name: the name of the operation resource. :type name: str :return...
airflow/contrib/hooks/datastore_hook.py
apache/airflow
DatastoreHook.delete_operation
def delete_operation(self, name): conn = self.get_conn() resp = (conn .projects() .operations() .delete(name=name) .execute(num_retries=self.num_retries)) return resp
Deletes the long-running operation.
def delete_operation(self, name): """ Deletes the long-running operation. .. seealso:: https://cloud.google.com/datastore/docs/reference/data/rest/v1/projects.operations/delete :param name: the name of the operation resource. :type name: str :return: none if...
airflow/contrib/hooks/datastore_hook.py
apache/airflow
DatastoreHook.poll_operation_until_done
def poll_operation_until_done(self, name, polling_interval_in_seconds): while True: result = self.get_operation(name) state = result['metadata']['common']['state'] if state == 'PROCESSING': self.log.info('Operation is processing. Re-polling state in {} second...
Poll backup operation state until it's completed.
def poll_operation_until_done(self, name, polling_interval_in_seconds): """ Poll backup operation state until it's completed. :param name: the name of the operation resource :type name: str :param polling_interval_in_seconds: The number of seconds to wait before calling another ...
airflow/contrib/hooks/datastore_hook.py
apache/airflow
DatastoreHook.export_to_storage_bucket
def export_to_storage_bucket(self, bucket, namespace=None, entity_filter=None, labels=None): admin_conn = self.get_conn() output_uri_prefix = 'gs://' + '/'.join(filter(None, [bucket, namespace])) if not entity_filter: entity_filter = {} if not labels: labels = {}...
Export entities from Cloud Datastore to Cloud Storage for backup.
def export_to_storage_bucket(self, bucket, namespace=None, entity_filter=None, labels=None): """ Export entities from Cloud Datastore to Cloud Storage for backup. .. note:: Keep in mind that this requests the Admin API not the Data API. .. seealso:: https://clou...
airflow/contrib/hooks/datastore_hook.py
apache/airflow
DatastoreHook.import_from_storage_bucket
def import_from_storage_bucket(self, bucket, file, namespace=None, entity_filter=None, labels=None): admin_conn = self.get_conn() input_url = 'gs://' + '/'.join(filter(None, [bucket, namespace, file])) if not entity_filter: entity_filter = {} if not labels: label...
Import a backup from Cloud Storage to Cloud Datastore.
def import_from_storage_bucket(self, bucket, file, namespace=None, entity_filter=None, labels=None): """ Import a backup from Cloud Storage to Cloud Datastore. .. note:: Keep in mind that this requests the Admin API not the Data API. .. seealso:: https://cloud.g...
airflow/contrib/hooks/datastore_hook.py
apache/airflow
AwsSnsHook.publish_to_target
def publish_to_target(self, target_arn, message): conn = self.get_conn() messages = { 'default': message } return conn.publish( TargetArn=target_arn, Message=json.dumps(messages), MessageStructure='json' )
Publish a message to a topic or an endpoint.
def publish_to_target(self, target_arn, message): """ Publish a message to a topic or an endpoint. :param target_arn: either a TopicArn or an EndpointArn :type target_arn: str :param message: the default message you want to send :param message: str """ c...
airflow/contrib/hooks/aws_sns_hook.py
apache/airflow
get_hostname
def get_hostname(): try: callable_path = conf.get('core', 'hostname_callable') except AirflowConfigException: callable_path = None if not callable_path: return socket.getfqdn() module_path, attr_name = callable_path.split(':') module = importlib.import_m...
Fetch the hostname using the callable from the config or using `socket.getfqdn` as a fallback.
def get_hostname(): """ Fetch the hostname using the callable from the config or using `socket.getfqdn` as a fallback. """ # First we attempt to fetch the callable path from the config. try: callable_path = conf.get('core', 'hostname_callable') except AirflowConfigException: ...
airflow/utils/net.py
apache/airflow
CloudNaturalLanguageHook.get_conn
def get_conn(self): if not self._conn: self._conn = LanguageServiceClient(credentials=self._get_credentials()) return self._conn
Retrieves connection to Cloud Natural Language service.
def get_conn(self): """ Retrieves connection to Cloud Natural Language service. :return: Cloud Natural Language service object :rtype: google.cloud.language_v1.LanguageServiceClient """ if not self._conn: self._conn = LanguageServiceClient(credentials=self._g...
airflow/contrib/hooks/gcp_natural_language_hook.py
apache/airflow
CloudNaturalLanguageHook.analyze_entities
def analyze_entities(self, document, encoding_type=None, retry=None, timeout=None, metadata=None): client = self.get_conn() return client.analyze_entities( document=document, encoding_type=encoding_type, retry=retry, timeout=timeout, metadata=metadata )
Finds named entities in the text along with entity types, salience, mentions for each entity, and other properties.
def analyze_entities(self, document, encoding_type=None, retry=None, timeout=None, metadata=None): """ Finds named entities in the text along with entity types, salience, mentions for each entity, and other properties. :param document: Input document. If a dict is provided, ...
airflow/contrib/hooks/gcp_natural_language_hook.py
apache/airflow
CloudNaturalLanguageHook.annotate_text
def annotate_text(self, document, features, encoding_type=None, retry=None, timeout=None, metadata=None): client = self.get_conn() return client.annotate_text( document=document, features=features, encoding_type=encoding_type, retry=retry, tim...
A convenience method that provides all the features that analyzeSentiment, analyzeEntities, and analyzeSyntax provide in one call.
def annotate_text(self, document, features, encoding_type=None, retry=None, timeout=None, metadata=None): """ A convenience method that provides all the features that analyzeSentiment, analyzeEntities, and analyzeSyntax provide in one call. :param document: Input document. I...
airflow/contrib/hooks/gcp_natural_language_hook.py
apache/airflow
CloudNaturalLanguageHook.classify_text
def classify_text(self, document, retry=None, timeout=None, metadata=None): client = self.get_conn() return client.classify_text(document=document, retry=retry, timeout=timeout, metadata=metadata)
Classifies a document into categories.
def classify_text(self, document, retry=None, timeout=None, metadata=None): """ Classifies a document into categories. :param document: Input document. If a dict is provided, it must be of the same form as the protobuf message Document :type document: dict or class google.cl...
airflow/contrib/hooks/gcp_natural_language_hook.py
apache/airflow
get_template_field
def get_template_field(env, fullname): modname, classname = fullname.rsplit(".", 1) try: with mock(env.config.autodoc_mock_imports): mod = import_module(modname) except ImportError: raise RoleException("Error loading %s module." % (modname, )) clazz = getattr(mod, classname...
Gets template fields for specific operator class.
def get_template_field(env, fullname): """ Gets template fields for specific operator class. :param fullname: Full path to operator class. For example: ``airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetCreateOperator`` :return: List of template field :rtype: list[str] ...
docs/exts/docroles.py
apache/airflow
dispose_orm
def dispose_orm(): log.debug("Disposing DB connection pool (PID %s)", os.getpid()) global engine global Session if Session: Session.remove() Session = None if engine: engine.dispose() engine = None
Properly close pooled database connections
def dispose_orm(): """ Properly close pooled database connections """ log.debug("Disposing DB connection pool (PID %s)", os.getpid()) global engine global Session if Session: Session.remove() Session = None if engine: engine.dispose() engine = None
airflow/settings.py
apache/airflow
prepare_classpath
def prepare_classpath(): if DAGS_FOLDER not in sys.path: sys.path.append(DAGS_FOLDER) config_path = os.path.join(AIRFLOW_HOME, 'config') if config_path not in sys.path: sys.path.append(config_path) if PLUGINS_FOLDER not in sys.path: sys.path.append(PLUGINS_FOLDER)
Ensures that certain subfolders of AIRFLOW_HOME are on the classpath
def prepare_classpath(): """ Ensures that certain subfolders of AIRFLOW_HOME are on the classpath """ if DAGS_FOLDER not in sys.path: sys.path.append(DAGS_FOLDER) # Add ./config/ for loading custom log parsers etc, or # airflow_local_settings etc. config_path = os.path.join(AIRFLOW...
airflow/settings.py
apache/airflow
CeleryQueueSensor._check_task_id
def _check_task_id(self, context): ti = context['ti'] celery_result = ti.xcom_pull(task_ids=self.target_task_id) return celery_result.ready()
Gets the returned Celery result from the Airflow task ID provided to the sensor, and returns True if the celery result has been finished execution.
def _check_task_id(self, context): """ Gets the returned Celery result from the Airflow task ID provided to the sensor, and returns True if the celery result has been finished execution. :param context: Airflow's execution context :type context: dict :return: Tru...
airflow/contrib/sensors/celery_queue_sensor.py
apache/airflow
alchemy_to_dict
def alchemy_to_dict(obj): if not obj: return None d = {} for c in obj.__table__.columns: value = getattr(obj, c.name) if type(value) == datetime: value = value.isoformat() d[c.name] = value return d
Transforms a SQLAlchemy model instance into a dictionary
def alchemy_to_dict(obj): """ Transforms a SQLAlchemy model instance into a dictionary """ if not obj: return None d = {} for c in obj.__table__.columns: value = getattr(obj, c.name) if type(value) == datetime: value = value.isoformat() d[c.name] = val...
airflow/utils/helpers.py
apache/airflow
chunks
def chunks(items, chunk_size): if chunk_size <= 0: raise ValueError('Chunk size must be a positive integer') for i in range(0, len(items), chunk_size): yield items[i:i + chunk_size]
Yield successive chunks of a given size from a list of items
def chunks(items, chunk_size): """ Yield successive chunks of a given size from a list of items """ if chunk_size <= 0: raise ValueError('Chunk size must be a positive integer') for i in range(0, len(items), chunk_size): yield items[i:i + chunk_size]
airflow/utils/helpers.py
apache/airflow
reduce_in_chunks
def reduce_in_chunks(fn, iterable, initializer, chunk_size=0): if len(iterable) == 0: return initializer if chunk_size == 0: chunk_size = len(iterable) return reduce(fn, chunks(iterable, chunk_size), initializer)
Reduce the given list of items by splitting it into chunks of the given size and passing each chunk through the reducer
def reduce_in_chunks(fn, iterable, initializer, chunk_size=0): """ Reduce the given list of items by splitting it into chunks of the given size and passing each chunk through the reducer """ if len(iterable) == 0: return initializer if chunk_size == 0: chunk_size = len(iterable) ...
airflow/utils/helpers.py
apache/airflow
chain
def chain(*tasks): for up_task, down_task in zip(tasks[:-1], tasks[1:]): up_task.set_downstream(down_task)
Given a number of tasks, builds a dependency chain. chain(task_1, task_2, task_3, task_4) is equivalent to task_1.set_downstream(task_2) task_2.set_downstream(task_3) task_3.set_downstream(task_4)
def chain(*tasks): """ Given a number of tasks, builds a dependency chain. chain(task_1, task_2, task_3, task_4) is equivalent to task_1.set_downstream(task_2) task_2.set_downstream(task_3) task_3.set_downstream(task_4) """ for up_task, down_task in zip(tasks[:-1], tasks[1:]): ...
airflow/utils/helpers.py
apache/airflow
render_log_filename
def render_log_filename(ti, try_number, filename_template): filename_template, filename_jinja_template = parse_template_string(filename_template) if filename_jinja_template: jinja_context = ti.get_template_context() jinja_context['try_number'] = try_number return filename_jinja_template....
Given task instance, try_number, filename_template, return the rendered log filename
def render_log_filename(ti, try_number, filename_template): """ Given task instance, try_number, filename_template, return the rendered log filename :param ti: task instance :param try_number: try_number of the task :param filename_template: filename template, which can be jinja template or ...
airflow/utils/helpers.py
apache/airflow
DataProcHook.wait
def wait(self, operation): submitted = _DataProcOperation(self.get_conn(), operation, self.num_retries) submitted.wait_for_done()
Awaits for Google Cloud Dataproc Operation to complete.
def wait(self, operation): """Awaits for Google Cloud Dataproc Operation to complete.""" submitted = _DataProcOperation(self.get_conn(), operation, self.num_retries) submitted.wait_for_done()
airflow/contrib/hooks/gcp_dataproc_hook.py
apache/airflow
_handle_databricks_operator_execution
def _handle_databricks_operator_execution(operator, hook, log, context): if operator.do_xcom_push: context['ti'].xcom_push(key=XCOM_RUN_ID_KEY, value=operator.run_id) log.info('Run submitted with run_id: %s', operator.run_id) run_page_url = hook.get_run_page_url(operator.run_id) if operator.do_x...
Handles the Airflow + Databricks lifecycle logic for a Databricks operator
def _handle_databricks_operator_execution(operator, hook, log, context): """ Handles the Airflow + Databricks lifecycle logic for a Databricks operator :param operator: Databricks operator being handled :param context: Airflow context """ if operator.do_xcom_push: context['ti'].xcom_pus...
airflow/contrib/operators/databricks_operator.py
apache/airflow
PigCliHook.run_cli
def run_cli(self, pig, verbose=True): with TemporaryDirectory(prefix='airflow_pigop_') as tmp_dir: with NamedTemporaryFile(dir=tmp_dir) as f: f.write(pig.encode('utf-8')) f.flush() fname = f.name pig_bin = 'pig' cmd_extr...
Run an pig script using the pig cli
def run_cli(self, pig, verbose=True): """ Run an pig script using the pig cli >>> ph = PigCliHook() >>> result = ph.run_cli("ls /;") >>> ("hdfs://" in result) True """ with TemporaryDirectory(prefix='airflow_pigop_') as tmp_dir: with NamedTem...
airflow/hooks/pig_hook.py
apache/airflow
fetch_celery_task_state
def fetch_celery_task_state(celery_task): try: with timeout(seconds=2): res = (celery_task[0], celery_task[1].state) except Exception as e: exception_traceback = "Celery Task ID: {}\n{}".format(celery_task[0], ...
Fetch and return the state of the given celery task. The scope of this function is global so that it can be called by subprocesses in the pool.
def fetch_celery_task_state(celery_task): """ Fetch and return the state of the given celery task. The scope of this function is global so that it can be called by subprocesses in the pool. :param celery_task: a tuple of the Celery task key and the async Celery object used to fetch the task's s...
airflow/executors/celery_executor.py
apache/airflow
CeleryExecutor._num_tasks_per_send_process
def _num_tasks_per_send_process(self, to_send_count): return max(1, int(math.ceil(1.0 * to_send_count / self._sync_parallelism)))
How many Celery tasks should each worker process send.
def _num_tasks_per_send_process(self, to_send_count): """ How many Celery tasks should each worker process send. :return: Number of tasks that should be sent per process :rtype: int """ return max(1, int(math.ceil(1.0 * to_send_count / self._sync_paral...
airflow/executors/celery_executor.py
apache/airflow
CeleryExecutor._num_tasks_per_fetch_process
def _num_tasks_per_fetch_process(self): return max(1, int(math.ceil(1.0 * len(self.tasks) / self._sync_parallelism)))
How many Celery tasks should be sent to each worker process.
def _num_tasks_per_fetch_process(self): """ How many Celery tasks should be sent to each worker process. :return: Number of tasks that should be used per process :rtype: int """ return max(1, int(math.ceil(1.0 * len(self.tasks) / self._sync_parallelism...
airflow/executors/celery_executor.py
apache/airflow
Variable.setdefault
def setdefault(cls, key, default, deserialize_json=False): obj = Variable.get(key, default_var=None, deserialize_json=deserialize_json) if obj is None: if default is not None: Variable.set(key, default, serialize_json=deserialize_json) ...
Like a Python builtin dict object, setdefault returns the current value for a key, and if it isn't there, stores the default value and returns it.
def setdefault(cls, key, default, deserialize_json=False): """ Like a Python builtin dict object, setdefault returns the current value for a key, and if it isn't there, stores the default value and returns it. :param key: Dict key for this Variable :type key: str :param ...
airflow/models/variable.py
apache/airflow
MLEngineHook.create_job
def create_job(self, project_id, job, use_existing_job_fn=None): request = self._mlengine.projects().jobs().create( parent='projects/{}'.format(project_id), body=job) job_id = job['jobId'] try: request.execute() except HttpError as e: ...
Launches a MLEngine job and wait for it to reach a terminal state.
def create_job(self, project_id, job, use_existing_job_fn=None): """ Launches a MLEngine job and wait for it to reach a terminal state. :param project_id: The Google Cloud project id within which MLEngine job will be launched. :type project_id: str :param job: MLEng...
airflow/contrib/hooks/gcp_mlengine_hook.py
apache/airflow
MLEngineHook._get_job
def _get_job(self, project_id, job_id): job_name = 'projects/{}/jobs/{}'.format(project_id, job_id) request = self._mlengine.projects().jobs().get(name=job_name) while True: try: return request.execute() except HttpError as e: if e.resp.sta...
Gets a MLEngine job based on the job name.
def _get_job(self, project_id, job_id): """ Gets a MLEngine job based on the job name. :return: MLEngine job object if succeed. :rtype: dict Raises: googleapiclient.errors.HttpError: if HTTP error is returned from server """ job_name = 'projects/{}/j...
airflow/contrib/hooks/gcp_mlengine_hook.py
apache/airflow
MLEngineHook._wait_for_job_done
def _wait_for_job_done(self, project_id, job_id, interval=30): if interval <= 0: raise ValueError("Interval must be > 0") while True: job = self._get_job(project_id, job_id) if job['state'] in ['SUCCEEDED', 'FAILED', 'CANCELLED']: return job ...
Waits for the Job to reach a terminal state. This method will periodically check the job state until the job reach a terminal state.
def _wait_for_job_done(self, project_id, job_id, interval=30): """ Waits for the Job to reach a terminal state. This method will periodically check the job state until the job reach a terminal state. Raises: googleapiclient.errors.HttpError: if HTTP error is returne...
airflow/contrib/hooks/gcp_mlengine_hook.py
apache/airflow
MLEngineHook.create_version
def create_version(self, project_id, model_name, version_spec): parent_name = 'projects/{}/models/{}'.format(project_id, model_name) create_request = self._mlengine.projects().models().versions().create( parent=parent_name, body=version_spec) response = create_request.execute() ...
Creates the Version on Google Cloud ML Engine.
def create_version(self, project_id, model_name, version_spec): """ Creates the Version on Google Cloud ML Engine. Returns the operation if the version was created successfully and raises an error otherwise. """ parent_name = 'projects/{}/models/{}'.format(project_id, mo...
airflow/contrib/hooks/gcp_mlengine_hook.py
apache/airflow
MLEngineHook.set_default_version
def set_default_version(self, project_id, model_name, version_name): full_version_name = 'projects/{}/models/{}/versions/{}'.format( project_id, model_name, version_name) request = self._mlengine.projects().models().versions().setDefault( name=full_version_name, body={}) ...
Sets a version to be the default. Blocks until finished.
def set_default_version(self, project_id, model_name, version_name): """ Sets a version to be the default. Blocks until finished. """ full_version_name = 'projects/{}/models/{}/versions/{}'.format( project_id, model_name, version_name) request = self._mlengine.project...
airflow/contrib/hooks/gcp_mlengine_hook.py
apache/airflow
MLEngineHook.list_versions
def list_versions(self, project_id, model_name): result = [] full_parent_name = 'projects/{}/models/{}'.format( project_id, model_name) request = self._mlengine.projects().models().versions().list( parent=full_parent_name, pageSize=100) response = request.execute...
Lists all available versions of a model. Blocks until finished.
def list_versions(self, project_id, model_name): """ Lists all available versions of a model. Blocks until finished. """ result = [] full_parent_name = 'projects/{}/models/{}'.format( project_id, model_name) request = self._mlengine.projects().models().version...
airflow/contrib/hooks/gcp_mlengine_hook.py
apache/airflow
MLEngineHook.delete_version
def delete_version(self, project_id, model_name, version_name): full_name = 'projects/{}/models/{}/versions/{}'.format( project_id, model_name, version_name) delete_request = self._mlengine.projects().models().versions().delete( name=full_name) response = delete_request.e...
Deletes the given version of a model. Blocks until finished.
def delete_version(self, project_id, model_name, version_name): """ Deletes the given version of a model. Blocks until finished. """ full_name = 'projects/{}/models/{}/versions/{}'.format( project_id, model_name, version_name) delete_request = self._mlengine.projects(...
airflow/contrib/hooks/gcp_mlengine_hook.py
apache/airflow
MLEngineHook.create_model
def create_model(self, project_id, model): if not model['name']: raise ValueError("Model name must be provided and " "could not be an empty string") project = 'projects/{}'.format(project_id) request = self._mlengine.projects().models().create( ...
Create a Model. Blocks until finished.
def create_model(self, project_id, model): """ Create a Model. Blocks until finished. """ if not model['name']: raise ValueError("Model name must be provided and " "could not be an empty string") project = 'projects/{}'.format(project_id) ...
airflow/contrib/hooks/gcp_mlengine_hook.py
apache/airflow
MLEngineHook.get_model
def get_model(self, project_id, model_name): if not model_name: raise ValueError("Model name must be provided and " "it could not be an empty string") full_model_name = 'projects/{}/models/{}'.format( project_id, model_name) request = self._ml...
Gets a Model. Blocks until finished.
def get_model(self, project_id, model_name): """ Gets a Model. Blocks until finished. """ if not model_name: raise ValueError("Model name must be provided and " "it could not be an empty string") full_model_name = 'projects/{}/models/{}'.f...
airflow/contrib/hooks/gcp_mlengine_hook.py
apache/airflow
AwsDynamoDBHook.write_batch_data
def write_batch_data(self, items): dynamodb_conn = self.get_conn() try: table = dynamodb_conn.Table(self.table_name) with table.batch_writer(overwrite_by_pkeys=self.table_keys) as batch: for item in items: batch.put_item(Item=item) ...
Write batch items to dynamodb table with provisioned throughout capacity.
def write_batch_data(self, items): """ Write batch items to dynamodb table with provisioned throughout capacity. """ dynamodb_conn = self.get_conn() try: table = dynamodb_conn.Table(self.table_name) with table.batch_writer(overwrite_by_pkeys=self.table_...
airflow/contrib/hooks/aws_dynamodb_hook.py
apache/airflow
_integrate_plugins
def _integrate_plugins(): from airflow.plugins_manager import executors_modules for executors_module in executors_modules: sys.modules[executors_module.__name__] = executors_module globals()[executors_module._name] = executors_module
Integrate plugins to the context.
def _integrate_plugins(): """Integrate plugins to the context.""" from airflow.plugins_manager import executors_modules for executors_module in executors_modules: sys.modules[executors_module.__name__] = executors_module globals()[executors_module._name] = executors_module
airflow/executors/__init__.py
apache/airflow
get_default_executor
def get_default_executor(): global DEFAULT_EXECUTOR if DEFAULT_EXECUTOR is not None: return DEFAULT_EXECUTOR executor_name = configuration.conf.get('core', 'EXECUTOR') DEFAULT_EXECUTOR = _get_executor(executor_name) log = LoggingMixin().log log.info("Using executor %s", executor_name...
Creates a new instance of the configured executor if none exists and returns it
def get_default_executor(): """Creates a new instance of the configured executor if none exists and returns it""" global DEFAULT_EXECUTOR if DEFAULT_EXECUTOR is not None: return DEFAULT_EXECUTOR executor_name = configuration.conf.get('core', 'EXECUTOR') DEFAULT_EXECUTOR = _get_executor(ex...
airflow/executors/__init__.py
apache/airflow
_get_executor
def _get_executor(executor_name): if executor_name == Executors.LocalExecutor: return LocalExecutor() elif executor_name == Executors.SequentialExecutor: return SequentialExecutor() elif executor_name == Executors.CeleryExecutor: from airflow.executors.celery_executor import CeleryEx...
Creates a new instance of the named executor. In case the executor name is not know in airflow, look for it in the plugins
def _get_executor(executor_name): """ Creates a new instance of the named executor. In case the executor name is not know in airflow, look for it in the plugins """ if executor_name == Executors.LocalExecutor: return LocalExecutor() elif executor_name == Executors.SequentialExecutor:...
airflow/executors/__init__.py
apache/airflow
SegmentHook.on_error
def on_error(self, error, items): self.log.error('Encountered Segment error: {segment_error} with ' 'items: {with_items}'.format(segment_error=error, with_items=items)) raise AirflowException('Segment error: {}'.format(error))
Handles error callbacks when using Segment with segment_debug_mode set to True
def on_error(self, error, items): """ Handles error callbacks when using Segment with segment_debug_mode set to True """ self.log.error('Encountered Segment error: {segment_error} with ' 'items: {with_items}'.format(segment_error=error, ...
airflow/contrib/hooks/segment_hook.py
apache/airflow
trigger_dag
def trigger_dag(dag_id): data = request.get_json(force=True) run_id = None if 'run_id' in data: run_id = data['run_id'] conf = None if 'conf' in data: conf = data['conf'] execution_date = None if 'execution_date' in data and data['execution_date'] is not None: exec...
Trigger a new dag run for a Dag with an execution date of now unless specified in the data.
def trigger_dag(dag_id): """ Trigger a new dag run for a Dag with an execution date of now unless specified in the data. """ data = request.get_json(force=True) run_id = None if 'run_id' in data: run_id = data['run_id'] conf = None if 'conf' in data: conf = data['co...
airflow/www/api/experimental/endpoints.py
apache/airflow
delete_dag
def delete_dag(dag_id): try: count = delete.delete_dag(dag_id) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response return jsonify(message="Removed {} record(s)".format(count)...
Delete all DB records related to the specified Dag.
def delete_dag(dag_id): """ Delete all DB records related to the specified Dag. """ try: count = delete.delete_dag(dag_id) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return ...
airflow/www/api/experimental/endpoints.py
apache/airflow
get_pools
def get_pools(): try: pools = pool_api.get_pools() except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response else: return jsonify([p.to_json() for p in pools])
Get all pools.
def get_pools(): """Get all pools.""" try: pools = pool_api.get_pools() except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response else: return jsonify([p.to_json() for ...
airflow/www/api/experimental/endpoints.py
apache/airflow
create_pool
def create_pool(): params = request.get_json(force=True) try: pool = pool_api.create_pool(**params) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response else: retu...
Create a pool.
def create_pool(): """Create a pool.""" params = request.get_json(force=True) try: pool = pool_api.create_pool(**params) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return respon...
airflow/www/api/experimental/endpoints.py
apache/airflow
delete_pool
def delete_pool(name): try: pool = pool_api.delete_pool(name=name) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response else: return jsonify(pool.to_json())
Delete pool.
def delete_pool(name): """Delete pool.""" try: pool = pool_api.delete_pool(name=name) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response else: return jsonify(poo...
airflow/www/api/experimental/endpoints.py
apache/airflow
AzureContainerInstanceHook.create_or_update
def create_or_update(self, resource_group, name, container_group): self.connection.container_groups.create_or_update(resource_group, name, container_group)
Create a new container group
def create_or_update(self, resource_group, name, container_group): """ Create a new container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str :param container_group...
airflow/contrib/hooks/azure_container_instance_hook.py
apache/airflow
AzureContainerInstanceHook.get_state_exitcode_details
def get_state_exitcode_details(self, resource_group, name): current_state = self._get_instance_view(resource_group, name).current_state return (current_state.state, current_state.exit_code, current_state.detail_status)
Get the state and exitcode of a container group
def get_state_exitcode_details(self, resource_group, name): """ Get the state and exitcode of a container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str :return: A...
airflow/contrib/hooks/azure_container_instance_hook.py
apache/airflow
AzureContainerInstanceHook.get_messages
def get_messages(self, resource_group, name): instance_view = self._get_instance_view(resource_group, name) return [event.message for event in instance_view.events]
Get the messages of a container group
def get_messages(self, resource_group, name): """ Get the messages of a container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str :return: A list of the event messa...
airflow/contrib/hooks/azure_container_instance_hook.py
apache/airflow
AzureContainerInstanceHook.get_logs
def get_logs(self, resource_group, name, tail=1000): logs = self.connection.container.list_logs(resource_group, name, name, tail=tail) return logs.content.splitlines(True)
Get the tail from logs of a container group
def get_logs(self, resource_group, name, tail=1000): """ Get the tail from logs of a container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str :param tail: the size...
airflow/contrib/hooks/azure_container_instance_hook.py
apache/airflow
AzureContainerInstanceHook.delete
def delete(self, resource_group, name): self.connection.container_groups.delete(resource_group, name)
Delete a container group
def delete(self, resource_group, name): """ Delete a container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str """ self.connection.container_groups.delete(r...
airflow/contrib/hooks/azure_container_instance_hook.py
apache/airflow
AzureContainerInstanceHook.exists
def exists(self, resource_group, name): for container in self.connection.container_groups.list_by_resource_group(resource_group): if container.name == name: return True return False
Test if a container group exists
def exists(self, resource_group, name): """ Test if a container group exists :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str """ for container in self.connection....
airflow/contrib/hooks/azure_container_instance_hook.py
apache/airflow
apply_defaults
def apply_defaults(func): sig_cache = signature(func) non_optional_args = { name for (name, param) in sig_cache.parameters.items() if param.default == param.empty and param.name != 'self' and param.kind not in (param.VAR_POSITIONAL, param.VAR_KEYWORD)} @w...
Function decorator that Looks for an argument named "default_args", and fills the unspecified arguments from it. Since python2.
def apply_defaults(func): """ Function decorator that Looks for an argument named "default_args", and fills the unspecified arguments from it. Since python2.* isn't clear about which arguments are missing when calling a function, and that this can be quite confusing with multi-level inheritance...
airflow/utils/decorators.py
apache/airflow
HiveToDruidTransfer.construct_ingest_query
def construct_ingest_query(self, static_path, columns): num_shards = self.num_shards target_partition_size = self.target_partition_size if self.target_partition_size == -1: if self.num_shards == -1: target_partition_size = DEFAULT_TARGET_PAR...
Builds an ingest query for an HDFS TSV load.
def construct_ingest_query(self, static_path, columns): """ Builds an ingest query for an HDFS TSV load. :param static_path: The path on hdfs where the data is :type static_path: str :param columns: List of all the columns that are available :type columns: list "...
airflow/operators/hive_to_druid.py
apache/airflow
RedisPubSubSensor.poke
def poke(self, context): self.log.info('RedisPubSubSensor checking for message on channels: %s', self.channels) message = self.pubsub.get_message() self.log.info('Message %s from channel %s', message, self.channels) if message and message['type'] == 'message': con...
Check for message on subscribed channels and write to xcom the message with key ``message`` An example of message ``{'type': 'message', 'pattern': None, 'channel': b'test', 'data': b'hello'}``
def poke(self, context): """ Check for message on subscribed channels and write to xcom the message with key ``message`` An example of message ``{'type': 'message', 'pattern': None, 'channel': b'test', 'data': b'hello'}`` :param context: the context object :type context: dict ...
airflow/contrib/sensors/redis_pub_sub_sensor.py
apache/airflow
DagRun.get_previous_dagrun
def get_previous_dagrun(self, session=None): return session.query(DagRun).filter( DagRun.dag_id == self.dag_id, DagRun.execution_date < self.execution_date ).order_by( DagRun.execution_date.desc() ).first()
The previous DagRun, if there is one
def get_previous_dagrun(self, session=None): """The previous DagRun, if there is one""" return session.query(DagRun).filter( DagRun.dag_id == self.dag_id, DagRun.execution_date < self.execution_date ).order_by( DagRun.execution_date.desc() ).first()
airflow/models/dagrun.py
apache/airflow
DagRun.get_previous_scheduled_dagrun
def get_previous_scheduled_dagrun(self, session=None): dag = self.get_dag() return session.query(DagRun).filter( DagRun.dag_id == self.dag_id, DagRun.execution_date == dag.previous_schedule(self.execution_date) ).first()
The previous, SCHEDULED DagRun, if there is one
def get_previous_scheduled_dagrun(self, session=None): """The previous, SCHEDULED DagRun, if there is one""" dag = self.get_dag() return session.query(DagRun).filter( DagRun.dag_id == self.dag_id, DagRun.execution_date == dag.previous_schedule(self.execution_date) ...
airflow/models/dagrun.py
apache/airflow
DagRun.update_state
def update_state(self, session=None): dag = self.get_dag() tis = self.get_task_instances(session=session) self.log.debug("Updating state for %s considering %s task(s)", self, len(tis)) for ti in list(tis): if ti.state == State.REMOVED: tis.remov...
Determines the overall state of the DagRun based on the state of its TaskInstances.
def update_state(self, session=None): """ Determines the overall state of the DagRun based on the state of its TaskInstances. :return: State """ dag = self.get_dag() tis = self.get_task_instances(session=session) self.log.debug("Updating state for %s co...
airflow/models/dagrun.py
apache/airflow
DagRun.verify_integrity
def verify_integrity(self, session=None): from airflow.models.taskinstance import TaskInstance dag = self.get_dag() tis = self.get_task_instances(session=session) task_ids = [] for ti in tis: task_ids.append(ti.task_id) task = None ...
Verifies the DagRun by checking for removed tasks or tasks that are not in the database yet. It will set state to removed or add the task if required.
def verify_integrity(self, session=None): """ Verifies the DagRun by checking for removed tasks or tasks that are not in the database yet. It will set state to removed or add the task if required. """ from airflow.models.taskinstance import TaskInstance # Avoid circular import ...
airflow/models/dagrun.py
apache/airflow
jenkins_request_with_headers
def jenkins_request_with_headers(jenkins_server, req): try: response = jenkins_server.jenkins_request(req) response_body = response.content response_headers = response.headers if response_body is None: raise jenkins.EmptyResponseException( "Error communica...
We need to get the headers in addition to the body answer to get the location from them This function uses jenkins_request method from python-jenkins library with just the return call changed
def jenkins_request_with_headers(jenkins_server, req): """ We need to get the headers in addition to the body answer to get the location from them This function uses jenkins_request method from python-jenkins library with just the return call changed :param jenkins_server: The server to query ...
airflow/contrib/operators/jenkins_job_trigger_operator.py
apache/airflow
conditionally_trigger
def conditionally_trigger(context, dag_run_obj): c_p = context['params']['condition_param'] print("Controller DAG : conditionally_trigger = {}".format(c_p)) if context['params']['condition_param']: dag_run_obj.payload = {'message': context['params']['message']} pp.pprint(dag_run_obj.payload)...
This function decides whether or not to Trigger the remote DAG
def conditionally_trigger(context, dag_run_obj): """This function decides whether or not to Trigger the remote DAG""" c_p = context['params']['condition_param'] print("Controller DAG : conditionally_trigger = {}".format(c_p)) if context['params']['condition_param']: dag_run_obj.payload = {'messa...
airflow/example_dags/example_trigger_controller_dag.py
apache/airflow
DatadogHook.send_metric
def send_metric(self, metric_name, datapoint, tags=None, type_=None, interval=None): response = api.Metric.send( metric=metric_name, points=datapoint, host=self.host, tags=tags, type=type_, interval=interval) self.validate_response...
Sends a single datapoint metric to DataDog
def send_metric(self, metric_name, datapoint, tags=None, type_=None, interval=None): """ Sends a single datapoint metric to DataDog :param metric_name: The name of the metric :type metric_name: str :param datapoint: A single integer or float related to the metric :type d...
airflow/contrib/hooks/datadog_hook.py
apache/airflow
DatadogHook.query_metric
def query_metric(self, query, from_seconds_ago, to_seconds_ago): now = int(time.time()) response = api.Metric.query( start=now - from_seconds_ago, end=now - to_seconds_ago, query=query) self.vali...
Queries datadog for a specific metric, potentially with some function applied to it and returns the results.
def query_metric(self, query, from_seconds_ago, to_seconds_ago): """ Queries datadog for a specific metric, potentially with some function applied to it and returns the results. :param query: The datadog query to execute (se...
airflow/contrib/hooks/datadog_hook.py
apache/airflow
DagBag.get_dag
def get_dag(self, dag_id): from airflow.models.dag import DagModel root_dag_id = dag_id if dag_id in self.dags: dag = self.dags[dag_id] if dag.is_subdag: root_dag_id = dag.parent_dag.dag_id orm_dag = DagModel.get_current(root_...
Gets the DAG out of the dictionary, and refreshes it if expired
def get_dag(self, dag_id): """ Gets the DAG out of the dictionary, and refreshes it if expired """ from airflow.models.dag import DagModel # Avoid circular import # If asking for a known subdag, we want to refresh the parent root_dag_id = dag_id if dag_id in sel...
airflow/models/dagbag.py
apache/airflow
DagBag.kill_zombies
def kill_zombies(self, zombies, session=None): from airflow.models.taskinstance import TaskInstance for zombie in zombies: if zombie.dag_id in self.dags: dag = self.dags[zombie.dag_id] if zombie.task_id in dag.task_ids: task = dag.get_ta...
Fail given zombie tasks, which are tasks that haven't had a heartbeat for too long, in the current DagBag.
def kill_zombies(self, zombies, session=None): """ Fail given zombie tasks, which are tasks that haven't had a heartbeat for too long, in the current DagBag. :param zombies: zombie task instances to kill. :type zombies: airflow.utils.dag_processing.SimpleTaskInstance :pa...
airflow/models/dagbag.py
apache/airflow
DagBag.bag_dag
def bag_dag(self, dag, parent_dag, root_dag): dag.test_cycle() dag.resolve_template_files() dag.last_loaded = timezone.utcnow() for task in dag.tasks: settings.policy(task) subdags = dag.subdags try: for subdag in subdags: sub...
Adds the DAG into the bag, recurses into sub dags. Throws AirflowDagCycleException if a cycle is detected in this dag or its subdags
def bag_dag(self, dag, parent_dag, root_dag): """ Adds the DAG into the bag, recurses into sub dags. Throws AirflowDagCycleException if a cycle is detected in this dag or its subdags """ dag.test_cycle() # throws if a task cycle is found dag.resolve_template_files() ...
airflow/models/dagbag.py
apache/airflow
DagBag.dagbag_report
def dagbag_report(self): report = textwrap.dedent() stats = self.dagbag_stats return report.format( dag_folder=self.dag_folder, duration=sum([o.duration for o in stats]), dag_num=sum([o.dag_num for o in stats]), task_num=sum([o.task_num for o in st...
Prints a report around DagBag loading stats
def dagbag_report(self): """Prints a report around DagBag loading stats""" report = textwrap.dedent("""\n ------------------------------------------------------------------- DagBag loading stats for {dag_folder} ------------------------------------------------------------------- ...
airflow/models/dagbag.py
apache/airflow
ds_add
def ds_add(ds, days): ds = datetime.strptime(ds, '%Y-%m-%d') if days: ds = ds + timedelta(days) return ds.isoformat()[:10]
Add or subtract days from a YYYY-MM-DD
def ds_add(ds, days): """ Add or subtract days from a YYYY-MM-DD :param ds: anchor date in ``YYYY-MM-DD`` format to add to :type ds: str :param days: number of days to add to the ds, you can use negative values :type days: int >>> ds_add('2015-01-01', 5) '2015-01-06' >>> ds_add('20...
airflow/macros/__init__.py
apache/airflow
ds_format
def ds_format(ds, input_format, output_format): return datetime.strptime(ds, input_format).strftime(output_format)
Takes an input string and outputs another string as specified in the output format
def ds_format(ds, input_format, output_format): """ Takes an input string and outputs another string as specified in the output format :param ds: input string which contains a date :type ds: str :param input_format: input string format. E.g. %Y-%m-%d :type input_format: str :param outpu...
airflow/macros/__init__.py
apache/airflow
HdfsSensorRegex.poke
def poke(self, context): sb = self.hook(self.hdfs_conn_id).get_conn() self.log.info( 'Poking for %s to be a directory with files matching %s', self.filepath, self.regex.pattern ) result = [f for f in sb.ls([self.filepath], include_toplevel=False) if f['file_...
poke matching files in a directory with self.regex
def poke(self, context): """ poke matching files in a directory with self.regex :return: Bool depending on the search criteria """ sb = self.hook(self.hdfs_conn_id).get_conn() self.log.info( 'Poking for %s to be a directory with files matching %s', self.filep...
airflow/contrib/sensors/hdfs_sensor.py
apache/airflow
HdfsSensorFolder.poke
def poke(self, context): sb = self.hook(self.hdfs_conn_id).get_conn() result = [f for f in sb.ls([self.filepath], include_toplevel=True)] result = self.filter_for_ignored_ext(result, self.ignored_ext, self.ignore_copying) result = self.filter_...
poke for a non empty directory
def poke(self, context): """ poke for a non empty directory :return: Bool depending on the search criteria """ sb = self.hook(self.hdfs_conn_id).get_conn() result = [f for f in sb.ls([self.filepath], include_toplevel=True)] result = self.filter_for_ignored_ext(re...
airflow/contrib/sensors/hdfs_sensor.py
apache/airflow
clear_task_instances
def clear_task_instances(tis, session, activate_dag_runs=True, dag=None, ): job_ids = [] for ti in tis: if ti.state == State.RUNNING: if ti.job_id: ti.state = State.SHUTDOWN ...
Clears a set of task instances, but makes sure the running ones get killed.
def clear_task_instances(tis, session, activate_dag_runs=True, dag=None, ): """ Clears a set of task instances, but makes sure the running ones get killed. :param tis: a list of task instances :param...
airflow/models/taskinstance.py
apache/airflow
TaskInstance.try_number
def try_number(self): if self.state == State.RUNNING: return self._try_number return self._try_number + 1
Return the try number that this task number will be when it is actually run. If the TI is currently running, this will match the column in the databse, in all othercases this will be incremenetd
def try_number(self): """ Return the try number that this task number will be when it is actually run. If the TI is currently running, this will match the column in the databse, in all othercases this will be incremenetd """ # This is designed so that task logs e...
airflow/models/taskinstance.py
apache/airflow
TaskInstance.generate_command
def generate_command(dag_id, task_id, execution_date, mark_success=False, ignore_all_deps=False, ignore_depends_on_past=False, ignore_task_deps=False, ...
Generates the shell command required to execute this task instance.
def generate_command(dag_id, task_id, execution_date, mark_success=False, ignore_all_deps=False, ignore_depends_on_past=False, ignore_task_deps=False, ...
airflow/models/taskinstance.py
apache/airflow
TaskInstance.current_state
def current_state(self, session=None): TI = TaskInstance ti = session.query(TI).filter( TI.dag_id == self.dag_id, TI.task_id == self.task_id, TI.execution_date == self.execution_date, ).all() if ti: state = ti[0].state else: ...
Get the very latest state from the database, if a session is passed, we use and looking up the state becomes part of the session, otherwise a new session is used.
def current_state(self, session=None): """ Get the very latest state from the database, if a session is passed, we use and looking up the state becomes part of the session, otherwise a new session is used. """ TI = TaskInstance ti = session.query(TI).filter( ...
airflow/models/taskinstance.py
apache/airflow
TaskInstance.error
def error(self, session=None): self.log.error("Recording the task instance as FAILED") self.state = State.FAILED session.merge(self) session.commit()
Forces the task instance's state to FAILED in the database.
def error(self, session=None): """ Forces the task instance's state to FAILED in the database. """ self.log.error("Recording the task instance as FAILED") self.state = State.FAILED session.merge(self) session.commit()
airflow/models/taskinstance.py
apache/airflow
TaskInstance.refresh_from_db
def refresh_from_db(self, session=None, lock_for_update=False): TI = TaskInstance qry = session.query(TI).filter( TI.dag_id == self.dag_id, TI.task_id == self.task_id, TI.execution_date == self.execution_date) if lock_for_update: ti = qry.with_fo...
Refreshes the task instance from the database based on the primary key
def refresh_from_db(self, session=None, lock_for_update=False): """ Refreshes the task instance from the database based on the primary key :param lock_for_update: if True, indicates that the database should lock the TaskInstance (issuing a FOR UPDATE clause) until the se...
airflow/models/taskinstance.py
apache/airflow
TaskInstance.clear_xcom_data
def clear_xcom_data(self, session=None): session.query(XCom).filter( XCom.dag_id == self.dag_id, XCom.task_id == self.task_id, XCom.execution_date == self.execution_date ).delete() session.commit()
Clears all XCom data from the database for the task instance
def clear_xcom_data(self, session=None): """ Clears all XCom data from the database for the task instance """ session.query(XCom).filter( XCom.dag_id == self.dag_id, XCom.task_id == self.task_id, XCom.execution_date == self.execution_date ).del...
airflow/models/taskinstance.py
apache/airflow
TaskInstance.next_retry_datetime
def next_retry_datetime(self): delay = self.task.retry_delay if self.task.retry_exponential_backoff: min_backoff = int(delay.total_seconds() * (2 ** (self.try_number - 2))) hash = int(hashlib.sha1("{}#{}#{}#{}".format(self.dag_id, ...
Get datetime of the next retry if the task instance fails. For exponential backoff, retry_delay is used as base and will be converted to seconds.
def next_retry_datetime(self): """ Get datetime of the next retry if the task instance fails. For exponential backoff, retry_delay is used as base and will be converted to seconds. """ delay = self.task.retry_delay if self.task.retry_exponential_backoff: min_b...
airflow/models/taskinstance.py
apache/airflow
TaskInstance.ready_for_retry
def ready_for_retry(self): return (self.state == State.UP_FOR_RETRY and self.next_retry_datetime() < timezone.utcnow())
Checks on whether the task instance is in the right state and timeframe to be retried.
def ready_for_retry(self): """ Checks on whether the task instance is in the right state and timeframe to be retried. """ return (self.state == State.UP_FOR_RETRY and self.next_retry_datetime() < timezone.utcnow())
airflow/models/taskinstance.py
apache/airflow
TaskInstance.xcom_push
def xcom_push( self, key, value, execution_date=None): if execution_date and execution_date < self.execution_date: raise ValueError( 'execution_date can not be in the past (current ' 'execution_date is {}; received {})'....
Make an XCom available for tasks to pull.
def xcom_push( self, key, value, execution_date=None): """ Make an XCom available for tasks to pull. :param key: A key for the XCom :type key: str :param value: A value for the XCom. The value is pickled and stored in t...
airflow/models/taskinstance.py
apache/airflow
TaskInstance.init_run_context
def init_run_context(self, raw=False): self.raw = raw self._set_context(self)
Sets the log context.
def init_run_context(self, raw=False): """ Sets the log context. """ self.raw = raw self._set_context(self)
airflow/models/taskinstance.py
apache/airflow
WasbTaskHandler.close
def close(self): if self.closed: return super().close() if not self.upload_on_close: return local_loc = os.path.join(self.local_base, self.log_relative_path) remote_loc = os.path.join(self.remote_base, self.log_relati...
Close and upload local log file to remote storage Wasb.
def close(self): """ Close and upload local log file to remote storage Wasb. """ # When application exit, system shuts down all handlers by # calling close method. Here we check if logger is already # closed to prevent uploading the log to remote storage multiple ...
airflow/utils/log/wasb_task_handler.py
apache/airflow
GceHook.get_conn
def get_conn(self): if not self._conn: http_authorized = self._authorize() self._conn = build('compute', self.api_version, http=http_authorized, cache_discovery=False) return self._conn
Retrieves connection to Google Compute Engine.
def get_conn(self): """ Retrieves connection to Google Compute Engine. :return: Google Compute Engine services object :rtype: dict """ if not self._conn: http_authorized = self._authorize() self._conn = build('compute', self.api_version, ...
airflow/contrib/hooks/gcp_compute_hook.py
apache/airflow
GceHook.start_instance
def start_instance(self, zone, resource_id, project_id=None): response = self.get_conn().instances().start( project=project_id, zone=zone, instance=resource_id ).execute(num_retries=self.num_retries) try: operation_name = response["name"] e...
Starts an existing instance defined by project_id, zone and resource_id. Must be called with keyword arguments rather than positional.
def start_instance(self, zone, resource_id, project_id=None): """ Starts an existing instance defined by project_id, zone and resource_id. Must be called with keyword arguments rather than positional. :param zone: Google Cloud Platform zone where the instance exists :type zone: ...
airflow/contrib/hooks/gcp_compute_hook.py
apache/airflow
GceHook.set_machine_type
def set_machine_type(self, zone, resource_id, body, project_id=None): response = self._execute_set_machine_type(zone, resource_id, body, project_id) try: operation_name = response["name"] except KeyError: raise AirflowException( "Wrong response '{}' return...
Sets machine type of an instance defined by project_id, zone and resource_id. Must be called with keyword arguments rather than positional.
def set_machine_type(self, zone, resource_id, body, project_id=None): """ Sets machine type of an instance defined by project_id, zone and resource_id. Must be called with keyword arguments rather than positional. :param zone: Google Cloud Platform zone where the instance exists. ...
airflow/contrib/hooks/gcp_compute_hook.py
apache/airflow
GceHook.get_instance_template
def get_instance_template(self, resource_id, project_id=None): response = self.get_conn().instanceTemplates().get( project=project_id, instanceTemplate=resource_id ).execute(num_retries=self.num_retries) return response
Retrieves instance template by project_id and resource_id. Must be called with keyword arguments rather than positional.
def get_instance_template(self, resource_id, project_id=None): """ Retrieves instance template by project_id and resource_id. Must be called with keyword arguments rather than positional. :param resource_id: Name of the instance template :type resource_id: str :param pro...
airflow/contrib/hooks/gcp_compute_hook.py
apache/airflow
GceHook.insert_instance_template
def insert_instance_template(self, body, request_id=None, project_id=None): response = self.get_conn().instanceTemplates().insert( project=project_id, body=body, requestId=request_id ).execute(num_retries=self.num_retries) try: operation_name = res...
Inserts instance template using body specified Must be called with keyword arguments rather than positional.
def insert_instance_template(self, body, request_id=None, project_id=None): """ Inserts instance template using body specified Must be called with keyword arguments rather than positional. :param body: Instance template representation as object according to https://cloud.goo...
airflow/contrib/hooks/gcp_compute_hook.py
apache/airflow
GceHook.get_instance_group_manager
def get_instance_group_manager(self, zone, resource_id, project_id=None): response = self.get_conn().instanceGroupManagers().get( project=project_id, zone=zone, instanceGroupManager=resource_id ).execute(num_retries=self.num_retries) return response
Retrieves Instance Group Manager by project_id, zone and resource_id. Must be called with keyword arguments rather than positional.
def get_instance_group_manager(self, zone, resource_id, project_id=None): """ Retrieves Instance Group Manager by project_id, zone and resource_id. Must be called with keyword arguments rather than positional. :param zone: Google Cloud Platform zone where the Instance Group Manager exis...
airflow/contrib/hooks/gcp_compute_hook.py
apache/airflow
GceHook.patch_instance_group_manager
def patch_instance_group_manager(self, zone, resource_id, body, request_id=None, project_id=None): response = self.get_conn().instanceGroupManagers().patch( project=project_id, zone=zone, instanceGroupManager=resource_id, body=...
Patches Instance Group Manager with the specified body. Must be called with keyword arguments rather than positional.
def patch_instance_group_manager(self, zone, resource_id, body, request_id=None, project_id=None): """ Patches Instance Group Manager with the specified body. Must be called with keyword arguments rather than positional. :param zone: Google Cloud Pla...
airflow/contrib/hooks/gcp_compute_hook.py
apache/airflow
GceHook._wait_for_operation_to_complete
def _wait_for_operation_to_complete(self, project_id, operation_name, zone=None): service = self.get_conn() while True: if zone is None: operation_response = self._check_global_operation_status( service, operation_name, project_id) ...
Waits for the named operation to complete - checks status of the async call.
def _wait_for_operation_to_complete(self, project_id, operation_name, zone=None): """ Waits for the named operation to complete - checks status of the async call. :param operation_name: name of the operation :type operation_name: str :param zone: optional region of the request (...
airflow/contrib/hooks/gcp_compute_hook.py
apache/airflow
S3Hook.check_for_bucket
def check_for_bucket(self, bucket_name): try: self.get_conn().head_bucket(Bucket=bucket_name) return True except ClientError as e: self.log.info(e.response["Error"]["Message"]) return False
Check if bucket_name exists.
def check_for_bucket(self, bucket_name): """ Check if bucket_name exists. :param bucket_name: the name of the bucket :type bucket_name: str """ try: self.get_conn().head_bucket(Bucket=bucket_name) return True except ClientError as e: ...
airflow/hooks/S3_hook.py
apache/airflow
S3Hook.create_bucket
def create_bucket(self, bucket_name, region_name=None): s3_conn = self.get_conn() if not region_name: region_name = s3_conn.meta.region_name if region_name == 'us-east-1': self.get_conn().create_bucket(Bucket=bucket_name) else: self.get_conn().create_b...
Creates an Amazon S3 bucket.
def create_bucket(self, bucket_name, region_name=None): """ Creates an Amazon S3 bucket. :param bucket_name: The name of the bucket :type bucket_name: str :param region_name: The name of the aws region in which to create the bucket. :type region_name: str """ ...
airflow/hooks/S3_hook.py
apache/airflow
S3Hook.check_for_prefix
def check_for_prefix(self, bucket_name, prefix, delimiter): prefix = prefix + delimiter if prefix[-1] != delimiter else prefix prefix_split = re.split(r'(\w+[{d}])$'.format(d=delimiter), prefix, 1) previous_level = prefix_split[0] plist = self.list_prefixes(bucket_name, previous_level, d...
Checks that a prefix exists in a bucket
def check_for_prefix(self, bucket_name, prefix, delimiter): """ Checks that a prefix exists in a bucket :param bucket_name: the name of the bucket :type bucket_name: str :param prefix: a key prefix :type prefix: str :param delimiter: the delimiter marks key hiera...
airflow/hooks/S3_hook.py
apache/airflow
S3Hook.list_prefixes
def list_prefixes(self, bucket_name, prefix='', delimiter='', page_size=None, max_items=None): config = { 'PageSize': page_size, 'MaxItems': max_items, } paginator = self.get_conn().get_paginator('list_objects_v2') response = paginator.pagin...
Lists prefixes in a bucket under prefix
def list_prefixes(self, bucket_name, prefix='', delimiter='', page_size=None, max_items=None): """ Lists prefixes in a bucket under prefix :param bucket_name: the name of the bucket :type bucket_name: str :param prefix: a key prefix :type prefix: st...
airflow/hooks/S3_hook.py
apache/airflow
S3Hook.list_keys
def list_keys(self, bucket_name, prefix='', delimiter='', page_size=None, max_items=None): config = { 'PageSize': page_size, 'MaxItems': max_items, } paginator = self.get_conn().get_paginator('list_objects_v2') response = paginator.paginate(Buck...
Lists keys in a bucket under prefix and not containing delimiter
def list_keys(self, bucket_name, prefix='', delimiter='', page_size=None, max_items=None): """ Lists keys in a bucket under prefix and not containing delimiter :param bucket_name: the name of the bucket :type bucket_name: str :param prefix: a key prefix ...
airflow/hooks/S3_hook.py
apache/airflow
S3Hook.check_for_key
def check_for_key(self, key, bucket_name=None): if not bucket_name: (bucket_name, key) = self.parse_s3_url(key) try: self.get_conn().head_object(Bucket=bucket_name, Key=key) return True except ClientError as e: self.log.info(e.response["Error"]["M...
Checks if a key exists in a bucket
def check_for_key(self, key, bucket_name=None): """ Checks if a key exists in a bucket :param key: S3 key that will point to the file :type key: str :param bucket_name: Name of the bucket in which the file is stored :type bucket_name: str """ if not bucke...
airflow/hooks/S3_hook.py