def __init__( self, *, job_name: str, job_definition: str, job_queue: str, overrides: dict, array_properties: Optional[dict] = None, parameters: Optional[dict] = None, job_id: Optional[str] = None, waiters: Optional[Any] = None, max_retries: Optional[int] = None, status_retries: Optional[int] = None, aws_conn_id: Optional[str] = None, region_name: Optional[str] = None, tags: Optional[dict] = None, **kwargs, ): BaseOperator.__init__(self, **kwargs) self.job_id = job_id self.job_name = job_name self.job_definition = job_definition self.job_queue = job_queue self.overrides = overrides or {} self.array_properties = array_properties or {} self.parameters = parameters or {} self.waiters = waiters self.tags = tags or {} self.hook = BatchClientHook( max_retries=max_retries, status_retries=status_retries, aws_conn_id=aws_conn_id, region_name=region_name, )
def __init__( self, job_name, job_definition, job_queue, overrides, array_properties=None, parameters=None, job_id=None, waiters=None, max_retries=None, status_retries=None, aws_conn_id=None, region_name=None, **kwargs, ): # pylint: disable=too-many-arguments BaseOperator.__init__(self, **kwargs) AwsBatchClient.__init__( self, max_retries=max_retries, status_retries=status_retries, aws_conn_id=aws_conn_id, region_name=region_name, ) self.job_id = job_id self.job_name = job_name self.job_definition = job_definition self.job_queue = job_queue self.overrides = overrides self.array_properties = array_properties or {} self.parameters = parameters self.waiters = waiters
def __init__(self, ssh_hook, lsf_script, bsub=DEFAULT_BSUB, bjobs=DEFAULT_BJOBS, queue_name=DEFAULT_QUEUE_NAME, bsub_args='', bkill=DEFAULT_BKILL, poke_interval=10, timeout=60 * 60, soft_fail=False, env=None, *args, **kwargs): self.bsub = bsub self.bsub_args = bsub_args self.bjobs = bjobs self.queue_name = queue_name self.lsf_script = lsf_script self.hook = ssh_hook self.jobid = None self.timeout = timeout self.poke_interval = poke_interval self.soft_fail = soft_fail self.env = env self.prevent_returncode = None BaseOperator.__init__(self, *args, **kwargs)
def __init__(self, project, control, namespace, kind, metadata_converter, task_id=None, filters=[], *args, **kwargs): BaseOperator.__init__(self, task_id=task_id if task_id else 'datastore_get_{}_{}'.format(namespace, kind), *args, **kwargs) self.project = project self.control = control self.namespace = namespace self.kind = kind self.filters = filters self.metadata_converter = metadata_converter(self) self.entity = None
def __init__(self, project_id, dataset_id, table_id, field_id, field_type, format_string=None, timezone=None, bigquery_conn_id='bigquery_default', delegate_to=None, *args, **kwargs): max_field = bigquery_singlevalue_formatter(aggregation_function='MAX', field_id=field_id, field_type=field_type, format_string=format_string, timezone=timezone) self.sql_template_params = { 'project_id': project_id, 'dataset_id': dataset_id, 'table_id': table_id, 'max_field': max_field } self.bigquery_conn_id = bigquery_conn_id self.delegate_to = delegate_to BaseOperator.__init__(self, *args, **kwargs)
def __init__(self, gcs_file_path, bing_maps_conn_id='bing_maps_default', *args, **kwargs): self.gcs_file_path = gcs_file_path self.bing_maps_conn_id = bing_maps_conn_id BaseOperator.__init__(self, task_id=self.operation, *args, **kwargs)
def __init__(self, phase, step, given_now=None, *args, **kwargs): BaseOperator.__init__(self, task_id='{}_{}'.format(step, phase), *args, **kwargs) self.phase = phase self.step = step self.given_now = given_now
def __init__(self, *args, **kwargs): BaseOperator.__init__(self, *args, **kwargs) # task_type is used by UI to display the correct class type, because UI only # receives BaseOperator from deserialized DAGs. self._task_type = 'BaseOperator' # Move class attributes into object attributes. self.ui_color = BaseOperator.ui_color self.ui_fgcolor = BaseOperator.ui_fgcolor self.template_fields = BaseOperator.template_fields
def __init__( self, *, task_id: str, spell_owner: Optional[str] = None, spell_conn_id: Optional[str] = None, **kwargs, ): BaseOperator.__init__(self, task_id=task_id) SpellClient.__init__(self, spell_conn_id=spell_conn_id, spell_owner=spell_owner) self.kwargs = kwargs
def __init__(self, include_timestamp: bool = True, postgres_conn_id: str = "postgres_ods_dev", database: str = "ODS", data_key: str = "entity_snapshots", chunksize: int = 1000, *args, **kwargs): BaseOperator.__init__(self, *args, **kwargs) self.include_timestamp = include_timestamp self.postgres_conn_id = postgres_conn_id self.database = database self.data_key = data_key self.chunksize = chunksize
def __init__(self, project_id, dataset_id, table_id, bigquery_conn_id='bigquery_default', *args, **kwargs): self.task_id = 'drop-table-{}.{}'.format(dataset_id, table_id) self.project_id = project_id self.dataset_id = dataset_id self.table_id = table_id self.bigquery_conn_id = bigquery_conn_id BaseOperator.__init__(self, task_id=self.task_id, *args, **kwargs)
def __init__(self, lookups={}, *args, **kwargs): """ Operator to make accesible xcom params in `self.dag_params`. The `lookup` param, maps how the xcom params are going to be stored. It uses the keys as param keys in self.dag_params and the values as the taskids. I.E:: t = XComParams(lookups={'var1': 'taskid_1'}) In function `execute` the `self.dag_params` var will be populated as a dictionary: >>> t.dag_params {'var1': 'what taskid_id task returned in `execute` function'} """ logger.info(kwargs) self.lookups = lookups BaseOperator.__init__(self, *args, **kwargs)
def __init__(self, source_folder, destination_folder, *args, **kwargs): BaseOperator.__init__(self, *args, **kwargs) self.source_folder = source_folder self.destination_folder = destination_folder