def azkaban_mongo_job(): project_2 = Project('LeadDataStats') project_2.add_job('UploadDataStats', Job({'type': 'command', 'command': 'python /home/msingh/Documents/' 'PycharmProjects/AzkabanTest/mongo/MongoDataPush.py'}, {'dependencies': 'MongoStart'})) project_3 = Project('MongoDataUpload') project_3.add_job('UploadDataStatus', Job({'type': 'command', 'command': 'echo "Data successfully uploaded"'}, {'dependencies': 'UploadDataStats'}))
def build_project(project_name, global_props, project_props, jobs, files, version): logger.info("Building workflow %s, version: %s.", project_name, version) project = Project(project_name, root=os.curdir, version=version) project.properties = global_props project.properties.update(project_props) for job_name, job_definition in jobs.items(): project.add_job(job_name, Job(job_definition)) for file, target in files: project.add_file(file, target) return project
#!/usr/bin/env python # encoding: utf-8 """Azkaban sample project configuration script. Let us assume we have a flow with pig scripts to run, which share many options. This example shows a way to concisely build the project. """ from azkaban import PigJob, Project from getpass import getuser PROJECT = Project('azkabancli_sample', root=__file__) # default options for all jobs DEFAULTS = { 'user.to.proxy': getuser(), 'param': { 'input_root': 'sample_dir/', 'n_reducers': 20, }, 'jvm.args.mapred': { 'max.split.size': 2684354560, 'min.split.size': 2684354560, }, } # list of pig job options OPTIONS = [ { 'pig.script': 'first.pig'
This example shows how to simply define a project with two configurations: production and test, without any job duplication. """ from azkaban import Job, Project from getpass import getuser # Production project # ------------------ # # This project is configured to run in a production environment (e.g. using a # headless user with permissions to write to a specific directory). PROJECT = Project('azkabancli_sample', root=__file__) PROJECT.properties = { 'user.to.proxy': 'production_user', 'hdfs.root': '/jobs/sample/' } # dictionary of jobs, keyed by job name JOBS = { 'gather_data': Job({ 'type': 'hadoopJava', 'job.class': 'sample.GatherData', 'path.output': '${hdfs.root}data.avro', # note the property use here }), # ...
from azkaban import Job, Project project = Project('foo') project.add_file('./jobs.py', 'jobs.py') project.add_job('bar', Job({'type': 'command', 'command': 'cat jobs.py'}))
#!/usr/bin/env python # encoding: utf-8 """ Azkaban CLI syntax definition of the `basic_flow` project """ from azkaban import Job, Project PROJECT = Project('azkaban_examples') JOBS = { # `basic_flow` example 'basic_step_1.cmd': Job({ 'type': 'command', 'command': 'echo "job: basic_step_1.cmd"' }), 'basic_step_2.cmd': Job({ 'type': 'command', 'command': 'echo "job: basic_step_2.cmd"', 'dependencies': 'basic_step_1.cmd' }), 'basic_step_3.cmd': Job({ 'type': 'command', 'command': 'echo "job: basic_step_3.cmd"', 'dependencies': 'basic_step_2.cmd' }), 'basic_step_4.cmd': Job({
#!/usr/bin/env python # encoding: utf-8 """ Azkaban example projects configuration script. • Azkaban CLI syntax definition to configure all examples in this project """ from azkaban import Job, Project PROJECT = Project('azkaban_examples', root=__file__) # Project level properties declared here are visible to all jobs. PROJECT.properties = {'project_1': 'project-val1'} JOBS = { # `basic_flow` example 'basic_step_1.cmd': Job({ 'type': 'command', 'command': 'echo "job: basic_step_1.cmd"' }), 'basic_step_2.cmd': Job({ 'type': 'command', 'command': 'echo "job: basic_step_2.cmd"', 'dependencies': 'basic_step_1.cmd' }), 'basic_step_3.cmd': Job({ 'type': 'command', 'command': 'echo "job: basic_step_3.cmd"', 'dependencies': 'basic_step_2.cmd'