def post(self, job_id): job = Job.find(job_id) job.pipeline.start_single_job(job) tracker = insight.GAProvider() tracker.track_event(category='jobs', action='manual_run', label=job.worker_class) return job
def post(self): args = parser.parse_args() pipeline = Pipeline(name=args['name']) pipeline.assign_attributes(args) pipeline.save() pipeline.save_relations(args) tracker = insight.GAProvider() tracker.track_event(category='pipelines', action='create') return pipeline, 201
def patch(self, pipeline_id): pipeline = Pipeline.find(pipeline_id) args = parser.parse_args() schedule_pipeline = (args['run_on_schedule'] == 'True') pipeline.update(run_on_schedule=schedule_pipeline) tracker = insight.GAProvider() tracker.track_event( category='pipelines', action=('schedule' if schedule_pipeline else 'unschedule')) return pipeline
def delete(self, job_id): job = Job.find(job_id) abort_if_job_doesnt_exist(job, job_id) if job.pipeline.is_blocked(): return { 'message': 'Removing of job for active pipeline is unavailable' }, 422 job.destroy() tracker = insight.GAProvider() tracker.track_event(category='jobs', action='delete') return {}, 204
def get(self): """Finds and enqueues pipelines scheduled to be executed now.""" for pipeline in Pipeline.where(run_on_schedule=True).all(): logging.info('Checking schedules for pipeline %s', pipeline.name) for schedule in pipeline.schedules: logging.info('Checking schedule with cron string %s', schedule.cron) if self._its_time(schedule.cron): logging.info('Trying to start pipeline %s', pipeline.name) pipeline.start() tracker = insight.GAProvider() tracker.track_event(category='pipelines', action='scheduled_run') break return 'OK', 200
def post(self): tracker = insight.GAProvider() tracker.track_event(category='pipelines', action='import') args = import_parser.parse_args() file_ = args['upload_file'] data = {} if file_: data = json.loads(file_.read()) pipeline = Pipeline(name=data['name']) pipeline.save() pipeline.import_data(data) return pipeline, 201 return data
def post(self): args = parser.parse_args() pipeline = Pipeline.find(args['pipeline_id']) if pipeline.is_blocked(): return { 'message': 'Creating new jobs for active pipeline is unavailable' }, 422 job = Job(args['name'], args['worker_class'], args['pipeline_id']) job.assign_attributes(args) job.save() job.save_relations(args) tracker = insight.GAProvider() tracker.track_event(category='jobs', action='create', label=args['worker_class']) return job, 201
def get(self, pipeline_id): tracker = insight.GAProvider() tracker.track_event(category='pipelines', action='export') pipeline = Pipeline.find(pipeline_id) jobs = self.__get_jobs__(pipeline) pipeline_params = [] for param in pipeline.params: pipeline_params.append({ 'name': param.name, 'value': param.value, 'type': param.type, }) pipeline_schedules = [] for schedule in pipeline.schedules: pipeline_schedules.append({ 'cron': schedule.cron, }) data = { 'name': pipeline.name, 'jobs': jobs, 'params': pipeline_params, 'schedules': pipeline_schedules } ts = time.time() pipeline_date = datetime.datetime.fromtimestamp(ts) pipeline_date_formatted = pipeline_date.strftime('%Y%m%d%H%M%S') filename = pipeline.name.lower( ) + "-" + pipeline_date_formatted + ".json" return data, 200, { 'Access-Control-Expose-Headers': 'Filename', 'Content-Disposition': "attachment; filename=" + filename, 'Filename': filename, 'Content-type': 'text/json' }
def post(self, pipeline_id): pipeline = Pipeline.find(pipeline_id) pipeline.stop() tracker = insight.GAProvider() tracker.track_event(category='pipelines', action='manual_stop') return pipeline
def get(self): tracker = insight.GAProvider() tracker.track_event(category='pipelines', action='list') pipelines = Pipeline.all() return pipelines