def update_task(self, environ, start_response, task_id): """ Update data configuration for an existing task """ if validators.validate_task_id(task_id) != task_id: self.log.error("Failed to pass validation: '%s'" % task_id) return response.send_error(start_response, 400, constants.ERROR_TASK_INVALID_ID) if not os.path.isdir(self._task_dir(task_id)): self.log.debug("Task not found '%s'" % task_id) return response.send_error(start_response, 404, constants.ERROR_TASK_NOT_FOUND) try: new_task_description = json.loads( request.read_request_data(environ)) except ValueError: self.log.error("Decoding task data failed '%s'" % task_id) return response.send_error(start_response, 400, constants.ERROR_TASK_INVALID_PAYLOAD) if self.zknodes: lock = distlocks.ZooKeeperLock(self.zknodes, 'task-lock-%s' % task_id) if lock.try_lock() != True: lock.close() self.log.debug("Task locked '%s'" % task_id) return response.send_response(start_response, 409, constants.ERROR_TASK_LOCKED) else: lock = None try: old_task_description = self._load_task_config(task_id) except: if lock: lock.unlock() lock.close() self.log.error("Failed to read task data '%s'" % task_id) return response.send_response(start_response, 500) if old_task_description.has_key( 'assignee') and new_task_description.has_key( 'assignee') and old_task_description[ 'assignee'] != new_task_description['assignee']: if lock: lock.unlock() lock.close() self.log.debug("Task assignment conflict '%s'" % task_id) return response.send_response(start_response, 409, constants.ERROR_TASK_WRONG_ACTOR) self._save_task_config(task_id, new_task_description) if lock: lock.unlock() lock.close() return response.send_response( start_response, 200, json.dumps({ 'id': task_id, 'data': new_task_description }))
def update_workspace(self, environ, start_response, job_id, build_id): """ Store workspace archive """ if validators.validate_job_id(job_id) == None: self.log.error("Job_id validation failure, '%s'", job_id) return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) if validators.validate_build_id(build_id) != build_id: self.log.error("Build_id validation failure, '%s'", build_id) return response.send_error(start_response, 400, constants.ERROR_BUILD_INVALID_ID) if not os.path.isdir(self._build_dir(job_id, build_id)): return response.send_error(start_response, 404, constants.ERROR_BUILD_NOT_FOUND) ifh, data_len = request.get_request_data_handle_and_length(environ) try: ofh = open(self._build_workspace_file(job_id, build_id), 'wb') except IOError: return response.send_error(start_response, 500, constants.ERROR_BUILD_WRITE_FAILED) try: while data_len > 0: read_len = data_len if read_len > 1024*128: read_len = 1024*128 data = ifh.read(read_len) ofh.write(data) data_len = data_len - len(data) except IOError: ofh.close() return response.send_error(start_response, 500, constants.ERROR_BUILD_WRITE_FAILED) ofh.close() return response.send_response(start_response, 204)
def handle_request(self, environ, start_response, method, job_id, build_id, parts): """ Handle requests related to build artifacts """ if validators.validate_job_id(job_id) == None: self.log.error('Invalid job_id: %r' % job_id) return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) if validators.validate_build_id(build_id) != build_id: self.log.error("Build_id validation failure, '%s'", build_id) return response.send_error(start_response, 400, constants.ERROR_BUILD_INVALID_ID) if len(parts) == 0: if method == 'POST': return self.create_or_update_artifact(environ, start_response, job_id, build_id) else: return response.send_error(start_response, 400) elif len(parts) == 1: if method == 'GET': return self.get_artifact(environ, start_response, job_id, build_id, parts[0]) elif method == 'PUT': return self.create_or_update_artifact(environ, start_response, job_id, build_id, parts[0]) elif method == 'DELETE': return self.delete_artifact(start_response, job_id, build_id, parts[0]) else: return response.send_error(start_response, 400) elif len(parts) == 2: if method == 'GET': return self.get_artifact(environ, start_response, job_id, build_id, parts[0]) else: return response.send_error(start_response, 400) return response.send_response(start_response, 400)
def get_build_state(self, start_response, job_id, build_id): """ Get job state """ if validators.validate_job_id(job_id) != job_id: return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) if validators.validate_build_id(build_id) != build_id: return response.send_error(start_response, 400, constants.ERROR_BUILD_INVALID_ID) if not os.path.isdir(self._build_dir(job_id, build_id)): return response.send_error(start_response, 404, constants.ERROR_JOB_NOT_FOUND) build_data = None for _ in range(10): try: build_state = json.load( file(self._build_state_file(job_id, build_id), 'rb')) build_data = json.dumps({ 'job_id': job_id, 'build_number': build_id, 'state': build_state }) break except (OSError, ValueError): time.sleep(0.1) if not build_data: return response.send_error(start_response, 409, constants.ERROR_BUILD_LOCKED) return response.send_response(start_response, 200, build_data)
def handle_request(self, environ, start_response): """ Parse top level request for dispatching """ if 'PATH_INFO' not in environ: self.log.error('PATH_INFO not specified') return response.send_error(start_response, 500) method = environ['REQUEST_METHOD'] parts = environ['PATH_INFO'].split('/')[1:] if len(parts) == 0: self.log.error('Invalid PATH_INFO, (%r)', environ['PATH_INFO']) return response.send_error(start_response, 400) if parts[0] == 'jobs': return self.jobs.handle_request(environ, start_response, method, parts[1:]) elif parts[0] == 'tasks': return self.tasks.handle_request(environ, start_response, method, parts[1:]) elif parts[0] == 'ui': return self.ui.handle_request(environ, start_response, method, parts[1:]) elif parts[0] == '': return response.send_response(start_response, 204) self.log.warn('Unknown command %r', parts[0]) return response.send_error(start_response, 400)
def delete_task(self, start_response, task_id): """ Delete given task """ if validators.validate_task_id(task_id) != task_id: return response.send_error(start_response, 400, constants.ERROR_TASK_INVALID_ID) if not os.path.isdir(self._task_dir(task_id)): return response.send_error(start_response, 404, constants.ERROR_TASK_NOT_FOUND) shutil.rmtree(self._task_dir(task_id)) return response.send_response(start_response, 204)
def create_or_update_artifact(self, environ, start_response, job_id, build_id, artifact_id_param=None): """ Create or update an artifact """ if artifact_id_param is not None: artifact_id = artifact_id_param if validators.validate_artifact_id(artifact_id) != artifact_id: return response.send_error(start_response, 400, constants.ERROR_ARTIFACT_INVALID_ID) if not os.path.isfile( self._build_artifact_file(job_id, build_id, artifact_id)): return response.send_error(start_response, 404, constants.ERROR_ARTIFACT_NOT_FOUND) else: artifact_id = str(uuid.uuid4()) if not os.path.isdir(self._build_artifact_dir(job_id, build_id)): try: os.mkdir(self._build_artifact_dir(job_id, build_id)) except IOError: return response.send_error( start_response, 400, constants.ERROR_ARTIFACT_WRITE_FAILED) ifh, data_len = request.get_request_data_handle_and_length(environ) try: ofh = open( self._build_artifact_file(job_id, build_id, artifact_id), 'wb') except IOError: return response.send_error(start_response, 400, constants.ERROR_ARTIFACT_WRITE_FAILED) try: while data_len > 0: read_len = data_len if read_len > 1024 * 128: read_len = 1024 * 128 data = ifh.read(read_len) ofh.write(data) data_len = data_len - len(data) except IOError: ofh.close() return response.send_error(start_response, 400, constants.ERROR_ARTIFACT_WRITE_FAILED) ofh.close() return response.send_response( start_response, 200 if artifact_id_param else 201, json.dumps({ 'job_id': job_id, 'build_number': int(build_id), 'artifact_id': artifact_id }))
def get_tasks(self, start_response): """ Return information on all open tasks """ result = {'tasks': []} task_ids = os.listdir(self._data_dir()) for task_id in task_ids: if not os.path.isdir(self._task_dir(task_id)): continue result['tasks'].append(task_id) return response.send_response(start_response, 200, json.dumps(result))
def get_jobs(self, start_response): """ Return all job ids """ results = { 'jobs': [] } job_ids = os.listdir(self._data_dir()) for job_id in job_ids: if not os.path.isdir(self._job_dir(job_id)): continue results['jobs'].append(job_id) return response.send_response(start_response, 200, json.dumps(results))
def get_tasks(self, start_response): """ Return information on all open tasks """ result = { 'tasks': [] } task_ids = os.listdir(self._data_dir()) for task_id in task_ids: if not os.path.isdir(self._task_dir(task_id)): continue result['tasks'].append(task_id) return response.send_response(start_response, 200, json.dumps(result))
def get_builds(self, start_response, job_id): """ Return all builds for a specific job """ if validators.validate_job_id(job_id) == None: self.log.error("Job_id validation failure") return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) result = { 'builds': self._get_build_numbers(job_id) } if len(result['builds']) > 0: result['last_build_number'] = max(result['builds']) return response.send_response(start_response, 200, json.dumps(result))
def get_builds(self, start_response, job_id): """ Return all builds for a specific job """ if validators.validate_job_id(job_id) == None: self.log.error("Job_id validation failure") return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) result = {'builds': self._get_build_numbers(job_id)} if len(result['builds']) > 0: result['last_build_number'] = max(result['builds']) return response.send_response(start_response, 200, json.dumps(result))
def create_new_task(self, environ, start_response): """ Post a new task """ try: task_description = json.loads(request.read_request_data(environ)) except ValueError: self.log.debug('Failed to load task data') return response.send_error(start_response, 400, constants.ERROR_TASK_INVALID_PAYLOAD) task_id_candidate = str(uuid.uuid4()) os.mkdir(self._task_dir(task_id_candidate)) self._save_task_config(task_id_candidate, task_description) return response.send_response(start_response, 201, json.dumps({'id': task_id_candidate, 'data': task_description}))
def delete_artifact(self, start_response, job_id, build_id, artifact_id): """ Delete artifact """ if validators.validate_artifact_id(artifact_id) != artifact_id: return response.send_error(start_response, 400, constants.ERROR_ARTIFACT_INVALID_ID) if not os.path.isfile(self._build_artifact_file(job_id, build_id, artifact_id)): return response.send_error(start_response, 404, constants.ERROR_ARTIFACT_NOT_FOUND) try: os.unlink(self._build_artifact_file(job_id, build_id, artifact_id)) except IOError: return response.send_error(start_response, 400, constants.ERROR_ARTIFACT_WRITE_FAILED) return response.send_response(start_response, 204)
def update_task(self, environ, start_response, task_id): """ Update data configuration for an existing task """ if validators.validate_task_id(task_id) != task_id: self.log.error("Failed to pass validation: '%s'" % task_id) return response.send_error(start_response, 400, constants.ERROR_TASK_INVALID_ID) if not os.path.isdir(self._task_dir(task_id)): self.log.debug("Task not found '%s'" % task_id) return response.send_error(start_response, 404, constants.ERROR_TASK_NOT_FOUND) try: new_task_description = json.loads(request.read_request_data(environ)) except ValueError: self.log.error("Decoding task data failed '%s'" % task_id) return response.send_error(start_response, 400, constants.ERROR_TASK_INVALID_PAYLOAD) if self.zknodes: lock = distlocks.ZooKeeperLock(self.zknodes, 'task-lock-%s' % task_id) if lock.try_lock() != True: lock.close() self.log.debug("Task locked '%s'" % task_id) return response.send_response(start_response, 409, constants.ERROR_TASK_LOCKED) else: lock = None try: old_task_description = self._load_task_config(task_id) except: if lock: lock.unlock() lock.close() self.log.error("Failed to read task data '%s'" % task_id) return response.send_response(start_response, 500) if old_task_description.has_key('assignee') and new_task_description.has_key('assignee') and old_task_description['assignee'] != new_task_description['assignee']: if lock: lock.unlock() lock.close() self.log.debug("Task assignment conflict '%s'" % task_id) return response.send_response(start_response, 409, constants.ERROR_TASK_WRONG_ACTOR) self._save_task_config(task_id, new_task_description) if lock: lock.unlock() lock.close() return response.send_response(start_response, 200, json.dumps({'id': task_id, 'data': new_task_description}))
def delete_job(self, start_response, job_id): """ Delete job """ if validators.validate_job_id(job_id) == None: self.log.debug('Invalid job_id: %r' % job_id) return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) if not os.path.isdir(self._job_dir(job_id)): return response.send_error(start_response, 404, constants.ERROR_JOB_NOT_FOUND) self.log.debug("delete job %s" % job_id) try: shutil.rmtree(self._job_dir(job_id)) except OSError: return response.send_error(start_response, 404, constants.ERROR_JOB_NOT_FOUND) return response.send_response(start_response, 204)
def send_file(cls, start_response, filename): """ send content """ if resource_exists is None or not resource_exists('distci.frontend', filename): return response.send_error(start_response, 404) if filename.endswith('.js'): content_type = "application/javascript" elif filename.endswith('.css'): content_type = "text/css" elif filename.endswith('.html'): content_type = "text/html" else: content_type = "application/octet-stream" data = resource_string('distci.frontend', filename) return response.send_response(start_response, 200, data, content_type)
def create_or_update_job(self, environ, start_response, job_id_param = None): """ Create a new job """ try: job_config = json.loads(request.read_request_data(environ)) except ValueError: self.log.debug('Failed to load job config') return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_PAYLOAD) job_id = job_config.get('job_id') if job_id == None: self.log.debug('Missing job_id') return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_PAYLOAD) if job_id_param and job_id_param != job_id: self.log.debug('Job ID mismatch: %r vs %r' % (job_id, job_id_param)) return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) if validators.validate_job_id(job_id) == None: self.log.debug('Invalid job_id: %r' % job_id) return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) if self.zknodes: lock = distlocks.ZooKeeperLock(self.zknodes, 'job-lock-%s' % job_id) if lock.try_lock() != True: lock.close() self.log.debug("Job locked '%s'" % job_id) return response.send_error(start_response, 400, constants.ERROR_JOB_LOCKED) else: lock = None if job_id_param is None: try: os.mkdir(self._job_dir(job_id)) except OSError: if lock: lock.unlock() lock.close() return response.send_error(start_response, 500, constants.ERROR_JOB_CONFIG_WRITE_FAILED) try: file(self._job_config_file(job_id), 'wb').write(json.dumps(job_config)) except IOError: self.log.debug('Failed to write job config, job_id %s' % job_id) if lock: lock.unlock() lock.close() return response.send_error(start_response, 500, constants.ERROR_JOB_CONFIG_WRITE_FAILED) if lock: lock.unlock() lock.close() return response.send_response(start_response, 200 if job_id_param else 201, json.dumps({'job_id':job_id, 'config':job_config}))
def get_task(self, start_response, task_id): """ Return information on a specific task """ if validators.validate_task_id(task_id) != task_id: return response.send_error(start_response, 400, constants.ERROR_TASK_INVALID_ID) if not os.path.isdir(self._task_dir(task_id)): return response.send_error(start_response, 404, constants.ERROR_TASK_NOT_FOUND) task_data = None for _ in range(10): try: task_data = json.dumps(self._prepare_task_data(task_id)) break except: time.sleep(0.1) if not task_data: return response.send_error(start_response, 409, constants.ERROR_TASK_LOCKED) return response.send_response(start_response, 200, task_data)
def delete_artifact(self, start_response, job_id, build_id, artifact_id): """ Delete artifact """ if validators.validate_artifact_id(artifact_id) != artifact_id: return response.send_error(start_response, 400, constants.ERROR_ARTIFACT_INVALID_ID) if not os.path.isfile( self._build_artifact_file(job_id, build_id, artifact_id)): return response.send_error(start_response, 404, constants.ERROR_ARTIFACT_NOT_FOUND) try: os.unlink(self._build_artifact_file(job_id, build_id, artifact_id)) except IOError: return response.send_error(start_response, 400, constants.ERROR_ARTIFACT_WRITE_FAILED) return response.send_response(start_response, 204)
def delete_build(self, start_response, job_id, build_id): """ Delete a specific build and all related data """ if validators.validate_job_id(job_id) == None: self.log.debug('Invalid job_id: %r' % job_id) return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) if validators.validate_build_id(build_id) != build_id: self.log.error("Build_id validation failure, '%s'", build_id) return response.send_error(start_response, 400, constants.ERROR_BUILD_INVALID_ID) if not os.path.isdir(self._build_dir(job_id, build_id)): return response.send_error(start_response, 404, constants.ERROR_BUILD_NOT_FOUND) self.log.debug("delete build %s/%s" % (job_id, build_id)) try: shutil.rmtree(self._build_dir(job_id, build_id)) except OSError: return response.send_error(start_response, 404, constants.ERROR_BUILD_NOT_FOUND) return response.send_response(start_response, 204)
def update_console_log(self, environ, start_response, job_id, build_id): """ Append content to the console log """ if validators.validate_job_id(job_id) == None: self.log.error("Job_id validation failure, '%s'", job_id) return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) if validators.validate_build_id(build_id) != build_id: self.log.error("Build_id validation failure, '%s'", build_id) return response.send_error(start_response, 400, constants.ERROR_BUILD_INVALID_ID) if not os.path.isdir(self._build_dir(job_id, build_id)): return response.send_error(start_response, 404, constants.ERROR_BUILD_NOT_FOUND) try: file(self._console_log_file(job_id, build_id), 'ab').write(request.read_request_data(environ)) except IOError: # can be ignored, we just return empty log pass return response.send_response(start_response, 204)
def delete_workspace(self, start_response, job_id, build_id): """ Delete workspace archive """ if validators.validate_job_id(job_id) == None: self.log.error("Job_id validation failure, '%s'", job_id) return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) if validators.validate_build_id(build_id) != build_id: self.log.error("Build_id validation failure, '%s'", build_id) return response.send_error(start_response, 400, constants.ERROR_BUILD_INVALID_ID) if not os.path.isfile(self._build_workspace_file(job_id, build_id)): return response.send_error(start_response, 404, constants.ERROR_BUILD_NOT_FOUND) try: os.unlink(self._build_workspace_file(job_id, build_id)) except IOError: return response.send_error(start_response, 500, constants.ERROR_BUILD_WRITE_FAILED) return response.send_response(start_response, 204)
def get_job_config(self, start_response, job_id): """ Get config for a specific job """ if validators.validate_job_id(job_id) != job_id: return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) if not os.path.isdir(self._job_dir(job_id)): return response.send_error(start_response, 404, constants.ERROR_JOB_NOT_FOUND) self.log.debug("get job config %s" % job_id) job_data = None for _ in range(10): try: job_data = json.dumps({'job_id': job_id, 'config': self._load_job_config(job_id)}) break except (OSError, ValueError): time.sleep(0.1) if not job_data: return response.send_error(start_response, 409, constants.ERROR_JOB_LOCKED) return response.send_response(start_response, 200, job_data)
def get_tasks(self, start_response): """ Return information on all open tasks """ result = { 'tasks': [] } task_ids = os.listdir(self._data_dir()) for task_id in task_ids: if not os.path.isdir(self._task_dir(task_id)): continue task_data = None for _ in range(10): try: task_data = self._prepare_task_data(task_id) break except: time.sleep(0.1) if task_data: result['tasks'].append(task_data) return response.send_response(start_response, 200, json.dumps(result))
def get_console_log(self, start_response, job_id, build_id): """ Return contents of the console log """ if validators.validate_job_id(job_id) == None: self.log.error("Job_id validation failure, '%s'", job_id) return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) if validators.validate_build_id(build_id) != build_id: self.log.error("Build_id validation failure, '%s'", build_id) return response.send_error(start_response, 400, constants.ERROR_BUILD_INVALID_ID) if not os.path.isdir(self._build_dir(job_id, build_id)): return response.send_error(start_response, 404, constants.ERROR_BUILD_NOT_FOUND) console_log = '' try: console_log = file(self._console_log_file(job_id, build_id), 'rb').read() except IOError: # can be ignored, we just return empty log pass return response.send_response(start_response, 200, console_log, content_type="text/plain")
def create_new_task(self, environ, start_response): """ Post a new task """ try: task_description = json.loads(request.read_request_data(environ)) except ValueError: self.log.debug('Failed to load task data') return response.send_error(start_response, 400, constants.ERROR_TASK_INVALID_PAYLOAD) task_id_candidate = str(uuid.uuid4()) os.mkdir(self._task_dir(task_id_candidate)) self._save_task_config(task_id_candidate, task_description) return response.send_response( start_response, 201, json.dumps({ 'id': task_id_candidate, 'data': task_description }))
def get_build_state(self, start_response, job_id, build_id): """ Get job state """ if validators.validate_job_id(job_id) != job_id: return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) if validators.validate_build_id(build_id) != build_id: return response.send_error(start_response, 400, constants.ERROR_BUILD_INVALID_ID) if not os.path.isdir(self._build_dir(job_id, build_id)): return response.send_error(start_response, 404, constants.ERROR_JOB_NOT_FOUND) build_data = None for _ in range(10): try: build_state = json.load(file(self._build_state_file(job_id, build_id), 'rb')) build_data = json.dumps({'job_id': job_id, 'build_number': build_id, 'state': build_state}) break except (OSError, ValueError): time.sleep(0.1) if not build_data: return response.send_error(start_response, 409, constants.ERROR_BUILD_LOCKED) return response.send_response(start_response, 200, build_data)
def create_or_update_artifact(self, environ, start_response, job_id, build_id, artifact_id_param = None): """ Create or update an artifact """ if artifact_id_param is not None: artifact_id = artifact_id_param if validators.validate_artifact_id(artifact_id) != artifact_id: return response.send_error(start_response, 400, constants.ERROR_ARTIFACT_INVALID_ID) if not os.path.isfile(self._build_artifact_file(job_id, build_id, artifact_id)): return response.send_error(start_response, 404, constants.ERROR_ARTIFACT_NOT_FOUND) else: artifact_id = str(uuid.uuid4()) if not os.path.isdir(self._build_artifact_dir(job_id, build_id)): try: os.mkdir(self._build_artifact_dir(job_id, build_id)) except IOError: return response.send_error(start_response, 400, constants.ERROR_ARTIFACT_WRITE_FAILED) ifh, data_len = request.get_request_data_handle_and_length(environ) try: ofh = open(self._build_artifact_file(job_id, build_id, artifact_id), 'wb') except IOError: return response.send_error(start_response, 400, constants.ERROR_ARTIFACT_WRITE_FAILED) try: while data_len > 0: read_len = data_len if read_len > 1024*128: read_len = 1024*128 data = ifh.read(read_len) ofh.write(data) data_len = data_len - len(data) except IOError: ofh.close() return response.send_error(start_response, 400, constants.ERROR_ARTIFACT_WRITE_FAILED) ofh.close() return response.send_response(start_response, 200 if artifact_id_param else 201, json.dumps({'job_id': job_id, 'build_number': int(build_id), 'artifact_id': artifact_id}))
def update_workspace(self, environ, start_response, job_id, build_id): """ Store workspace archive """ if validators.validate_job_id(job_id) == None: self.log.error("Job_id validation failure, '%s'", job_id) return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) if validators.validate_build_id(build_id) != build_id: self.log.error("Build_id validation failure, '%s'", build_id) return response.send_error(start_response, 400, constants.ERROR_BUILD_INVALID_ID) if not os.path.isdir(self._build_dir(job_id, build_id)): return response.send_error(start_response, 404, constants.ERROR_BUILD_NOT_FOUND) ifh, data_len = request.get_request_data_handle_and_length(environ) try: ofh = open(self._build_workspace_file(job_id, build_id), 'wb') except IOError: return response.send_error(start_response, 500, constants.ERROR_BUILD_WRITE_FAILED) try: while data_len > 0: read_len = data_len if read_len > 1024 * 128: read_len = 1024 * 128 data = ifh.read(read_len) ofh.write(data) data_len = data_len - len(data) except IOError: ofh.close() return response.send_error(start_response, 500, constants.ERROR_BUILD_WRITE_FAILED) ofh.close() return response.send_response(start_response, 204)
def update_build_state(self, environ, start_response, job_id, build_id): """ Update build state """ try: build_state = json.loads(request.read_request_data(environ)) except ValueError: self.log.error('Failed to load build state') return response.send_error(start_response, 400, constants.ERROR_BUILD_INVALID_PAYLOAD) if validators.validate_job_id(job_id) == None: self.log.error("Job_id validation failure, '%s'", job_id) return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) if validators.validate_build_id(build_id) != build_id: self.log.error("Build_id validation failure, '%s'", build_id) return response.send_error(start_response, 400, constants.ERROR_BUILD_INVALID_ID) if not os.path.isdir(self._build_dir(job_id, build_id)): return response.send_error(start_response, 404, constants.ERROR_BUILD_NOT_FOUND) try: file(self._build_state_file(job_id, build_id), 'wb').write(json.dumps(build_state)) except IOError: self.log.debug('Failed to write build state, job_id %s, build %s', job_id, build_id) return response.send_error(start_response, 500, constants.ERROR_BUILD_WRITE_FAILED) return response.send_response(start_response, 200, json.dumps({'job_id': job_id, 'build_number': int(build_id), 'state': build_state}))
def update_build_state(self, environ, start_response, job_id, build_id): """ Update build state """ try: build_state = json.loads(request.read_request_data(environ)) except ValueError: self.log.error('Failed to load build state') return response.send_error(start_response, 400, constants.ERROR_BUILD_INVALID_PAYLOAD) if validators.validate_job_id(job_id) == None: self.log.error("Job_id validation failure, '%s'", job_id) return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) if validators.validate_build_id(build_id) != build_id: self.log.error("Build_id validation failure, '%s'", build_id) return response.send_error(start_response, 400, constants.ERROR_BUILD_INVALID_ID) if not os.path.isdir(self._build_dir(job_id, build_id)): return response.send_error(start_response, 404, constants.ERROR_BUILD_NOT_FOUND) try: file(self._build_state_file(job_id, build_id), 'wb').write(json.dumps(build_state)) except IOError: self.log.debug('Failed to write build state, job_id %s, build %s', job_id, build_id) return response.send_error(start_response, 500, constants.ERROR_BUILD_WRITE_FAILED) return response.send_response( start_response, 200, json.dumps({ 'job_id': job_id, 'build_number': int(build_id), 'state': build_state }))
def trigger_build(self, start_response, job_id): """ Trigger a new build """ if validators.validate_job_id(job_id) == None: self.log.error("Job_id validation failure, '%s'", job_id) return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) if self.zknodes: lock = distlocks.ZooKeeperLock(self.zknodes, 'job-lock-%s' % job_id) if lock.try_lock() != True: lock.close() self.log.debug("Job locked '%s'" % job_id) return response.send_error(start_response, 400, constants.ERROR_JOB_LOCKED) else: lock = None build_ids = self._get_build_numbers(job_id) if len(build_ids) > 0: new_build_number = str(max(build_ids) + 1) else: new_build_number = "1" try: os.mkdir(self._build_dir(job_id, new_build_number)) except OSError: if lock: lock.unlock() lock.close() self.log.error("Build directory creation failed") return response.send_error(start_response, 500, constants.ERROR_BUILD_CREATE_FAILED) if lock: lock.unlock() lock.close() build_state = {"status": "preparing"} try: file(self._build_state_file(job_id, new_build_number), 'wb').write(json.dumps(build_state)) except IOError: self.log.debug('Failed to write build state, job_id %s, build %s', job_id, new_build_number) return response.send_error(start_response, 500, constants.ERROR_BUILD_WRITE_FAILED) if self.config.get('task_frontends'): for _ in range(10): task_id = self.distci_client.tasks.create() if task_id is not None: break if task_id is None: self.log.error('Failed to create a build task') return response.send_error( start_response, 500, constants.ERROR_BUILD_TASK_CREATION_FAILED) task_description = { 'capabilities': ['build_control_v1'], 'job_id': job_id, 'build_number': new_build_number, 'status': 'pending', 'id': task_id } for _ in range(10): task_details = self.distci_client.tasks.update( task_id, task_description) if task_details is not None: break if task_details is None: self.log.error("Build task creation failed") return response.send_error( start_response, 500, constants.ERROR_BUILD_TASK_CREATION_FAILED) else: self.log.warn( 'No task frontends configured, unable to trigger build control task' ) return response.send_response( start_response, 201, json.dumps({ 'job_id': job_id, 'build_number': int(new_build_number), 'state': build_state }))
def trigger_build(self, start_response, job_id): """ Trigger a new build """ if validators.validate_job_id(job_id) == None: self.log.error("Job_id validation failure, '%s'", job_id) return response.send_error(start_response, 400, constants.ERROR_JOB_INVALID_ID) if self.zknodes: lock = distlocks.ZooKeeperLock(self.zknodes, 'job-lock-%s' % job_id) if lock.try_lock() != True: lock.close() self.log.debug("Job locked '%s'" % job_id) return response.send_error(start_response, 400, constants.ERROR_JOB_LOCKED) else: lock = None build_ids = self._get_build_numbers(job_id) if len(build_ids) > 0: new_build_number = str(max(build_ids) + 1) else: new_build_number = "1" try: os.mkdir(self._build_dir(job_id, new_build_number)) except OSError: if lock: lock.unlock() lock.close() self.log.error("Build directory creation failed") return response.send_error(start_response, 500, constants.ERROR_BUILD_CREATE_FAILED) if lock: lock.unlock() lock.close() build_state = { "status": "preparing" } try: file(self._build_state_file(job_id, new_build_number), 'wb').write(json.dumps(build_state)) except IOError: self.log.debug('Failed to write build state, job_id %s, build %s', job_id, new_build_number) return response.send_error(start_response, 500, constants.ERROR_BUILD_WRITE_FAILED) if self.config.get('task_frontends'): for _ in range(10): task_id = self.distci_client.tasks.create() if task_id is not None: break if task_id is None: self.log.error('Failed to create a build task') return response.send_error(start_response, 500, constants.ERROR_BUILD_TASK_CREATION_FAILED) task_description = { 'capabilities': [ 'build_control_v1' ], 'job_id': job_id, 'build_number': new_build_number, 'status': 'pending', 'id': task_id } for _ in range(10): task_details = self.distci_client.tasks.update(task_id, task_description) if task_details is not None: break if task_details is None: self.log.error("Build task creation failed") return response.send_error(start_response, 500, constants.ERROR_BUILD_TASK_CREATION_FAILED) else: self.log.warn('No task frontends configured, unable to trigger build control task') return response.send_response(start_response, 201, json.dumps({'job_id': job_id, 'build_number': int(new_build_number), 'state': build_state}))