def populate(task): from bkr.server.tasks import Tasks controller = Tasks() filename = os.path.join(controller.task_dir, task.rpm) if not os.path.exists(filename): print 'Skipping missing %s' % filename return raw_taskinfo = controller.read_taskinfo(filename) tinfo = testinfo.parse_string(raw_taskinfo['desc'], raise_errors=False) if tinfo.owner: task.owner = tinfo.owner.decode('utf8') if tinfo.priority: task.priority = tinfo.priority.decode('utf8') task.destructive = tinfo.destructive
def __init__(self, remote): # setup, sanity checks self.task_dir = turbogears.config.get("basepath.rpms", "/var/www/beaker/rpms") self._setup_logging() self._check_perms() # Initialize core attributes self.remote = remote remote_proxy = self._get_server_proxy(self.remote) self.proxy={'remote':remote_proxy, } self.tasks_added = [] self.t_downloaded = 0 self.tasks = Tasks() # load configuration data load_config()
def setUp(self): with session.begin(): self.arch_one = u'i386' self.osmajor_one = u'testosmajor' self.task_one = data_setup.create_task( name=u'/a/a/a', exclude_arches=[self.arch_one]) self.task_two = data_setup.create_task( name=u'/a/a/b', exclude_arches=[self.arch_one]) self.task_three = data_setup.create_task( name=u'/a/a/c', exclude_osmajors=[self.osmajor_one]) data_setup.create_completed_job( task_list=[self.task_one, self.task_two, self.task_three]) self.recipe_tasks = [] t = Tasks() for id in [ t.id for t in [self.task_one, self.task_two, self.task_three] ]: self.recipe_tasks = self.recipe_tasks + self.get_task_query( { 'task_id': id }, False).all()
class TaskLibrarySync: def __init__(self, remote): # setup, sanity checks self.task_dir = turbogears.config.get("basepath.rpms", "/var/www/beaker/rpms") self._setup_logging() self._check_perms() # Initialize core attributes self.remote = remote remote_proxy = self._get_server_proxy(self.remote) self.proxy={'remote':remote_proxy, } self.tasks_added = [] self.t_downloaded = 0 self.tasks = Tasks() # load configuration data load_config() def _setup_logging(self): formatter = logging.Formatter('%(asctime)s - %(message)s') stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setFormatter(formatter) self.logger = logging.getLogger("") self.logger.addHandler(stdout_handler) def _check_perms(self): # See if the euid is the same as that of self.task_dir task_dir_uid = os.stat(self.task_dir).st_uid if os.geteuid() != task_dir_uid: self.logger.critical('You should run this script as user: %s' % pwd.getpwuid(task_dir_uid).pw_name) sys.exit(-1) def _get_server_proxy(self, server): kobo_conf = {} kobo_conf['HUB_URL'] = server hub = HubProxy(kobo_conf) return hub def get_tasks(self, server): # if local, directly read the database if server == 'local': tasks = self.tasks.filter({'valid':1}) else: tasks = self.proxy[server].tasks.filter({'valid':1}) return [task['name'] for task in tasks] def _get_task_xml(self, server, task): # if local, directly read the database if server == 'local': try: self.logger.debug('Getting task XML for %s from local database' % task) return self.tasks.to_xml(task, False) except Exception: self.logger.error('Could not get task XML for %s from local Beaker DB. Continuing.' % task) return None try: self.logger.debug('Getting task XML for %s from %s' % (task, getattr(self, server))) return self.proxy[server].tasks.to_xml(task, False) except (xmlrpclib.Fault, xmlrpclib.ProtocolError) as e: # If something goes wrong with this task, for example: # https://bugzilla.redhat.com/show_bug.cgi?id=915549 # we do our best to continue anyway... self.logger.error('Could not get task XML for %s from %s. Continuing.' % (task, server)) self.logger.error('Error message: %s' % e.faultString) return None def update_db(self): self.logger.info('Updating local Beaker database..') for task_rpm in self.tasks_added: self.logger.debug('Adding %s'% task_rpm) with open(os.path.join(self.task_dir,task_rpm)) as f: try: session.begin() task = self.tasks.process_taskinfo(self.tasks.read_taskinfo(f)) old_rpm = task.rpm task.rpm = task_rpm session.commit() except Exception: session.rollback() session.close() self.logger.critical('Error adding task %s' % task_rpm) unlink_ignore(task_rpm) else: session.close() self.logger.debug('Successfully added %s' % task.rpm) if old_rpm: unlink_ignore(os.path.join(self.task_dir, old_rpm)) # Update task repo self.logger.info('Creating repodata..') Task.update_repo() return def _download(self, task_url): task_rpm_name = os.path.split(task_url)[1] rpm_file = os.path.join(self.task_dir, task_rpm_name) if not os.path.exists(rpm_file): try: with atomically_replaced_file(rpm_file) as f: siphon(urllib2.urlopen(task_url), f) f.flush() except urllib2.HTTPError as err: self.logger.critical(err) #unlink_ignore(rpm_file) except Exception as err: self.logger.critical(err) unlink_ignore(rpm_file) else: self.logger.debug('Downloaded %s' % task_rpm_name) self.tasks_added.append(task_rpm_name) self.t_downloaded = self.t_downloaded + 1 else: self.logger.debug('Already downloaded %s' % task_rpm_name) self.tasks_added.append(task_rpm_name) self.t_downloaded = self.t_downloaded + 1 return def tasks_add(self, new_tasks, old_tasks): self.logger.info('Downloading %s new tasks' % len(new_tasks)) # Get the task XMLs task_xml = [] for task in new_tasks: task_xml = self._get_task_xml('remote', task) if task_xml is not None: task_url = find_task_version_url(task_xml)[1] self._download(task_url) # common tasks self.logger.info('Downloading %s common tasks' % len(old_tasks)) # tasks which exist in both remote and local # will be uploaded only if remote_version != local_version for task in old_tasks: task_xml = self._get_task_xml('remote', task) remote_task_version, remote_task_url = find_task_version_url(task_xml) task_xml = self._get_task_xml('local', task) local_task_version = find_task_version_url(task_xml)[0] if remote_task_version != local_task_version: self._download(remote_task_url) # Finished downloading tasks self.logger.info('Downloaded %d Tasks' % self.t_downloaded) # update Beaker's database self.update_db() return
def get_task_query(self, kw, filter_on_recipe_task_ids=True): t = Tasks() if filter_on_recipe_task_ids: kw['recipe_task_id'] = [r.id for r in self.recipe_tasks] ret = t._do_search({}, **kw) return ret['tasks']