def run(self): sm = SyncManager.getDBInstance() logger = self.getLogger() # go over all the agents for agtName, agent in sm.getAllAgents().iteritems(): # skip agents if they're not active if not agent.isActive(): logger.warning("Agent '%s' is not active - skipping" % agtName) continue logger.info("Starting agent '%s'" % agtName) try: dbi = DBMgr.getInstance() # pass the current time and a logger result = agent.run(int_timestamp(nowutc()), logger=logger, dbi=dbi, task=self) except: logger.exception("Problem running agent '%s'" % agtName) return if result: logger.info("Acknowledged successful operation") agent.acknowledge() dbi.commit() else: logger.info("'Acknowledge' not done - no records?") logger.info("Agent '%s' finished" % agtName)
def _uploadBatch(self, batch): """ Uploads a batch to the server """ self._logger.debug('getting a batch') tstart = time.time() # get a batch self._logger.info('Generating metadata') data = self._agent._getMetadata(batch, logger=self._logger) self._logger.info('Metadata ready ') tgen = time.time() - tstart result = self._server.upload_marcxml(data, "-ir").read() tupload = time.time() - (tstart + tgen) if self._task: self._task.setOnRunningListSince(nowutc()) self._logger.debug('rec %s result: %s' % (batch, result)) if result.startswith('[INFO]'): fpath = result.strip().split(' ')[-1] self._logger.info('Batch of %d records stored in server (%s) ' '[%f s %f s]' % \ (len(batch), fpath, tgen, tupload)) else: self._logger.error('Records: %s output: %s' % (batch, result)) raise Exception('upload failed') return True
def run(self): sm = SyncManager.getDBInstance() logger = self.getLogger() # go over all the agents for agtName, agent in sm.getAllAgents().iteritems(): # skip agents if they're not active if not agent.isActive(): logger.warning("Agent '%s' is not active - skipping" % agtName) continue logger.info("Starting agent '%s'" % agtName) try: dbi = DBMgr.getInstance() # pass the current time and a logger result = agent.run(int_timestamp(nowutc()), logger=logger, dbi=dbi) except: logger.exception("Problem running agent '%s'" % agtName) return if result: logger.info("Acknowledged successful operation") agent.acknowledge() dbi.commit() else: logger.info("'Acknowledge' not done - no records?") logger.info("Agent '%s' finished" % agtName)
def getVars(self): tplVars = WTemplated.getVars(self) smanager = SyncManager.getDBInstance() tplVars["trackData"], tplVars["lastAgentTS"] = self._calculateTrackData(smanager) tplVars["agents"] = smanager.getAllAgents() tplVars["currentTS"] = int_timestamp(nowutc()) tplVars["granularity"] = smanager.getGranularity() return tplVars
def getVars(self): tplVars = WTemplated.getVars(self) smanager = SyncManager.getDBInstance() tplVars['trackData'], tplVars['lastAgentTS'] = \ self._calculateTrackData(smanager) tplVars['agents'] = smanager.getAllAgents() tplVars['currentTS'] = int_timestamp(nowutc()) tplVars['granularity'] = smanager.getGranularity() return tplVars
def _updateStatistics(cls, cat, dbi, level=0, logger=None): stats = cat.getStatistics() stats["events"] = {} stats["contributions"] = {} stats["resources"] = 0 if len(cat.getSubCategoryList()) > 0: for scat in cat.getSubCategoryList(): # only at top level if level == 0 and logger: logger.info("Processing '%s' (%s)" % (scat.getTitle(), scat.getId())) cls._updateStatistics(scat, dbi, level + 1, logger) for year in scat._statistics["events"]: if year in stats["events"]: stats["events"][year] += scat._statistics["events"][ year] else: stats["events"][year] = scat._statistics["events"][ year] for year in scat._statistics["contributions"]: if year in stats["contributions"]: stats["contributions"][year] += scat._statistics[ "contributions"][year] else: stats["contributions"][year] = scat._statistics[ "contributions"][year] stats["resources"] += scat._statistics["resources"] elif cat.conferences: for event in cat.conferences: cls._processEvent(dbi, event, stats) stats["updated"] = nowutc() cat._statistics = stats cat._p_changed = 1 dbi.commit() if level == 1: logger.info("%s : %s" % (cat.getId(), cat._statistics)) return stats
def _parseDateTime(cls, dateTime, allowNegativeOffset): """ Accepted formats: * ISO 8601 subset - YYYY-MM-DD[THH:MM] * 'today', 'yesterday', 'tomorrow' and 'now' * days in the future/past: '[+/-]DdHHhMMm' 'ctx' means that the date will change according to its function ('from' or 'to') """ # if it's a an "alias", return immediately now = nowutc() if dateTime in cls._deltas: return ('ctx', now + cls._deltas[dateTime]) elif dateTime == 'now': return ('abs', now) elif dateTime == 'today': return ('ctx', now) m = re.match(r'^([+-])?(?:(\d{1,3})d)?(?:(\d{1,2})h)?(?:(\d{1,2})m)?$', dateTime) if m: mod = -1 if m.group(1) == '-' else 1 if not allowNegativeOffset and mod == -1: raise ArgumentParseError( 'End date cannot be a negative offset') atoms = list(0 if a is None else int(a) * mod for a in m.groups()[1:]) if atoms[1] > 23 or atoms[2] > 59: raise ArgumentParseError("Invalid time!") return ('ctx', timedelta(days=atoms[0], hours=atoms[1], minutes=atoms[2])) else: # iso 8601 subset try: return ('abs', datetime.strptime(dateTime, "%Y-%m-%dT%H:%M")) except ValueError: pass try: return ('ctx', datetime.strptime(dateTime, "%Y-%m-%d")) except ValueError: raise ArgumentParseError("Impossible to parse '%s'" % dateTime)
def _updateStatistics(cls, cat, dbi, level=0, logger=None): stats = cat.getStatistics() stats["events"] = {} stats["contributions"] = {} stats["resources"] = 0 if len(cat.getSubCategoryList()) > 0: for scat in cat.getSubCategoryList(): # only at top level if level == 0 and logger: logger.info("Processing '%s' (%s)" % (scat.getTitle(), scat.getId())) cls._updateStatistics(scat, dbi, level + 1, logger) for year in scat._statistics["events"]: if year in stats["events"]: stats["events"][year] += scat._statistics["events"][year] else: stats["events"][year] = scat._statistics["events"][year] for year in scat._statistics["contributions"]: if year in stats["contributions"]: stats["contributions"][year] += scat._statistics["contributions"][year] else: stats["contributions"][year] = scat._statistics["contributions"][year] stats["resources"] += scat._statistics["resources"] elif cat.conferences: for event in cat.conferences: cls._processEvent(dbi, event, stats) stats["updated"] = nowutc() cat._statistics = stats cat._p_changed = 1 dbi.commit() if level == 1: logger.info("%s : %s" % (cat.getId(), cat._statistics)) return stats
def _parseDateTime(cls, dateTime, allowNegativeOffset): """ Accepted formats: * ISO 8601 subset - YYYY-MM-DD[THH:MM] * 'today', 'yesterday', 'tomorrow' and 'now' * days in the future/past: '[+/-]DdHHhMMm' 'ctx' means that the date will change according to its function ('from' or 'to') """ # if it's a an "alias", return immediately now = nowutc() if dateTime in cls._deltas: return ('ctx', now + cls._deltas[dateTime]) elif dateTime == 'now': return ('abs', now) elif dateTime == 'today': return ('ctx', now) m = re.match(r'^([+-])?(?:(\d{1,3})d)?(?:(\d{1,2})h)?(?:(\d{1,2})m)?$', dateTime) if m: mod = -1 if m.group(1) == '-' else 1 if not allowNegativeOffset and mod == -1: raise ArgumentParseError('End date cannot be a negative offset') atoms = list(0 if a == None else int(a) * mod for a in m.groups()[1:]) if atoms[1] > 23 or atoms[2] > 59: raise ArgumentParseError("Invalid time!") return ('ctx', timedelta(days=atoms[0], hours=atoms[1], minutes=atoms[2])) else: # iso 8601 subset try: return ('abs', datetime.strptime(dateTime, "%Y-%m-%dT%H:%M")) except ValueError: pass try: return ('ctx', datetime.strptime(dateTime, "%Y-%m-%d")) except ValueError: raise ArgumentParseError("Impossible to parse '%s'" % dateTime)
def requestFinished(self, obj, req): sm = SyncManager.getDBInstance() cm = ContextManager.get('indico.ext.livesync:actions') cm_ids = ContextManager.get('indico.ext.livesync:ids') timestamp = int_timestamp(nowutc()) # if the returned context is a dummy one, there's nothing to do if cm.__class__ == DummyDict: return # Insert the elements from the temporary index # into the permanent one (MPT) for obj, actions in cm.iteritems(): objId = cm_ids[obj] for action in actions: Logger.get('ext.livesync').debug((objId, action)) # TODO: remove redundant items if not sm.objectExcluded(obj): sm.add(timestamp, ActionWrapper(timestamp, obj, actions, objId))
def _getAnswer(self): self._agent.preActivate( int_timestamp(nowutc()) / self._sm.getGranularity()) return True
def _getAnswer(self): self._task.setStartOn(date_time.nowutc()) self._client.startFailedTask(self._task)
def _nextTS(self): time.sleep(1) return int_timestamp(nowutc())
class DataFetcher(object): _deltas = {'yesterday': timedelta(-1), 'tomorrow': timedelta(1)} _sortingKeys = { 'id': lambda x: x.getId(), 'start': lambda x: x.getStartDate(), 'end': lambda x: x.getEndDate(), 'title': lambda x: x.getTitle() } def __init__(self, aw, hook): self._aw = aw self._hook = hook @classmethod def getAllowedFormats(cls): return Serializer.getAllFormats() @classmethod def _parseDateTime(cls, dateTime, allowNegativeOffset): """ Accepted formats: * ISO 8601 subset - YYYY-MM-DD[THH:MM] * 'today', 'yesterday', 'tomorrow' and 'now' * days in the future/past: '[+/-]DdHHhMMm' 'ctx' means that the date will change according to its function ('from' or 'to') """ # if it's a an "alias", return immediately now = nowutc() if dateTime in cls._deltas: return ('ctx', now + cls._deltas[dateTime]) elif dateTime == 'now': return ('abs', now) elif dateTime == 'today': return ('ctx', now) m = re.match(r'^([+-])?(?:(\d{1,3})d)?(?:(\d{1,2})h)?(?:(\d{1,2})m)?$', dateTime) if m: mod = -1 if m.group(1) == '-' else 1 if not allowNegativeOffset and mod == -1: raise ArgumentParseError( 'End date cannot be a negative offset') atoms = list(0 if a is None else int(a) * mod for a in m.groups()[1:]) if atoms[1] > 23 or atoms[2] > 59: raise ArgumentParseError("Invalid time!") return ('ctx', timedelta(days=atoms[0], hours=atoms[1], minutes=atoms[2])) else: # iso 8601 subset try: return ('abs', datetime.strptime(dateTime, "%Y-%m-%dT%H:%M")) except ValueError: pass try: return ('ctx', datetime.strptime(dateTime, "%Y-%m-%d")) except ValueError: raise ArgumentParseError("Impossible to parse '%s'" % dateTime) @classmethod def _getDateTime(cls, ctx, dateTime, tz, aux=None): try: rel, value = cls._parseDateTime(dateTime, ctx == 'from') except ArgumentParseError, e: raise HTTPAPIError(e.message, 400) if rel == 'abs': return tz.localize(value) if not value.tzinfo else value elif rel == 'ctx' and type(value) == timedelta: value = nowutc() + value # from here on, 'value' has to be a datetime if ctx == 'from': return tz.localize(value.combine(value.date(), time(0, 0, 0))) else: return tz.localize(value.combine(value.date(), time(23, 59, 59)))
def getCurrentTime(self): return nowutc()
def _getAnswer(self): self._agent.preActivate(int_timestamp(nowutc())) return True
def _getAnswer(self): self._agent.preActivate(int_timestamp(nowutc()) / self._sm.getGranularity()) return True
class CERNSearchRecordUploader(RecordUploader): """ A worker that uploads data using HTTP """ def __init__(self, logger, agent, url, username, password, task=None): super(CERNSearchRecordUploader, self).__init__(logger, agent) self._url = url self._username = username self._password = password self._task = task def _postRequest(self, batch): pass def _uploadBatch(self, batch): """ Uploads a batch to the server """ url = "%s/ImportXML" % self._url self._logger.debug('getting a batch') tstart = time.time() # get a batch self._logger.info('Generating metadata') data = self._agent._getMetadata(batch, logger=self._logger) self._logger.info('Metadata ready ') postData = {'xml': data} tgen = time.time() - tstart req = Request(url) # remove line break cred = base64.encodestring('%s:%s' % (self._username, self._password)).strip() req.add_header("Authorization", "Basic %s" % cred) try: result = urlopen(req, data=urlencode(postData)) except HTTPError, e: self._logger.exception("Status %s: \n %s" % (e.code, e.read())) raise Exception('upload failed') result_data = result.read() tupload = time.time() - (tstart + tgen) self._logger.debug('rec %s result: %s' % (batch, result_data)) xmlDoc = etree.fromstring(result_data) # right now there is nothing else to pay attention to booleanResult = etree.tostring(xmlDoc, method="text") if self._task: self._task.setOnRunningListSince(nowutc()) if result.code == 200 and booleanResult == 'true': self._logger.info('Batch of %d records stored in server' ' [%f s %f s]' % \ (len(batch), tgen, tupload)) else: self._logger.error('Records: %s output: %s ' '(HTTP code %s)' % (batch, result_data, result.code)) raise Exception('upload failed') return True