def checkLooseness(self, k, c, fix): orphan = False parent = getParent(c) if parent == None: print "[1st deg ORPHAN] %s\n" % prettyPrint(c) orphan = True elif hasattr(parent, 'getOwnerList') and parent.getOwnerList() == []: print "[2nd deg ORPHAN] %s %s\n" % (prettyPrint(c), parent) orphan = True if orphan and fix: DBMgr.getInstance().sync() print "Fixing %s..." % prettyPrint(c), if orphan: words = self.idx._words l = words[k] pos = l.index(c) print "del val %s\n" % l[pos] del l[pos] words[k] = l self.idx.setIndex(words) ids = self.idx._ids l = ids[k] print "del key %s\n" % l[pos] del l[pos] ids[k] = l DBMgr.getInstance().commit() print "Done!\n"
def _process(self): if not self._cancel: if not self._confirmed: p = RHMaintenancePack(self) return p.display() DBMgr.getInstance().pack() self._redirect(urlHandlers.UHMaintenance.getURL())
def _process( self ): if not self._cancel: if not self._confirmed: p=RHMaintenancePack(self) return p.display() DBMgr.getInstance().pack() self._redirect(urlHandlers.UHMaintenance.getURL())
def checkLooseness(self, k, c, fix): orphan = False parent = getParent(c) if parent == None: print "[1st deg ORPHAN] %s\n" % prettyPrint(c) orphan = True elif hasattr(parent, "getOwnerList") and parent.getOwnerList() == []: print "[2nd deg ORPHAN] %s %s\n" % (prettyPrint(c), parent) orphan = True if orphan and fix: DBMgr.getInstance().sync() print "Fixing %s..." % prettyPrint(c), if orphan: words = self.idx._words l = words[k] pos = l.index(c) print "del val %s\n" % l[pos] del l[pos] words[k] = l self.idx.setIndex(words) ids = self.idx._ids l = ids[k] print "del key %s\n" % l[pos] del l[pos] ids[k] = l DBMgr.getInstance().commit() print "Done!\n"
def rebuildRoomReservationsIndex(): from MaKaC.common.db import DBMgr from MaKaC.rb_location import CrossLocationDB from MaKaC.rb_room import RoomBase from MaKaC.plugins.RoomBooking.default.dalManager import DALManager from BTrees.OOBTree import OOBTree DBMgr.getInstance().startRequest() CrossLocationDB.connect() root = DALManager.root resvEx = ReservationBase() resvEx.isConfirmed = None allResvs = CrossLocationQueries.getReservations( resvExample = resvEx ) print "There are " + str( len( allResvs ) ) + " resvs and pre-resvs to index..." c = 0 root[_ROOM_RESERVATIONS_INDEX] = OOBTree() print "Room => Reservations Index branch created" for resv in allResvs: roomReservationsIndexBTree = root[_ROOM_RESERVATIONS_INDEX] resvs = roomReservationsIndexBTree.get( resv.room.id ) if resvs == None: resvs = [] # New list of reservations for this room roomReservationsIndexBTree.insert( resv.room.id, resvs ) resvs.append( resv ) roomReservationsIndexBTree[resv.room.id] = resvs c += 1 if c % 100 == 0: print c CrossLocationDB.commit() CrossLocationDB.disconnect() DBMgr.getInstance().endRequest()
def countIndex(idxName): DBMgr.getInstance().startRequest() idx = IndexesHolder().getIndex(idxName) print len(sum(idx._words.values(), [])) DBMgr.getInstance().endRequest()
def setUp(self): DBMgr.getInstance().startRequest() self.oldIndex = IndexesHolder()._getIdx()["categoryDateLtd"] self.newIndex = IndexesHolder()._getIdx()["categoryDate"] self.startDate = datetime(2010,5,13, 10, 0, 0, tzinfo=timezone('UTC')) self.endDate = datetime(2010,5,14, 14, 0, 0, tzinfo=timezone('UTC')) self.ch = ConferenceHolder() self.categId = '0'
def remoteSynchronize(self, raiseExceptionOnSyncFail=False): """ Calls the webcast synchronization URL Will perform an intermediate commit before calling the URL or the remote server will query a non-updated state of the indico DB. raiseExceptionOnSyncFail: if True (default), we will raise a MaKaCError if calling the webcast synchronization URL had a problem """ url = str(self.getWebcastSynchronizationURL()).strip() if url: try: Logger.get('webcast').info("Doing an intermediate commit...") DBMgr.getInstance().commit() Logger.get('webcast').info("Commit done.") Logger.get('webcast').info( "Calling the webcast synchronization URL: " + url) answer = urlopen(url, timeout=10).read(100000).strip() Logger.get('webcast').info("Got answer: " + answer) return answer except HTTPError, e: code = e.code shortMessage = BaseHTTPRequestHandler.responses[code][0] longMessage = BaseHTTPRequestHandler.responses[code][1] Logger.get('webcast').error( """Calling the webcast synchronization URL: [%s] triggered HTTPError: %s (code = %s, shortMessage = '%s', longMessage = '%s'""" % (str(url), str(e), code, shortMessage, longMessage)) if raiseExceptionOnSyncFail: if str(code) == '404': raise MaKaCError( 'Could not find the server at ' + str(url) + "(HTTP error 404)", 'webcast') elif str(code) == '500': raise MaKaCError( "The server at" + str(url) + " has an internal problem (HTTP error 500)", 'webcast') else: raise MaKaCError( "Problem contacting the webcast synchronization server. Reason: HTTPError: %s (code = %s, shortMessage = '%s', longMessage = '%s', url = '%s'" "" % (str(e), code, shortMessage, longMessage, str(url)), 'webcast') except URLError, e: Logger.get('webcast').error( """Calling the webcast synchronization URL: [%s] triggered exception: %s""" % (str(url), str(e))) if raiseExceptionOnSyncFail: if str(e.reason).strip() == 'timed out': raise MaKaCError( "The webcast synchronization URL is not responding", 'webcast') raise MaKaCError( """URLError when contacting the webcast synchronization URL: [%s]. Reason=[%s]""" % (str(url), str(e.reason)), 'webcast')
def searchForId(idxName, id): DBMgr.getInstance().startRequest() idx = IndexesHolder().getIndex(idxName) for k, l in idx._words.iteritems(): for v in l[:]: if v.id == id: print "%s" % k DBMgr.getInstance().endRequest()
def buildCategoryDateIndexLtd(): """ Builds limited version of CategoryDateIndex. Can take a long time """ DBMgr.getInstance().startRequest() idx = CategoryDateIndexLtd() idx.buildIndex() IndexesHolder()._getIdx()["categoryDateLtd"] = idx DBMgr.getInstance().endRequest()
def register(interval=15): from indico.modules.scheduler import Client from dateutil.rrule import MINUTELY from MaKaC.common import DBMgr DBMgr.getInstance().startRequest() task = OutlookUpdateCalendarNotificationTask(MINUTELY, interval=interval) client = Client() client.enqueue(task) DBMgr.getInstance().endRequest()
def main( **kwargs ): location = kwargs.get( 'location', 'Universe' ) from MaKaC.rb_factory import Factory from MaKaC.common.db import DBMgr DBMgr.getInstance().startRequest() Factory.getDALManager().connect() initializeRoomBookingDB( location, force = True ) Factory.getDALManager().disconnect() DBMgr.getInstance().endRequest()
def main(**kwargs): location = kwargs.get('location', 'Universe') from MaKaC.rb_factory import Factory from MaKaC.common.db import DBMgr DBMgr.getInstance().startRequest() Factory.getDALManager().connect() initializeRoomBookingDB(location, force=True) Factory.getDALManager().disconnect() DBMgr.getInstance().endRequest()
def main(): DBMgr.getInstance().startRequest() print _("Is working day: ") print str( HolidaysHolder.isWorkingDay( date( 2007, 5, 1 ) ) ) print str( HolidaysHolder.isWorkingDay( date( 2007, 5, 2 ) ) ) print str( HolidaysHolder.isWorkingDay( date( 2007, 9, 5 ) ) ) h = HolidaysHolder.getHolidays() for day in h: if day.year == 2007: print day DBMgr.getInstance().endRequest()
def inverseCheckIndex(idxName): DBMgr.getInstance().startRequest() idx = IndexesHolder().getIndex(idxName) if idxName == 'OAIConferenceModificationDate': OAIConferenceModificationDate(idx).inverseCheck() elif idxName == 'OAIPrivateConferenceModificationDate': OAIPrivateConferenceModificationDate(idx).inverseCheck() else: print "No inverse checking procedures defined for %s" % idxName sys.exit(-1) DBMgr.getInstance().endRequest()
def setUp(self): DBMgr.getInstance().startRequest() self.oldIndex = IndexesHolder()._getIdx()["categoryDateLtd"] self.newIndex = IndexesHolder()._getIdx()["categoryDate"] self.startDate = datetime(2010, 5, 13, 10, 0, 0, tzinfo=timezone('UTC')) self.endDate = datetime(2010, 5, 14, 14, 0, 0, tzinfo=timezone('UTC')) self.ch = ConferenceHolder() self.categId = '0'
def tmp(): from MaKaC.rb_factory import Factory from MaKaC.rb_room import RoomBase from MaKaC.common.db import DBMgr from BTrees.OOBTree import OOBTree DBMgr.getInstance().startRequest() Factory.getDALManager().connect() dayReservationsIndexBTree = OOBTree() raise str( dir( dayReservationsIndexBTree ) ) Factory.getDALManager().disconnect() DBMgr.getInstance().endRequest()
def inverseCheckIndex(idxName): DBMgr.getInstance().startRequest() idx = IndexesHolder().getIndex(idxName) if idxName == "OAIConferenceModificationDate": OAIConferenceModificationDate(idx).inverseCheck() elif idxName == "OAIPrivateConferenceModificationDate": OAIPrivateConferenceModificationDate(idx).inverseCheck() else: print "No inverse checking procedures defined for %s" % idxName sys.exit(-1) DBMgr.getInstance().endRequest()
def connect(self): if not self.isConnected(): if DALManager.usesMainIndicoDB(): self.connection = DBMgr.getInstance().getDBConnection() else: self.connection = self.db.open() self.root = self.connection.root()
def connect(): if not DALManager.isConnected(): if DALManager.usesMainIndicoDB(): DALManager.connection = DBMgr.getInstance().getDBConnection() else: DALManager.connection = DALManager.theInstance().db.open() DALManager.root = DALManager.connection.root()
def __init__( self, rh ): config = Config.getInstance() db_connected = DBMgr.getInstance().isConnected() self._rh = rh self._locTZ = "" self._asset_env = Environment(config.getHtdocsDir(), '') if db_connected: debug = HelperMaKaCInfo.getMaKaCInfoInstance().isDebugActive() else: debug = False # This is done in order to avoid the problem sending the error report because the DB is not connected. if db_connected: info = HelperMaKaCInfo.getMaKaCInfoInstance() self._asset_env.debug = info.isDebugActive() self._dir = config.getTPLDir() self._asset_env.debug = debug if db_connected: css_file = config.getCssStylesheetName() else: css_file = 'Default.css' # register existing assets assets.register_all_js(self._asset_env) assets.register_all_css(self._asset_env, css_file) #store page specific CSS and JS self._extraCSS = [] self._extraJS = []
def findCamouflaged(fix=False, fromDate=None): table = { 0: [Conference], 1: [Contribution, AcceptedContribution], 2: [SubContribution] } dbi = DBMgr.getInstance() dbi.startRequest() camouflaged = [] doh = DeletedObjectHolder() for obj in doh.getList(): types = table[obj.getId().count(":")] if not obj._objClass in types: camouflaged.append(obj) print "-- CAMOUFLAGED %s (%s) should be in %s" % ( prettyPrint(obj), obj._objClass, types) if fix: for c in camouflaged: dbi.sync() doh.remove(c) dbi.commit() print "-- FIXED %s " % prettyPrint(c) dbi.endRequest() print "\n Total of %s camouflaged conferences found" % len(camouflaged) return camouflaged
def main(argv): DBMgr.getInstance().startRequest() print "Req start at " + str(datetime.now()) if "migrate" in argv: migrateCategoryDateIndex() if "switch" in argv: switchIndex() if "removeBackup" in argv: deleteBackup() if "display" in argv: displayIndexes() print "Req ends at " + str(datetime.now()) DBMgr.getInstance().endRequest()
def findCamouflaged(fix=False,fromDate=None): table = { 0: [Conference], 1: [Contribution, AcceptedContribution], 2: [SubContribution] } dbi = DBMgr.getInstance() dbi.startRequest() camouflaged = [] doh = DeletedObjectHolder() for obj in doh.getList(): types = table[obj.getId().count(":")] if not obj._objClass in types: camouflaged.append(obj) print "-- CAMOUFLAGED %s (%s) should be in %s" % (prettyPrint(obj), obj._objClass, types) if fix: for c in camouflaged: dbi.sync() doh.remove(c) dbi.commit() print "-- FIXED %s " % prettyPrint(c) dbi.endRequest() print "\n Total of %s camouflaged conferences found" % len(camouflaged) return camouflaged
def getEntry(idxName, entry, ids): DBMgr.getInstance().startRequest() idx = IndexesHolder().getIndex(idxName) if ids: if idx._ids.has_key(entry): print entry, idx._ids[entry] else: print "Key '%s' was not found" % entry else: if idx._words.has_key(entry): print entry, map(prettyPrint, idx._words[entry]) else: print "Key '%s' was not found" % entry DBMgr.getInstance().endRequest()
def _process(self): s = MaintenanceMng.getStat(Config.getInstance().getTempDir()) dbSize = MaintenanceMng.humanReadableSize( DBMgr.getInstance().getDBSize(), 'm') nWebsession = MaintenanceMng.getWebsessionNum() p = adminPages.WPMaintenance(self, s, dbSize, nWebsession) return p.display()
def __call__(self, aw, req): """Perform the actual exporting""" if self.HTTP_POST != (req.method == 'POST'): raise HTTPAPIError('This action requires %s' % ('POST' if self.HTTP_POST else 'GET'), apache.HTTP_METHOD_NOT_ALLOWED) self._getParams() if not self.GUEST_ALLOWED and not aw.getUser(): raise HTTPAPIError('Guest access to this hook is forbidden.', apache.HTTP_FORBIDDEN) if not self._hasAccess(aw): raise HTTPAPIError('Access to this hook is restricted.', apache.HTTP_FORBIDDEN) func = getattr(self, self.PREFIX + '_' + self._type, None) if not func: raise NotImplementedError(self.PREFIX + '_' + self._type) if not self.COMMIT: # Just execute the function, we'll never have to repeat it resultList, complete = self._performCall(func, aw) else: # Try it a few times until commit succeeds dbi = DBMgr.getInstance() for _retry in xrange(10): dbi.sync() resultList, complete = self._performCall(func, aw) try: dbi.commit() except ConflictError: pass # retry else: break else: raise HTTPAPIError('An unresolvable database conflict has occured', apache.HTTP_INTERNAL_SERVER_ERROR) extraFunc = getattr(self, self.PREFIX + '_' + self._type + '_extra', None) extra = extraFunc(aw, resultList) if extraFunc else None return resultList, extra, complete, self.SERIALIZER_TYPE_MAP
def play(): from MaKaC.rb_location import CrossLocationDB from MaKaC.rb_room import RoomBase from MaKaC.common.db import DBMgr DBMgr.getInstance().startRequest() CrossLocationDB.connect() roomEx = RoomBase() roomEx.isActive = False rooms = CrossLocationQueries.getRooms( roomExample = roomEx ) for r in rooms: print r CrossLocationDB.commit() CrossLocationDB.disconnect() DBMgr.getInstance().endRequest()
def getWebcastManagerInstance(cls): dbmgr = DBMgr.getInstance() root = dbmgr.getDBConnection().root() try: wm = root["WebcastManager"] except KeyError: wm = WebcastManager() root["WebcastManager"] = wm return wm
def _clearAvatarConferenceStorage(self, keysToDelete): dbi = DBMgr.getInstance() dbi.commit() # Ensure that the status 'request_sent' is kept and requests are not sent twice storage = getAvatarConferenceStorage() for i, key in enumerate(keysToDelete): del storage[key] if i % 1000 == 999: dbi.commit() dbi.commit()
def checkIndex(idxName, dfrom, fix, showProgress): DBMgr.getInstance().startRequest() idx = IndexesHolder().getIndex(idxName) if idxName == "OAIConferenceModificationDate": OAIConferenceModificationDate(idx).check(dfrom, fix, showProgress) elif idxName == "OAIPrivateConferenceModificationDate": OAIPrivateConferenceModificationDate(idx).check(dfrom, fix, showProgress) elif idxName == "OAIContributionModificationDate": OAIContributionModificationDate(idx).check(dfrom, fix, showProgress) elif idxName == "OAIPrivateContributionModificationDate": OAIPrivateContributionModificationDate(idx).check(dfrom, fix, showProgress) else: print "No checking procedures defined for %s" % idxName sys.exit(-1) DBMgr.getInstance().endRequest()
def reindex(fix=False, fromDate=None): """ Recreate deleted obj indices, from the DOH """ dbi = DBMgr.getInstance() dbi.startRequest() pubConfIdx = IndexesHolder().getIndex( 'OAIDeletedConferenceModificationDate') prvConfIdx = IndexesHolder().getIndex( 'OAIDeletedPrivateConferenceModificationDate') pubContIdx = IndexesHolder().getIndex( 'OAIDeletedContributionModificationDate') prvContIdx = IndexesHolder().getIndex( 'OAIDeletedPrivateContributionModificationDate') doh = DeletedObjectHolder() pubConfIdx.initIndex() pubContIdx.initIndex() prvConfIdx.initIndex() prvContIdx.initIndex() fromDateParsed_tz = datetime.datetime( *time.strptime(fromDate, '%Y-%m-%d')[:6], **{'tzinfo': timezone('UTC')}) fromDateParsed_naive = datetime.datetime( *time.strptime(fromDate, '%Y-%m-%d')[:6]) for obj in doh.getList(): if fromDate: if obj.getOAIModificationDate().tzinfo: fromDateParsed = fromDateParsed_tz else: fromDateParsed = fromDateParsed_naive if obj.getOAIModificationDate() < fromDateParsed: continue if not hasattr(obj, 'protected'): print "NO DATA FOR %s (%s)" % (obj.getId(), obj.getOAIModificationDate()) continue print "indexing %s (%s)" % (prettyPrint(obj), obj.getOAIModificationDate()) if obj._objClass == Conference: if obj.protected: prvConfIdx.indexConference(obj) else: pubConfIdx.indexConference(obj) elif obj._objClass == Contribution or obj._objClass == AcceptedContribution or obj._objClass == SubContribution: if obj.protected: prvContIdx.indexContribution(obj) else: pubContIdx.indexContribution(obj) dbi.commit() dbi.endRequest()
def checkIndex(idxName, dfrom, fix, showProgress): DBMgr.getInstance().startRequest() idx = IndexesHolder().getIndex(idxName) if idxName == 'OAIConferenceModificationDate': OAIConferenceModificationDate(idx).check(dfrom, fix, showProgress) elif idxName == 'OAIPrivateConferenceModificationDate': OAIPrivateConferenceModificationDate(idx).check( dfrom, fix, showProgress) elif idxName == 'OAIContributionModificationDate': OAIContributionModificationDate(idx).check(dfrom, fix, showProgress) elif idxName == 'OAIPrivateContributionModificationDate': OAIPrivateContributionModificationDate(idx).check( dfrom, fix, showProgress) else: print "No checking procedures defined for %s" % idxName sys.exit(-1) DBMgr.getInstance().endRequest()
def remoteSynchronize(self, raiseExceptionOnSyncFail = False): """ Calls the webcast synchronization URL Will perform an intermediate commit before calling the URL or the remote server will query a non-updated state of the indico DB. raiseExceptionOnSyncFail: if True (default), we will raise a MaKaCError if calling the webcast synchronization URL had a problem """ url = str(self.getWebcastSynchronizationURL()).strip() if url: try: Logger.get('webcast').info("Doing an intermediate commit...") DBMgr.getInstance().commit() Logger.get('webcast').info("Commit done.") Logger.get('webcast').info("Calling the webcast synchronization URL: " + url) answer = urlopen(url , timeout=10).read(100000).strip() Logger.get('webcast').info("Got answer: " + answer) return answer except HTTPError, e: code = e.code shortMessage = BaseHTTPRequestHandler.responses[code][0] longMessage = BaseHTTPRequestHandler.responses[code][1] Logger.get('webcast').error("""Calling the webcast synchronization URL: [%s] triggered HTTPError: %s (code = %s, shortMessage = '%s', longMessage = '%s'""" % (str(url), str(e), code, shortMessage, longMessage)) if raiseExceptionOnSyncFail: if str(code) == '404': raise MaKaCError('Could not find the server at ' + str(url) + "(HTTP error 404)", 'webcast') elif str(code) == '500': raise MaKaCError("The server at" + str(url) + " has an internal problem (HTTP error 500)", 'webcast') else: raise MaKaCError("Problem contacting the webcast synchronization server. Reason: HTTPError: %s (code = %s, shortMessage = '%s', longMessage = '%s', url = '%s'""" % (str(e), code, shortMessage, longMessage, str(url)), 'webcast') except URLError, e: Logger.get('webcast').error("""Calling the webcast synchronization URL: [%s] triggered exception: %s""" % (str(url), str(e))) if raiseExceptionOnSyncFail: if str(e.reason).strip() == 'timed out': raise MaKaCError("The webcast synchronization URL is not responding", 'webcast') raise MaKaCError("""URLError when contacting the webcast synchronization URL: [%s]. Reason=[%s]"""%(str(url), str(e.reason)), 'webcast')
def main(): formatter = logging.Formatter("%(asctime)s %(name)-16s: %(levelname)-8s - %(message)s") parser = argparse.ArgumentParser(description='Process some integers.') parser.add_argument('--logging', action='store', help='display logging messages for specified level') parser.add_argument('--web-server', action='store_true', help='run a standalone WSGI web server with Indico') parser.add_argument('--with-ipv6', action='store_true', help='enable ipv6 support for web server') args, remainingArgs = parser.parse_known_args() if 'logging' in args and args.logging: logger = Logger.get() handler = logging.StreamHandler() handler.setLevel(getattr(logging, args.logging)) handler.setFormatter(formatter) logger.addHandler(handler) if 'web_server' in args and args.web_server: config = Config.getInstance() refserver = RefServer(config.getHostNameURL(), int(config.getPortURL()), enable_ipv6=args.with_ipv6) refserver.run() else: dbi = DBMgr.getInstance() dbi.startRequest() namespace = setupNamespace(dbi) if HAS_IPYTHON: if OLD_IPYTHON: ipshell = IPShellEmbed(remainingArgs, banner=SHELL_BANNER, exit_msg='Good luck', user_ns=namespace) else: config = IPConfig() ipshell = InteractiveShellEmbed(config=config, banner1=SHELL_BANNER, exit_msg='Good luck', user_ns=namespace) ipshell() else: console = code.InteractiveConsole(namespace) console.interact(SHELL_BANNER) dbi.abort() dbi.endRequest()
def getRepositoryFromDB(cls): from MaKaC.common.db import DBMgr dbRoot = DBMgr.getInstance().getDBConnection().root() try: fr = dbRoot["local_repositories"][cls._repo_name] except KeyError: fr = cls() if not "local_repositories" in dbRoot: dbRoot["local_repositories"] = OOBTree() dbRoot["local_repositories"][cls._repo_name] = fr return fr
def indexByDay(): from MaKaC.rb_location import CrossLocationDB from MaKaC.rb_room import RoomBase from MaKaC.common.db import DBMgr DBMgr.getInstance().startRequest() CrossLocationDB.connect() # resvEx = ReservationBase() # resvEx.isConfirmed = None # resvs = CrossLocationQueries.getReservations( resvExample = resvEx ) # print "There are " + str( len( resvs ) ) + " resvs to index..." # c = 0 # for resv in resvs: # resv._addToDayReservationsIndex() # c += 1 # if c % 100 == 0: # print c CrossLocationDB.commit() CrossLocationDB.disconnect() DBMgr.getInstance().endRequest()
def start(self, obj): super(Database_Feature, self).start(obj) obj._dbmgr = DBMgr.getInstance() retries = 10 # quite prone to DB conflicts while retries: try: with obj._context('database', sync=True) as conn: obj._home = default_actions.initialize_new_db(conn.root()) break except ConflictError: retries -= 1
def reindex(fix=False,fromDate=None): """ Recreate deleted obj indices, from the DOH """ dbi = DBMgr.getInstance() dbi.startRequest() pubConfIdx = IndexesHolder().getIndex('OAIDeletedConferenceModificationDate') prvConfIdx = IndexesHolder().getIndex('OAIDeletedPrivateConferenceModificationDate') pubContIdx = IndexesHolder().getIndex('OAIDeletedContributionModificationDate') prvContIdx = IndexesHolder().getIndex('OAIDeletedPrivateContributionModificationDate') doh = DeletedObjectHolder() pubConfIdx.initIndex() pubContIdx.initIndex() prvConfIdx.initIndex() prvContIdx.initIndex() fromDateParsed_tz = datetime.datetime(*time.strptime(fromDate,'%Y-%m-%d')[:6],**{'tzinfo':timezone('UTC')}) fromDateParsed_naive = datetime.datetime(*time.strptime(fromDate,'%Y-%m-%d')[:6]) for obj in doh.getList(): if fromDate: if obj.getOAIModificationDate().tzinfo: fromDateParsed = fromDateParsed_tz else: fromDateParsed = fromDateParsed_naive if obj.getOAIModificationDate() < fromDateParsed: continue if not hasattr(obj,'protected'): print "NO DATA FOR %s (%s)" % (obj.getId(), obj.getOAIModificationDate()) continue print "indexing %s (%s)" % (prettyPrint(obj), obj.getOAIModificationDate()) if obj._objClass == Conference: if obj.protected: prvConfIdx.indexConference(obj) else: pubConfIdx.indexConference(obj) elif obj._objClass == Contribution or obj._objClass == AcceptedContribution or obj._objClass == SubContribution: if obj.protected: prvContIdx.indexContribution(obj) else: pubContIdx.indexContribution(obj) dbi.commit() dbi.endRequest()
def fetchOAI(private, fromDate, untilDate): dbi = DBMgr.getInstance() dbi.startRequest() response = FakeOAIResponse('host','/oai.py',private) response.OAIListRecords(fromDate,untilDate,None,'marcxml', None) token = response.getToken() while not token == None: response.OAIListRecords(fromDate,untilDate,None,'marcxml', token) token = response.getToken() dbi.endRequest()
def __call__(self, aw, req): """Perform the actual exporting""" if self.HTTP_POST != (req.method == 'POST'): raise HTTPAPIError( 'This action requires %s' % ('POST' if self.HTTP_POST else 'GET'), apache.HTTP_METHOD_NOT_ALLOWED) self._req = req self._getParams() req = self._req if not self.GUEST_ALLOWED and not aw.getUser(): raise HTTPAPIError('Guest access to this resource is forbidden.', apache.HTTP_FORBIDDEN) if not self._hasAccess(aw): raise HTTPAPIError('Access to this resource is restricted.', apache.HTTP_FORBIDDEN) func = getattr(self, self.PREFIX + '_' + self._type, None) if not func: raise NotImplementedError(self.PREFIX + '_' + self._type) if not self.COMMIT: # Just execute the function, we'll never have to repeat it resultList, complete = self._performCall(func, aw) else: # Try it a few times until commit succeeds dbi = DBMgr.getInstance() for _retry in xrange(10): dbi.sync() resultList, complete = self._performCall(func, aw) try: dbi.commit() except ConflictError: pass # retry else: break else: raise HTTPAPIError( 'An unresolvable database conflict has occured', apache.HTTP_INTERNAL_SERVER_ERROR) extraFunc = getattr(self, self.PREFIX + '_' + self._type + '_extra', None) extra = extraFunc(aw, resultList) if extraFunc else None return resultList, extra, complete, self.SERIALIZER_TYPE_MAP
def __init__(self, rh): config = Config.getInstance() self._rh = rh self._locTZ = "" self._asset_env = Environment(config.getHtdocsDir(), '/') # This is done in order to avoid the problem sending the error report because the DB is not connected. if DBMgr.getInstance().isConnected(): info = HelperMaKaCInfo.getMaKaCInfoInstance() self._asset_env.debug = info.isDebugActive() # register existing assets assets.register_all_js(self._asset_env) #store page specific CSS and JS self._extraCSS = [] self._extraJS = []
def guessProtection(fix=False, fromDate=None): """ Recreate deleted obj indices, from the DOH, guessing the protection, using the parent category""" dbi = DBMgr.getInstance() dbi.startRequest() doh = DeletedObjectHolder() if fromDate: fromDateParsed_tz = datetime.datetime( *time.strptime(fromDate, '%Y-%m-%d')[:6], **{'tzinfo': timezone('UTC')}) fromDateParsed_naive = datetime.datetime( *time.strptime(fromDate, '%Y-%m-%d')[:6]) for obj in doh.getList(): if fromDate: if obj.getOAIModificationDate().tzinfo: fromDateParsed = fromDateParsed_tz else: fromDateParsed = fromDateParsed_naive if obj.getOAIModificationDate() < fromDateParsed: continue if not hasattr(obj, 'protected'): try: categ = CategoryManager().getById(obj.getCategoryPath()[-1]) obj.protected = categ.hasAnyProtection() print "-- protection for %s set as %s" % (prettyPrint(obj), obj.protected) except KeyError: print ">> CATEGORY %s for %s no longer exists - assuming TRUE" % ( obj.getCategoryPath()[-1], prettyPrint(obj)) obj.protected = True dbi.commit() else: print "** protection for %s was already at %s" % (prettyPrint(obj), obj.protected) dbi.endRequest()
def listIndex(idxName, ids): DBMgr.getInstance().startRequest() idx = IndexesHolder().getIndex(idxName) for k, v in idx._words.iteritems(): if ids: f = lambda x: str(x.getId()) else: f = prettyPrint print k, map(f, v) DBMgr.getInstance().endRequest() ## def fixOrphans(idxName): ## DBMgr.getInstance().startRequest() ## idx = IndexesHolder().getIndex(idxName) ## for k,l in idx._words.iteritems(): ## for v in l[:]: ## if type(v) != MaKaC.conference.DeletedObject: ## parent = getParent(v) ## orphan = False ## if parent == None: ## print "[1st deg ORPHAN] %s" % prettyPrint(v) ## orphan = True ## elif hasattr(parent, 'getOwnerList') and parent.getOwnerList() == []: ## print "[2nd deg ORPHAN] %s %s" % (prettyPrint(v), parent) ## orphan = True ## if orphan: ## words = idx._words ## l = words[k] ## l.remove(v) ## words[k] = l ## idx.setIndex(words) ## ids = idx._ids ## l = words[k] ## l.remove(v.id) ## ids[k] = l DBMgr.getInstance().endRequest()