def FetchByCode(code): """ Fetches an individual shop record from the database using the specified shop code. Args: code: string, the shop's code. Returns: A dict containing all the shop fields: id, region_id, code, name """ result = None try: Log.info( ('SHOPS-Fetch-Code:', 'Trying to grab data from table using Code')) query = "SELECT * FROM shop WHERE code = %s;" db.cursor.execute(query, (code, )) result = db.cursor.fetchone() Log.info(('SHOPS-Fetch-Code:', 'Successfully grabbed data')) except Error as e: Log.error(('SHOPS-Fetch-Code:', e)) Log.info(('SHOPS-Fetch-Code:Query:', query)) Log.info(('SHOPS-Fetch-Code:', 'Failed to grab data')) return result
def getStringArray(path): if path is None: Log.error('file path is None') return dom = xml.dom.minidom.parse(path) root = dom.documentElement itemlist = root.getElementsByTagName('array') keys = [] values = [] for index in range(len(itemlist)): item = itemlist[index] key = item.getAttribute("name") value = '' valuelist = item.getElementsByTagName('item') for i in range(len(valuelist)): vv = valuelist[i] value += vv.firstChild.data value += '|' keys.append(key) values.append(value) #print 'key=' + key +',value=' + value #print '#######################' return (keys, values)
def writeToFile(keys, values, directory, additional): if not os.path.exists(directory): os.makedirs(directory) Log.info("Creating android file:" + directory + "/strings.xml") fo = open(directory + "/strings.xml", "wb") stringEncoding = "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<resources>\n" fo.write(stringEncoding) for x in range(len(keys)): if values[x] is None or values[x] == '': Log.error("Key:" + keys[x] + "\'s value is None. Index:" + str(x + 1)) continue key = keys[x] value = values[x] content = " <string name=\"" + key + "\">" + value + "</string>\n" fo.write(content) if additional is not None: fo.write(additional) fo.write("</resources>") fo.close()
def write(fName, pCollection): if(fName.endswith(CSV.SUFFIX)): CSV.write(fName, pCollection) elif(fName.endswith(VCARD.SUFFIX)): VCARD.write(fName, pCollection) else: Log.error(__class__, "write() - File format not supported!")
def FetchById(id): """ Fetches an individual shop record from the database using the specified identifier. Args: id: int, the unique identifier of the shop. Returns: A dict containing all the shop fields: id, region_id, code, name """ result = None try: Log.info( ('SHOPS-Fetch-Id:', 'Trying to grab data from table using Id')) query = "SELECT * FROM shop WHERE id = %s;" db.cursor.execute(query, (id, )) result = db.cursor.fetchone() Log.info(('SHOPS-Fetch-Id:', 'Successfully grabbed data')) except Error as e: Log.error(('SHOPS-Fetch-Id:', e)) Log.info(('SHOPS-Fetch-Id:', query)) Log.info(('SHOPS-Fetch-Id:', 'Failed to grab data')) return result
def convertiOSAndAndroidFile(table, targetFloderPath, iOSAdditional, androidAdditional): firstRow = table.row_values(0) # 第0行所有的值 keys = table.col_values(0) # 第0列所有的值 Log.info("targetFloderPath: " + targetFloderPath) if not os.path.exists(targetFloderPath): os.makedirs(targetFloderPath) fo = open(targetFloderPath + "/output.txt", "wb") Log.info("open file " + targetFloderPath + "/output.txt") for x in range(len(keys)): row = table.row_values(x) # 第0行所有的值 if row[0] is None or row[0] == '' or row[1] is None or row[1] == '': Log.error("Key:" + keys[x] + "\'s value is None. Index:" + str(x + 1)) continue content = row[0] + " " + row[1] + "\n" Log.info("wcontent is " + content) fo.write(content) Log.info("will close") fo.close()
def writeToFile(filePath, xlsName, titles, values): if filePath is None: Log.error('write file path is None') return if not os.path.exists(filePath): os.makedirs(filePath) workbook = xlsxwriter.Workbook(filePath + "/" + xlsName) titleBold = workbook.add_format({ 'bold': True, 'border': 1, 'align': 'left', 'valign': 'vcenter', 'fg_color': '#00FF00', }) worksheet = workbook.add_worksheet() startLine = 0 if titles: startLine = 1 for index in range(len(titles)): worksheet.write(0, index, titles[index], titleBold) for lindex in range(len(values)): if isinstance(values[lindex], list): for cindex in range(len(values[lindex])): worksheet.write(cindex + startLine, lindex, values[lindex][cindex]) workbook.close()
def QueryUnread(self,index, boardname): # Log.debug("QueryUnread: %s %d" % (boardname, index)) board = BoardManager.BoardManager.GetBoard(boardname) if (board == None): Log.error("Fail to load board %s for unread?" % boardname) return False entry = self.FindCacheEntry(board) if (entry == -1): Log.warn("cannot find cache entry for unread? %s" % boardname) return False for j in range(0, BRC_MAXNUM): cur = self._cache[entry].list[j] # Log.debug("read: %d" % cur) if (cur == 0): if (j == 0): # Log.debug("empty bread cache") return True # Log.debug("reach bread cache end") return False if (index > cur): # Log.debug("not found") return True elif (index == cur): # Log.debug("found") return False return False
class Upgrade: #---------------------------------------------------------------------------# # Constructor #---------------------------------------------------------------------------# def __init__(self): self.log = Log() self.error = 0 self.Version = "v0.1.6" #---------------------------------------------------------------------------# # Get current version #---------------------------------------------------------------------------# def GetCurrentVersion(self): version = "v0.0" #Connect to MySQL db = MySQLdb.connect(Config.DbHost, Config.DbUser, Config.DbPassword, Config.DbName) cursor = db.cursor() try: #Execure SQL-Query cursor.execute("SELECT SettingValue FROM ha_settings WHERE SettingName='Version'") result = cursor.fetchone() version = result[0] except MySQLdb.Error, e: #Log exceptions try: self.error = 1 self.log.error('Server', 'MySQL Error [%d]: %s' % (e.args[0], e.args[1])) except IndexError: self.error = 1 self.log.error('Server', 'MySQL Error: %s' % str(e)) finally:
def FetchByCode( code ): """ Fetches an individual shop record from the database using the specified shop code. Args: code: string, the shop's code. Returns: A dict containing all the shop fields: id, region_id, code, name """ result = None try: Log.info(('SHOPS-Fetch-Code:', 'Trying to grab data from table using Code')) query = "SELECT * FROM shop WHERE code = %s;" db.cursor.execute( query, ( code, ) ) result = db.cursor.fetchone() Log.info(('SHOPS-Fetch-Code:', 'Successfully grabbed data')) except Error as e: Log.error(('SHOPS-Fetch-Code:', e)) Log.info(('SHOPS-Fetch-Code:Query:',query)) Log.info(('SHOPS-Fetch-Code:', 'Failed to grab data')) return result
def FetchById( id ): """ Fetches an individual category record from the database using the specified identifier. Args: id: int, the unique identifier of the category. Returns: A dict containing all the category fields: id, parent_id, name, code """ result = None try: Log.info(('CATEGORIES-Fetch-Id:', 'Trying to grab data from table using Id')) query = "SELECT * FROM category WHERE id = %s;" db.cursor.execute( query, ( id, ) ) result = db.cursor.fetchone() Log.info(('CATEGORIES-Fetch-Id:', 'Successfully grabbed data')) except Error as e: Log.error(('CATEGORIES-Fetch-Id:', e)) Log.info(('CATEGORIES-Fetch-Id:Query:', query)) Log.info(('CATEGORIES-Fetch-Id:','Failed to grab data')) return result
def FetchByRegionCode( code ): """ Fetch all the shop records that belong to the region specified by the region code. This is a common use-case internally within Oxfam. Args: code: string, the region code Returns: A list of dicts containing all the shop records that belong to the specified region (area). """ result = None # While we could just do this in one SQL statement, we're going to use the Regions module so that the code is more robust. # This can be changed if required when we look at optimization. region = Regions.FetchByCode( code ) if region is not None: try: Log.info(('SHOPS-Fetch-RegionCode:', 'Trying to grab data using regionCode/Id ')) query = "SELECT * FROM shop WHERE region_id = %s;" db.cursor.execute( query, ( region[ "id" ], ) ) result = db.cursor.fetchall() Log.info(('SHOPS-Fetch-RegionCode:', 'Successfully grabbed data')) except Error as e: Log.error(('SHOPS-Fetch-RegionCode', e)) Log.info(('SHOPS-Fetch-RegionCode:Querry:', query)) Log.info(('SHOPS-Fetch-RegionCode:', 'Failed to grab data')) return result
def write(fName, pCollection): Log.trace(__class__, "write()") try: file = open(fName, 'w') SEP = "," EMB = "\"" # write headerNames first sep = "" for col in pCollection.getHeaderNames(): file.write(sep + EMB + col + EMB) if(sep != SEP): sep = SEP file.write("\n") # write persons for person in pCollection.getPersons(): sep = "" for col in pCollection.getHeaderNames(): file.write(sep + EMB + person.getAttribute(col) + EMB) if(sep != SEP): sep = SEP file.write("\n") file.close() return True except IOError: Log.error(__class__, "IOError with file > " + fName)
def getKeysAndValues(path): if path is None: Log.error('file path is None') return dom = xml.dom.minidom.parse(path) root = dom.documentElement itemlist = root.getElementsByTagName('string') keys = [] values = [] keyValues = {} for index in range(len(itemlist)): item = itemlist[index] translatable = item.getAttribute("translatable") key = item.getAttribute("name") try: value = item.firstChild.data except: Log.error('file=' + path + ', key=' + key + "has not data") continue if translatable != "false": keys.append(key) values.append(value) keyValues[key] = value return (keys, values, keyValues)
def read(fName): Log.trace(__class__, "read()") pCollection = PersonCollection() try: file = open(fName, 'r') isHeader = True #SEP = ',*' SEP = "[\,,\s]*" EMB = "\"" regEx = re.compile(EMB + '([^' + EMB + ']*)' + EMB + SEP) for line in file: i = 0 person = Person() for col in regEx.findall(line): if(isHeader): pCollection.addHeader(col) #self._headerNames.append(col) else: person.setAttribute(pCollection.getHeaderNames()[i], col) i += 1 if(isHeader): isHeader = False else: pCollection.addPerson(person) file.close() return pCollection except IOError: Log.error(__class__, "IOError with file > " + fName) return None
def notify_session(self, jid, session, type = None): # notify session changed (online/state change) for hisjid in self._rosters: roster = self._rosters[hisjid] if (jid in roster.watching()): him = self.get_user(hisjid) # he can't see you! if (not him.CanSee(session._userinfo)): continue # you are watching me, so I'll notify you Log.debug("notify %s about %s" % (hisjid, session.get_fulljid())) elem = None if (type == None): show = session.get_show(self.get_user(hisjid)) elem = self.E.presence( {'from' : session.get_fulljid(), 'to' : hisjid}, self.E.status(session.get_status()), self.E.priority(session.get_priority())) if (show != None): elem.append(self.E.show(show)) else: elem = self.E.presence( {'from' : session.get_fulljid(), 'to' : hisjid, 'type' : type}) try: self.transmit(hisjid, elem) except Exception as e: Log.error("notify error: %r" % e) Log.error(traceback.format_exc())
def fuzzyReplaceLocalizableFile(keys, values, directory, additional): if not os.path.exists(directory): return Log.info("Open iOS file:" + directory + "Localizable.strings") fn = directory + "Localizable.strings" shutil.copyfile(fn, fn + ".bak") fo = open(directory + "KeyMap.txt", "wb") (okeys, ovalues) = LocalizableStringsFileUtil.getKeysAndValues(fn) for x in range(len(values)): if values[x] is None or values[x] == '': Log.error("Key:" + keys[x] + "\'s value is None. Index:" + str(x + 1)) continue key = keys[x] value = values[x] (ovalue, ratio) = process.extractOne(value, ovalues) if ratio > 80: oindex = ovalues.index(ovalue) Log.info("found a string '" + ovalue + "' with key '" + key + "' index: " + str(oindex)) content = okeys[oindex] + "," + key + "\n" fo.write(content) fo.close LocalizableStringsFileUtil.writeToFile(keys, values, directory, additional)
def QueryUnread(self,index, boardname): # Log.debug("QueryUnread: %s %d" % (boardname, index)) board = BoardManager.BoardManager.GetBoard(boardname) if (board == None): Log.error("Fail to load board %s for unread?" % boardname) return False entry = self.FindCacheEntry(board) if (entry == -1): Log.warn("cannot find cache entry for unread? %s", boardname) return False self._cache[entry].Update(True) for j in range(0, BRC_MAXNUM): cur = self._cache[entry]._list[j] # Log.debug("read: %d" % cur) if (cur == 0): if (j == 0): # Log.debug("empty bread cache") return True # Log.debug("reach bread cache end") return False if (index > cur): # Log.debug("not found") return True elif (index == cur): # Log.debug("found") return False return False
def Init(self): if self._cache_map == None: cachepath = User.User.CacheFile(self._userid, '') try: os.mkdir(cachepath, 0700) except: pass entrypath = User.User.CacheFile(self._userid, 'entry') try: os.stat(entrypath) except: # Log.debug("no brc cache file for %s, creating" % self._userid) brc = '\0' * BRC_CACHE_NUM * BrcCacheEntry.Size() fbrc = os.open(entrypath, os.O_RDWR | os.O_CREAT, 0600) os.write(fbrc, brc) os.close(fbrc) fbrc = open(entrypath, "r+b") if (fbrc == None): Log.error("cannot init brc cache for %s" % self._userid) return False self._cache_map = mmap.mmap(fbrc.fileno(), BRC_CACHE_NUM * BrcCacheEntry.Size(), prot = mmap.PROT_READ | mmap.PROT_WRITE, flags = mmap.MAP_SHARED) fbrc.close() if (self._cache_map == None): Log.error("failed to mmap cache file for %s" % self.userid) return False self._cache = [0] * BRC_CACHE_NUM for i in range(0, BRC_CACHE_NUM): self._cache[i] = BrcCacheEntry(self, i) self._cache[i].Load(self._cache_map[BrcCacheEntry.Size() * i:BrcCacheEntry.Size() * (i+1)]) return True
def FetchByRegionId(region_id): """ Fetch all the shop records that have the specified region_id (i.e. those that belong to the specified area). Args: region_id: int, the unique identifier of the region this shop belongs to. Returns: A list of dicts containing all the shop records that belong to the specified region (area). """ result = None try: Log.info(('SHOPS-Fetch-RegionId:', 'Trying to grab data from table using RegionId')) query = "SELECT * FROM shop WHERE region_id = %s;" db.cursor.execute(query, (region_id, )) result = db.cursor.fetchall() Log.info(('SHOPS-Fetch-RegionId:', 'Successfully grabbed data')) except Error as e: Log.error(('SHOPS-Fetch-RegionId:', e)) Log.info(('SHOPS-Fetch-RegionId:Query:', query)) Log.info(('SHOPS-Fetch-RegionId:', 'Failed to grab data')) return result
def AddLogInfo(filepath, user, session, anony, has_sig): color = (user.userec.numlogins % 7) + 31 if (anony): from_str = Config.Config.GetString("NAME_ANONYMOUS_FROM", "Anonymous") else: from_str = session._fromip try: with open(filepath, "ab") as fp: if (has_sig): fp.write('\n') else: fp.write('\n--\n') lastline = u'\n\033[m\033[1;%2dm※ 来源:·%s %s·[FROM: %s]\033[m\n' % ( color, Config.Config.GetString("BBS_FULL_NAME", "Python BBS"), Config.Config.GetString("NAME_BBS_ENGLISH", "PyBBS"), from_str) fp.write(lastline.encode('gbk')) except IOError: Log.error("Post.AddLogInfo: IOError on %s" % filepath) pass
def FetchByRegionCode(code): """ Fetch all the shop records that belong to the region specified by the region code. This is a common use-case internally within Oxfam. Args: code: string, the region code Returns: A list of dicts containing all the shop records that belong to the specified region (area). """ result = None # While we could just do this in one SQL statement, we're going to use the Regions module so that the code is more robust. # This can be changed if required when we look at optimization. region = Regions.FetchByCode(code) if region is not None: try: Log.info(('SHOPS-Fetch-RegionCode:', 'Trying to grab data using regionCode/Id ')) query = "SELECT * FROM shop WHERE region_id = %s;" db.cursor.execute(query, (region["id"], )) result = db.cursor.fetchall() Log.info(('SHOPS-Fetch-RegionCode:', 'Successfully grabbed data')) except Error as e: Log.error(('SHOPS-Fetch-RegionCode', e)) Log.info(('SHOPS-Fetch-RegionCode:Querry:', query)) Log.info(('SHOPS-Fetch-RegionCode:', 'Failed to grab data')) return result
def writeToFile(keys, values, directory, filename, additional): if not os.path.exists(directory): os.makedirs(directory) fo = open(directory + "/" + filename, "wb") stringEncoding = "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<resources>\n" fo.write(stringEncoding) for x in range(len(keys)): if values[x] is None or values[x] == '': Log.error("Key:" + keys[x] + "\'s value is None. Index:" + str(x + 1)) continue key = keys[x].strip() value = re.sub(r'(%\d\$)(@)', r'\1s', values[x]) content = " <string name=\"" + key + "\">" + value + "</string>\n" fo.write(content) if additional is not None: fo.write(additional) fo.write("</resources>") fo.close()
def getIOSKeysAndValues(path): if path is None: Log.error('file path is None') return tuples = [] try: file = codecs.open(path, 'r', encoding='utf-8') for line in file: pattern = re.compile('\".*\";') value = pattern.search(line) if value is not None: result = re.findall(r"\"(.*)\"\s*=\s*\"(.*)\";", value.string) if len(result) > 0: tuples.append(result[0]) file.close() except UnicodeDecodeError: print("got unicode error with utf-8 , trying different encoding") keys = [] values = [] for x in tuples: if len(x) >= 2: keys.append(x[0]) values.append(x[1]) return keys, values
def getKeysAndValues(path): if path is None: Log.error('file path is None') return # 1.Read localizable.strings file = codecs.open(path, 'r', 'utf-8') string = file.read() file.close() # 2.Split by "; localStringList = string.split('\";') list = [x.split(' = ') for x in localStringList] # 3.Get keys & values keys = [] values = [] for x in range(len(list)): keyValue = list[x] if len(keyValue) > 1: key = keyValue[0].split('\"')[1] value = keyValue[1][1:] keys.append(key) values.append(value) return (keys, values)
def Init(): if (CommonData.publicshm == None): try: CommonData.publicshm = SharedMemory(Config.PUBLIC_SHMKEY, size = PUBLICSHM_SIZE) except ExistentialError: Log.error("time daemon not started") raise Exception("Initialization failed: publicshm not created")
def Init(self): if self._cache_map == None: cachepath = User.User.CacheFile(self._userid, '') try: os.mkdir(cachepath, 0700) except: pass entrypath = User.User.CacheFile(self._userid, 'entry') try: os.stat(entrypath) except: # Log.debug("no brc cache file for %s, creating" % self._userid) brc = '\0' * BRC_CACHE_NUM * BrcCacheEntry.size fbrc = os.open(entrypath, os.O_RDWR | os.O_CREAT, 0600) os.write(fbrc, brc) os.close(fbrc) fbrc = open(entrypath, "r+b") if (fbrc == None): Log.error("cannot init brc cache for %s" % self._userid) return False self._cache_map = mmap.mmap(fbrc.fileno(), BRC_CACHE_NUM * BrcCacheEntry.size, prot = mmap.PROT_READ | mmap.PROT_WRITE, flags = mmap.MAP_SHARED) fbrc.close() if (self._cache_map == None): Log.error("failed to mmap cache file for %s" % self._userid) return False self._cache = [0] * BRC_CACHE_NUM for i in range(0, BRC_CACHE_NUM): self._cache[i] = BrcCacheEntry(self, i) return True
def Init(): Log.info("Initializing BCache") if (BCache.bcache == None): boardf = open(Config.Config.GetBoardsFile(), 'r+b') if (boardf == None): Log.error("Cannot open boards file") raise ServerError("fail to open boards file") try: BCache.bcache = mmap.mmap(boardf.fileno(), Config.MAXBOARD * BoardHeader.size, flags=mmap.MAP_SHARED, prot=mmap.PROT_READ) if (BCache.bcache == None): Log.error("Cannot mmap boards file") raise ServerError("fail to mmap boards file") finally: boardf.close() Log.info("Got boards list") if (BCache.brdshm == None): try: Log.info("Attaching to BCache shared memory") BCache.brdshm = SharedMemory(Config.Config.GetInt( "BCACHE_SHMKEY", 3693), size=BCache.BRDSHM_SIZE) # print "Got SHM" # for i in range(0, Config.MAXBOARD): # bh = BoardHeader(i) # if (bh.filename != ''): # bs = BoardStatus(i) # print "Board: ", bh.filename, " lastpost: ", bs.lastpost, " total: ", bs.total, " curruser: "******"Creating BCache shared memory") BCache.brdshm = SharedMemory(Config.Config.GetInt( "BCACHE_SHMKEY", 3693), size=BCache.BRDSHM_SIZE, flags=IPC_CREAT, mode=0660) fd = BCache.Lock() try: maxbrd = -1 for i in range(0, Config.MAXBOARD): bh = BoardHeader(i) if (bh.filename != ''): bs = BoardStatus(i) board = Board(bh, bs, i) board.UpdateLastPost() maxbrd = i if (maxbrd != -1): BCache.SetBoardCount(maxbrd + 1) finally: BCache.Unlock(fd) Log.info("BCache initialized") return
def read(fName): if(fName.endswith(CSV.SUFFIX)): return CSV.read(fName) elif(fName.endswith(VCARD.SUFFIX)): return VCARD.read(fName) else: Log.error(__class__, "read() - File format not supported!") return None
def transmit(self, to, elem): for (fulljid, route) in self.routes(to): Log.debug("sending to %s" % fulljid) try: route.handle(elem) except Exception as e: Log.error("send error: %r" % e) Log.error(traceback.format_exc())
def test_Log(self): # Log log = Log() log.debug("Test", "Debug") log.info("Test", "Info") log.warning("Test", "Warning") log.error("Test", "Error") log.SQLQuery("INSERT INTO ha_data (DataId, DataName, DataText, DataStatus, DataLastUpdated) VALUES (9999, 'Test', 'Test', 200, NOW()) ON DUPLICATE KEY UPDATE DataText = VALUES(DataText), DataStatus = VALUES(DataStatus), DataLastUpdated = VALUES(DataLastUpdated)")
def run(self): while (self._running): time.sleep(Config.XMPP_UPDATE_TIME_INTERVAL) try: self.update_sessions() except Exception as e: Log.error("Exception caught in rosters.updater: %r" % e) Log.error(traceback.format_exc())
def Init(): Log.info("Attaching commondata shm") if (CommonData.publicshm == None): try: CommonData.publicshm = SharedMemory(Config.PUBLIC_SHMKEY, size=PUBLICSHM_SIZE) except ExistentialError: Log.error("time daemon not started") raise Exception("Initialization failed: publicshm not created")
def run(self): while (self._running): time.sleep(Config.XMPP_UPDATE_TIME_INTERVAL) try: self.update_sessions() self.update_friends() except Exception as e: Log.error("Exception caught in rosters.updater: %r" % e) Log.error(traceback.format_exc())
def index_boards(self): """ Index all the boards. """ boards = BoardManager.BoardManager.boards.keys() for board in boards: try: self.index_board(board) except Exception as exc: Log.error("Exception caught when indexing %s: %r" % (board, exc))
def removePacketFromWarehouse(self, landingSpeed, takeoffSpeed, packet): # land to retrieve the packe if self.land(landingSpeed) != 0: l.error(TAG, "Impossible to land to retrieve the packet") return -1 self.packet = packet # takeoff again if self.takeoff(takeoffSpeed) != 0: return -1
def run(self): while (self._rosters._running): try: time.sleep(Config.XMPP_PING_TIME_INTERVAL) for conn in self._rosters._conns.values(): conn.ping_client() except Exception as e: Log.error("Exception caught in rosters.pinger: %r" % e) Log.error(traceback.format_exc())
def Init(): Log.info("Initializing BCache") if BCache.bcache == None: boardf = open(Config.Config.GetBoardsFile(), "r+b") if boardf == None: Log.error("Cannot open boards file") raise ServerError("fail to open boards file") try: BCache.bcache = mmap.mmap( boardf.fileno(), Config.MAXBOARD * BoardHeader.size, flags=mmap.MAP_SHARED, prot=mmap.PROT_READ ) if BCache.bcache == None: Log.error("Cannot mmap boards file") raise ServerError("fail to mmap boards file") finally: boardf.close() Log.info("Got boards list") if BCache.brdshm == None: try: Log.info("Attaching to BCache shared memory") BCache.brdshm = SharedMemory(Config.Config.GetInt("BCACHE_SHMKEY", 3693), size=BCache.BRDSHM_SIZE) # print "Got SHM" # for i in range(0, Config.MAXBOARD): # bh = BoardHeader(i) # if (bh.filename != ''): # bs = BoardStatus(i) # print "Board: ", bh.filename, " lastpost: ", bs.lastpost, " total: ", bs.total, " curruser: "******"Creating BCache shared memory") BCache.brdshm = SharedMemory( Config.Config.GetInt("BCACHE_SHMKEY", 3693), size=BCache.BRDSHM_SIZE, flags=IPC_CREAT, mode=0660 ) fd = BCache.Lock() try: maxbrd = -1 for i in range(0, Config.MAXBOARD): bh = BoardHeader(i) if bh.filename != "": bs = BoardStatus(i) board = Board(bh, bs, i) board.UpdateLastPost() maxbrd = i if maxbrd != -1: BCache.SetBoardCount(maxbrd + 1) finally: BCache.Unlock(fd) Log.info("BCache initialized") return
def broadcast(self, conn, elem): """Send presence information to everyone subscribed to this account. We do not need to consider the people logined through term""" roster = self._get(conn) for jid in roster.presence(conn.authJID, elem).subscribers(): try: conn.send(jid, elem) except Exception as e: Log.error("Exception caught when broadcasting from %r to %r..." % (conn.authJID, jid)) Log.error(traceback.format_exc())
def run(self): while (self.rosters._running): time.sleep(STEALER_INTERVAL) try: checked = set() for loginind, conn in self.rosters._conns.items(): if conn.get_uid() not in checked: conn.steal_msg() checked.add(conn.get_uid()) except Exception as e: Log.error("Exception caught in rosters.msg_stealer: %r" % e) Log.error(traceback.format_exc())
def do_OPTIONS(self): try: self.send_response(200) self.send_header('Access-Control-Allow-Origin', '*') self.send_header('Access-Control-Allow-Methods', 'GET, POST') self.send_header('Access-Control-Max-Age', '86400') self.send_header('Content-Type', 'text/html; charset=UTF-8') self.send_header('Content-Length', '0') self.end_headers() self.wfile.flush() except: Log.error("Error handling OPTIONS")
def run(self): Log.info("FastIndexer start") self.init_conn() while True: if self.stopped: break try: self.index_boards() except Exception as exc: Log.error("Exception caught in FastIndexer: %r" % exc) time.sleep(INDEX_INTERVAL) self.close_conn()
def GetNewUserId(username): try: sock = socket.create_connection(('127.0.0.1', 60001)) sock.sendall("NEW %s" % username) ret = sock.recv(4) if len(ret) == 4: newid = struct.unpack('=i', ret)[0] Log.debug("new user id: %d" % newid) return newid Log.error("invalid response from miscd for newuser") return -1 except Exception as exc: Log.error("fail to get new user id: %r" % exc) return -1
def probe(self, conn): """Ask everybody this account is subscribed to for a status update. This is used when a client first connects. Also fake responses from TERM users""" Log.debug("probing friends from %s" % conn.authJID.full) roster = self._get(conn) elem = conn.E.presence({'from': unicode(conn.authJID), 'type': 'probe'}) sender = UserManager.UserManager.LoadUser(conn._userid) for jid in roster.watching(): if (jid in self._rosters): try: conn.send(jid, elem) except Exception as e: Log.error("Exception caught when probing XMPP user %r: %r" % (jid, e)) Log.error(traceback.format_exc()) # if (jid != conn.authJID.bare): # bug somewhere, if they are equal.. for session_info in self.get_bbs_online(jid): if (not sender.CanSee(session_info._userinfo)): continue show = session_info.get_show(self.get_user(conn.authJID.bare)) elem = conn.E.presence( {'from' : '%s/%s' % (jid, session_info.get_res()), 'to' : conn.authJID.bare}, conn.E.status(session_info.get_status()), conn.E.priority(session_info.get_priority())) if (show != None): elem.append(conn.E.show(show)) try: conn.send(conn.authJID, elem) except Exception as e: Log.error("Exception caught when faking response from %s/%s to %r" % (jid, session_info.get_res(), conn.authJID.bare)) Log.error(traceback.format_exc()) Log.debug("probed friends from %s" % conn.authJID.full)
def Clear(self, boardname): board = BoardManager.BoardManager.GetBoard(boardname) if (board == None): Log.error("Fail to load board %s for clear" % boardname) return False entry = self.FindCacheEntry(board) if (entry == -1): return False bh = BCache.BCache.GetBoardHeader(boardname) self._cache[entry]._list[0] = bh.nowid self._cache[entry]._list[1] = 0 self._cache[entry]._changed = 1 self._cache[entry].Commit() return True
def Lock(): #try: #SemLock.Lock(Config.UCACHE_SEMLOCK, timeout = 10); #return 0; #except BusyError: #return -1; # Log.debug("Utmp.Lock enter()") lockf = os.open(Config.BBS_ROOT + "UTMP", os.O_RDWR | os.O_CREAT, 0600) if (lockf < 0): Log.error("Fail to open lock file!") raise Exception("fail to lock!") Util.FLock(lockf, shared = False) # Log.debug("Utmp.Lock succ()") return lockf
def index_board(self, board): """ Index one board (name: board)""" boardobj = BoardManager.BoardManager.GetBoard(board) if not boardobj: Log.error("Error loading board %s" % board) return if board in self.board_info: idx_obj = self.board_info[board] else: idx_obj = IndexBoardInfo(board, 0) self.board_info[board] = idx_obj bdir_path = boardobj.GetDirPath() with open(bdir_path, 'rb') as bdir: Util.FLock(bdir, shared=True) try: if not board in self.state.locks: self.state.locks[board] = threading.Lock() status = os.stat(bdir_path) if status.st_mtime <= idx_obj.last_idx: # why <? anyway... return Log.debug("Board %s updated. Indexing..." % board) # index into buffer table self.init_buf(board) for idx in xrange(status.st_size / PostEntry.PostEntry.size): post_entry = PostEntry.PostEntry( bdir.read(PostEntry.PostEntry.size)) self.insert_entry(board, post_entry, idx) self.conn.commit() # commit buffer table self.state.locks[board].acquire() try: self.remove_idx_status(idx_obj) self.commit_buf(board) self.create_db_index(board) idx_obj.last_idx = status.st_mtime self.insert_idx_status(idx_obj) finally: self.state.locks[board].release() Log.debug("Board %s indexed." % board) finally: Util.FUnlock(bdir)
def main(): try: userinfo = getpwnam('bbs') os.setuid(userinfo[2]) except: Log.error("Failed to find user 'bbs'!") sys.exit(1) port = 8080 server = MyServer(('', port), DataService) print 'Starting at port %d...' % port try: server.serve_forever() except: pass
def _read(self, file): try: file = open(file, 'r') #SEP = "," SEP = "[\,,\s]" EMB = "\"" regEx = re.compile(EMB + '([^' + EMB + ']+)' + EMB + SEP + EMB + '([^' + EMB + ']+)' + EMB + SEP + EMB + '([^' + EMB + ']*)' + EMB) for line in file: cols = regEx.match(line) if(cols != None): #TODO Gruppengroesze pruefen self._mapping[cols.group(1)] = cols.group(2) self._defaultValues[cols.group(2)] = cols.group(3) file.close() return True except IOError: Log.error(self.__class__, "IOError with file > " + file)