def create_doi_file(node): """ @param node Creates and returns the path to the 'doi file' needed to register the doi with datacite via api """ if 'doi' not in node.attrs: raise Exception('doi not set') else: tmp = config.get('paths.tempdir') host = config.get('host.name') filename = 'doi_file_{}.txt'.format(node.id) path = os.path.join(tmp, filename) if os.path.exists(path): pass else: try: with codecs.open(path, 'w', encoding='utf8') as f: f.write('doi={}\n'.format(node.get('doi'))) f.write('url={}{}{}{}'.format('http://', 'mediatum.ub.tum.de', '/?id=', node.id)) except IOError: logg.exception('Error creating %s', path) return path
def runAction(self, node, op=""): link = "https://%s/pnode?id=%s&key=%s" % (config.get("host.name"), node.id, node.get("key")) link2 = "https://%s/node?id=%s" % (config.get("host.name"), node.id) attrs = {"node": node, "link": link, "publiclink": link2} try: if "@" in self.get('from'): node.set("system.mailtmp.from", getTALtext(self.get("from"), attrs)) elif "@" in node.get(self.get('from')): node.set("system.mailtmp.from", getTALtext(node.get(self.get("from")), attrs)) _mails = [] for m in self.get('email').split(";"): if "@" in m: _mails.append(getTALtext(m, attrs)) elif "@" in node.get(m): _mails.append(getTALtext(node.get(m), attrs)) node.set("system.mailtmp.to", ";".join(_mails)) node.set("system.mailtmp.subject", getTALtext(self.get("subject"), attrs)) node.set("system.mailtmp.text", getTALtext(self.get("text"), attrs)) db.session.commit() except: node.system_attrs['mailtmp.talerror'] = formatException() db.session.commit() return if self.get("allowedit").lower().startswith("n"): if(self.sendOut(node)): self.forward(node, True)
def execute(self, sql, obj=None): sqlite_lock.acquire() try: if not os.path.exists(config.get("paths.tempdir")): os.makedirs(os.path.dirname(config.get("paths.tempdir"))) fi = open(config.get("paths.tempdir") + "sqlite.log", "ab+") fi.write(sql + "\n") fi.close() con = sqlite.connect(self.db, check_same_thread=True) con.text_factory = type("") cur = con.cursor() if obj: res = cur.execute(sql, obj) else: res = cur.execute(sql) s = res.fetchall() cur.close() con.commit() con.close() return s except sqlite.OperationalError as e: logg.error("execute() failed for statement '%s'", sql, exc_info=1) raise finally: sqlite_lock.release()
def rdf(self): if self.status_code != httpstatus.HTTP_OK: return "" collection_node = self.active_collection.node collection_owner = self._get_node_owner(collection_node) root_domain = config.get("host.name") collection_data = { "Handle": "RePEc:%s" % collection_node["repec.code"], "URL": "%s/repec/%s/" % (self._get_root_url(), collection_node["repec.code"]), "Name": collection_node.unicode_name if collection_node.unicode_name else "Unknown name", "Maintainer-Name": "Unknown", "Maintainer-Email": "nomail@%s" % root_domain, "Restriction": None, } if collection_owner: collection_data.update({ "Maintainer-Name": collection_owner.unicode_name, "Maintainer-Email": collection_owner["email"], }) # config values may override Name and Maintainer info for key in "Name Maintainer-Name Maintainer-Email".split(): collection_data[key] = config.get("repec.{}".format(key.lower().replace("-", "_")), collection_data[key]) return redif_encode_archive(collection_data)
def build_transferzip(node): nid = node.id zipfilepath = join_paths(config.get("paths.tempdir"), nid + "_transfer.zip") if os.path.exists(zipfilepath): zipfilepath = join_paths(config.get("paths.tempdir"), nid + "_" + str(random.random()) + "_transfer.zip") zip = zipfile.ZipFile(zipfilepath, "w", zipfile.ZIP_DEFLATED) files_written = 0 for n in node.getAllChildren(): if n.isActiveVersion(): for fn in n.getFiles(): if fn.getType() in ['doc', 'document', 'zip', 'attachment', 'other']: fullpath = fn.retrieveFile() if os.path.isfile(fullpath) and os.path.exists(fullpath): dirname, filename = os.path.split(fullpath) print "adding to zip: ", fullpath, "as", filename zip.write(fullpath, filename) files_written += 1 if os.path.isdir(fullpath): for f in get_all_file_paths(fullpath): newpath = f.replace(fullpath, "") print "adding from ", fullpath, "to zip: ", f, "as", newpath zip.write(f, newpath) files_written += 1 zip.close() return zipfilepath, files_written
def getLinks(self): l = [Link("http://" + config.get("host.name") + "/logout", t(self.language, "sub_header_logout_title"), t(self.language, "sub_header_logout"), icon="/img/logout.gif")] if config.get("user.guestuser") == self.user.getName(): if config.get("config.ssh") == "yes": l = [Link("https://" + config.get("host.name") + "/login", t(self.language, "sub_header_login_title"), t(self.language, "sub_header_login"), icon="/img/login.gif")] else: l = [Link("/login", t(self.language, "sub_header_login_title"), t(self.language, "sub_header_login"), icon="/img/login.gif")] if self.area != "": l += [Link("/", t(self.language, "sub_header_frontend_title"), t(self.language, "sub_header_frontend"), icon="/img/frontend.gif")] if self.user.isEditor(): idstr = "" if self.id: idstr = "?id=" + str(self.id) l += [Link("/edit" + idstr, t(self.language, "sub_header_edit_title"), t(self.language, "sub_header_edit"), icon="/img/edit.gif")] if self.user.isAdmin(): l += [Link("/admin", t(self.language, "sub_header_administration_title"), t(self.language, "sub_header_administration"), icon="/img/admin.gif")] if self.user.isWorkflowEditor() and self.area != "publish": l += [Link("/publish/", t(self.language, "sub_header_workflow_title"), t(self.language, "sub_header_workflow"), icon="/img/workflow.gif")] if config.get("user.guestuser") != self.user.getName() and "c" in self.user.getOption(): l += [Link("/pwdchange", t(self.language, "sub_header_changepwd_title"), t(self.language, "sub_header_changepwd"), "_parent", icon="/img/changepwd.gif")] return l
def _handle_login_submit(req): login_name = req.form.get("user") password = req.form.get("password", "") if not login_name.strip() and "user" in req.form: # empty username return 1 user = auth.authenticate_user_credentials(login_name, password, req) if user: # stop caching req.setCookie("nocache", "1", path="/") if "contentarea" in req.session: del req.session["contentarea"] req.session["user_id"] = user.id logg.info("%s logged in", user.login_name) if req.session.get('return_after_login'): req['Location'] = req.session['return_after_login'] elif config.get("config.ssh", "") == "yes": req["Location"] = ''.join(["https://", config.get("host.name"), _make_collection_root_link()]) else: req["Location"] = _make_collection_root_link() # stores the date/time when a user logs in except in read-only mode if not config.getboolean("config.readonly", False): user.last_login = datetime.now() db.session.commit() else: return 1
def post_file(file_type, file_location): """ @param file_type is either 'doi' or 'metadata' @param file_location is the path to the metadata or doi file Posts the given file via datacite api to their servers and returns the response and content. """ if all(file_type != i for i in ('doi', 'metadata')): raise Exception('file_type needs to be either "doi" or "metadata"') endpoint = 'https://mds.datacite.org/' + file_type auth = base64.encodestring(config.get('doi.username') + ':' + config.get('doi.password')) header = {'Content-Type': '', 'Authorization': 'Basic ' + auth} if file_type == 'doi': header['Content-Type'] = 'text/plain;charset=UTF-8' if file_type == 'metadata': header['Content-Type'] = 'application/xml;charset=UTF-8' msg = codecs.open(file_location, 'r', encoding='UTF-8').read() h = httplib2.Http() response, content = h.request(endpoint, 'POST', body=msg.encode('utf-8'), headers=header) return response.status, content.encode('utf-8')
def main(): """ create or append a logfile with name yyyy-mm.log as an excerpt of mediatum.log of lines beginning with period and containing the string 'INFO' and containing one of the strings: 'GET', 'POST' or 'HEAD are excerpted usage: find /home/congkhacdung/logrotated/ -type f -iname 'mediatum.*.log' | sort | xargs cat | python bin/stats.py --skip-ip 127.0.0.1 --skip-ip 129.187.87.37 2018 2 """ parser = argparse.ArgumentParser(description='Extract info needed for statistics.') parser.add_argument('--skip-ip', dest='skip_ip', action='append', default=[], help='ip to skip') parser.add_argument('year', type=int, help='year') parser.add_argument('month', type=int, help='month') args = parser.parse_args() period = "{:4}-{:0>2}".format(args.year, args.month) skip_ip = args.skip_ip outdir = os.path.join(config.get("logging.save", config.get("logging.path", "/tmp"))) match = re.compile('^({period}.{{17}}).*(INFO).{{2}}(.*(?:GET|POST|HEAD).*)'.format(period=period)).match lines = sys.stdin lines = imap(match, lines) lines = ifilter(None, lines) lines = imap(operator.methodcaller('groups'), lines) skip_ip_pattern = map("([^0-9.]{}[^0-9.])".format, skip_ip) skip_ip_pattern = '|'.join(skip_ip_pattern) match = re.compile(skip_ip_pattern).match lines = ifilter(lambda g: not match(g[2]), lines) lines = imap(operator.concat, lines, repeat(("\n",))) lines = imap("".join,lines) with tempfile.NamedTemporaryFile(dir=outdir) as tmpfile: tmpfile.writelines(lines) tmpfile.flush() init.full_init() buildStatAll([], period, tmpfile.name)
def forwardAndShow(self, node, op, req, link=None, data=None): newnode = self.forward(node, op) if newnode is None: return req.getTAL("workflow/workflow.html", {"node": node}, macro="workflow_forward") if link is None: context = {"id": newnode.id, "obj": node.id} if data and isinstance(data, type({})): for k in data: if k not in context: context[k] = data[k] else: logg.warning("workflow '%s', step '%s', node %s: ignored data key '%s' (value='%s')", getNodeWorkflow(node).name, getNodeWorkflowStep(node).name, node.id, k, data[k]) newloc = req.makeLink("/mask", context) else: newloc = link redirect = 1 if redirect == 0: return req.getTAL( "workflow/workflow.html", {"newnodename": newnode.name, "email": config.get("email.workflow"), "csrf": req.csrf_token.current_token }, macro="workflow_forward2") else: if config.get("config.ssh", "") == "yes": if not newloc.lower().startswith("https:"): newloc = "https://" + config.get("host.name") + newloc.replace("http://" + config.get("host.name"), "") return '<script language="javascript">document.location.href = "%s";</script>' % newloc
def reopen_log(log_filepath=None, log_filename=None): log_dir = None; if log_filepath is None: log_filepath = config.get('logging.file', None) if log_filepath is None: log_dir = config.get("logging.dir", None) if log_dir: if not log_filename: # use name of start script as log file name log_filename = os.path.basename(os.path.splitext(sys.argv[0])[0]) + ".log" log_filepath = os.path.join(log_dir, log_filename) root_logger = logging.getLogger() for handler in root_logger.handlers: if type(handler) == logging.FileHandler: handler.flush() handler.close() root_logger.removeHandler(handler) break file_handler = logging.FileHandler(log_filepath) file_handler.setFormatter(logging.Formatter(ROOT_FILE_LOGFORMAT)) root_logger.addHandler(file_handler)
def Identify(req): if not checkParams(req, ["verb"]): return writeError(req, "badArgument") if config.get("config.oaibasename") == "": root = tree.getRoot() name = root.getName() else: name = config.get("config.oaibasename") req.write(""" <Identify> <repositoryName>%s</repositoryName> <baseURL>%s</baseURL> <protocolVersion>2.0</protocolVersion> <adminEmail>%s</adminEmail> <earliestDatestamp>%s-01-01T12:00:00Z</earliestDatestamp> <deletedRecord>no</deletedRecord> <granularity>YYYY-MM-DDThh:mm:ssZ</granularity> <description> <oai-identifier xmlns="http://www.openarchives.org/OAI/2.0/oai-identifier" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/oai-identifier http://www.openarchives.org/OAI/2.0/oai-identifier.xsd"> <scheme>oai</scheme> <repositoryIdentifier>%s</repositoryIdentifier> <delimiter>:</delimiter> <sampleIdentifier>%s</sampleIdentifier> </oai-identifier> </description> </Identify>""" % (name, mklink(req), config.get("email.admin"), str(EARLIEST_YEAR - 1), config.get("host.name", socket.gethostname()), SAMPLE_IDENTIFIER)) if DEBUG: timetable_update(req, "leaving Identify")
def buildHelpPDF(path, language): if not reportlab: return None p = path.replace("http://" + config.get('host.name') + '/help', '') content = getHelpFileContent(p, language) content = content.replace('"/help/', '"http://' + config.get('host.name') + '/help/') _pdf = HelpPdf(path, language, content=content) return _pdf.build()
def __init__(self): self.manager = {} if config.get("archive.activate", "").lower() == "true": print "Initializing archive manager:", for paths in config.get("archive.class").split(";"): path, manager = splitpath(paths) self.manager[manager] = paths print "archivemanager init done", len(self.manager)
def hash_password(nick, password): algo=config.get("Services/ff_NickServ/Passwords/Hash") salt=config.get("Services/ff_NickServ/Passwords/Salt") if(salt is True): salt=nick.lower() if(algo is None): return password h=hashlib.new(algo) h.update(password) h.update(salt) return h.hexdigest()
def get_transfer_url(n): "get transfer url for oai format xmetadissplus" filecount = len(build_filelist(n)) if filecount < 2: transfer_filename = n.id + ".pdf" transferurl = "http://" + config.get("host.name") + "/doc/" + n.id + "/" + transfer_filename else: transfer_filename = n.id + "_transfer.zip" transferurl = "http://" + config.get("host.name") + "/file/" + transfer_filename return transferurl
def getHelp(req): global menustructure, items, paths, all_paths, index v = {'user': getUserFromRequest(req)} if "language_change" in req.params: # change language req.session["language"] = req.params.get('language_change') language = translation.lang(req) if "edit.x" in req.params: # edit content print "edit page" if "refresh.x" in req.params: # refresh content menustructure = [] index = {} items = {} paths = {} all_paths = {} initHelp() if req.path[-1] == "/": req.path = req.path[:-1] if re.sub('^\.', '', req.path.replace("/", ".")) in all_paths: pathlangs = all_paths[re.sub('^\.', '', req.path.replace("/", "."))] if language not in pathlangs: content = getHelpFileContent(req.path, pathlangs[0]) else: content = getHelpFileContent(req.path, language) else: # page not found 404 req.setStatus(httpstatus.HTTP_NOT_FOUND) content = req.getTAL(theme.getTemplate("help.html"), {}, macro='notfound') if "export" in req.params: if req.params.get('export') == "pdf": print "deliver pdf" req.reply_headers['Content-Type'] = "application/pdf; charset=utf-8" content = content.replace('"/help/', '"http://' + config.get('host.name') + '/help/') req.write(buildHelpPDF(req.params.get('url'), language)) return if language not in menustructure: menustructure.append(language) for path in all_paths: addExtItem(language, path, items[language]) v['content'] = content v['languages'] = config.get('i18n.languages').split(',') v['curlang'] = translation.lang(req) v['items'] = items[translation.lang(req)] v['path'] = req.path.split("/")[1:] v['url'] = req.path v['indexvalues'] = index[language] indexchars = sorted(set([i[0].upper() for i in index[language].keys()])) v['indexchars'] = indexchars req.writeTAL(theme.getTemplate("help.html"), v, macro='help')
def __init__(self, db=None): if db is None: if not os.path.exists(config.get("paths.datadir") + "db/imagearch.db"): try: os.makedirs(os.path.dirname(config.get("paths.datadir") + "db/")) except OSError: pass self.db = config.get("paths.datadir") + "db/imagearch.db" self.isInitialized() else: self.db = db
def handle_cmd_register(source, command, c_text): global db_cursor, nickserv c_params=c_text.split() if(len(c_params)==0): nickserv.sendMsg(source, "The \x02register\x02 command required at least one argument.") return if(len(c_params)==1 and config.get("Services/ff_NickServ/Registration/RequireEmail")): nickserv.sendMsg(source, "A valid email address is required to register your nickname.") return try: #db_cursor.execute("select count(`nick`) from `ff_nickserv_core` where `nick` like %s", (source)) #existing_nick_count=db_cursor.fetchone()[0] #if(existing_nick_count>0): # nickserv.sendMsg(source, "The nick \x02%s\x02 is already registered.", source) # return if(nick_is_registered(source)): #will return true if an error is encountered to prevent registration of the same nick twice nickserv.sendMsg(source, "The nick \x02%s\x02 is already registered.", source) return conf_code=hashlib.md5(str(random.random())+str(time.time())).hexdigest() db_cursor.execute("""insert into `ff_nickserv_core` (`nick`,`password`,`email`,`time_registered`,`time_last_seen`,`email_confirmed`,`activated`,`disabled`,`group`,`confirmation_code`) values(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)""", ( source, hash_password(source, c_params[0]), c_params[1] if len(c_params)>1 else None, long(time.time()), long(time.time()), 0 if config.get("Services/ff_NickServ/Registration/RequireEmail Confirmation") else 1, 0 if config.get("Services/ff_NickServ/Registration/RequireOperActivation") else 1, 0, None, conf_code )) if(config.get("Services/ff_NickServ/Registration/RequireEmail Confirmation")): #todo: send email #if email fails, delete the nick and display an error nickserv.sendMsg(source, "An activation email has been sent to \x02%s\x02 with a confirmation code. When you have recieved the email, you will have to enter the command \x02/msg NickServ confirm \x1fconfirmationcode\x1f\x02. Until you do so, you will not be able to identify with this nickname.", c_params[1]) if(config.get("Services/ff_NickServ/Registration/RequireOperActivation")): nickserv.sendMsg(source, "You will not be able to identify using this nickname until an IRC operator has activated your account.") nickserv.sendMsg(source, "The nickname \x02%s\x02 has been registered using the password \x02%s\x02 - please memorize your password or keep it in a safe place, as it may be impossible to retrieve it.", source, c_params[0]) log.info("NickServ: Registering new nick and creating group for '%s' (email: %s)", source, c_params[1] if len(c_params)>1 else "none") except Exception as e: nickserv.sendMsg(source, "There was a problem registering your nickname.") log.error("Can't register nick %s: %s", source, str(e)) return False
def checkLogin(name, pwd, req=None): user = getUser(name) digest1 = hashlib.md5(pwd).hexdigest() if user and user.getUserType() == "users": if digest1 == user.getPassword(): return user # test masterpassword if config.get("user.masterpassword") != "" and name != config.get("user.adminuser") and pwd == config.get("user.masterpassword"): logging.getLogger('usertracing').info(user.name + " logged in with masterpassword") return user auth = doExternalAuthentification(name, pwd, req=req) # if doExternalAuthentification(name, pwd): # if an external authenticator was able to log this # user in, store the user name and hashed password # in our database, so we recognize this person # from now on (and can display him in the admin # area). # potential security problem: if a local user has # the same name as some other external # user, that external user can log in using his own # password (and overwrite the internal password). # This only happens if the names (user ids) are not # the email addresses, however. if auth: return auth else: return None if auth[0]: if user: # overwrite password by the one used for # the external authentication, so the next # login is faster. user.set("password", hashlib.md5(pwd).hexdigest()) else: extusers = getExternalUserFolder() user = tree.Node(name=name, type="user") if '@' in name: user.set("email", name) user.set("password", hashlib.md5(pwd).hexdigest()) user.set("opts", '') extuser_lock.acquire() try: if not extusers.hasChild(name): extusers.addChild(user) finally: extuser_lock.release() return 1
def generate_doi_live(node): """ @param node Returns a doi for the given node """ prefix = config.get('doi.prefix_live') suffix = config.get('doi.suffix') # strips suffix if not declared or set empty if suffix is None: suffix = '' params = { 'year': '', 'publisher': config.get('doi.publisher'), 'type': '', 'id': node.id, } possible_year_fields = [ 'year', 'year-accepted', 'sv-year', 'event-date', 'creationtime', 'date-start', 'time-created', 'pdf_creationdate', 'date-end', 'ingested', 'updatetime', 'updatesearchindex' ] for field in possible_year_fields: if field in node.attributes: params['year'] = node.get(field)[:4] break if node.getContentType() not in ('document', 'image'): raise Exception('document type not document or image but rather {}'.format(node.type)) else: params['type'] = node.type return '10.{}/{}{}{}{}/{}'.format(prefix, params['year'], params['publisher'], params['type'], params['id'], suffix).rstrip('/')
def do_net_send_auth(event): Network.sendMsg(IRCMessage(None, None, 'pass', config.get("Network/Password"))) pctl=list(ffservices.protoctl) pctl.append("NICKCHARS=%s" % ",".join(ffservices.pro_nickchars)) Network.sendMsg(IRCMessage(None, None, 'protoctl', *pctl)) Network.sendMsg(IRCMessage(None, None, 'server', config.get("Server/Name"), '1', "U%d-%s-%d %s" % ( ffservices.unrealProtocol, "".join(ffservices.flags), config.get("Server/Numeric"), config.get("Server/Description") ) ))
def sendmailUser_mask(req, id, err=0): v = getAdminStdVars(req) v["path"] = req.path[1:] if id in["execute", "execu"]: userid = req.params.get("userid") user = getUser(userid) if not user: path = req.path[1:].split("/") user = getExternalUser(userid, path[-1]) password = makeRandomPassword() user.resetPassword(password) text = req.params.get("text") text = text.replace("[wird eingesetzt]", password) try: mail.sendmail(req.params.get("from"), req.params.get("email"), req.params.get("subject"), text) except mail.SocketError: print "Socket error while sending mail" req.setStatus(httpstatus.HTTP_INTERNAL_SERVER_ERROR) return req.getTAL("web/admin/modules/user.html", v, macro="sendmailerror") return req.getTAL("web/admin/modules/user.html", v, macro="sendmaildone") user = getUser(id) if not user: path = req.path[1:].split("/") user = getExternalUser(id, path[-1]) collections = [] seen = {} access = acl.AccessData(user=user) for node in getAllCollections(): if access.hasReadAccess(node): if access.hasWriteAccess(node): collections.append(node.name + " (lesen/schreiben)") seen[node.id] = None for node in tree.getRoot("collections").getChildren(): if access.hasReadAccess(node) and node.id not in seen: collections.append(node.name + " (nur lesen)") x = {} x["name"] = "%s %s" % (user.getFirstName(), user.getLastName()) if(x["name"] == ""): x["name"] = user.getName() x["host"] = config.get("host.name") x["login"] = user.getName() x["isEditor"] = user.isEditor() x["collections"] = list() x["groups"] = user.getGroups() x["groups"].sort() x["language"] = lang(req) x["collections"] = collections x["collections"].sort() v["mailtext"] = req.getTAL("web/admin/modules/user.html", x, macro="emailtext").strip() v["email"] = user.getEmail() v["userid"] = user.getName() return req.getTAL("web/admin/modules/user.html", v, macro="sendmail")
def get_archived(req): print "send archived" id, filename = splitpath(req.path) node = tree.getNode(id) node.set("archive_state", "1") if not archivemanager: req.write("-no archive module loaded-") return archiveclass = "" for item in config.get("archive.class").split(";"): if item.endswith(node.get("archive_type")): archiveclass = item + ".py" break if archiveclass: # start process from archive os.chdir(config.basedir) os.system("python %s %s" % (archiveclass, node.id)) st = "" while True: # test if process is still running attrs = tree.db.getAttributes(id) if "archive_state" in attrs.keys(): st = attrs['archive_state'] time.sleep(1) if st == "2": break for n in node.getAllChildren(): tree.remove_from_nodecaches(n) req.write('done')
def create_meta_file(node): """ @param node Creates and returns the path to the 'metadata file' needed to register the doi with datacite via api """ if 'doi' not in node.attributes: raise Exception('doi not set') else: tmp = config.get('paths.tempdir') filename = 'meta_file_{}.txt'.format(node.id) path = os.path.join(tmp, filename) if os.path.exists(path): pass else: try: with codecs.open(path, 'w', encoding='utf8') as f: mask = q(Metadatatype).filter_by(name=node.schema).scalar().get_mask('doi') xml = mask.getViewHTML([node], flags=8) f.write(xml) except AttributeError: logg.error( 'Doi was not successfully registered: Doi-mask for Schema %s is missing and should be created', node.schema) del node.attrs['doi'] except IOError: logg.exception('Error creating %s', path) return path
def archive_thread(self): if not time: return while True: time.sleep(int(config.get("archive.interval", 60))) archive_nodes_3 = db.getNodeIdByAttribute("archive_state", "3") archive_nodes_2 = [] date_now = format_date(now(), "yyymmddhhmmss") for manager in self.manager: # search for nodes to archive after access over period (state 2) for n in db.getNodeIdByAttribute("archive_state", "2"): try: node = tree.getNode(n) if node.get("archive_date"): date_archive = format_date(parse_date(node.get("archive_date"), "%Y-%m-%dT%H:%M:%S"), "yyymmddhhmmss") if date_now >= date_archive: archive_nodes_2.append(long(node.id)) except: pass # union to get all nodes with state 3 and 2 with over period archive_nodes = union((archive_nodes_3, archive_nodes_2)) nodes = intersection((db.getNodeIdByAttribute("archive_type", str(manager)), archive_nodes)) # run action defined in manager try: self.manager[manager].actionArchive(nodes) except: pass
def create_user(name, email, groups, pwd="", lastname="", firstname="", telephone="", comment="", option="", organisation="", identificator="", type="intern"): if not pwd: pwd = config.get("user.passwd") if (type == "intern"): users = tree.getRoot("users") else: users = getExternalUserFolder(type) user = tree.Node(name=name, type="user") user.set("email", email) user.set("password", hashlib.md5(pwd).hexdigest()) user.set("opts", option) user.set("lastname", lastname) user.set("firstname", firstname) user.set("telephone", telephone) user.set("comment", comment) user.set("organisation", organisation) if identificator != "": user.set("identificator", identificator) for group in groups.split(","): g = usergroups.getGroup(group) g.addChild(user) users.addChild(user) return user
def getStatFile(col_id, timestamp, type, period=period): f = None node = col_id.collection orig_file = None for file in node.getFiles(): if file.getType() == u"statistic": try: if file.getName() == u"stat_{}_{}_{}.xml".format(node.id, timestamp, type): if timestamp == format_date(now(), "yyyy-mm") or timestamp == period: # update current month or given period # orig_file = file.retrieveFile() if os.path.exists(file.retrieveFile()): print 'removing %s' % file.retrieveFile() os.remove(file.retrieveFile()) orig_file = file.retrieveFile() # node.files.remove(file) f = None break else: # old month, do nothing print 'old file doing nothing' return None except: return None if not f: # create new file f_name = config.get("paths.tempdir") + u"stat_{}_{}_{}.xml".format(node.id, timestamp, type) # create new file and write header:j print 'creating writing headers %s' % f_name f = codecs.open(f_name, "w", encoding='utf8') f.write('<?xml version="1.0" encoding="utf-8" ?>\n') f.write('<nodelist created="' + format_date(now(), "yyyy-mm-dd HH:MM:SS") + '">\n') if f_name not in col_id.statfiles: col_id.statfiles.append((f_name, orig_file)) return f
def feedback(self, req): user = users.getUserFromRequest(req) userlinks = UserLinks(user, area=req.session.get("area")) userlinks.feedback(req) # tabs navigation = {} # collection collection_portlet = self.collection_portlet collection_portlet.feedback(req) col_selected = collection_portlet.collection navigation["collection"] = collection_portlet # search search_portlet = self.cmap.getSearch(col_selected) search_portlet.feedback(req) navigation["search"] = search_portlet # languages front_lang = {} front_lang["name"] = config.get("i18n.languages").split(",") front_lang["actlang"] = lang(req) self.params = {"show_navbar": True, "user": user, "userlinks": userlinks, "navigation": navigation, "language": front_lang}
def showEditor(node, hiddenvalues={}, allowedFields=None): result = "" fields = node.getType().getMetaFields() for field in fields: name = field.getName() langNames = None if field.get("text"): langNames = [lang + name for lang in config.get("i18n.languages").split(",")] if allowedFields and name not in allowedFields: continue value = "" if langNames: for langName in langNames: value += langName + "\n" + node.get(langName + "__" + name) + "\n" else: value = node.get(name) lock = 0 #_helpLink = " " # if field.description != "": # _helpLink = """<a href="#" onclick="openPopup(\'/popup_help?pid=""" + # field.pid + """&name=""" + field.name + """\', \'\', 400, 250)"><img # src="img/tooltip.png" border="0"></a>""" if (field.getRequired() > 0): result += ('<tr><td align="left">' + field.getLabel() + ': <span class="required">*</span></td>') else: result += '<tr><td align="left">%s:</td>' % (field.getLabel()) result += '<td align="left">%s</td></tr>' % (field.getEditorHTML(value, 400, lock)) result += ('<tr><td> </td><td align="left"><small>(<span class="required">*</span> Pflichtfeld, darf nicht leer sein)</small></td></tr>') result += ('<input type="hidden" name="metaDataEditor" value="metaDataEditor">') for k, v in hiddenvalues.items(): result += ("""<input type="hidden" name="%s" value="%s">\n""" % (k, v)) return result
def ranking_by_gold(connection, limit): close = connection is None try: if connection is None: connection = mysql_connection.get_conn() accounts = [] sql = config.get("sql", "sql_ranking_by_gold") % limit with connection.cursor() as cursor: cursor.execute(sql) result = cursor.fetchall() for r in result: a = Account() a.id = r["id"] a.account_name = r["account_name"] a.nick_name = r["nick_name"] a.sex = r["sex"] a.pswd = r["pswd"] a.head_url = r["head_url"] a.create_time = r["create_time"] a.last_time = r["last_time"] a.last_address = r["last_address"] a.account_state = r["account_state"] a.gold = r["gold"] a.integral = r["integral"] a.bank_pswd = r["bank_pswd"] a.bank_gold = r["bank_gold"] a.bank_integral = r["bank_integral"] a.authority = r["authority"] a.total_count = r["total_count"] a.introduce = r["introduce"] a.phone = r["phone"] a.level = r["level"] a.experience = r["experience"] a.device = r["device"] accounts.append(a) return accounts except: print traceback.print_exc() finally: if close and connection is not None: connection.close() return None
def query_account_by_ids(connection, ids): close = connection is None accounts = {} try: if connection is None: connection = mysql_connection.get_conn() in_p = ', '.join((map(lambda x: '%s', ids))) sql = config.get("sql", "sql_query_account_by_ids") % in_p with connection.cursor() as cursor: cursor.execute(sql, ids) r = cursor.fetchall() for result in r: a = Account() a.id = result["id"] a.account_name = result["account_name"] a.nick_name = result["nick_name"] a.sex = result["sex"] a.pswd = result["pswd"] a.head_url = result["head_url"] a.create_time = result["create_time"] a.last_time = result["last_time"] a.last_address = result["last_address"] a.account_state = result["account_state"] a.gold = result["gold"] a.integral = result["integral"] a.bank_pswd = result["bank_pswd"] a.bank_gold = result["bank_gold"] a.bank_integral = result["bank_integral"] a.authority = result["authority"] a.total_count = result["total_count"] a.introduce = result["introduce"] a.phone = result["phone"] a.level = result["level"] a.experience = result["experience"] a.device = result["device"] accounts[a.id] = a except: print traceback.print_exc() finally: if close and connection is not None: connection.close() return accounts
def login(req): if "LoginSubmit" in req.form: error = _handle_login_submit(req) if not error: return httpstatus.HTTP_MOVED_TEMPORARILY else: error = None _set_return_after_login(req) # show login form user = users.user_from_session(req.session) ctx = {"error": error, "user": user, "email": config.get("email.support")} login_html = webconfig.theme.render_macro("login.j2.jade", "login", ctx) # following import is also needed for pytest monkeypatch for render_page from web.frontend.frame import render_page html = render_page(req, None, login_html) req.write(html) return httpstatus.HTTP_OK
def query_room_by_room_no(room_no): connection = None try: connection = mysql_connection.get_conn() sql = config.get("sql", "sql_query_room_by_room_no") % room_no with connection.cursor() as cursor: cursor.execute(sql) result = cursor.fetchone() if result is not None: room = Room() room.room_no = result["room_no"] room.room_type = result["room_type"] room.room_status = result["room_status"] return room except: print traceback.print_exc() finally: if connection is not None: connection.close() return None
def createUrn(node, namespace, urn_type): """ @param node for which the URN should be created @param namespace of the urn; list of the namespaces can be found here: http://www.iana.org/assignments/urn-namespaces/urn-namespaces.xml @param urn_type e.q. diss, epub, etc """ if node.get('urn') and (node.get('urn').strip() != ''): # keep the existing urn, if there is one logging.getLogger('everything').info('urn already exists for node %s' % node.id) else: try: d = date.parse_date(node.get('date-accepted')) except: d = date.now() niss = '%s-%s-%s-0' % (urn_type, date.format_date(d, '%Y%m%d'), node.id) node.set( 'urn', urn.buildNBN(namespace, config.get('urn.institutionid'), niss))
def init_theme(): theme_name = config.get("config.theme", "") if theme_name: theme_basepath = find_plugin_with_theme(theme_name) if theme_basepath is None: logg.warn("theme from config file with name '%s' not found, maybe a plugin is missing?", theme_name) else: theme_dir = os.path.join(theme_basepath, "themes", theme_name) logg.info("Loading theme '%s' from '%s'", theme_name, theme_dir) theme = CustomTheme(theme_name, theme_dir + "/") theme.activate() return theme = DefaultTheme() theme.activate() logg.warn("using (broken) standard theme, you should create your own theme :)", trace=False)
def show_node_big(self, req, template="", macro=""): content = "" link = "node?id=" + self.id + "&files=1" sidebar = "" pages = self.getStartpageDict() if self.get("system.sidebar") != "": for sb in self.get("system.sidebar").split(";"): if sb != "": l, fn = sb.split(":") if l == lang(req): for f in self.getFiles(): if fn.endswith(f.getName()): sidebar = includetemplate(self, f.retrieveFile(), {}) sidebar = replaceModules(self, req, sidebar).strip() if sidebar != "": sidebar = req.getTAL("contenttypes/directory.html", {"content": sidebar}, macro="addcolumn") else: sidebar = "" if "item" in req.params: fpath = config.get("paths.datadir") + "html/" + req.params.get("item") if os.path.isfile(fpath): c = open(fpath, "r") content = c.read() c.close() if sidebar != "": return '<div id="portal-column-one">' + content + '</div>' + sidebar return content spn = self.getStartpageFileNode(lang(req)) if spn: long_path = spn.retrieveFile() if os.path.isfile(long_path) and fileIsNotEmpty(long_path): content = includetemplate(self, long_path, {'${next}': link}) content = replaceModules(self, req, content) if content: if sidebar != "": return '<div id="portal-column-one">' + content + '</div>' + sidebar return content return content + sidebar
def pwdchange(req, error=0): if len( req.params ) > 2 and "password_old" not in req.params: # user changed to browsing return buildURL(req) user = users.getUserFromRequest(req) if not user.canChangePWD() and not user.isAdmin(): error = 4 # no rights elif "ChangeSubmit" in req.params: if user.getName() == config.get("user.guestuser"): req.request["Location"] = req.makeLink( "node", {"id": tree.getRoot("collections").id}) return httpstatus.HTTP_MOVED_TEMPORARILY else: if not users.checkLogin(user.getName(), req.params.get("password_old")): error = 1 # old pwd does not match elif req.params.get("password_new1") != req.params.get( "password_new2"): error = 2 # new pwds do not match else: user.setPassword(req.params.get("password_new2")) req.request["Location"] = req.makeLink( "node", {"id": tree.getRoot("collections").id}) return httpstatus.HTTP_MOVED_TEMPORARILY navframe = frame.getNavigationFrame(req) navframe.feedback(req) contentHTML = req.getTAL(theme.getTemplate("login.html"), { "error": error, "user": user }, macro="change_pwd") navframe.write(req, contentHTML) return httpstatus.HTTP_OK
def handle(self, queue): while not self.__close: try: netMessage = NetMessage() messages = queue.getall(20, True, 20) for message in messages: netMessage.ParseFromString(message) gl.get_v("serverlogger").logger.info('''收到游戏服消息%d''' % netMessage.opcode) if netMessage.opcode == REGISTER_SERVICE: reqRegisterGame = ReqRegisterGame() reqRegisterGame.ParseFromString(netMessage.data) if reqRegisterGame.password == config.get( "coordinate", "game_connect_pwd"): gl.get_v("games").append( Game(reqRegisterGame.alloc_id, reqRegisterGame.name, netMessage.id)) elif netMessage.opcode == CHANGE_SERVICE_STATE: reqServiceState = ReqServiceState() self.changeServerState(netMessage.id, reqServiceState.state) elif netMessage.opcode == EXIT_GAME: userExit = UserExit() userExit.ParseFromString(netMessage.data) self.update_currency(userExit.playerId) self.send_to_gateway(EXIT_GAME, None, userExit.playerId) elif netMessage.opcode == APPLY_CHANGE_MATCH: userExit = UserExit() userExit.ParseFromString(netMessage.data) self.update_currency(userExit.playerId) recApplyChangeMatch = RecApplyChangeMatch() recApplyChangeMatch.gameId = userExit.roomNo recApplyChangeMatch.level = userExit.level self.send_to_gateway(APPLY_CHANGE_MATCH, recApplyChangeMatch, userExit.playerId) except Empty: gl.get_v("serverlogger").logger.info("Received timeout") except: print traceback.print_exc()
def makePresentationFormat(image, thumb): if isnewer(thumb, image): return pic = PILImage.open(image) tmpjpg = config.get("paths.datadir") + "tmp/img" + str( random.random()) + ".jpg" if pic.mode == "CMYK" and (image.endswith("jpg") or image.endswith("jpeg")) or pic.mode in [ "P", "L" ]: os.system("convert -quality 100 -draw \"rectangle 0,0 1,1\" %s %s" % (image, tmpjpg)) pic = PILImage.open(tmpjpg) try: pic.load() except IOError as e: pic = None raise OperationException("error:" + str(e)) width = pic.size[0] height = pic.size[1] resize = 1 if resize: # resize images only if they are actually too big if width > height: newwidth = 320 newheight = height * newwidth / width else: newheight = 320 newwidth = width * newheight / height pic = pic.resize((newwidth, newheight), PILImage.ANTIALIAS) try: pic.save(thumb, "jpeg") except IOError: pic.convert('RGB').save(thumb, "jpeg") if os.path.exists(tmpjpg): os.unlink(tmpjpg)
def serve_file(req, filepath): if 'mimetype' in req.params: mimetype = req.params.get('mimetype') elif filepath.lower().endswith('.html') or filepath.lower().endswith('.htm'): mimetype = 'text/html' else: mimetype = getMimeType(filepath) req.reply_headers['Content-Type'] = mimetype tmppath = config.get("paths.datadir") + "tmp/" abspath = os.path.join(tmppath, filepath) if os.path.isfile(abspath): filesize = os.path.getsize(abspath) req.sendFile(abspath, mimetype, force=1) return 200, filesize, abspath # ok else: return 404, 0, abspath # not found
def _generate_zoom_archive(self, files=None): if files is None: files = self.files.all() image_file = self._find_processing_file(files) zip_filename = get_zoom_zip_filename(self.id) zip_filepath = os.path.join(config.get("paths.zoomdir"), zip_filename) old_zoom_files = filter(lambda f: f.filetype == u"zoom", files) for old in old_zoom_files: self.files.remove(old) old.unlink() _create_zoom_archive(Image.ZOOM_TILESIZE, image_file.abspath, zip_filepath) file_obj = File(path=zip_filepath, filetype=u"zoom", mimetype=u"application/zip") self.files.append(file_obj)
def sendZipFile(req, path): tempfile = os.path.join(config.get("paths.tempdir"), unicode(random.random())) + ".zip" zip = zipfile.ZipFile(tempfile, "w") zip.debug = 3 def r(p): if os.path.isdir(os.path.join(path, p)): for file in os.listdir(os.path.join(path, p)): r(os.path.join(p, file)) else: while len(p) > 0 and p[0] == "/": p = p[1:] with suppress(Exception, warn=False): zip.write(os.path.join(path, p), p) r("/") zip.close() req.reply_headers['Content-Disposition'] = "attachment; filename=shoppingbag.zip" _sendFile(req, tempfile, "application/zip", nginx_x_accel_redirect_enabled=False) if os.sep == '/': # Unix? os.unlink(tempfile) # unlinking files while still reading them only works on Unix/Linux
def create_golds(type, updates): from data.database import data_account connection = None try: connection = mysql_connection.get_conn() with connection.cursor() as cursor: for update in updates: account = data_account.query_account_by_id( connection, update.user_id) sql = config.get("sql", "sql_create_gold") % ( type, update.roomNo, update.user_id, update.gold, account.gold, int(time.time())) cursor.execute(sql) connection.commit() except: print traceback.print_exc() if connection is not None: connection.rollback() finally: if connection is not None: connection.close()
def getStatFile(node, timestamp, type, period=period): f = None for file in node.getFiles(): if file.getType() == "statistic": try: if file.getName() == "stat_{}_{}_{}.xml".format( node.id, timestamp, type): if timestamp == str( format_date(now(), "yyyy-mm") ) or timestamp == period: # update current month or given period if os.path.exists(file.retrieveFile()): print 'removing %s' % file.retrieveFile() os.remove(file.retrieveFile()) node.removeFile( file) # remove old file and create new f = None break else: # old month, do nothing print 'old file doing nothing' return None except: return None if not f: # create new file f_name = config.get("paths.tempdir") + "stat_{}_{}_{}.xml".format( node.id, timestamp, type) if os.path.exists(f_name): f = open(f_name, "a") else: # create new file and write header: print 'creating writing headers %s' % f_name f = open(f_name, "w") f.write('<?xml version="1.0" encoding="utf-8" ?>\n') f.write('<nodelist created="' + str(format_date(now(), "yyyy-mm-dd HH:MM:SS")) + '">\n') if f_name not in statfiles: statfiles.append(f_name) return f
def query_account_by_id(connection, id): close = connection is None try: if connection is None: connection = mysql_connection.get_conn() sql = config.get("sql", "sql_query_account_by_id") % id with connection.cursor() as cursor: cursor.execute(sql) result = cursor.fetchone() if result is not None: a = Account() a.id = result["id"] a.account_name = result["account_name"] a.nick_name = result["nick_name"] a.sex = result["sex"] a.pswd = result["pswd"] a.head_url = result["head_url"] a.create_time = result["create_time"] a.last_time = result["last_time"] a.last_address = result["last_address"] a.account_state = result["account_state"] a.gold = result["gold"] a.integral = result["integral"] a.bank_pswd = result["bank_pswd"] a.bank_gold = result["bank_gold"] a.bank_integral = result["bank_integral"] a.authority = result["authority"] a.total_count = result["total_count"] a.introduce = result["introduce"] a.phone = result["phone"] a.level = result["level"] a.experience = result["experience"] a.device = result["device"] return a except: print traceback.print_exc() finally: if close and connection is not None: connection.close() return None
def file_to_node(file_node, upload_dir): ''' Converts the FileNode object in the upload_dir into a Node with the FileNode as an attachment @param file_node: FileNode @param upload_dir: Node @return: Node if one was created ''' home_dir = upload_dir.getParents()[0] file_type = file_node.getType() if file_type == 'other' or file_type == 'zip': return path = file_node.retrieveFile().split('/') new_name = path.pop().replace('ftp_', '', 1) path.append(new_name) new_path = '/'.join(path) try: os.rename(file_node.retrieveFile(), new_path) except: new_path = file_node.retrieveFile() schema = home_dir.get('system.ftp.{}'.format(file_type)).lstrip('/') if not schema: schema = 'file' new_node = tree.Node(utf8_decode_escape(new_name), type='/'.join([file_node.getType(), schema])) upload_dir.removeFile(file_node) file_node._path = file_node._path.replace(config.get('paths.datadir'), '') file_node._path = file_node._path.replace( file_node._path.split('/')[-1], new_node.getName()) new_node.addFile(file_node) new_node.event_files_changed() upload_dir.addChild(new_node) return new_node
def getStartpageFileNode(self, language, verbose=False): res = None basedir = config.get("paths.datadir") d = self.getStartpageDict() if d and (language in d.keys()): shortpath_dict = d[language] if shortpath_dict: for f in self.getFiles(): try: shortpath_file = f.retrieveFile().replace(basedir, "") except IOError: pass else: if shortpath_dict == shortpath_file: res = f if not d: for f in self.getFiles(): shortpath_file = f.retrieveFile().replace(basedir, "") if f.getType() == 'content' and f.mimetype == 'text/html': res = f return res
def query_rooms(): connection = None rooms = [] try: connection = mysql_connection.get_conn() sql = config.get("sql", "sql_query_rooms") with connection.cursor() as cursor: cursor.execute(sql) r = cursor.fetchall() if r is not None: for result in r: room = Room() room.room_no = result["room_no"] room.room_type = result["room_type"] room.room_status = result["room_status"] rooms.append(room) except: print traceback.print_exc() finally: if connection is not None: connection.close() return rooms
def getStatFile(col_id, timestamp, type, period=period): f = None node = col_id.collection orig_file = None for file in node.getFiles(): if file.getType() == u"statistic": try: if file.getName() == u"stat_{}_{}_{}.xml".format( node.id, timestamp, type): if timestamp == format_date( now(), "yyyy-mm" ) or timestamp == period: # update current month or given period # orig_file = file.retrieveFile() if os.path.exists(file.retrieveFile()): print 'removing %s' % file.retrieveFile() os.remove(file.retrieveFile()) orig_file = file.retrieveFile() # node.files.remove(file) f = None break else: # old month, do nothing print 'old file doing nothing' return None except: return None if not f: # create new file f_name = config.get("paths.tempdir") + u"stat_{}_{}_{}.xml".format( node.id, timestamp, type) # create new file and write header:j print 'creating writing headers %s' % f_name f = codecs.open(f_name, "w", encoding='utf8') f.write('<?xml version="1.0" encoding="utf-8" ?>\n') f.write('<nodelist created="' + format_date(now(), "yyyy-mm-dd HH:MM:SS") + '">\n') if f_name not in col_id.statfiles: col_id.statfiles.append((f_name, orig_file)) return f
def export(req): """ export definition: url contains /[type]/[id] """ if not current_user.is_admin: return httpstatus.HTTP_FORBIDDEN path = req.path[1:].split("/") try: module = findmodule(path[1]) tempfile = join_paths(config.get("paths.tempdir"), str(random.random())) with codecs.open(tempfile, "w", encoding='utf8') as f: try: f.write(module.export(req, path[2])) except UnicodeDecodeError: f.write(module.export(req, path[2]).decode('utf-8')) req.sendFile(tempfile, u"application/xml") if os.sep == '/': # Unix? os.unlink(tempfile) # unlinking files while still reading them only works on Unix/Linux except: logg.info("module has no export method")
def loadThemes(): def manageThemes(themepath, type): name = config.get("config.theme", "") if os.path.exists(config.basedir + "/" + themepath + "themes/" + name + "/"): athana.addFileStore("/theme/", themepath + "themes/" + name + "/") athana.addFileStorePath("/css/", themepath + "themes/" + name + "/css/") athana.addFileStorePath("/img/", themepath + "themes/" + name + "/img/") athana.addFileStorePath("/js/", themepath + "themes/" + name + "/js/") theme.update(name, themepath + "themes/" + name + "/", type) print "Loading theme '%s' (%s)" % (name, type) if config.get("config.theme", "") != "": manageThemes("web/", "intern") # internal theme for k, v in config.getsubset("plugins").items(): # themes from plugins manageThemes(v, "extern") else: print "Loading default theme"
def makeOriginalFormat(image, thumb): tmpjpg = config.get("paths.datadir") + "tmp/img" + str( random.random()) + ".jpg" pic = PILImage.open(image) if pic.mode == "CMYK" and (image.endswith("jpg") or image.endswith("jpeg")) or pic.mode in [ "P", "L" ]: # if image.endswith("jpg") or image.endswith("jpeg"): os.system("convert -quality 100 -draw \"rectangle 0,0 1,1\" %s %s" % (image, tmpjpg)) pic = PILImage.open(tmpjpg) try: pic.load() except IOError as e: pic = None raise OperationException("error:" + str(e)) pic.save(thumb, "png") if os.path.exists(tmpjpg): os.unlink(tmpjpg)
def query_by_id(id): connection = None try: connection = mysql_connection.get_conn() sql = config.get("sql", "sql_query_withdrawal_by_id") % id with connection.cursor() as cursor: cursor.execute(sql) result = cursor.fetchone() if result is not None: withdrawal = Withdrawal() withdrawal.id = result["id"] withdrawal.time = result["time"] withdrawal.user_id = result["user_id"] withdrawal.state = result["state"] withdrawal.money = result["money"] withdrawal.type = result["type"] return withdrawal except: print traceback.print_exc() finally: if connection is not None: connection.close() return None
def save_import_file(filename): import core.config as config temppath = config.get("paths.tempdir") _filename_only = filename.split(os.path.sep)[-1] # leave following in for windows: "/" in path representation possible there _filename_only = filename.split("/")[-1] destname = os.path.join( temppath, "bibtex_import_saved_" + getNow() + "_" + _filename_only) msg = "bibtex import: going to copy/save import file %r -> %r" % (filename, destname) logger.info(msg) if os.sep == '/': ret = os.system("cp %s %s" % (filename, destname)) else: cmd = "copy %s %s" % (filename, destname) ret = os.system(cmd.replace('/', '\\')) if ret & 0xff00: raise IOError("Couldn't copy %s to %s (error: %s)" % (filename, destname, str(ret))) return
def query_by_id(userid): connection = None try: connection = mysql_connection.get_conn() sql = config.get("sql", "sql_query_bankcard_by_id") % userid with connection.cursor() as cursor: cursor.execute(sql) result = cursor.fetchone() if result is not None: bankcard = Bankcard() bankcard.user_id = userid bankcard.rel_name = result["rel_name"] bankcard.bank_name = result["bank_name"] bankcard.bank_address = result["bank_address"] bankcard.phone_num = result["phone_num"] bankcard.bank_card_num = result["bank_card_num"] return bankcard except: print traceback.print_exc() finally: if connection is not None: connection.close() return None
def export_shoppingbag_bibtex(req): """ Export the metadata of selected nodes in a BibTeX-format """ from web.frontend.streams import sendBibFile from schema.schema import getMetaType import core.config as config import random import os items = [] # list of nodes to be exported for key in req.params.keys(): if key.startswith("select_"): items.append(key[7:]) dest = config.get("paths.tempdir") + str(random.random()) + ".bib" f = open(dest, "a") for item in items: node = tree.getNode(item) mask = getMetaType(node.getSchema()).getMask("bibtex") if mask is not None: f.write(mask.getViewHTML([node], flags=8)) # flags =8 -> export type else: f.write("The selected document type doesn't have any bibtex export mask") f.write("\n") f.close() if len(items) > 0: sendBibFile(req, dest) for root, dirs, files in os.walk(dest, topdown=False): for name in files: os.remove(os.path.join(root, name)) for name in dirs: os.rmdir(os.path.join(root, name)) if os.path.isdir(dest): os.rmdir(dest)
def execute(userId, message, messageHandle): redis = gl.get_v("redis") if redis.exists(str(userId) + "_room"): roomNo = redis.get(str(userId) + "_room") score = BaiRenScore() score.ParseFromString(message.data) onlyPlayerBanker = config.get("hongbao", "onlyPlayerBanker") == "True" redis.lock("lockroom_" + str(roomNo)) try: room = redis.getobj("room_" + str(roomNo)) seat = room.getWatchSeatByUserId(userId) if seat is None or room.banker == userId or userId in room.bankerList: redis.unlock("lockroom_" + str(roomNo)) return if 1 == len(score.score): seat.shangzhuangScore = score.score[0] room.bankerList.append(userId) room.updateBankerList(messageHandle, 0) if onlyPlayerBanker and room.gameStatus == GameStatus.WAITING and 1 == room.banker: gamestart_cmd.execute(room, messageHandle) room.save(redis) except: print traceback.print_exc() redis.unlock("lockroom_" + str(roomNo))
def export(req): """ export definition: url contains /[type]/[id] """ user = users.getUserFromRequest(req) if not user.isAdmin(): return httpstatus.HTTP_FORBIDDEN path = req.path[1:].split("/") try: module = findmodule(path[1]) tempfile = join_paths(config.get("paths.tempdir"), str(random.random())) file = open(tempfile, "w") file.write(module.export(req, path[2])) file.close() req.sendFile(tempfile, "application/xml") if os.sep == '/': # Unix? os.unlink( tempfile ) # unlinking files while still reading them only works on Unix/Linux except: print "module has no export method"