def parse(columns, metadata, lines): status = Status('networks', logger=log).n(len(lines)).start() for idx, line in enumerate(lines): status.log(idx) tokens = line.split('\t') if not len(tokens) == len(columns) + 3: continue source = tokens[0] target = tokens[1] # humannet composite score #score = float(tokens[-1]) for column, token in itertools.izip(columns, tokens[2:-1]): try: # individual edge score score = float(token) metadata[column]['count'] += 1 yield { 'source': source, 'target': target, 'score': score, 'meta': metadata[column]['_id'] } except ValueError: pass status.stop()
def filter_status(query, status): if status is None: return query elif isinstance(status, list): status_ids = [Status.name_to_id(item) for item in status] return query.filter(File.status_id.in_(status_ids)) else: return query.filter(File.status_id == Status.name_to_id(status))
def get(self): name = request.args.get('name') # 从本地数据库中得到数据 if name: enterprises = Enterprise.objects(__raw__={'entname': {"$regex": name.encode('utf-8')}}).only('entname', "lcid", "regcap", "esdate", "address", "regcapcur") else: enterprises = Enterprise.objects[:50] if len(enterprises): return Status(200, 'success', json.loads(enterprises.to_json())).result else: return Status(404, 'fail', {'title': 'Not Found'}).result
def get(self, lcid): enterprise = Enterprise.objects(lcid=lcid).first() entprerelations = Entprerelation.objects(enttarget=lcid) entsrelations = Entsrelation.objects(entsource=lcid) entpatents = Entpatent.objects(lcid=lcid) obj = {} obj["enterprise"] = self.load_json(enterprise) obj["entprerelations"] = self.load_json(entprerelations) obj["entsrelations"] = self.load_json(entsrelations) obj["entpatents"] = self.load_json(entpatents) # relEnts = Entsrelation.objects(entsource=lcid).to_json() # print len(relEnts) if enterprise: return Status(200, 'success', obj).result else: return Status(404, 'fail', {'title': 'Not Found'}).result
def get(self): keyword = request.args.get('keyword') if keyword: wordslist = convert2json(Word.objects(keyword=keyword)) else: wordslist = convert2json(Word.objects()) return Status(200, 'success', wordslist).result
def _get_build_status(self, build): results = self._get_build_xml(build) first = results.children[0].results.children[0] return Status( name=first.plan['shortName'], status='success' if 'Successful' in first['state'] else 'error', in_progress=False if 'Finished' in first['lifeCycleState'] else True)
def load_entrez_identifiers(): db = pymongo.MongoClient().identifiers db.genemania_entrez.drop() collection = db.genemania_entrez url = 'http://genemania.org/data/current/Homo_sapiens/identifier_mappings.txt' status = Status('loading genemania identifiers from ' + url, logger=log).start() r = requests.get(url) lines = r.iter_lines() lines.next() # ignore header row def parse(lines): for line in lines: try: preferred, name, source = line.split('\t') if (source == 'Entrez Gene ID'): yield { 'preferred': preferred, 'name': name, 'NAME': name.upper( ), # indexed to support case-insensitive queries 'source': source } except Exception as e: log.warn(e.message) count = 0 iterator = parse(lines) while True: records = [record for record in islice(iterator, 1000)] if len(records) > 0: count += len(collection.insert_many(records).inserted_ids) log.debug('inserted %d identifiers (%d total)', len(records), count) else: break log.info('creating NAME and preferred indexes') collection.create_index([("NAME", pymongo.ASCENDING)]) collection.create_index([("preferred", pymongo.ASCENDING)]) status.stop()
def post(self): main = request.json.get('main') keyword = request.json.get('keyword') word = request.json.get('word') kw = '\"%s %s %s\"' % (main, keyword, word) searchword = Searchword(main=main, keyword=keyword, word=word, kw=kw).save() return Status(200, 'success', convert2json(searchword)).result
def _get_deployment_status(self, deployment): results = self._get_deployment_json(deployment) first = results[0]['environmentStatuses'][0] return Status( name='Deploy %s' % (first['environment']['name']), status='success' if 'SUCCESS' in first['deploymentResult']['deploymentState'] else 'unknown', in_progress=False if 'FINISHED' in first['deploymentResult']['lifeCycleState'] else True)
def _get_build_status(self, build): results = self._get_build_json(build) first = results['data'][0] return Status( name='Android' if 'android' in first['stack_identifier'] else 'iOS', status='success' if 'success' in first['status_text'] else 'unknown' if 'error' not in first['status_text'] else 'error', in_progress=True if 'in-progress' in first['status_text'] else False)
def load_identifiers(): db = pymongo.MongoClient().identifiers db.genemania.drop() collection = db.genemania url = 'http://genemania.org/data/current/Danio_rerio/identifier_mappings.txt' status = Status('loading genemania identifiers from ' + url, logger=log).start() r = requests.get(url) lines = r.iter_lines() lines.next() # ignore header row def parse(lines): for line in lines: try: preferred, name, source = line.split('\t') yield { 'preferred': preferred, 'name': name, 'NAME': name.upper(), # indexed to support case-insensitive queries 'source': source } except Exception as e: log.warn(e.message) count = 0 iterator = parse(lines) while True: records = [record for record in islice(iterator, 1000)] if len(records) > 0: count += len(collection.insert_many(records).inserted_ids) log.debug('inserted %d identifiers (%d total)', len(records), count) else: break log.info('creating NAME and preferred indexes') collection.create_indexes([ pymongo.IndexModel([('NAME', pymongo.ASCENDING)]), pymongo.IndexModel([('preferred', pymongo.ASCENDING)]) ]) status.stop()
def get(self, lcid): # 得到当前公司的关联企业 arrs = [] for ent in self.getRelEnts(lcid): for e in ent: arrs.append(json.loads(e.to_json())) data = {} # cover to json object data = json.loads(Enterprise.objects(lcid=lcid).only("lcid","entname","esdate").first().to_json()) result = self.coverToTree(lcid, arrs, data) return Status(200, 'success', result).result
def get(self, lcid): self.getRelEnts(lcid) self.addDeep(lcid, self.rawdata, None) self.results['nodes'] = [] self.results['links'] = [] for data in self.rawdata: if data["parent"] is not None: obj = data["parent"] obj["investment"] = data["investment"] self.results["nodes"].append(obj) for link in data["children"]: self.results["links"].append(link) return Status(200, 'success', self.results).result
def marshal(self): return { "id": self.id, "name": self.name, "url": self.url, "directory": self.directory, "status": Status.id_to_name(self.status_id), "type": Type.id_to_name(self.type_id), "percent": self.percent, "size": self.size, "speed": self.speed, "timeRemaining": self.time_remaining, }
def post(self): email = request.json.get('email') password = request.json.get('password') if email is None or password is None: return make_error(404, 'missing arguments!') user = User.objects(email=email).first() if user is None: return make_error(404, 'user is not exit!') print user.verify_password(password) if user.verify_password(password): token = user.generate_auth_token() return Status(200, 'success', {'token': token}).result else: return make_error(401, 'Unauthorized Access!')
def post(self): email = request.json.get('email') username = request.json.get('username') password = request.json.get('password') captcha = request.json.get('captcha') if email is None or username is None or password is None: return make_error(400, 'missing arguments!') user = User.objects(email=email).first() print captcha == user["captcha"] if not captcha == user["captcha"]: return make_error(400, 'captcha is not correct!') user.hash_password(password) token = user.generate_auth_token() user.update(username=username, set__is_active=True) user.save() return Status(201, 'success', {'token': token}).result
def post(self): email = request.json.get('email') captcha = generate_verification_code() msg = Message('主题', sender=app.config['MAIL_USERNAME'], recipients=[email]) msg.body = '文本 body' msg.html = '验证码是:%s' % captcha if User.objects(email=email).first() is not None: return make_error(400, 'exiting user!') else: try: mail.send(msg) except Exception as e: # TODO: how to format e detail message to string return make_error(400, 'Mailbox not found or access denied') else: user = User(email=email, captcha=captcha) user.save() return Status(200, 'success', 'send captcha to your email').result
def get(self): entname = request.args.get('entname') self.data["keyword"] = entname.encode('utf-8') result = self.post_fnc(self.url, self.data, self.headers) entslist = result["data"]["list"] return Status(200, 'success', entslist).result
def get(self, kw): searchdocs = Searchdoc.objects(kw=kw) return Status(200, 'success', convert2json(searchdocs)).result
def post(self): # add.delay(2, 3) downloadlist = json.loads(request.data)["downloadlist"] for task in downloadlist: grab_from_qy.delay(task["lcid"]) return Status(200, 'success', None).result
def get(self): searchwords = Searchword.objects() return Status(200, 'success', convert2json(searchwords)).result
def put(self, oid): words = request.json.get('words') word = Word.objects.get(id=oid) word.update(words=words) return Status(200, 'success', None).result
def get(self, oid): word = convert2json(Word.objects.get(id=oid)) return Status(200, 'success', word).result
def post(self): keyword = request.json.get('keyword') words = request.json.get('words') word = Word(keyword=keyword, words=words).save() return Status(200, 'success', convert2json(word)).result
def get(self): grab_with_names.delay() return Status(200, 'success', None).result
def main(): parser = argparse.ArgumentParser() parser.add_argument('--id', action='store_true', help='load identifiers only') parser.add_argument('--batch', type=int, default=10000, help='insert records batch size') parser.add_argument('--warmstart', action='store_true', help='warmstart') args = parser.parse_args() if not args.warmstart: load_identifiers() if not args.id: client = pymongo.MongoClient() db = client.networks # collection stores metadata about source networks meta = db.meta # collection stores edge data edges = db.edges create_edges_index() url = 'http://genemania.org/data/current/Homo_sapiens/networks.txt' log.info('reading network list from %s', url) r = requests.get(url) lines = list(r.iter_lines())[1:] # ignore header line status = Status('networks', logger=log).n(len(lines)).start() for idx, line in enumerate(lines): status.log(idx) file_name, network_group_name, network_name, source, pubmed_id = line.split('\t') metadata = { 'collection': 'genemania', 'type': network_group_name.lower(), 'source': source, 'name': network_name, 'pubmed': int(pubmed_id) if not pubmed_id == '' else 0 } if not args.warmstart or meta.find_one(dict(metadata.items() + [('status', 'success')])) is None: # old metadata records and their associated edges will be dropped after the new network is finished processing _ids = [result['_id'] for result in meta.find(metadata)] log.info('found %d matching network(s) that will be replaced: %s', len(_ids), ', '.join([str(_id) for _id in _ids])) set_status(metadata, 'parsing') _id = meta.insert_one(metadata).inserted_id metadata['count'] = load_network('http://genemania.org/data/current/Homo_sapiens/' + file_name, _id, args.batch) log.info('%s %s %s network has %d edges', metadata['source'], metadata['name'], metadata['type'], metadata['count']) set_status(metadata, 'success') meta.save(metadata) if len(_ids) > 0: log.info('dropping old network metadata') meta.delete_many({'_id': {'$in': _ids}}) cleanup_edges() status.stop() return 0
def main(): parser = argparse.ArgumentParser() parser.add_argument('--warmstart', action='store_true', help='warmstart') args = parser.parse_args() if not args.warmstart: load_identifiers() client = pymongo.MongoClient() db = client.networks # collection stores metadata about source networks meta = db.meta # collection stores edge data edges = db.edges create_edges_index() url = 'http://genemania.org/data/current/Homo_sapiens/networks.txt' log.info('reading network list from %s', url) r = requests.get(url) lines = list(r.iter_lines())[1:] # ignore header line status = Status('networks', logger=log).n(len(lines)).start() for idx, line in enumerate(lines): status.log(idx) file_name, network_group_name, network_name, source, pubmed_id = line.split('\t') metadata = { 'collection': 'identifiers', 'type': network_group_name.lower(), 'source': source, 'name': network_name, 'pubmed': int(pubmed_id) if not pubmed_id == '' else 0 } if not args.warmstart or meta.find_one(dict(metadata.items() + [('status', 'success')])) is None: # old metadata records and their associated edges will be dropped after the new network is finished processing _ids = [result['_id'] for result in meta.find(metadata)] log.info('found %d matching network(s) that will be replaced: %s', len(_ids), ', '.join([str(_id) for _id in _ids])) set_status(metadata, 'parsing') _id = meta.insert_one(metadata).inserted_id metadata['count'] = load_network('http://genemania.org/data/current/Homo_sapiens/' + file_name, _id) log.info('%s %s %s network has %d edges', metadata['source'], metadata['name'], metadata['type'], metadata['count']) set_status(metadata, 'success') meta.save(metadata) if len(_ids) > 0: log.info('dropping old network metadata') meta.delete_many({'_id': {'$in': _ids}}) log.info('dropping old edge data') edges.delete_many({'meta': {'$nin': [it['_id'] for it in meta.find()]}}) status.stop() return 0
try: options.port = int(options.port) options.wsport = int(options.wsport) except ValueError: logging.error("Port must be numeric") op.print_help() sys.exit(1) try: options.stream_timeout = int(options.stream_timeout) except ValueError: logging.error("Stream timeout must be numeric") op.print_help() sys.exit(1) Status() statusThread = threading.Thread(target=Status._instance.run) statusThread.daemon = True statusThread.start() broadcaster = Broadcaster(args[0], options.stream_timeout) broadcaster.start() requestHandler = HTTPRequestHandler(options.port) requestHandler.start() s = SimpleWebSocketServer('', options.wsport, WebSocketStreamingClient) webSocketHandlerThread = threading.Thread(target=s.serveforever) webSocketHandlerThread.daemon = True webSocketHandlerThread.start()
def get(self): token = g.user.generate_auth_token() if token: return Status(200, 'success', {'token': token}).result else: return make_error(401, 'Unauthorized Access!')
def get(self, lcid): # add.delay(2, 3) grab_from_qy.delay(lcid) return Status(200, 'success', None).result
def get(self): return Status(200, 'success', None).result