def update_offset(self, new_offset): result = None client = None try: client = connect_to_db() offset_collection = collection.Collection(client.spotlight, "Offsets") now_time = datetime.datetime.utcnow() document = { "timestamp": now_time, "device_id": self.device_id, "offset": new_offset } offset_collection.insert_one(document) device_collection = collection.Collection(client.spotlight, "Devices") result = device_collection.update_one( {"device_id": self.device_id}, {"$set": { "device_parameter_offset": new_offset }}) client.close() except Exception, e: handle_db_error(client, e)
def temperature_updated(temperature): Config.logger.info("[Temperature][%s]" % str(temperature)) try: spotlight_collection = collection.Collection( Config.db_client.spotlight, "Temperatures") now_time = datetime.datetime.utcnow() document = { "timestamp": now_time, "device_id": Config.service_config["device_id"], "temperature": float(temperature) } spotlight_collection.insert_one(document) device_collection = collection.Collection( Config.db_client.spotlight, "Devices") device_collection.update_one( {"device_id": Config.service_config["device_id"]}, { "$set": { "device_ip": get_ip(), "latest_update": now_time, "latest_temperature": float(temperature) } }) except Exception, e: Config.handle_access_db_error(e)
def db_init(): client = MongoClient('mongodb://*****:*****@localhost:27017/spider') db = client.get_database() zhihu_user = collection.Collection(db, 'zhihu_user') zhihu_zhuanlan = collection.Collection(db, 'zhihu_zhuanlan') zhihu_question = collection.Collection(db, 'zhihu_question') zhihu_answer = collection.Collection(db, 'zhihu_answer') zhihu_comment = collection.Collection(db, 'zhihu_comment') zhihu_user.create_index('id', unique=True) zhihu_zhuanlan.create_index('id', unique=True) zhihu_question.create_index('id', unique=True) zhihu_answer.create_index('id', unique=True) zhihu_comment.create_index('id', unique=True)
def find(self, table, SON=None, pn=0, rn=0, sort=None, count=False, projection=None): try: data = [] col = collection.Collection(self.db, table) cur = col.find(filter=SON, projection=projection, skip=pn * rn, limit=rn, sort=sort) for doc in cur: #将ObjectId转为str if "_id" in doc: doc["_id"] = str(doc["_id"]) #日期格式化 if "crtime" in doc and isinstance(doc['crtime'], datetime.datetime): doc["crtime"] = doc['crtime'].strftime('%Y-%m-%d %H:%M:%S') data.append(doc) #若要计数,则将计数添加在列表末尾 if count: data.append(col.count(SON)) return data except Exception as e: logger.error(traceback.format_exc()) return []
def import_job(self, user_name, job_name, file_name): with self._lock: logger.info("import_job %s for user: %s from file: %s", job_name, user_name, file_name) field = (user_name, job_name) if field in self.job_mgrs: logger.info("%s exists, so cannot import job.", job_name) return "Job already exists" zipobj = None try: zipobj = zipfile.ZipFile(file_name, 'r') for suffix in db.JOB_DATA_COLLECTION_SUFFIXES: logger.info("Reading %s", suffix) data = zipobj.read(suffix) ar = json_util.loads(data) if not ar: continue colname = job_name + suffix col = collection.Collection( db.GetUserDB(self.db_client, user_name), colname) col.insert(ar) if not self.create_job(user_name, job_name, False): return "Error encountered while creating the imported job." return None except Exception, e: logger.info(str(e)) return "Error encountered while processing the input file: " + str( e) finally:
def insert_ppv_to_db(): ppv = PMV.calculate_ppv(0.5, float(ReactiveControl.current_temperature), float(ReactiveControl.current_temperature), 1.2, 0.0, 60.0) pmv = PMV.calculate_pmv(0.5, float(ReactiveControl.current_temperature), float(ReactiveControl.current_temperature), 1.2, 0.0, 60.0) if ReactiveControl.current_cool_state: ppv = PMV.calculate_ppv(0.5, float(ReactiveControl.current_temperature), float(ReactiveControl.current_temperature), 1.2, ReactiveControl.current_air_speed, 60.0) pmv = PMV.calculate_pmv(0.5, float(ReactiveControl.current_temperature), float(ReactiveControl.current_temperature), 1.2, ReactiveControl.current_air_speed, 60.0) try: ppv_collection = collection.Collection(Config.db_client.spotlight, "PPVs") document = { "timestamp": datetime.datetime.utcnow(), "device_id": Config.service_config["device_id"], "pmv": pmv, "ppv": ppv } ppv_collection.insert_one(document) except Exception, e: Config.handle_access_db_error(e)
def count(self, table, SON={}, **kwargs): try: col = collection.Collection(self.db, table) return col.count(SON, **kwargs) except Exception as e: logger.error(traceback.format_exc()) return 0
def find(self, table, SON=None, pn=0, rn=0, sort=None, count=False, projection=None): try: data = [] col = collection.Collection(self.db, table) #修改时区UTC->local time zone col = col.with_options(codec_options=CodecOptions( tz_aware=True, tzinfo=pytz.timezone('Asia/Shanghai'))) cur = col.find(filter=SON, projection=projection, skip=pn * rn, limit=rn, sort=sort) for doc in cur: #将ObjectId转为str if doc.has_key("_id"): doc["_id"] = str(doc["_id"]) #日期格式化 if doc.has_key("crtime") and isinstance( doc['crtime'], datetime.datetime): doc["crtime"] = doc['crtime'].strftime('%Y-%m-%d %H:%M:%S') data.append(doc) #若要计数,则将计数添加在列表末尾 if count: data.append(col.count(SON)) return data except Exception as e: logger.error(traceback.format_exc()) return []
def get_question_paths(): client = MongoClient(host=MONGO_HOST, port=MONGO_PORT) db = client[MONGO_DB] ai = collection.Collection(database=db, name=MONGO_COLLECTION) cursor = ai.find({}) urls = [i["url"] for i in cursor] return [urljoin("https://stackoverflow.com", url) for url in urls]
def wquery(): logging.info("Starting query load") connection = connector() db = connection[database] col = collection.Collection(db, strCollection, read_preference=readPreference) #seqId = random.randint(minSeqId, maxSeqId) startTime = datetime.datetime.now() endTime = startTime + datetime.timedelta(seconds=workloadTime) while (datetime.datetime.now() < endTime): randBucket = random.choice(bucketTuples) #seqId = random.randint(minSeqId, maxSeqId) seqId = random.randint(randBucket[0], randBucket[1]) query = {"SeqId": seqId} cur = col.find(query) try: if (cur.alive == True): item = cur.next() cur.close() except: print "Error with the cursor - likely empty" exit() logging.debug("%d" % seqId) sleep(sleepDelay)
def get_last_occupancy_temperature(self): temperature_list = list() occupancy_list = list() client = None try: client = connect_to_db() temperature_collection = collection.Collection( client.spotlight, "Temperatures") occupancy_collection = collection.Collection( client.spotlight, "Occupancies") temperature_list =\ list(temperature_collection.find({"device_id": self.device_id}).sort("timestamp", -1).limit(1)) occupancy_list =\ list(occupancy_collection.find({"device_id": self.device_id}).sort("timestamp", -1).limit(1)) client.close() except Exception, e: handle_db_error(client, e)
def dump(test=False): (db, pipe) = get_pipe('csacs', test=test) auth_regex = re.compile("Authen OK") ip_regex = re.compile('\d+\.\d+\.\d+\.\d+') mac_regex = re.compile("Caller-ID=([0-9A-Fa-f]+),") username_regex = re.compile(r"ONID\\\\([\w\d]+),") username2_regex = re.compile(r"User-Name=([\w\d\/]+),") # read in lines from named pipe while True: line = pipe.readline() if not line: if test: break continue try: # parse fields and insert into mongodb csacs = {} csacs['line'] = line split = line.split(' ') try: split.remove('') except: pass csacs['time'] = to_unix_timestamp( format_logdate(split[0] + ' ' + split[1]), split[2]) # get date to know which collection to store to date = 'csacs_' + str( datetime.datetime.now().year) + format_logdate(split[0] + ' ' + split[1]) match = ip_regex.search(line) if match: csacs['ip'] = ip2long(match.group()) match = mac_regex.search(line) if match: csacs['mac'] = match.group(1).lower() match = username_regex.search(line) if match: csacs['username'] = match.group(1).lower() else: match = username2_regex.search(line) if match: csacs['username'] = match.group(1).lower() match = auth_regex.search(line) if match: csacs['log_type'] = 'authentication' coll = collection.Collection(db, date) coll.save(csacs) except Exception as e: continue
def update(self, table, SON, DOC, upsert=False, multi=False): try: col = collection.Collection(self.db, table) if not multi: col.update_one(SON, DOC, upsert) else: col.update_many(SON, DOC, upsert) except Exception as e: logger.error(traceback.format_exc())
def find_and_modify(self, table, SON, DOC, upsert=True, multi=False): """ """ try: col = collection.Collection(self.db, table) if not multi: col.find_and_modify(SON, DOC, upsert) except Exception as e: logger.error(traceback.format_exc())
def find_one(self, table, SON={}, pn=0, rn=0, sort=None, count=False): try: data = [] col = collection.Collection(self.db, table) data = col.find_one(filter=SON, skip=pn * rn, limit=rn, sort=sort) return data except Exception as e: logger.exception(e) return []
def bulk_upsert_operation(self, table, SON, DOC): try: col = collection.Collection(self.db, table) bulkop = BulkOperationBuilder(col, ordered=False) bulkop.find(SON).update(DOC) return bulkop.execute() except Exception as e: logger.error(traceback.format_exc()) return None
def remove(self, table, DOC, multi=True): try: col = collection.Collection(self.db, table) if not multi: col.delete_one(DOC) else: col.delete_many(DOC) except Exception as e: logger.error(traceback.format_exc())
def connect_collection(db_name, collection_name, local=False): """ Connect to DB and get collection, return collection object for small scope use """ if local: connection = Connection('localhost', 27017) db_connect = database.Database(connection, db_name) return collection.Collection(db_connect, collection_name) host = get_config('database', 'host') port = int(get_config('database', 'port')) user = get_config('database', 'user') password = get_config('database', 'password') connection = Connection(host, port) db_connect = database.Database(connection, db_name) db_connect.authenticate(user, password) return collection.Collection(db_connect, collection_name)
def get_device(self, device_id): device_dict = None try: client = connect_to_db() device_collection = collection.Collection(client.sonar, "Devices") device_dict = device_collection.find_one({"device_id": device_id}) current_app.logger.info("fetched %s" % str(device_dict)) client.close() except Exception, e: handle_db_error(client, e)
def submit_vote(self, vote): client = None try: client = connect_to_db() training_collection = collection.Collection( client.spotlight, "Training") training = training_collection.find_one( {"device_id": self.device_id}) if not training: return False ppv_collection = collection.Collection(client.spotlight, "PPVs") pmv_ppv_dict = ppv_collection.find({ "device_id": self.device_id }).sort("timestamp", -1).limit(1) pmv = pmv_ppv_dict[0]["pmv"] votes_collection = collection.Collection(client.spotlight, "Votes") result = votes_collection.insert_one({ "device_id": self.device_id, "vote": vote, "pmv": pmv }) votes_archive_collection = collection.Collection( client.spotlight, "Votes_Archive") votes_archive_collection.insert_one({ "device_id": self.device_id, "training_start": training["start_time"], "vote": vote, "pmv": pmv }) client.close() if result: return True else: return False except Exception, e: handle_db_error(client, e)
def find_all_devices(): devices = None try: client = connect_to_db() device_collection = collection.Collection(client.sonar, "Devices") devices = list(device_collection.find()) current_app.logger.info("fetched %d all devices" % len(devices)) client.close() except Exception, e: handle_db_error(client, e)
def get_raw_data(sample_id): raw_url = "" try: client = connect_to_db() samples_collection = collection.Collection(client.sonar, "Samples") sample = samples_collection.find_one({"sample_id": sample_id}) raw_url = sample["raw_url"] client.close() except Exception, e: handle_db_error(client, e) return list()
def get_device(self, device_id): device_dict = None client = None try: client = connect_to_db() device_collection = collection.Collection(client.spotlight, "Devices") device_dict = device_collection.find_one({"device_id": device_id}) client.close() except Exception, e: handle_db_error(client, e)
def find_devices(user_id): devices = None try: client = connect_to_db() device_collection = collection.Collection(client.sonar, "Devices") devices = list(device_collection.find({"user_id": user_id})) current_app.logger.info("fetched %d devices for user '%s'" % (len(devices), str(user_id))) client.close() except Exception, e: handle_db_error(client, e)
def find_all_devices(): devices = None client = None try: client = connect_to_db() device_collection = collection.Collection(client.spotlight, "Devices") devices = list(device_collection.find()) client.close() except Exception, e: handle_db_error(client, e)
def insert(self, table, DOC): ret = None try: col = collection.Collection(self.db, table) if isinstance(DOC, dict): ret = col.insert_one(DOC) elif isinstance(DOC, list): ret = col.insert_many(DOC) except Exception as e: logger.error(traceback.format_exc()) finally: return ret
def find_one(self, table, SON={}, pn=0, rn=0, sort=None, count=False): try: data = {} col = collection.Collection(self.db, table) #修改时区UTC->local time zone col = col.with_options(codec_options=CodecOptions( tz_aware=True, tzinfo=pytz.timezone('Asia/Shanghai'))) data = col.find_one(filter=SON, skip=pn * rn, limit=rn, sort=sort) return data except Exception as e: logger.exception(e) return {}
def get_training(self): training = None client = None try: client = connect_to_db() training_collection = collection.Collection( client.spotlight, "Training") training = training_collection.find_one( {"device_id": self.device_id}) client.close() except Exception, e: handle_db_error(client, e)
def create_cube(cai): ethdb = connect_database() db_facts = collection.Collection(ethdb, "%s_facts" % cai['name']) for cube in cai['cubes']: print('Generating: %s cube...' % cube['name']) cube_facts = db_facts.find({"contract": cai['address'], "type": cube['fact_type'], "name": cube['fact_name']}) db_cube = collection.Collection(ethdb, '%s_%s' % (cai['name'], cube['name'])) db_cube.drop() for fact in cube_facts: record = dict() for dimension in cube['dimensions']: if dimension['field'][:9] == "arguments": if dimension['field'][10:] in fact['arguments']: record[dimension['name']] = fact['arguments'][dimension['field'][10:]]['value'] else: if dimension['field'] in fact: record[dimension['name']] = fact[dimension['field']] if 'off_chain_details' in dimension: if dimension['off_chain_details'] in cai: if str(record[dimension['name']]) in cai[dimension['off_chain_details']]: oc_dim_attributes = dict() oc_details = cai[dimension['off_chain_details']][str(record[dimension['name']])] for key in oc_details: oc_dim_attributes['%s %s [OC]' % (dimension['name'], key)] = oc_details[key] record.update(oc_dim_attributes) for measure in cube['measures']: try: value = eval(measure['value']) record[measure['name']] = value except SyntaxError: pass if len(record) > 0: db_cube.insert_one(record) return
def motion_updated(standard_deviation): Config.logger.info("[Motion_STD][%s]" % str(standard_deviation)) try: spotlight_collection = collection.Collection( Config.db_client.spotlight, "Motions") document = { "timestamp": datetime.datetime.utcnow(), "device_id": Config.service_config["device_id"], "std": float(standard_deviation) } spotlight_collection.insert_one(document) except Exception, e: Config.handle_access_db_error(e)