def separate(self, channels): try: storage = redis_storage.RedisStorage(config.storage["host"], config.storage["port"], config.storage["chunk_db"]) count = (self.t2 - self.t1) * self.density * len(channels) // ( self.size_stream * self.delta) print(count) if (count == 0): count = 1 deltaT = (self.t2 - self.t1) // count array = [] t = self.t1 while t < self.t2: chunk = Chunk(t, t + deltaT, channels) array.append(chunk) t += deltaT if (t != self.t2): chunk = Chunk(t + deltaT, self.t2, channels) array.append(chunk) for i, c in enumerate(array[1:]): array[i].next = c c.prev = array[i] for i in array: storage.put_chunk(i) return array except AttributeError: raise logging.error("atribute error")
def chunk_compress(chunk_id): if request.method == 'POST': try: request_data = request.get_json() storage = redis_storage.RedisStorage(config.storage["host"], config.storage["port"], config.storage["chunk_db"]) current_chunk = storage.get_chunk(chunk_id) if current_chunk is None: abort(404) data = data_base.get_data( datetime.datetime.fromtimestamp(current_chunk["t1"]), datetime.datetime.fromtimestamp(current_chunk["t2"]), current_chunk["channels"], request_data["level"], request_data["fields"]) if data is None: abort(404) if current_chunk["next"]: data.update({"next": current_chunk["next"]}) js = json.dumps(data) app.logger.debug(current_chunk) return js except (ValueError) as ex: app.logger.error(str(ex)) abort(400) else: abort(400)
def chunk(chunk_id): if request.method == 'GET': try: storage = redis_storage.RedisStorage() current_chunk = storage.get_chunk(chunk_id) if current_chunk is None: abort(404) data = data_base.GetData( current_chunk["channels"], datetime.datetime.fromtimestamp(current_chunk["t1"]), datetime.datetime.fromtimestamp(current_chunk["t2"])) if data is None: abort(404) d = defaultdict(list) for i in data: d[i[1]].append({"time": i[0].timestamp(), "value": i[2]}) if current_chunk["next"]: d.update({"next": current_chunk["next"]}) js = json.dumps(d) app.logger.debug(current_chunk) return js except (TypeError, ValueError) as ex: app.logger.error(str(ex)) abort(400) else: abort(400)
def chunk(chunk_id): if request.method == 'GET': try: storage = redis_storage.RedisStorage(config.storage["host"], config.storage["port"], config.storage["chunk_db"]) current_chunk = storage.get_chunk(chunk_id) if current_chunk is None: abort(404) data = data_base.get_data( datetime.datetime.fromtimestamp(current_chunk["t1"]), datetime.datetime.fromtimestamp(current_chunk["t2"]), current_chunk["channels"], "raw") if data is None: abort(404) d = defaultdict(list) for i in data: d[i.ch_id].append({ "time": i.time.timestamp(), "value": i.value }) if current_chunk["next"]: d.update({"next": current_chunk["next"]}) js = json.dumps(d) app.logger.debug(current_chunk) return js except (ValueError) as ex: app.logger.error(str(ex)) abort(400) else: abort(400)
def Separate(self, channels): try: storage = redis_storage.RedisStorage() count = (self.t2 - self.t1 ) * self.density * len(channels) // self.size_stream deltaT = (self.t2 - self.t1) // count array = [] t = self.t1 while t < self.t2: chunk = Chunk(t, t + deltaT, channels) array.append(chunk) t += deltaT for i, c in enumerate(array[1:]): array[i].next = c c.prev = array[i] for i in array: storage.put_chunk(i, 1000) return array except AttributeError: raise logging.error("atribute error")