def fetchHistory(store, p_id, s_id): sku = db.query(db.PSM).select().eql(p_id, s_id).where("p_id", "s_id").eval() # mydict = {'q': sku[0].get('uuid')} mydict = {'q': 'lg tv'} print(sku[0].get('uuid')) response = requests.get("https://stocktrack.ca/%s/search.php?%s" % (store, urlencode(mydict))) res = response.json() print(res) sku = res['products'][0]['sku'] response = requests.get("https://stocktrack.ca/%s/hist_data.php?sku=%s" % (store, sku)) series = response.json() agg = [] for point in series['1P']: date = getDate(point) price = point['y'] inv = getInventory(point) agg.append({"date": date, "price": price, "inv": inv}) db.query(db.PS).write( p_id, s_id, date, float(inv), float(price), ).into("p_id", "s_id", "date", "inventory", "price").eval()
def get(self): args = Product.get_parser.parse_args() rows = db.query(db.P).select() col, _ord = args.pop('col'), args.pop('ord') cols, vals = utils.marshal(args) rows.eql(*vals).where(*cols) rows.order_by(col, _ord) return utils.jsonify(rows.eval())
def post(self): print(request.json) print(request.form) args = Product.post_parser.parse_args() print(args) rows = db.query(db.P) cols, vals = utils.marshal(args) rows.write(*vals).into(*cols) return utils.jsonify(rows.eval())
def get(self): args = Supplier.get_parser.parse_args() rows = db.query(db.S).select() cols, vals = utils.marshal(args) rows.eql(*vals).where(*cols) suppliers = utils.jsonify(rows.eval()) for ind, sup in enumerate(suppliers): if sup['s_id'] == -1: del suppliers[ind] break return suppliers
def get(self): args = ProductSupplierMap.get_parser.parse_args() rows = db.query(db.PSM).select().join(db.S).on("s_id").join( db.P).on("p_id") cols, vals = utils.marshal(args) rows.eql(*vals).where(*cols) suppliers = utils.jsonify(rows.eval()) #remove empty, preferrably id # for ind,sup in enumerate(suppliers): # for key, val in sup.items(): # if val == -1: # del suppliers[ind] return suppliers
def run(self): consumer = KafkaConsumer(bootstrap_servers=os.getenv('KAFKA_SERVER'), consumer_timeout_ms=1000, group_id='group', value_deserializer=json.loads) consumer.subscribe(['test4', 'progress']) print('Consumer %s Listening', self.uuid) while not self.stop_event.is_set(): for msg in consumer: # tp = TopicPartition(msg.topic, msg.partition) # offsets = {tp: OffsetAndMetadata(msg.offset, None)} # consumer.commit(offsets=offsets) # if self.stop_event.is_set() or msg.key == "kill": # break print(msg.value) echo = msg.value['echo'] s_id = echo['s_id'] p_id = echo['p_id'] price = msg.value['data'][0]['price'] # print(s_id, p_id, price) rows = db.query(db.PS) rows.write(s_id, p_id, price, 'NOW').into('s_id', 'p_id', 'price', 'date') print(rows.eval()) tp = TopicPartition(msg.topic, msg.partition) offsets = {tp: OffsetAndMetadata(msg.offset, None)} consumer.commit(offsets=offsets) if self.stop_event.is_set() or msg.key == "kill": break print("Consumer Closing") consumer.unsuscribe() consumer.close()
def post(self): args = ProductSupplierMap.post_parser.parse_args() rows = db.query(db.PSM) cols, vals = utils.marshal(args) rows.write(*vals).into(*cols) return utils.jsonify(rows.eval())
ey, em = end_year_month.split(".") self.year_end, self.month_end = int(ey), int(em) def getDemand(self): try: pytrends = TrendReq(hl='en-US', tz=360) res = dailydata.get_daily_data(self.keyword, start_year=self.year_start, start_mon=self.month_start, stop_year=self.year_end, stop_mon=self.month_end, geo='', wait_time = 10) return res finally: # os.remove(demand.temp_dir) pass product_id = 9 # p = db.query(db.P).select().eql(product_id).where("p_id").eval() # print(p[0]['keyword']) # keyword = p[0]['keyword'] keyword = "lg tv" d = demand(keyword, "2020.2", "2020.3") df = d.getDemand() for i, row in df.iterrows(): print('index: ', str(i.to_pydatetime().date()), 'col g:', row[keyword]) db.query(db.PD).write( product_id, str(i.to_pydatetime().date()), float(row[keyword]), ).into( *db.Cols[db.PD] ).eval() print('inserted')