Пример #1
0
def start_saving(dest_url, db_url):
    item_list = get_all_file_folders(dest_url)
    db_item_list = db.get_all_from_db(db_url)
    save_list, update_list = ali_utils.get_changed_list(item_list,
                                                        db_item_list)
    db.save_to_db(save_list)
    db.update_to_db(update_list)
Пример #2
0
 def post(self):
     # Testing Cmd: curl -X POST -d "id=a03" -d "t=23.5" -d "h=98.5"  http://%IP%:%Port%/logs
     args = parser.parse_args()
     while not lock(locker_key, locker_token, locker_expire):
         pass
     (Devices, Logs) = load_from_db('Devices', 'Logs')
     if args['id'] not in Devices.keys():
         return "Device %s is not available!" % args['id'], 406
     if args['id'] not in Logs.keys():
         Logs[args['id']] = []
     ts = int(time.time())
     # ts = str(datetime.utcnow())     # datetime.strptime("2019-10-01 14:08:08.774648","%Y-%m-%d %H:%M:%S.%f")
     # if ts in Logs[args['id']].keys():
     #     return "Timestamp is duplicated!", 406
     tmp = Logs[args['id']]
     tmp = tmp[-4:] if len(tmp) >= 4 else tmp
     try:
         tmp = tmp + \
             [{'ts': ts, 't': float(args['t']), 'h': float(args['h'])}, ]
         Logs[args['id']] = tmp
         save_to_db(Logs=Logs)
         unlock(locker_key, locker_token)
         return Logs[args['id']], 201
     except ValueError as err:
         unlock(locker_key, locker_token)
         return {'message': '%r' % err}, 500
Пример #3
0
 def delete(self, device_id):
     # Testing Cmd: curl -X DELETE http://%IP%:%Port%/devices/a03
     while not lock(locker_key, locker_token, locker_expire):
         pass
     (Devices, ) = load_from_db('Devices')
     abort_if_device_doesnt_exist(device_id)
     del Devices[device_id]
     save_to_db(Devices=Devices)
     unlock(locker_key, locker_token)
     return '', 204
Пример #4
0
 def put(self, device_id):
     # Testing Cmd: curl -X PUT -d "lat=23.0" -d "lng=121.0" http://%IP%:%Port%/devices/a03
     args = parser.parse_args()
     while not lock(locker_key, locker_token, locker_expire):
         pass
     (Devices, ) = load_from_db('Devices')
     Devices[device_id] = {'lat': args['lat'], 'lng': args['lng']}
     save_to_db(Devices=Devices)
     unlock(locker_key, locker_token)
     return Devices[device_id], 201
Пример #5
0
 def post(self):
     # Testing Cmd: curl -X POST -d "id=a04" -d "lat=23.5" -d "lng=121.5"  http://%IP%:%Port%/devices
     args = parser.parse_args()
     while not lock(locker_key, locker_token, locker_expire):
         pass
     (Devices, ) = load_from_db('Devices')
     if args['id'] in Devices.keys():
         return "ID is duplicated!", 406
     Devices[args['id']] = {'lat': args['lat'], 'lng': args['lng']}
     save_to_db(Devices=Devices)
     unlock(locker_key, locker_token)
     return Devices[args['id']], 201
Пример #6
0
def main_pipeline():
    '''
    Base logic here
    '''
    new_files = tools.check_input_dir()
    for file_name in new_files:
        p = Parser(dirs.INPUT_DIR + file_name)
        try:
            json_data = p.make_json()
            tools.save_json(json_data, file_name)
            save_to_db(json_data)
            tools.move_input_file(file_name, dirs.OK_DIR)
        except tools.MyLocalException:
            tools.move_input_file(file_name, dirs.ERR_DIR)
Пример #7
0
    def patch(self, device_id):
        # Testing Cmd: curl -X PATCH -d "lat=23.5" http://%IP%:%Port%/devices/a03
        while not lock(locker_key, locker_token, locker_expire):
            pass
        (Devices, ) = load_from_db('Devices')
        abort_if_device_doesnt_exist(device_id)
        info = Devices[device_id]
        args = parser.parse_args()
        if args['lat'] != None:
            info['lat'] = float(args['lat'])
        if args['lng'] != None:
            info['lng'] = float(args['lng'])

        Devices[device_id] = info
        save_to_db(Devices=Devices)
        unlock(locker_key, locker_token)
        return Devices[device_id], 201
Пример #8
0
def populate_historical_market_data():
    '''
    1. You have to gather 100 items from the API and store it in the database. 
    '''
    d = datetime.today()
    d_100 = d - timedelta(days=100)

    res = n.Markets.get_market_cap_history(start=d_100.isoformat("T") + "Z",
                                           end=d.isoformat("T") + "Z")

    res = res[::-1]
    i = 1

    for r in res:
        market_cap = r['market_cap']
        time_stamp = dateutil.parser.parse(r['timestamp']).strftime('%d-%m-%Y')
        data = (time_stamp, market_cap)
        print(
            f"Saving total market cap data to db from {time_stamp} ({i}/100)")
        i += 1
        save_to_db(data, 'bonus')
Пример #9
0
def upload_file():
    if request.method == 'POST':
        # Checar se o post request possui o arquivo de envio
        if 'file' not in request.files:
            print('no file')
            return redirect(request.url)
        file = request.files['file']
        # caso o usuário não selecione o arquivo, o browser mantém
        # na pág principal
        if file.filename == '':
            print('no filename')
            return redirect(request.url)
        else:
            new_file = mapbox_API(file) # Chamar script_API
            filename = "Resultado_final.xlsx"
            new_file.save(os.path.join(settings.UPLOAD_FOLDER, filename))
            print("saved file successfully")
            # Chamar função para guardar dados no db
            save_to_db()

        # enviar nome de arquivo como parametro para download
            return redirect('/downloadfile/'+ filename)
    return render_template('index.html')
def on_message(client, userdata, msg):
    print(msg.topic+" "+str(msg.payload))
    data = msg.payload.decode('utf-8')
    args = json.loads(data)
    while not lock(locker_key, locker_token, locker_expire):
        pass
    (Devices, Logs) = load_from_db('Devices', 'Logs')
    if args['id'] not in Devices.keys():
        print("Device %s is not available!" % args['id'])
        return
    if args['id'] not in Logs.keys():
        Logs[args['id']] = []
    ts = int(time.time())
    tmp = Logs[args['id']]
    tmp = tmp[-4:] if len(tmp) >= 4 else tmp
    try:
        tmp = tmp+[{'ts': ts, 't': float(args['t']), 'h': float(args['h'])}, ]
        Logs[args['id']] = tmp
        save_to_db(Logs=Logs)
        unlock(locker_key, locker_token)
        print('Device %s added a log.' % args['id'])
    except ValueError as err:
        unlock(locker_key, locker_token)
        print('Error: %r \n when device %s adding a log.' % (err, args['id']))
Пример #11
0
def get_historical_data():
    '''
    Return historical market (CoinGecko) / news (NewsAPI) data for coins in coin_list
    '''
    starting_day = '18-03-2020'

    i = 1
    for coin in coin_list:
        print(
            f"Fetching 20 days worth of data for {coin} ({i}/{len(coin_list)} coins)..."
        )
        for day in get_last_20_days(starting_day):
            res = cg.get_coin_history_by_id(coin, day)

            # Market Data (CoinGeckoAPI)
            market_data = res['market_data']
            price = str(round(market_data['current_price']['usd'], 2))
            market_cap = str(round(market_data['market_cap']['usd'], 2))
            total_volume = str(round(market_data['total_volume']['usd'], 2))

            # Reddit Data (CoinGeckoAPI)
            community_data = res['community_data']
            reddit_posts_48h = str(community_data['reddit_average_posts_48h'])
            reddit_comments_48h = str(
                community_data['reddit_average_comments_48h'])

            # News Data (NewsAPI)
            sentiment = str(get_sentiment(coin, day))

            # Save to DB
            market_data_to_save = (day, coin, price, market_cap, total_volume)
            reddit_data_to_save = (day, coin, reddit_posts_48h,
                                   reddit_comments_48h)
            sentiment_data_to_save = (day, coin, sentiment)

            save_to_db(market_data_to_save, 'market')
            save_to_db(reddit_data_to_save, 'reddit')
            save_to_db(sentiment_data_to_save, 'sentiment')

        i += 1
Пример #12
0
def fetch_and_save_data():
    '''
    Remember: Must limit data from each API to 20 or fewer items

    Populates DB with following data from yesterday's date (most recent market close):
    CoinGecko API: 10 items (5 rows for market_data, 5 rows for reddit_data)
    News API: 5 items (5 rows for sentiment_data)
    '''
    yesterdays_date = (datetime.today() -
                       timedelta(days=1)).strftime('%d-%m-%Y')

    for coin in coin_list:
        print(
            f'Fetching {coin}\'s market, reddit and sentiment data for {yesterdays_date}'
        )
        market_data = fetch_market_data(yesterdays_date, coin)
        reddit_data = fetch_reddit_data(yesterdays_date, coin)
        sentiment_data = fetch_sentiment_data(yesterdays_date, coin)

        save_to_db(market_data, 'market')
        save_to_db(reddit_data, 'reddit')
        save_to_db(sentiment_data, 'sentiment')
        print('\n')
Пример #13
0
def init_db():
    while not lock(locker_key, locker_token, locker_expire):
        pass
    save_to_db(Devices={}, Logs={})
    unlock(locker_key, locker_token)
    return "Database is initialized!"
def on_message(mqttc, userdata, msg):
    print('message received...')
    print('topic: ' + msg.topic + ', qos: ' + str(msg.qos) + ', message: ' +
          str(msg.payload))
    save_to_db(msg)