import json
import re
import time
import requests
import feedparser
import io
from lxml import html
from urllib.parse import urljoin
import telepot
from telepot.loop import MessageLoop
from dotenv import DotEnv
from pymongo import MongoClient

# Constants
DOTENV = DotEnv('../.env')
CLIENT = MongoClient(DOTENV.get('MongoDbUri', 'mongodb://localhost:27017'))
DB = CLIENT[DOTENV.get('MongoDbName', 'rsscrawler')]  # which database
CRAWLERS = DB.crawlers  # which collection
USERS = DB.users  # which collection

TOKEN = DOTENV.get('TelegramToken',
                   '')  #sys.argv[1]  # get token from command-line

SITES_DIRECTORY = '../sites/'
DEFAULTS_DIRECTORY = '../defaults/'


# Object Creator functions
def create_user_object(who, date):
    """ creating user object from empty user.json """
    with open(DEFAULTS_DIRECTORY + 'user.json') as empty_user_json:
Ejemplo n.º 2
0
#   ibm i pgm to upload sensi info
import sqlalchemy as sa
from dotenv import DotEnv
import sys
import os

os.chdir('C:\\Users\\WRA1523\\Dropbox\\Yellow Folders\\Python')

dotenv = DotEnv()

# print(f"{dotenv.has('ibm_profile')}")
# sys.exit()

engine = sa.create_engine(
    "ibmi://*****:*****@WRSERV/rdbname[S785b980]")

cnxn = engine.connect()
metadata = sa.MetaData()
table = sa.Table('BU100', metadata, autoload=True, autoload_with=engine)

query = sa.select([table])

result = cnxn.execute(query)
result = result.fetchall()

# print first entry
print(result[0])
            'repo_owner_profile_url',
            'title',
            'bodyHTML',
            'url',
            'state',
            'createdAt',
            'lastEditedAt',
            'publishedAt',
            'updatedAt',
            'labels',
            'is_locked',
            'total_participants',
        ]
        writer = csv.DictWriter(issues_csv, fieldnames=fields)
        writer.writeheader()
        writer.writerows(github_issues)

if __name__ == '__main__':
    environment = DotEnv()

    start_time = datetime.now()
    logging.info(f'Script started: {start_time}')
    
    main(environment)
    
    finish_time = datetime.now()
    delta = finish_time - start_time
    
    logging.info(f'Script finished: {finish_time}')
    logging.info(f'Script took {delta.seconds} seconds')
  
import asyncio
import base64
import binascii
import hashlib
import hmac
import json
import time

import requests
from dotenv import DotEnv
from requests import Request, Session
from web3 import HTTPProvider, Web3

env = DotEnv('.env')

BASE_URL = env.get('BASE_URL')  # The Ocean X staging environment
WEB3_URL = env.get('WEB3_URL')  # This is the default for Parity

RESERVE_MARKET_ORDER = BASE_URL + '/market_order/reserve'
PLACE_MARKET_ORDER = BASE_URL + '/market_order/place'
USER_HISTORY = BASE_URL + '/user_history'

API_KEY = env.get('API_KEY')
API_SECRET = env.get('API_SECRET')
ETHEREUM_ADDRESS = env.get('ETHEREUM_ADDRESS')

print(API_KEY, API_SECRET, ETHEREUM_ADDRESS)

web3 = Web3(HTTPProvider(WEB3_URL))

Ejemplo n.º 5
0
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)

from utils import (
    get_data_query,
    parse_data,
    get_rate_limit,
    handle_rate_limit,
    write_issues_csv,
    write_repos_csv,
    write_stars_forks,
    clear
)

environment = DotEnv('config/.env')
github_token = environment.get('GITHUB_TOKEN')
client = GraphQLClient('https://api.github.com/graphql')
client.inject_token(f'token {github_token}')

def get_data(query, variables):
    has_next = True
    cursor = None
    entities = []

    spinner = Spinner('Fetching Github Data')
    while has_next:
        spinner.next()
        variables['cursor'] = cursor

        rate_limit = get_rate_limit(client)