Example #1
0
#   ibm i pgm to upload sensi info
import sqlalchemy as sa
from dotenv import DotEnv
import sys
import os

os.chdir('C:\\Users\\WRA1523\\Dropbox\\Yellow Folders\\Python')

dotenv = DotEnv()

# print(f"{dotenv.has('ibm_profile')}")
# sys.exit()

engine = sa.create_engine(
    "ibmi://*****:*****@WRSERV/rdbname[S785b980]")

cnxn = engine.connect()
metadata = sa.MetaData()
table = sa.Table('BU100', metadata, autoload=True, autoload_with=engine)

query = sa.select([table])

result = cnxn.execute(query)
result = result.fetchall()

# print first entry
print(result[0])
import json
import re
import time
import requests
import feedparser
import io
from lxml import html
from urllib.parse import urljoin
import telepot
from telepot.loop import MessageLoop
from dotenv import DotEnv
from pymongo import MongoClient

# Constants
DOTENV = DotEnv('../.env')
CLIENT = MongoClient(DOTENV.get('MongoDbUri', 'mongodb://localhost:27017'))
DB = CLIENT[DOTENV.get('MongoDbName', 'rsscrawler')]  # which database
CRAWLERS = DB.crawlers  # which collection
USERS = DB.users  # which collection

TOKEN = DOTENV.get('TelegramToken',
                   '')  #sys.argv[1]  # get token from command-line

SITES_DIRECTORY = '../sites/'
DEFAULTS_DIRECTORY = '../defaults/'


# Object Creator functions
def create_user_object(who, date):
    """ creating user object from empty user.json """
    with open(DEFAULTS_DIRECTORY + 'user.json') as empty_user_json:
import asyncio
import base64
import binascii
import hashlib
import hmac
import json
import time

import requests
from dotenv import DotEnv
from requests import Request, Session
from web3 import HTTPProvider, Web3

env = DotEnv('.env')

BASE_URL = env.get('BASE_URL')  # The Ocean X staging environment
WEB3_URL = env.get('WEB3_URL')  # This is the default for Parity

RESERVE_MARKET_ORDER = BASE_URL + '/market_order/reserve'
PLACE_MARKET_ORDER = BASE_URL + '/market_order/place'
USER_HISTORY = BASE_URL + '/user_history'

API_KEY = env.get('API_KEY')
API_SECRET = env.get('API_SECRET')
ETHEREUM_ADDRESS = env.get('ETHEREUM_ADDRESS')

print(API_KEY, API_SECRET, ETHEREUM_ADDRESS)

web3 = Web3(HTTPProvider(WEB3_URL))

""" Crawling url links """
import json
import os
import requests
import feedparser
from lxml import html
from pymongo import MongoClient
from dotenv import DotEnv
from util.bcolors import Bcolors

### Global Variables
DOTENV = DotEnv('../.env')
CLIENT = MongoClient(DOTENV.get('MongoDbUri', 'mongodb://localhost:27017'))
DB = CLIENT[DOTENV.get('MongoDbName', 'rsscrawler')]  # which database
CRAWLERS = DB.crawlers  # which collection
SITES_DIRECTORY = '../sites/'


def push_redis(data):
    """ pushing data to redis """
    return data


def write_file(data):
    """ writing data to file """
    return data


def insert_mongo_db(isinit, data, siteurl):
    """ inserting data to mongodb """
    if not data == {}:
Example #5
0
from bs4 import BeautifulSoup
from dotenv import DotEnv
import requests
import spotipy
from spotipy.oauth2 import SpotifyOAuth

dotenv = DotEnv()

SPOTIFY_ID=dotenv.get('CLIENT_ID')
SPOTIFY_SECRET=dotenv.get('CLIENT_SECRET')

date = input("Which date do you want the machine to get music from? YYYY-MM-DD: ")
response = requests.get("https://www.billboard.com/charts/hot-100/" + date)
soup = BeautifulSoup(response.text, 'html.parser')
song_names_spans = soup.find_all("span", class_="chart-element__information__song")
song_names = [song.getText() for song in song_names_spans]


sp = spotipy.Spotify(
    auth_manager=SpotifyOAuth(
        scope="playlist-modify-private",
        redirect_uri="http://example.com",
        client_id=SPOTIFY_ID,
        client_secret=SPOTIFY_SECRET,
        show_dialog=True,
        cache_path="token.txt"
    )
)
user_id = sp.current_user()["id"]

            'repo_owner_profile_url',
            'title',
            'bodyHTML',
            'url',
            'state',
            'createdAt',
            'lastEditedAt',
            'publishedAt',
            'updatedAt',
            'labels',
            'is_locked',
            'total_participants',
        ]
        writer = csv.DictWriter(issues_csv, fieldnames=fields)
        writer.writeheader()
        writer.writerows(github_issues)

if __name__ == '__main__':
    environment = DotEnv()

    start_time = datetime.now()
    logging.info(f'Script started: {start_time}')
    
    main(environment)
    
    finish_time = datetime.now()
    delta = finish_time - start_time
    
    logging.info(f'Script finished: {finish_time}')
    logging.info(f'Script took {delta.seconds} seconds')
  
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)

from utils import (
    get_data_query,
    parse_data,
    get_rate_limit,
    handle_rate_limit,
    write_issues_csv,
    write_repos_csv,
    write_stars_forks,
    clear
)

environment = DotEnv('config/.env')
github_token = environment.get('GITHUB_TOKEN')
client = GraphQLClient('https://api.github.com/graphql')
client.inject_token(f'token {github_token}')

def get_data(query, variables):
    has_next = True
    cursor = None
    entities = []

    spinner = Spinner('Fetching Github Data')
    while has_next:
        spinner.next()
        variables['cursor'] = cursor

        rate_limit = get_rate_limit(client)