def main(): # .env should contain GITHUB_TOKEN for d in ['.', p.dirname(__file__)]: try: read_dotenv(p.join(d, '.env')) break except FileNotFoundError: continue a = argparse.ArgumentParser() a.add_argument('workdir', default='.', help='directory to hold nixpkgs and fc-nixos checkouts') a.add_argument( '-n', '--no-push', '--dry-run', default=False, action='store_true', help="commit changes locally, but don't push to origin and don't " "create pull requests") a.add_argument('-v', '--verbose', action='store_true', help='more output') args = a.parse_args() logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) os.makedirs(args.workdir, exist_ok=True) os.chdir(args.workdir) nixpkgs = Nixpkgs('nixpkgs', NIXPKGS_URL) nixpkgs.ensure() nixpkgs.track_upstream() fc_nixos = FcNixOS('fc-nixos', FC_NIXOS_URL) fc_nixos.ensure() fc_nixos.update_pinnings(nixpkgs) if not args.no_push: nixpkgs.push() fc_nixos.create_pr()
def get_max_count_NFTs_watcher(self): dotenv_path = join('.env') read_dotenv(dotenv_path) max_count_NFTs_watcher = os.getenv('MAX_COUNT_NFTS_WATCHER') return None if not max_count_NFTs_watcher else int( max_count_NFTs_watcher)
import os from py_dotenv import read_dotenv from multiprocessing import cpu_count dotenv_path = '/info.env' base_path = os.path.abspath(os.path.join(os.path.dirname(__file__))) read_dotenv(base_path + dotenv_path) if 'nt' in os.name: DSN_SRV3_PRD = os.getenv('DSN_SRV3_Prd') DSN_MLG_PRD = os.getenv('DSN_MLG_Prd') DSN_MLG_DEV = os.getenv('DSN_MLG_Dev') elif 'posix' in os.name: DSN_SRV3_PRD = os.getenv('DSN_SRV3_Prd_Linux') DSN_MLG_PRD = os.getenv('DSN_MLG_Prd_Linux') UID = os.getenv('UID') PWD = os.getenv('PWD') project_id = 2844 update_frequency_days = 0 sql_info = { 'database_source': 'BI_MLG', 'source_table': 'VHE_Fact_PA_PSE_Info' } temp_file_loc = 'dbs/cr_vhe_pse_visits.csv' temp_file_grouped_loc = 'dbs/cr_vhe_pse_visits_grouped.csv'
import os from discordbot import newBot from py_dotenv import read_dotenv import mysql.connector dotenv_path = os.path.join(os.path.dirname(__file__), '.env') read_dotenv(dotenv_path) mydb = mysql.connector.connect(host=os.getenv('DB_HOST'), user=os.getenv('DB_USER'), passwd=os.getenv('DB_PASSWORD'), database=os.getenv('DB_DATABASE')) connection = mydb.cursor() newBot(os.getenv('DISCORD_TOKEN'), connection, mydb, os.getenv('ACTIVATION_CHANNEL'))
try: import urllib.parse as urlparse except ImportError: import urlparse import requests from bs4 import BeautifulSoup from tweepy import OAuthHandler, Stream, TweepError from tweepy import API from logs import * from twitterlistener import API_RETRY_DELAY_S, API_RETRY_COUNT, API_RETRY_ERRORS, TwitterListener # Read API keys try: read_dotenv(os.path.join(os.path.dirname(__file__), '.env')) except FileNotFoundError: print( "\n%s '.env' does not exist. Please create the file & add the necessary API keys to it." % ERROR) exit(1) # The keys for the Twitter app we're using for API requests # (https://apps.twitter.com/app/13239588). Read from environment variables. TWITTER_CONSUMER_KEY = getenv('TWITTER_CONSUMER_KEY') TWITTER_CONSUMER_SECRET = getenv('TWITTER_CONSUMER_SECRET') # The keys for the Twitter account we're using for API requests. # Read from environment variables. TWITTER_ACCESS_TOKEN = getenv('TWITTER_ACCESS_TOKEN') TWITTER_ACCESS_TOKEN_SECRET = getenv('TWITTER_ACCESS_TOKEN_SECRET')
import os from py_dotenv import read_dotenv path = os.path.abspath('.' + 'env') read_dotenv(path) DB_NAME = str(os.getenv('DBNAME')) USER = str(os.getenv('USER')) PASSWD = str(os.getenv('PASSWD')) class Configuration(object): SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_DATABASE_URI = str(os.getenv('DATABASE_URI'))
import logging import os from py_dotenv import read_dotenv from telegram.ext import Updater, CommandHandler, MessageHandler, Filters, CallbackQueryHandler from bot_handlers import start, good_echo, button, empty_message, college_updates if __name__ == '__main__': try: read_dotenv(".env") except Exception as e: print("Error while reading env file - {}".format(e)) logging.basicConfig( format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) updater = Updater(os.getenv("SECRET_TOKEN")) updater.dispatcher.add_handler(CommandHandler('start', start)) updater.dispatcher.add_handler( CommandHandler('collegeupdates', college_updates)) updater.dispatcher.add_handler(CallbackQueryHandler(button)) updater.dispatcher.add_handler( MessageHandler(Filters.text & (~Filters.command), good_echo)) updater.dispatcher.add_handler( MessageHandler(Filters.status_update, empty_message)) updater.start_polling() updater.idle()
from argparse import ArgumentParser import csv from datetime import datetime, timedelta import logging import os from py_dotenv import read_dotenv import re import requests import sys import time import urllib3 urllib3.disable_warnings() import xml.etree.ElementTree as ET envfile = os.path.join('./.paenv') read_dotenv(envfile) api_key = os.getenv('API_KEY') server = os.getenv('SERVER') tstamp = datetime.now() newfile = f"PAVPN-log-{tstamp}.csv" def get_system_log(server=server, api_key=api_key): payload = { 'type': 'log', 'log-type': 'system', 'query': f'( subtype eq globalprotect ) and ( eventid eq globalprotectgateway-config-succ ) or ( eventid eq globalprotectgateway-config-release ) and ( {query_time} )', 'nlogs': '5000' } headers = { 'X-PAN-KEY': api_key }
def get_save_images(self): dotenv_path = join('.env') read_dotenv(dotenv_path) save_images = os.getenv('SAVE_IMAGES') return True if save_images else False
def get_ipfs_url(self): dotenv_path = join('.env') read_dotenv(dotenv_path) ipfs_url = os.getenv('IPFS_URL') return ipfs_url
def get_web3_provider(self): dotenv_path = join('.env') read_dotenv(dotenv_path) provider_url = os.getenv('WEB_3_PROVIDER') return provider_url