예제 #1
0
 def __init__(self):
     #self.connection = psycopg2.connect(host='localhost', database='seoscraperdb', user='******')
     env.read_envfile()
     self.connection = psycopg2.connect(host=env.str('PG_HOST'), database=env.str('PG_DATABASE'), user=env.str('PG_USER'))
     self.api_key = env.str('GOOGLE_PAGESPEED_API_KEY')
     delete_table('pagespeed')
     logging.debug('__init__:' + self.PAGESPEED_URL + self.api_key)
예제 #2
0
def get_connection_string(env_file='config/.env'):
    """Constructs the connection string for the DB with values from env file

    """
    env.read_envfile(env_file)

    connection_string = Template("Driver={$SQLDriver};"
                                 "Server=$server,$port;"
                                 "Database=$db_name;"
                                 "Uid=$db_user;"
                                 "Pwd=$db_password;"
                                 "Encrypt=$encrypt;"
                                 "TrustServerCertificate=$certificate;"
                                 "Connection Timeout=$timeout;")

    return connection_string.substitute(
        SQLDriver=env.str('Driver'),
        server=env.str('Server'),
        port=env.str('Port'),
        db_name=env.str('Database'),
        db_user=env.str('User'),
        db_password=env.str('Password'),
        encrypt=env.str('Encrypt'),
        certificate=env.str('TrustServerCertificate'),
        timeout=env.str('ConnectionTimeout')
    )
예제 #3
0
def get_connection_string():
    """Constructs the connection string for the DB with values from env file

    """

    connection_string = Template(
        "Driver={$SQLDriver};"
        "Server=$server,$port;"
        "Database=$db_name;"
        "Uid=$db_user;"
        "Pwd=$db_password;"
        "Encrypt=$encrypt;"
        "TrustServerCertificate=$certificate;"
        "Connection Timeout=$timeout;"
    )

    return connection_string.substitute(
        SQLDriver=env.str("Driver"),
        server=env.str("Server"),
        port=env.str("Port"),
        db_name=env.str("Database"),
        db_user=env.str("User"),
        db_password=env.str("Password"),
        encrypt=env.str("Encrypt"),
        certificate=env.str("TrustServerCertificate"),
        timeout=env.str("ConnectionTimeout"),
    )
예제 #4
0
def insert_values(cursor, values_dict):
    """Executes an insert command on the db using the values

     provided by de value_dict in which the keys represent
     table columns and the dict values are the values to be
     inserted
    """
    insert_command = Template('insert into $tableName($columns)'
                              ' values($values)')

    try:
        cursor.execute(insert_command.substitute(
            tableName=env.str('TableName'),
            columns=', '.join(values_dict.keys()),
            values=', '.join("'" + item + "'" for item in values_dict.values())
        ))
    except pyodbc.DataError as data_error:
        print(dir(data_error))
        if data_error[0] == '22001':
            logger.error('Value to be inserted exceeds column size limit')
            wrong_value = compare_lengths(cursor, values_dict)
            logger.error('Max size for column %s is %i',
                         wrong_value['columnName'], wrong_value['columnSize'])
            logger.error('Actual size for column %s is %i',
                         wrong_value['columnName'], wrong_value['actualSize'])
        else:
            logger.error('Database insertion error', exc_info=True)

        logger.info('Terminating execution')
        sys.exit(0)
예제 #5
0
파일: sql_utils.py 프로젝트: LIS/lis-test
def check_column_exists(cursor, column_name):
    check_column = Template("select * from sys.columns where Name = N'$columnName' and "
                            "Object_ID = Object_ID(N'$tableName')")

    table_name = '"' + env.str('TableName') + '"'
    result = cursor.execute(check_column.substitute(tableName=table_name, columnName=column_name))
    print('checking column {} existence: {}'.format(column_name, result))
    if list(result):
        return True
    return False
예제 #6
0
def get_columns_limit(cursor):
    rows = cursor.execute(
        "select column_name, data_type, character_maximum_length "
        "from information_schema.columns "
        "where table_name = '" + env.str("TableName") + "'"
    )

    columns_list = list()
    for row in rows:
        if row[1] == "nchar":
            columns_list.append((str(row[0]), int(row[2])))

    return columns_list
예제 #7
0
def select_row(cursor, row_dict):
    select_cmd_template = Template("select id from $tableName where ($filters)")

    filters = ""

    for col_name, col_value in row_dict.iteritems():
        if type(col_value) == str:
            filters = " ".join([filters, col_name, "=", "'" + col_value + "'"])
        else:
            filters = " ".join([filters, "=", str(col_value)])
        filters = " ".join([filters, "AND"])

    table_name = '"' + env.str("TableName") + '"'
    return cursor.execute(select_cmd_template.substitute(tableName=table_name, filters=filters[:-3]))
예제 #8
0
    def get_jwt(self, scope):
        if (self.str_jwt is None):
            json_file = json.load(open(env.str('GOOGLE_API_PRIVATE_FILE')))

            self.issued_at = timegm(datetime.datetime.utcnow().utctimetuple())
            self.expire = self.issued_at + 3600
        
            payload = { 'iss' : json_file['client_email'], 
                'scope' : scope, 
                'aud' : self.audience, 
                'exp' : self.expire, 
                'iat' : self.issued_at }

            self.str_jwt = str(jwt.encode(payload, json_file['private_key'], algorithm='RS256'), 'utf-8')
        
        return self.str_jwt
예제 #9
0
def insert_values(cursor, values_dict):
    """Creates an insert command from a template and calls the pyodbc method

     Provided with a dictionary that is structured so the keys match the
     column names and the values are represented by the items that are to be
     inserted the function composes the sql command from a template and
     calls a pyodbc to execute the command.
    """
    insert_command_template = Template(
        'insert into $tableName($columns) values($values)'
    )
    logger.debug('Line to be inserted %s', values_dict)
    values = ''
    table_name = '"' + env.str('TableName') + '"'
    for item in values_dict.values():
        if type(item) == str:
            values = ', '.join([values, "'" + item + "'"])
        else:
            values = ', '.join([values, str(item)])

    insert_command = insert_command_template.substitute(
            tableName=table_name,
            columns=', '.join(values_dict.keys()),
            values=values[1:]
        )

    logger.debug('Insert command that will be exectued:')
    logger.debug(insert_command)

    try:
        cursor.execute(insert_command)
    except pyodbc.DataError as data_error:
        print(dir(data_error))
        if data_error[0] == '22001':
            logger.error('Value to be inserted exceeds column size limit')
            wrong_value = compare_lengths(cursor, values_dict)
            logger.error('Max size for column %s is %i',
                         wrong_value['columnName'], wrong_value['columnSize'])
            logger.error('Actual size for column %s is %i',
                         wrong_value['columnName'], wrong_value['actualSize'])
        else:
            logger.error('Database insertion error', exc_info=True)

        logger.info('Terminating execution')
        sys.exit(0)
예제 #10
0
def select_row(cursor, row_dict):
    select_cmd_template = Template('select id from $tableName where ($filters)')

    filters = ''

    for col_name, col_value in row_dict.iteritems():
        if type(col_value) == str:
            filters = ' '.join([filters, col_name, '=', "'" + col_value + "'"])
        else:
            filters = ' '.join([filters, '=', "'" + col_value + "'"])
        filters = ' '.join([filters, 'AND'])

    table_name = '"' + env.str('TableName') + '"'
    return cursor.execute(
        select_cmd_template.substitute(
            tableName=table_name,
            filters=filters[:-3]
            )
        )
예제 #11
0
from envparse import env

TELEGRAM_TOKEN = env.str("TELEGRAM_TOKEN")

print(TELEGRAM_TOKEN)
예제 #12
0
 def __init__(self):
     #self.connection = psycopg2.connect(host='localhost', database='seoscraperdb', user='******')
     env.read_envfile()
     self.connection = psycopg2.connect(host=env.str('PG_HOST'), database=env.str('PG_DATABASE'), user=env.str('PG_USER'))
예제 #13
0
파일: base.py 프로젝트: natoinet/seoscraper
from envparse import env
import psycopg2

env.read_envfile()
PG_HOST = env.str('PG_HOST')
PG_DB = env.str('PG_DATABASE')
PG_USER = env.str('PG_USER')

def delete_table(table):
    # Delete all data in the tables
    with psycopg2.connect(host=PG_HOST, database=PG_DB, user=PG_USER) as connection:
        cur = connection.cursor()
        cur.execute( "delete from " + table)
        #cur.execute( """delete from %s""", (table,) )
        #cur.execute("delete from urljson")
        #cur.execute("delete from pagemap")

def db_to_csv(query, tofile):
    with psycopg2.connect(host=PG_HOST, database=PG_DB, user=PG_USER) as connection:
        cur = connection.cursor()
        outputquery = "COPY ({0}) TO STDOUT WITH CSV HEADER".format(query)

        with open(tofile, 'w') as f: #"/Users/antoinebrunel/Downloads/res_url_list_links_cirh.csv"
            cur.copy_expert(outputquery, f)
예제 #14
0
파일: settings.py 프로젝트: zhwei820/myhug
from os.path import isfile
from envparse import env

if isfile('.env'):
    env.read_envfile('.env')

DEBUG = env.bool('DEBUG', default=False)

TOKEN_SECRET_KEY = env.str('TOKEN_SECRET_KEY')

SITE_HOST = env.str('HOST', default='127.0.0.1')
SITE_PORT = env.int('PORT', default=8701)

DATABASE_URL = env.str('DATABASE_URL', default='mysql://*****:*****@localhost:3306/pinax_mysite')
DATABASE_URL_r = env.str('DATABASE_URL_r', default='mysql://*****:*****@localhost:3306/pinax_mysite')  # 只读库

REDIS_URL = env.str('REDIS_URL', default=False)  # cluster
# REDIS_URL_1 = env.str('REDIS_URL_1', default=False)
# REDIS_URL_2 = env.str('REDIS_URL_2', default=False)
# REDIS_URL_3 = env.str('REDIS_URL_3', default=False)

# REDIS_URL = env.str('REDIS_URL', default=False)  # cluster

VERIFY_SMS_CHANNEL = env.json('VERIFY_SMS_CHANNEL') # help='根据包名确定推送中心的产品id'

STATUS = {
	'OK': 1,
	'ERROR': 2,
	'INFO': 3,
	'UPDATE_USERS': 4
}
예제 #15
0
from envparse import env


# Number of threads in executor
# https://docs.python.org/3/library/concurrent.futures.html#concurrent.futures.ThreadPoolExecutor
THREAD_POOL_SIZE = env.int('RAGE_SRV_THREAD_POOL_SIZE', default=4)


# Redis connection settings
REDIS_HOST = env.str('RAGE_SRV_REDIS_HOST', default='127.0.0.1')
REDIS_PORT = env.int('RAGE_SRV_REDIS_PORT', default=6379)
REDIS_DB = env.int('RAGE_SRV_REDIS_DB', default=1)


# Cover-Rage binding address settings. `SRV_SCHEME` could be one of: 'http', 'https'
SRV_SCHEME = env.str('RAGE_SRV_SCHEME', default='https')
SRV_HOST = env.str('RAGE_SRV_HOST')


# Cover-Rage API urls
SRV_API_SEND_RESULTS_URL = '/api/results/{public_token}/'
SRV_API_GITHUB_WEB_HOOK_URL = '/api/github/{public_token}/'

# TODO: change url from API end point to regular view
SRV_STATUS_URL = '/api/status/{public_token}/{sha}/'


# Minimal good coverage percentage (required for background color of badge)
# If project's coverage is less then value of this settings - badge background color will be red.
# If it's greater or equal - badge background color will be green.
# If it's zero (undefined) - badge background color will be orange.
예제 #16
0
from os.path import isfile
from envparse import env
import logging


log = logging.getLogger('app')
log.setLevel(logging.DEBUG)

f = logging.Formatter('[L:%(lineno)d]# %(levelname)-8s [%(asctime)s]  %(message)s', datefmt = '%d-%m-%Y %H:%M:%S')
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(f)
log.addHandler(ch)

if isfile('.env'):
    env.read_envfile('.env')

DEBUG = env.bool('DEBUG', default=False)

SITE_HOST = env.str('HOST')
SITE_PORT = env.int('PORT')
SECRET_KEY = env.str('SECRET_KEY')
MONGO_HOST = env.str('MONGO_HOST')
MONGO_DB_NAME = env.str('MONGO_DB_NAME')

MESSAGE_COLLECTION = 'messages'
USER_COLLECTION = 'users'
예제 #17
0
log = logging.getLogger('app')
log.setLevel(logging.DEBUG)

f = logging.Formatter('[L:%(lineno)d]# %(levelname)-8s [%(asctime)s]  %(message)s', datefmt = '%d-%m-%Y %H:%M:%S')
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(f)
log.addHandler(ch)

if isfile('.env'):
    env.read_envfile('.env')

DEBUG = env.bool('DEBUG', default=False)

SITE_HOST = env.str('HOST')
SITE_PORT = env.int('PORT')
SECRET_KEY = env.str('SECRET_KEY')
MYSQL_HOST = env.str('MYSQL_HOST')
MYSQL_DB_NAME = env.str('MYSQL_DB_NAME')
MYSQL_USER = env.str('MYSQL_USER')
MYSQL_PASSWORD = env.str('MYSQL_PASSWORD')

PLAYERS_IN_GAME = 2

STATUS = {
	'OK': 1,
	'ERROR': 2,
	'INFO': 3,
	'UPDATE_USERS': 4
}
예제 #18
0
def test_str():
    expected = str(env_vars['STR'])
    assert_type_value(str, expected, env('STR'))
    assert_type_value(str, expected, env.str('STR'))
예제 #19
0
파일: settings.py 프로젝트: sloria/sir
from envparse import env

env.read_envfile()  # Read .env

ENV = env.str('NODE_ENV', default='production')
DEBUG = ENV != 'production'

CACHE = {
    'STRATEGY': 'redis',
    'PARAMS': {
        'host': env('REDIS_HOST', default='localhost'),
        'port': env.int('REDIS_PORT', default=6379),
    }
}

ROUTES = {
    'URL_PREFIX': '/v1/'
}

GITHUB = {
    'CLIENT_ID': env('SIR_GITHUB_CLIENT_ID'),
    'CLIENT_SECRET': env('SIR_GITHUB_CLIENT_SECRET'),
}
예제 #20
0
 def __init__(self):
     env.read_envfile()
     self._aws_region = env.str("AWS_REGION")
     self._aws_access_key_id = env.str("SSM_AWS_ACCESS_KEY_ID")
     self._aws_secret_access_key = env.str("SSM_AWS_SECRET_ACCESS_KEY")
     self._kms_id = env.str("KMS_ID")
예제 #21
0
from envparse import env

DB_HOST = env.str('DB_HOST', default='db.note-whisperer.com')
DB_USER = env.str('DB_USER', default='root')
DB_PASSWD = env.str('DB_PASSWD', default='notewhisperer')

KEY_DB_NAME = env.str('KEY_DB_NAME', default='keys_db')
MSG_DB_NAME = env.str('MSG_DB_NAME', default='msg_db')
예제 #22
0
파일: parser.py 프로젝트: LIS/lis-test
def print_rows(cursor):
    table_name = '"' + env.str('TableName') + '"'
    cursor.execute("SELECT * FROM linux_containers_windows")
    for row in cursor.fetchall():
        print row