Ejemplo n.º 1
0
def discord_notification(msg, thumbnail_url):
    # notify on discord
    discord_url = get_discord_url()
    payload = r"""{{ "embeds": [{{"title": "Mindfulness notification", 
    "thumbnail": {{"url": "{thumbnail}"}}, "description": "{content}"}}] }}""". \
        format(content=msg.replace("'", '"'), thumbnail=thumbnail_url)
    cmd = r"""curl -X POST -H "Content-Type: application/json" --data '{}' {}""".format(
        payload, discord_url)
    logger.info(cmd)

    if "None" != discord_url:
        for i in range(utils.get_config_parser().getint('discord',
                                                        'retries',
                                                        fallback=2)):
            try:
                subprocess.call([
                    'curl', '-X', 'POST', '-H',
                    '"Content-Type: application/json"', '--data', payload,
                    discord_url
                ])
            except (OSError, Exception) as ex:
                logger.info("Discord notifier failed: %s" % ex)
            else:
                logger.info("Discord notifier succeeded")
                break
Ejemplo n.º 2
0
 def test_password_file(self):
     connection = utils.get_oracle_connection_obj()
     config = utils.get_config_parser()
     query = f"SELECT PASSWORD_FILE FROM {config.get('Database', 'connection_tablename')}"
     pwrd_df = pd.read_sql(query, con=connection)
     for index, row in pwrd_df.iterrows():
         exists = os.path.isfile(row['PASSWORD_FILE'])
         assert exists, f"{row['PASSWORD_FILE']} file does not exists "
Ejemplo n.º 3
0
 def test_encrypt_decrypt(self):
     config = utils.get_config_parser()
     enc = AESCipher(config.get('AES', 'key'))
     f = open("password.txt", "w+")
     f.write("Password")
     f.close()
     enc.encrypt_file("password.txt", out_filename="password.enc")
     password = enc.decrypt_file("password.enc")
     assert password == "Password", "Failed to decrypt file"
Ejemplo n.º 4
0
 def setUp(self):
     # setUp is where you instantiate the selenium webdriver and loads the browser depending upon the config settings.
     self.config = get_config_parser("config.txt")
     if self.config.get("default", "browsertype") == "Chrome":
         self.selenium = webdriver.Chrome(self.config.get("default", "Chrome"))
     else:
         self.selenium = webdriver.Firefox()
    	
     self.selenium.maximize_window()
     super(LoginTestCase, self).setUp()
Ejemplo n.º 5
0
    def test_oracle_table(self):
        connection = utils.get_oracle_connection_obj()
        config = utils.get_config_parser()

        tbl_list = (config.get('Database', 'extraction_tablename'),
                    config.get('Database', 'connection_tablename'))
        dbcur = connection.cursor()
        try:
            for tbl in tbl_list:
                dbcur.execute("SELECT * FROM {}".format(tbl))
        except Exception as ex:
            err, = ex.args
            pytest.fail(err.message)
Ejemplo n.º 6
0
 def test_source_db_connection(self):
     connection = utils.get_oracle_connection_obj()
     config = utils.get_config_parser()
     enc = AESCipher(
         '[EX\xc8\xd5\xbfI{\xa2$\x05(\xd5\x18\xbf\xc0\x85)\x10nc\x94\x02)j\xdf\xcb\xc4\x94\x9d(\x9e'
     )
     query = f"SELECT CONNECTION_STRING,USERNAME,PASSWORD_FILE FROM {config.get('Database', 'connection_tablename')}"
     source_db_df = pd.read_sql(query, con=connection)
     for index, row in source_db_df.iterrows():
         password = enc.decrypt_file(row['PASSWORD_FILE'])
         connection = utils.get_oracle_connection_obj(
             row['USERNAME'], password, row['CONNECTION_STRING'])
         assert type(
             connection
         ) == cx_Oracle.Connection, "Username or password or connection string is incorrect"
Ejemplo n.º 7
0
 def test_source_tables(self):
     connection = utils.get_oracle_connection_obj()
     config = utils.get_config_parser()
     enc = AESCipher(
         '[EX\xc8\xd5\xbfI{\xa2$\x05(\xd5\x18\xbf\xc0\x85)\x10nc\x94\x02)j\xdf\xcb\xc4\x94\x9d(\x9e'
     )
     query = f"SELECT CONNECTION_STRING,USERNAME,PASSWORD_FILE,DATABASE_NAME,TABLE_NAME FROM " \
         f"{config.get('Database', 'extraction_tablename')} E JOIN " \
         f"{config.get('Database', 'connection_tablename')} C ON E.CONNECTION_ID=C.CONNECTION_ID"
     join_df = pd.read_sql(query, con=connection)
     for index, row in join_df.iterrows():
         password = enc.decrypt_file(row['PASSWORD_FILE'])
         connection = utils.get_oracle_connection_obj(
             row['USERNAME'], password, row['CONNECTION_STRING'])
         dbcur = connection.cursor()
         try:
             dbcur.execute(
                 f"SELECT * FROM {row['DATABASE_NAME']}.{row['TABLE_NAME']}"
             )
         except Exception as ex:
             err, = ex.args
             pytest.fail(err.message)
"""

# standard library
import os
import sys
import datetime
import csv

# third party
import stackexchange

# local
import utils

# authenticate to the API
config_parser = utils.get_config_parser()
if not config_parser.has_section('stackoverflow'):
    sys.stderr.write(utils.yellow(
        "stackoverflow is not configured; skipping...\n"
    ))
    exit(0)

# authenticate. if this gives problems exceeding request limits,
# you'll need to obtain an API key
# https://github.com/lucjon/Py-StackExchange/tree/updating-2.0#api-keys
so = stackexchange.Site(stackexchange.StackOverflow)
so.impose_throttling = True

user = so.user(config_parser.get('stackoverflow', 'user_id'))
timeline = user.timeline.fetch()
# timeline = user.timeline.fetch( # i think this is the format
Ejemplo n.º 9
0
def extract_source_data(extraction_id, enc):
    """
    Extacts source and source database connection details from
    extraction_details and connection_detalis tables respectively
    and extracts source data and store it in provided delimited file.
    :param extraction_id: source extraction_id
    :param enc : AESCipher object
    """
    try:
        logging.getLogger().setLevel(logging.INFO)
        connection = utils.get_oracle_connection_obj()

        config = utils.get_config_parser()
        extraction_query = get_select_query(
            config.get('Database', 'extraction_columnlist'),
            config.get('Database', 'tablespace'),
            config.get('Database', 'extraction_tablename'),
            "EXTRACTION_ID=" + extraction_id.__str__())

        logging.info("Extraction_details query = " + extraction_query)

        extraction_details_df = pd.read_sql(extraction_query, con=connection)

        # Get connection details for given connection_id corresponding to extraction id
        connection_id = extraction_details_df.iloc[0]['CONNECTION_ID']
        connection_query = get_select_query(
            config.get('Database', 'connection_columnlist'),
            config.get('Database', 'tablespace'),
            config.get('Database', 'connection_tablename'),
            "CONNECTION_ID=" + connection_id.__str__())

        logging.info("Connection_details query = " + connection_query)
        connection_details_df = pd.read_sql(connection_query, con=connection)

        # create connection object for extraction table db

        password_file = connection_details_df.iloc[0]['PASSWORD_FILE']
        password = enc.decrypt_file(password_file)
        connection_src_extraction = utils.get_oracle_connection_obj(
            connection_details_df.iloc[0]['USERNAME'], password,
            connection_details_df.iloc[0]['CONNECTION_STRING'])

        # Get current incremental column's max value from source data
        max_value = get_max(connection_src_extraction, extraction_details_df)

        source_extraction_query = get_select_query(
            extraction_details_df.iloc[0]['COLUMN_LIST'],
            extraction_details_df.iloc[0]['DATABASE_NAME'],
            extraction_details_df.iloc[0]['TABLE_NAME'],
            extraction_details_df.iloc[0]['INCREMENTAL_COLUMN'] + " > " +
            extraction_details_df.iloc[0]['INCREMENTAL_VALUE'].__str__() +
            " AND " + extraction_details_df.iloc[0]['INCREMENTAL_COLUMN'] +
            " <= " + max_value.__str__())

        logging.info("Source extraction query = " + source_extraction_query)
        source_extraction_df = pd.read_sql(source_extraction_query,
                                           con=connection_src_extraction)

        logging.info("copying " + extraction_details_df.iloc[0]['FILENAME'] +
                     " file to " + config.get('Directory', 'inbound') +
                     " path")

        source_extraction_df.to_csv(
            config.get('Directory', 'inbound') +
            extraction_details_df.iloc[0]['FILENAME'],
            index=False,
            sep=extraction_details_df.iloc[0]['FILE_DELIMITER'])

        logging.info("Updating File_details oracle table")
        today = date.today().strftime("%Y-%m-%d")
        insert_file_details(
            connection, config.get('Database', 'tablespace'),
            config.get('Database', 'file_details_tablename'),
            f"0,'{connection_details_df.iloc[0]['SYSTEM']}', "
            f"'{extraction_details_df.iloc[0]['DATABASE_NAME']}', "
            f"'{extraction_details_df.iloc[0]['TABLE_NAME']}', "
            f"'{extraction_details_df.iloc[0]['FILENAME']}', "
            f"TO_DATE('{datetime.now().strftime('%Y%m%d%H%M%S')}','yyyymmddhh24miss'),"
            f"{source_extraction_df.shape[0]}, 'False', 'False', 'None', date'{today}',"
            f" '{getpass.getuser()}',"
            f" date'{today}', '{getpass.getuser()}'")

        logging.info("Successfully extracted data for " +
                     extraction_details_df.iloc[0]['TABLE_NAME'] + " table.")

        logging.info(
            "Updating incremental column's value in extraction_details")

        # #####Uncomment below code to update incremental_value column in excraction_details table  #######
        # update_extraction_details(connection, config.get('Database', 'tablespace'),
        #                  config.get('Database', 'extraction_tablename'),
        #                  extraction_id, max_value)

        logging.info(
            "####################### JOB COMPLETED SUCCESSFULLY ######################"
        )

    except Exception as ex:
        # err, = ex.args
        error.Errors(ex).errorrun()
        # logging.error("Error code    = ", err.code)
        # logging.error("Error Message = ", err.message)
        connection.close()
        connection_src_extraction.close()
        os._exit(1)
    connection.close()
    connection_src_extraction.close()
    return 0
Ejemplo n.º 10
0
def setup_django(do_rsync=True):
    """setup django"""

    # http://stackoverflow.com/a/19536667/564709
    if isinstance(do_rsync, (str, unicode,)):
        do_rsync = bool(strtobool(do_rsync))
        
    with vagrant_settings(env.host_string):

        # open up and listen to port 8000
        sudo("iptables -A INPUT -p tcp --dport 8000 -j ACCEPT")

        # extract necessary configuration variables from INI file
        parser = utils.get_config_parser()
        mysql_root_password = parser.get('mysql', 'root_password')
        django_username = parser.get('mysql', 'django_root_username')
        django_password = parser.get('mysql', 'django_root_password')
        django_db = parser.get('mysql', 'django_database')

        # setup mysql
        fabtools.require.mysql.server(password=mysql_root_password)
        with settings(mysql_user='******', mysql_password=mysql_root_password):
            fabtools.require.mysql.user(django_username, django_password)
            fabtools.require.mysql.database(django_db,owner=django_username)


        # collect the static files
        with cd("/vagrant/Web"):
            run("./manage.py collectstatic --noinput")

        # rsync directory to get all models, views, etc into the
        # /srv/www directory.
        #
        # TODO: Use a soft link to the figures/templates directory to
        # avoid unnecessary rsyncing of data from analysis?
        site_name = "movievsmovie.datasco.pe"
        web_dir = "Web"
        site_root = os.path.join("/srv", "www", site_name, web_dir)
        fabtools.require.directory(site_root, owner="www-data", use_sudo=True)
        if do_rsync:
            sudo("rsync -avC --exclude='*.hg' /vagrant/%s %s" % (
                web_dir, os.path.dirname(site_root)
            ))

        # write the local django settings. since local.py is listed in
        # the .hgignore, the -C option to rsync must ignore it. this
        # needs to go AFTER rsyncing
        for root_dir in ["/vagrant/" + web_dir, site_root]:
            # make sure the dir exists (for the site_root one)
            target_dir = root_dir+"/Web/settings/"
            fabtools.require.directory(target_dir, owner="www-data", use_sudo=True)
            # use_sudo is necessary (for the site_root one)
            fabtools.require.files.template_file(
                path=root_dir+"/Web/settings/local.py",
                template_source=os.path.join(
                    utils.fabfile_templates_root(), "django_settings.py"
                ),
                context={
                    "django_db": django_db,
                    "django_username": django_username,
                    "django_password": django_password,
                },
                use_sudo=True,
            )

        # make sure permissions are set up properly
        #sudo("chmod -R a+w %s" % site_root)
        sudo("chmod -R g+w %s" % site_root)
        sudo("chgrp -R www-data %s" % site_root)
            
        # make sure database is up and running
        with cd("/vagrant/Web"):
            run("./manage.py syncdb --noinput")
            run("./manage.py migrate")


        # setup apache
        # fabtools.require.apache.module_enabled("mod_wsgi") # __future__
        config_filename = os.path.join(
            utils.fabfile_templates_root(), 
            "apache.conf",
        )
        fabtools.require.apache.site(
            'movie.vs.movie.datasco.pe',
            template_source=config_filename,
            wsgi_application_group=r"%{GLOBAL}",
            site_name=site_name,
            site_root=site_root,
        )
        fabtools.require.apache.disabled('default')
Ejemplo n.º 11
0
    # The access tokens can be found on your applications's Details
    # page located at https://dev.twitter.com/apps (located 
    # under "Your access token")
    access_token = config_parser.get('twitter', 'access_token')
    access_token_secret = config_parser.get('twitter', 'access_token_secret')
    
    # authenticate and use the api object
    auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
    auth.set_access_token(access_token, access_token_secret)

    api = tweepy.API(auth)
    return api

# If the authentication was successful, you should
# see the name of the account print out
api = authenticate(utils.get_config_parser())

# print out the tweet timestamps and some basic statistics about those
# tweets
writer = csv.writer(sys.stdout)
writer.writerow(["datetime", "retweets", "favorites"])
for status in tweepy.Cursor(api.user_timeline).items():
    writer.writerow([
        status.created_at, status.retweet_count, status.favorite_count
    ])

sys.stderr.write(utils.green(
    "twitter complete!\n"
))
Ejemplo n.º 12
0
def setup_django(do_rsync=True):
    """setup django"""
        
    with vagrant_settings(env.host_string):

        # extract necessary configuration variables from INI file
        parser = utils.get_config_parser()
        mysql_root_password = parser.get('mysql', 'root_password')
        django_username = parser.get('mysql', 'django_root_username')
        django_password = parser.get('mysql', 'django_root_password')
        django_db = parser.get('mysql', 'django_database')
        facebook_id = parser.get('social', 'FACEBOOK_APP_ID')

        # setup mysql
        fabtools.require.mysql.server(password=mysql_root_password)
        with settings(mysql_user='******', mysql_password=mysql_root_password):
            fabtools.require.mysql.user(django_username, django_password)
            fabtools.require.mysql.database(django_db,owner=django_username)

        # write the local django settings. since local.py is listed in
        # the .hgignore, the -C option to rsync must ignore it. this
        # needs to go AFTER rsyncing

        # rsync directory to get all models, views, etc into the
        # /srv/www directory.
        #
        # TODO: Use a soft link to the figures/templates directory to
        # avoid unnecessary rsyncing of data from analysis?
        site_name = "chicagoenergy.datascopeanalytics.com"
        web_dir = "Map"
        site_root = os.path.join("/srv", "www", site_name, web_dir)
        fabtools.require.directory(site_root, owner="www-data", use_sudo=True)
        if do_rsync:
            sudo("rsync -avC --exclude='*.hg' /vagrant/%s %s" % (
                web_dir, os.path.dirname(site_root)
            ))


        for root_dir in ["/vagrant/" + web_dir, site_root]:
            # make sure the dir exists (for the site_root one)
            target_dir = root_dir+"/Map/settings/"
            fabtools.require.directory(target_dir, owner="www-data", use_sudo=True)
            # use_sudo is necessary (for the site_root one)
            fabtools.require.files.template_file(
                path=root_dir+"/Map/settings/local.py",
                template_source=os.path.join(
                    utils.fabfile_templates_root(), "django_settings.py"
                ),
                context={
                    "django_db": django_db,
                    "django_username": django_username,
                    "django_password": django_password,
                    "FACEBOOK_APP_ID": facebook_id,
                },
                use_sudo=True,
            )

        # collect the static files
        with cd("/vagrant/Map"):
            run("./manage.py collectstatic --noinput")

        # make sure permissions are set up properly
        #sudo("chmod -R a+w %s" % site_root)
        sudo("chmod -R g+w %s" % site_root)
        sudo("chgrp -R www-data %s" % site_root)

        # # make sure permissions are set up properly
        # #sudo("chmod -R a+w %s" % site_root)
        # sudo("chmod -R g+w %s" % site_root)
        # sudo("chgrp -R www-data %s" % site_root)
            
        # make sure database is up and running
        with cd("/vagrant/Map"):
            run("./manage.py syncdb --noinput")
            run("./manage.py migrate")

        # setup apache
        # fabtools.require.apache.module_enabled("mod_wsgi") # __future__
        config_filename = os.path.join(
            utils.fabfile_templates_root(), 
            "apache.conf",
        )
        fabtools.require.apache.site(
            'chicagoenergy.datascopeanalytics.com',
            template_source=config_filename,
            wsgi_application_group=r"%{GLOBAL}",
            site_name=site_name,
            site_root=site_root,
        )
        fabtools.require.apache.disabled('default')
Ejemplo n.º 13
0
import time
from shutil import copyfile
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
import logging
import errors as error
import utils

config = utils.get_config_parser()
logging.getLogger().setLevel(logging.INFO)


class FileHandler(FileSystemEventHandler):
    def on_created(self, event):

        logging.info(
            f'event type: {event.event_type}  path : {event.src_path}')
        try:

            copyfile(
                event.src_path,
                config.get('Directory', 'temp') +
                os.path.basename(event.src_path))
            connection = utils.get_oracle_connection_obj()
            update_file_details(
                True, connection, config.get('Database', 'tablespace'),
                config.get('Database', 'file_details_tablename'),
                os.path.basename(event.src_path))
        except Exception as ex:
            error.Errors(ex).errorrun()