Exemplo n.º 1
0
def start_commnad(message: telebot.types.Message) -> None:
    """Обработка команды /start"""

    database.create_database()

    _send_message(message.chat.id, database.select_notes(),
                  keyboard.delete_key())
Exemplo n.º 2
0
def monitoring_UI(sec):
    try:
        db.create_database()
        comp = False
        clist_start = []
        clist_stop = []
        prev_date = 0
        while True:
            proclist, date = pr.proc_list()

            if comp:
                #Compare amd analyze
                prev_list = db.load_data_by_date(prev_date)
                clist_start, clist_stop = co.compare_process(
                    proclist, prev_list)

                #Print Status Log
                if clist_start: fp.print_list(clist_start)
                if clist_stop: fp.print_list(clist_stop)

                if not clist_start: comp = False

            else:
                comp = True
            #Print All Running Process
            db.insert_data(proclist)
            prev_date = date
            time.sleep(sec)

    except KeyboardInterrupt:
        sys.exit(0)
Exemplo n.º 3
0
 def create_parser(self):
     super(ReaderFileManager, self).create_parser()
     self.parser['Settings'] = {
         'date filter': '0',
         'tags sort': '0',
         'tag filter': '0'
     }
     self.parser['Flags'] = {
         'has attachments': '0',
         'has parent': '0',
         'has children': '0'
     }
     self.parser['Strings'] = {
         'tags': '()',
         'body': ''
     }
     self.parser['Dates'] = {
         'low year': str(get_oldest_date().year),
         'high year': str(get_newest_date().year),
         'low month': '1',
         'high month': '12',
         'low day': '01',
         'high day': '31',
         'low hour': '00',
         'high hour': '23',
         'low minute': '00',
         'high minute': '59',
         'low weekday': '0',
         'high weekday': '6'
     }
     if not self.database:
         db = join(getcwd(), 'jurnl.sqlite')
         if not exists(db):
             create_database(db)
         self.database = db
Exemplo n.º 4
0
def configure_database(app):
    create_database()
    Migrate(app, db)

    @app.teardown_request
    def shutdown_session(exception=None):
        db.session.remove()
Exemplo n.º 5
0
def request_handler(request):
	last_section = False
	if not request["method"] == "POST":
		return "MUST BE POST REQUEST"
	if not request["is_json"]:
		return "MUST BE JSON"
	data = json.loads(request["data"])
	new_photo = bool(int(data["new"]))
	photo_text = data["photo"]

	if new_photo:
		db.delete_all_rows(photo_database,table)
	#print(photo_text)
	if "255,217" in photo_text:
		final_text_index = photo_text.find("255,217")
		last_section = True
		photo_text = photo_text[:final_text_index+8]

	values = list(map(int,photo_text.split(",")[:-1]))
	if not last_section:
		db.create_database(photo_database,table,"(data int)")
		for val in values:
			db.insert_into_database(photo_database,table,(val,))
	if last_section:
		rows = db.lookup_database(photo_database,table)
		img = [row[0] for row in rows] + values
		#change back to
		path = "__HOME__/final_project/.gitignore/tmp.jpg"
		tmpfile = open(path, "wb")
		#tmpfile = open("tmp.jpg", "wb")
		for i in img:
		    tmpfile.write(struct.pack("B",i))
		tmpfile.close()

	return "Completed"
Exemplo n.º 6
0
def main():
    api_key = config.get(USER_CFG_SECTION, 'api_key')
    api_secret_key = config.get(USER_CFG_SECTION, 'api_secret_key')
    tld = config.get(USER_CFG_SECTION, 'tld')

    client = BinanceAPIManager(api_key, api_secret_key, tld, logger)

    logger.info("Creating database schema if it doesn't already exist")
    create_database()

    set_coins(supported_coin_list)

    migrate_old_state()

    initialize_trade_thresholds(client)

    initialize_current_coin(client)

    schedule = SafeScheduler(logger)
    schedule.every(SCOUT_SLEEP_TIME).seconds.do(
        scout,
        client=client,
        transaction_fee=SCOUT_TRANSACTION_FEE,
        multiplier=SCOUT_MULTIPLIER).tag("scouting")
    schedule.every(1).minutes.do(update_values,
                                 client=client).tag("updating value history")
    schedule.every(1).minutes.do(
        prune_scout_history,
        hours=SCOUT_HISTORY_PRUNE_TIME).tag("pruning scout history")
    schedule.every(1).hours.do(prune_value_history).tag(
        "pruning value history")

    while True:
        schedule.run_pending()
        time.sleep(1)
Exemplo n.º 7
0
    def setUp(self):
        if glob.glob('*.db'):
            raise Exception(".db files exist on setUp and shouldn't!")
        
        database.create_database(TEST_DB)

        self.parser = phonebook.parse()
Exemplo n.º 8
0
 def open_spider(self, spider):
     print('open spider was called. Initializing database')
     self.context = mysql.connector.connect(user=self.user,
                                            passwd=self.passwd,
                                            host=self.host,
                                            charset='utf8mb4',
                                            use_unicode=True)
     create_database(self.context, self.db)
Exemplo n.º 9
0
def scrape_group():
    global _base_dir, _db_path

    # Vengono definiti i logger
    _define_loggers()

    # Viene selezionata la cartella di lavoro
    _base_dir = _select_work_dir()

    # Crea i percorsi
    people_save_dir = os.path.join(_base_dir, 'people')
    tg_save_images_dir = os.path.join(_base_dir, 'telegram_images')
    _db_path = os.path.join(_base_dir, 'session.sqlite')

    # Crea la cartella
    if not os.path.exists(people_save_dir): os.makedirs(people_save_dir)
    if not os.path.exists(tg_save_images_dir): os.makedirs(tg_save_images_dir)

    # Crea il database
    if not os.path.exists(_db_path): database.create_database(_db_path)
    else:
        print(
            colored('[ERROR]', 'red') +
            ' You have selected the working dir of an active scraping project, please select another directory'
        )
        return

    # Selezione del gruppo da analizzare
    _scrape_logger.info('New session creation')
    target_group = _select_telegram_group()

    # Ottiene e salva i membri del gruppo Telegram come entità 'person'
    people_profiles = _select_and_save_group_members(target_group,
                                                     people_save_dir)
    database.add_new_people(_db_path, people_profiles)

    # Ordina le persone in base al loro indice di identificabilità
    # (decrescente) e solo se hanno una possibilità di essere identificabili
    people_profiles = [
        p for p in people_profiles
        if p.get_identifiability() > _MIN_IDENTIFIABILITY_THREESHOLD
    ]
    people_profiles.sort(key=lambda p: p.get_identifiability(), reverse=True)

    # Vengono scaricate le foto profilo degli utenti Telegram
    _scrape_telegram(people_profiles, people_save_dir, tg_save_images_dir)

    # Vengono elaborati i profili Facebook e Instagram
    _scrape_facebook_instagram(people_profiles, people_save_dir)

    # Vengono elaborati i profili Twitter
    _scrape_twitter(people_profiles, people_save_dir)

    print(
        colored('[SYSTEM]', 'green') +
        ' Social profile search completed, press a button to terminate the application'
    )
    _scrape_logger.info('Social profile search finished')
Exemplo n.º 10
0
def initialize_app(flask_app):
    blueprint = Blueprint('geolocation', __name__, url_prefix='/geolocation')
    api.init_app(blueprint)
    api.add_namespace(location_namespace)
    flask_app.register_blueprint(blueprint)

    db.init_app(flask_app)

    from database import create_database
    create_database(app=flask_app)
Exemplo n.º 11
0
def initialize_app(flask_app):
    blueprint = Blueprint('weather', __name__, url_prefix='/weather')
    api.init_app(blueprint)
    api.add_namespace(protected_namespace)
    api.add_namespace(public_namespace)
    flask_app.register_blueprint(blueprint)

    db.init_app(flask_app)

    from database import create_database
    create_database(app=flask_app)
Exemplo n.º 12
0
def collect(data, addr, conn):
    data = data.split("#")[0]
    data = data.split("b'")[1]
    database.create_database()
    IP = data.split(',')[0]
    auth_type = data.split(',')[1]
    mac_addr = data.split(',')[2]
    ID = data.split(',')[3]
    hello_timeout = data.split(',')[4]
    _data = (IP, auth_type, mac_addr, ID, hello_timeout)
    database.update_database(_data)
Exemplo n.º 13
0
def main():
    api_key = config.get(USER_CFG_SECTION, 'api_key')
    api_secret_key = config.get(USER_CFG_SECTION, 'api_secret_key')
    tld = config.get(USER_CFG_SECTION,
                     'tld') or 'com'  # Default Top-level domain is 'com'

    client = BinanceAPIManager(api_key, api_secret_key, tld, logger)

    logger.info("Creating database schema if it doesn't already exist")
    create_database()

    set_coins(supported_coin_list)

    migrate_old_state()

    initialize_trade_thresholds(client)

    if get_current_coin() is None:
        current_coin_symbol = config.get(USER_CFG_SECTION, 'current_coin')
        if not current_coin_symbol:
            current_coin_symbol = random.choice(supported_coin_list)

        logger.info("Setting initial coin to {0}".format(current_coin_symbol))

        if current_coin_symbol not in supported_coin_list:
            exit(
                "***\nERROR!\nSince there is no backup file, a proper coin name must be provided at init\n***"
            )
        set_current_coin(current_coin_symbol)

        if config.get(USER_CFG_SECTION, 'current_coin') == '':
            current_coin = get_current_coin()
            logger.info("Purchasing {0} to begin trading".format(current_coin))
            all_tickers = client.get_all_market_tickers()
            client.buy_alt(current_coin, BRIDGE, all_tickers)
            logger.info("Ready to start trading")

    schedule = SafeScheduler(logger)
    schedule.every(SCOUT_SLEEP_TIME).seconds.do(
        scout,
        client=client,
        transaction_fee=SCOUT_TRANSACTION_FEE,
        multiplier=SCOUT_MULTIPLIER).tag("scouting")
    schedule.every(1).minutes.do(update_values,
                                 client=client).tag("updating value history")
    schedule.every(1).minutes.do(
        prune_scout_history,
        hours=SCOUT_HISTORY_PRUNE_TIME).tag("pruning scout history")
    schedule.every(1).hours.do(prune_value_history).tag(
        "pruning value history")

    while True:
        schedule.run_pending()
        time.sleep(1)
Exemplo n.º 14
0
    def setUp(self):
        if glob.glob('*.db'):
            raise Exception(".db files exist on setUp!")

        # Create test database and table
        database.create_database(TEST_DB)
        database.create_table(TEST_PB, TEST_DB)

        # Add some test records
        add_records()

        self.parser = phonebook.parse()
Exemplo n.º 15
0
 def create_parser(self):
     super(WriterFileManager, self).create_parser()
     self.parser.set('Attributes', 'body', '')
     self.parser.set('Attributes', 'date', 'None')
     self.parser.set('Attributes', 'tags', '()')
     self.parser.set('Attributes', 'attachments', '()')
     self.parser.set('Attributes', 'parent', 'None')
     if not self.database:
         db = join(getcwd(), 'jurnl.sqlite')
         if not exists(db):
             create_database(db)
         self.database = db
Exemplo n.º 16
0
    def setUp(self):
        self.tree = FakeTree()
        self.tree._path_exists[('new', 'db')] = False

        self.db = database.create_database(self.tree, ('new', 'db'))
        self.dbopener = FakeDbOpener()
        self.db._set_dbfileopener(self.dbopener)
Exemplo n.º 17
0
def main():
    database = mysql.connector.connect(host="localhost",
                                       user="******",
                                       passwd="")

    cursor = database.cursor()

    database.create_database()

    cursor.execute("USE me_database")

    etl(cursor)

    create_reports(cursor)

    cursor.close()
Exemplo n.º 18
0
def main():
    # setup
    aba_data = retrieve_aba_data()
    database = create_database()
    amw = dict()
    # compute
    for checklist in database.values():
        for species in checklist.species_list:
            if species not in amw:
                if species not in aba_data.keys():
                    continue
                aba_species_data = aba_data[species]
                amw[species] = AMWSpecies(species,
                                          aba_species_data['aba_code'],
                                          aba_species_data['exotic'],
                                          checklist)
            else:
                amw[species].add_data(checklist)
    for species in amw.values():
        species.final_calc()

    # output
    # amw_sorted = amw  # todo
    amw_out = open('amw.csv', 'w')
    amw_out.write(
        'Species,AMW,ABA Code,Observations,First Seen,Last Seen,Years Since,Loc Code\n'
    )
    for a in amw.values():
        amw_row = a.species + ',' + str(a.amw) + ',' + str(a.aba_code) + ',' + str(a.num_obs) + ',' + str(a.first_date)\
                  + ',' + str(a.last_date) + ',' + str(a.years_since) + ',' + str(a.loc_code) + '\n'
        amw_out.write(amw_row)
Exemplo n.º 19
0
    def monitoring_UI(self, sec):
        #Monitoring Function
        import proc as pr
        import compare as co
        import fprint as fp
        import time
        import database as db
        #Create MySql database if not exitsts
        db.create_database()
        db.create_database_start()
        db.create_database_stop()
        comp = False
        clist_start = []
        clist_stop = []
        prev_date = 0
        self.loop = True
        while self.loop:
            #List all running process
            proclist, date = pr.proc_list()

            if comp:
                #Compare amd analyze
                prev_list = db.load_data_by_date(prev_date)
                clist_start, clist_stop = co.compare_process(
                    proclist, prev_list)

                #Print Status Log - to the screen and to StatusLog.txt
                if clist_start:
                    fp.print_list(clist_start)
                    db.insert_data_start(clist_start)
                    '''for item in clist_start[:-2]:
                        self.status.addItem(str(item))'''

                if clist_stop:
                    fp.print_list(clist_stop)
                    db.insert_data_stop(clist_stop)
                    '''for item in clist_stop[:-2]:
                        self.status.addItem(str(item))'''

                if not clist_start: comp = False

            else:
                comp = True
            # Insert All Running Process to database and sleep
            db.insert_data(proclist)
            prev_date = date
            time.sleep(sec)
def start(path: str,
          database: str = "mydatabase",
          user: str = "postgres",
          password: str = "12345",
          host: str = "127.0.0.1",
          port: str = "5432",
          n: int = 0) -> None:
    """
    Gets the name of the file with path to it and optional parameters
    The body of service
    Creates psql connection and database
    Then reads .csv or .xlsx file, gets column names and types from it
    Then adds data if the table with such name already exists
    Creates the table and adds the data inside if the table with such name
    doesn't exist
    :param path: the name of the file with path to it
    :param database: name of the database
    :param user: name of psql user
    :param password: password of psql user
    :param host: host
    :param port: port
    :param n: number of row with headers
    """
    register_adapter(np.int64, psycopg2._psycopg.AsIs)

    connection = create_connection("postgres", user, password, host, port)
    create_database_query = "CREATE DATABASE " + database
    create_database(connection, create_database_query)
    connection = create_connection(database, user, password, host, port)

    table, table_name = read_file(path, n)

    cursor = connection.cursor()
    cursor.execute(
        "select * from information_schema.tables where table_name=%s",
        (table_name, ))
    columns, data, types = preprocess(table)

    if bool(cursor.rowcount):
        insert(columns, data, table_name, connection)
        connection.commit()
    else:
        create_table(types, table_name, connection)
        insert(columns, data, table_name, connection)
        connection.commit()
def choose_option(option):
	if option == 1:
		database.create_database()
	elif option == 2:
		database.show_databases()
	elif option == 3:
		database.used_database()
	elif option == 4:
		database.create_table()
	elif option == 5:
		database.add_column()
	elif option == 6:
		database.show_tables()
	elif option == 9:
		print("Thanks for using BYE!")
		sys.exit()
	else:
		print("Choose number from 1 to 9")
Exemplo n.º 22
0
    def load_all(self):
        #Load all MySql database
        try:
            conn = sqlite3.connect('processList.db')
            cursor = conn.cursor()
            cursor = conn.execute("SELECT * from process")
            self.showDB.setRowCount(0)
            for row_number, row_data in enumerate(cursor):
                self.showDB.insertRow(row_number)
                for column_number, data in enumerate(row_data):
                    self.showDB.setItem(row_number, column_number,
                                        QtWidgets.QTableWidgetItem(str(data)))

            conn.close()
        except db.Error as e:
            conn.close()
            db.create_database()
            print(e)
Exemplo n.º 23
0
def main():
    api_key = config.get(USER_CFG_SECTION, 'api_key')
    api_secret_key = config.get(USER_CFG_SECTION, 'api_secret_key')
    tld = config.get(USER_CFG_SECTION,
                     'tld') or 'com'  # Default Top-level domain is 'com'

    client = Client(api_key, api_secret_key, tld=tld)

    if not os.path.isfile('data/crypto_trading.db'):
        logger.info("Creating database schema")
        create_database()

    set_coins(supported_coin_list)

    migrate_old_state()

    initialize_trade_thresholds(client)

    if get_current_coin() is None:
        current_coin_symbol = config.get(USER_CFG_SECTION, 'current_coin')
        if not current_coin_symbol:
            current_coin_symbol = random.choice(supported_coin_list)

        logger.info("Setting initial coin to {0}".format(current_coin_symbol))

        if current_coin_symbol not in supported_coin_list:
            exit(
                "***\nERROR!\nSince there is no backup file, a proper coin name must be provided at init\n***"
            )
        set_current_coin(current_coin_symbol)

        if config.get(USER_CFG_SECTION, 'current_coin') == '':
            current_coin = get_current_coin()
            logger.info("Purchasing {0} to begin trading".format(current_coin))
            buy_alt(client, current_coin, BRIDGE)
            logger.info("Ready to start trading")

    while True:
        try:
            time.sleep(5)
            scout(client)
        except Exception as e:
            logger.info('Error while scouting...\n{}\n'.format(
                traceback.format_exc()))
Exemplo n.º 24
0
    def __init__(self, driver, user, password, dbname, host,
                 admin_user, admin_password, url_list, report_filename,
                 make_report, be_quiet, web_user, web_password):

        self.mail_parser = MailArchiveAnalyzer()

        self.db = create_database(driver=driver)
        self.db.name = dbname
        self.db.user = user
        self.db.password = password
        self.db.host = host
        self.db.admin_user = admin_user
        self.db.admin_password = admin_password

        # Connect to database if exists, otherwise create it and connect
        self.db.connect()

        # User and password to make login in case the archives are set to private
        self.web_user = web_user
        self.web_password = web_password

        # Don't show messages when retrieveing and analyzing files
        self.be_quiet = be_quiet

        # URLs to be analyzed
        self.url_list = url_list

        self.__check_mlstats_dirs()

        total_messages = 0
        stored_messages = 0
        non_parsed = 0
        for url in url_list:
            t,s,np = self.__analyze_url(url)

            total_messages += t
            stored_messages += s
            non_parsed += np

        self.__print_output("%d messages analyzed" % total_messages)
        self.__print_output("%d messages stored in database %s" % (stored_messages,self.db.name))
        self.__print_output("%d messages ignored by the parser" % non_parsed)

        difference = total_messages - stored_messages
        if difference == 0 and non_parsed == 0:
            self.__print_output("INFO: Everything seems to be ok.")

        if difference > 0:
            self.__print_output("WARNING: Some messages were parsed but not stored")

        if non_parsed > 0:
            self.__print_output("WARNING: Some messages were ignored by the parser (probably because they were ill formed messages)")

        if make_report:
            self.__print_brief_report(report_filename)
Exemplo n.º 25
0
def default_values():

    db.create_database()
    clas = [
        c.clas('Guerreiro', 7, 4, 0, 1),
        c.clas('Arqueiro', 4, 3, 0, 7),
        c.clas('Feiticeiro', 4, 2, 7, 2)
    ]

    for cl in clas:
        db.create_class(cl)

    race = [c.race('Human', 5, 5, 5, 5)]

    for ra in race:
        db.create_race(ra)

    player = [c.player('Rubens', 1, 0, 1, 1)]

    for pl in player:
        db.create_player(pl)

    iten = [
        c.item('Machado de Assis',
               'Um machado feito pelos deuses da literatura brasileira', 3, 3,
               0, 0),
        c.item('Espada de São Darwin',
               'Espada feita do primeiro minério descoberto', 3, 3, 0, 0),
        c.item('Cajado de Flamel', 'Cajado abençoado por Aristóteles', 1, 2, 4,
               0),
        c.item(
            'Arco de Sagitário',
            'Signo não influenciam, mas um disparo no peito muda o destino de alguém.',
            1, 2, 0, 3),
        c.item(
            'Crucifixo da Madeira da Cruz de Cristo',
            'Adquirido em uma liquidação papal de indulgências, Lutero condena isso.',
            0, 2, 4, 0)
    ]

    for it in iten:
        db.create_item(it)
Exemplo n.º 26
0
def run():
    response = menu()
    if response == 0:
        events = database.load_csv_data()
        database.create_database()
        database.load_database(events)
    elif response == 1:
        database.display_presenters()
    elif response == 2:
        database.display_events()
    elif response == 3:
        print("Please enter event id")
        event_id = int(input())
        database.display_presenters_for_event(event_id)
    elif response == 4:
        print("Please enter presenter id")
        presenter_id = int(input())
        database.display_events_for_presenter(presenter_id)
    else:
        print("Invalid selection")
Exemplo n.º 27
0
def main():
    token = os.environ.get('SLACK_TOKEN')
    port = os.environ.get('PORT', 8080)
    if not token:
        log.error('Please provide slack token to use this bot')
        exit(1)
    debug = os.environ.get('DEBUG', False)
    if isinstance(debug, str):
        if debug.isdigit():
            debug = debug == '1'
        else:
            debug = debug.lower() != 'false'
    mode = 'development' if debug else 'production'
    log.info('Launching application is %s mode.', mode)

    SlackBot.init(token)
    create_database()
    app = create_web_app(debug)

    log.info('Starting application on %s', port)
    web.run_app(app, port=port)
Exemplo n.º 28
0
    def load_data_by_dateA(self, date):
        #Load all data of specipic date (left)
        try:
            conn = sqlite3.connect('processList.db')
            cursor = conn.cursor()
            cursor = conn.execute(
                "SELECT * from process WHERE date==? ORDER BY pid ASC",
                (date, ))
            self.listAwidget.setRowCount(0)
            for row_number, row_data in enumerate(cursor):
                self.listAwidget.insertRow(row_number)
                for column_number, data in enumerate(row_data):
                    self.listAwidget.setItem(
                        row_number, column_number,
                        QtWidgets.QTableWidgetItem(str(data)))

            conn.close()
        except db.Error as e:
            conn.close()
            db.create_database()
            print(e)
Exemplo n.º 29
0
def create_file(database: str = None):
    """Creates the config file for the application. Creates a database named 'jurnl.sqlite' if it does not exist

    :param database: a str path pointing to the database that the config file is initially built for
    """
    if not database:
        database = join(getcwd(), 'jurnl.sqlite')
        if not exists(database):
            create_database(database)
    database = abspath(database)
    name = basename(database)
    if exists(abspath(database)):
        parser = ConfigParser()
        parser['Backup'] = {
            'enabled': 'yes',
            'last backup': 'Never',
            'backup interval': '72',
            'number of backups': '3'
        }
        parser['Filesystem'] = {
            'default database': database,
            'backup location': join(getcwd(), '.backup'),
            'imports': join(getcwd(), 'Imports'),
            'autodelete imports': 'False',
            'exports': join(getcwd(), 'Exports')
        }
        parser['Databases'] = {name.replace('.sqlite', ''): database}
        parser['Notebook'] = {'pages': '[]', 'current': ''}
        parser['Visual'] = {
            'theme': '(dark, green)',
            'dimensions': '(1500, 600)'
        }
        # TODO add option for obscuring system files (read and write in bytes instead of str)
        with open('settings.config', 'w') as f:
            parser.write(f)
            f.close()
    else:
        raise FileNotFoundError(
            'The provided database \'{}\' does not exist.'.format(name))
Exemplo n.º 30
0
def main():
    database = create_database()
    day_lists = dict()

    for checklist in database.values():
        date = format_date(checklist.date)
        if date in day_lists.keys():
            day_lists[date] = set().union(
                *[day_lists[date], checklist.species_list])
        else:
            day_lists[date] = checklist.species_list

    for day in sorted(day_lists.keys()):
        print(day + '\t' + str(len(day_lists[day])))
Exemplo n.º 31
0
def generate_list(start_date, end_date, list_type='first', regions=None):
    database = create_database(start_date, end_date, regions)
    species_list = {}
    for checklist in database.values():
        for species in checklist.species_list:
            if species not in species_list:
                species_list[species] = ListItem(checklist)
                continue
            if list_type == 'first':
                if checklist.date < species_list[species].date:
                    species_list[species] = ListItem(checklist)
            if list_type == 'last':
                if checklist.date > species_list[species].date:
                    species_list[species] = ListItem(checklist)
    return species_list
Exemplo n.º 32
0
def main():
    config = config_from_xml_file(CONFIG_FILE)

    mysql_config = copy.deepcopy(config['mysql_config'])
    del mysql_config[
        'database']  # databases can't be checked or created if a database is specified
    database_connection = create_database_connection(mysql_config)
    database_cursor = create_database_cursor(database_connection)

    if database_exists(config, database_cursor):
        print("'{}' database already exists.".format(config['database_name']))
    else:
        create_database(config, database_cursor)
    print()

    database_cursor.stop_cursor()
    database_connection.disconnect()

    database_connection = create_database_connection(config['mysql_config'])
    database_cursor = create_database_cursor(database_connection)

    for table in config['tables'].keys():
        if table_exists(table, database_cursor):
            print("'{}' table already exists.".format(table))
        else:
            create_mysql_table(config, table, database_connection,
                               database_cursor)
            print("'{}' table has been created.".format(table))
            print("Populating '{}' table with data......".format(table))
            load_data_into_mysql_table(config, table, database_connection,
                                       database_cursor)
            print("Population of '{}' table has been completed".format(table))
            print()

    database_cursor.stop_cursor()
    database_connection.disconnect()
Exemplo n.º 33
0
def compute_species_values(aba_data):
    database = create_database()
    species_values = dict()
    max_years_since = 0
    # aggregate data
    for checklist in database.values():
        for species in checklist.species_list:
            if species not in species_values:
                if species not in aba_data.keys():
                    continue
                aba_species_data = aba_data[species]
                species_values[species] = Species(species,
                                                  aba_species_data['aba_code'],
                                                  aba_species_data['exotic'],
                                                  checklist)
            else:
                species_values[species].add_data(checklist)
    # compute years since
    for species in species_values.values():
        max_years_since = max(max_years_since, species.compute_years_since())
    for species in species_values.values():
        species.compute_value()
    return species_values, max_years_since
Exemplo n.º 34
0
def main():
    cleanup_data.main()
    database = create_database()
    threshold = 550
    interval = 365
    start_date = datetime.datetime(2013, 1, 1)
    end_date = datetime.datetime.today() - datetime.timedelta(days=interval)
    date = start_date
    while date < end_date:
        interval_list = []
        for sid in database.keys():
            checklist = database[sid]
            if checklist.state[:2] != 'US' and checklist.state[:2] != 'CA':
                continue
            if date <= checklist.date < (date +
                                         datetime.timedelta(days=interval)):
                for species in checklist.species_list:
                    if species not in interval_list:
                        interval_list.append(species)

        if len(interval_list) >= threshold:
            print(date, len(interval_list))

        date += datetime.timedelta(days=1)
Exemplo n.º 35
0
            self.email = email
        if password is not None:
            self.password = password
        try:
            with open('Tableau.db'):
                con = lite.connect('Tableau.db')
                cursor = con.cursor()
                cursor.execute('''UPDATE users SET fname=?,lname=?,bday=?,bmonth=?,byear=?,email=?,password=? WHERE username=?''',
                               (self.fname,self.lname,self.bday,self.bmonth,self.byear,self.email,self.password,self.username))
                con.commit()
        except IOError:
            users[self.username] = self
            print("file does not exist yet")

if not os.path.exists('Tableau.db'):
    database.create_database()
    create_user("Timothy","Dawborn",15,5,1997,"*****@*****.**","timothy","timothydawborn")
         
if __name__ == "__main__":
    try:
        with open('Tableau.db'):
            con = lite.connect('Tableau.db')
            cursor = con.cursor()
            cursor.execute('''DELETE FROM users''')
            con.commit()
    except IOError:
        print("file does not exist yet")

    print("------------------------testing create user-----------------")
    create_user("lochlann","andrews",15,5,1997,"*****@*****.**","lochlanna","lochlannpword")
    print("------------------------should print a dict containing user-----------------")
Exemplo n.º 36
0
 def make_db(self):
     if not os.path.isfile(self.game_database):
         db.create_database(self.game_database)
Exemplo n.º 37
0
exitProgram = False
if len(sys.argv) != 2 :
    print("Please enter database that you would like to open as a " \
          "console argument")
    exitProgram = True
else :
    tmpStr = sys.argv[1].split("/")
    dirStr = ""
    for index in range(0,len(tmpStr)-1) :
        dirStr += tmpStr[index] + "/"
    if os.path.isdir(dirStr) == False :
        print("Path to database doesn't exist")
        exitProgram = True
    else :
        if os.path.isfile(sys.argv[1]) == False :
            database.create_database(sys.argv[1])
        conn = sqlite3.connect(sys.argv[1])
        cur = conn.cursor()

while exitProgram == False :
    print(menuCommands.introString)
    userInput = input("Selection: ")
    if userInput == "1" :
        # Execute code for adding a new ticker
        print("\n")
        print("Adding new stock for tracking")
        foundStock = False
        while foundStock == False :
            print("Enter 'quit' to exit to main menu")
            userInput = input("New stock: ")
            if userInput == "quit" :
Exemplo n.º 38
0
base = '/data/alstottj/Langley/'
from numpy import asarray, isnan, median, unique
from scipy.stats import skew, kruskal, ks_2samp

#from rpy2 import robjects
from rpy2.robjects.packages import importr
from rpy2.robjects.vectors import FloatVector
stats = importr('stats')

import database as db
from Helix_database import Session, database_url
session = Session()
db.create_database(database_url)

import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
plots = PdfPages(base+'Langley_distributions.pdf')

import powerlaw

dependents = ['Number_of_Children', 'Parent_Child_Registration_Interval_Corrected', 'Distance_from_Parent', 'Has_Children', 'Has_Parent']
independents = ['Age', 'Gender', 'Relationship_with_Parent', 'Heard_Through_Medium', 'Same_Age_as_Parent',\
        'Same_City_as_Parent', 'Same_Country_as_Parent', 'Same_Gender_as_Parent', 'Same_Relationship_to_Parent_as_They_Had_to_Their_Parent',\
        'Heard_Through_Same_Medium_as_Parent', 'Has_Parent']


#robjects.r("data<-read.table('%s')"%(base+'LangleyRtable'))
#robjects.r("attach(data)")

for d in dependents:
    print d
Exemplo n.º 39
0
def handle_mssg(mssg):

    # Unexpected bit: 
    # When the bot sends a message, this function is called. If the sender is 
    # itself, quit, to avoid an infinite loop.
    if mssg['sender_email'] == client.email:
        return

    print 'Message received!'

    content = mssg['content'].strip()

    first = content.split()[0]
    strip_len = len(first)

    while first.endswith(':'):
        strip_len += 1
        first = first[:-1]

    content = mssg['content'][strip_len:]
    content = content.lstrip()

    # First, assume the message is a "ssh" request.
    shh = first
    if shh.lower() == 'shh' or shh.lower() == 'ssh': # Allow for the ssh pun...because it's fun
        anon_shh(stream)
        return

    # Next, assume first is the recipient of an anonymous message
    recipient_email = first
    if recipient_email in emails:
        sender_email = mssg['sender_email']

        # Generate code, send it to person
        code = gen_new_relay_code()

        if not database.database_exists(filename):
            print 'Database %s doesn\'t yet exist. Creating it now...'%(filename)
            database.create_database(filename)
        if not database.table_exists(table, filename):
            # TODO: Put this block of code into the database module as the create_table function
            # The current problem is that I need a way of passing a tuple of strings, and
            # I'm not sure how to securely do this...
            # database.create_table(cols, table, database) where cols = (col1, col2)
            with sqlite3.connect(filename) as con:
                c = con.cursor()
                c.execute("CREATE TABLE %s (code TEXT, email TEXT)"%(table))

        database.add_record((code, sender_email), table, filename)

        end_content = '\n\nThis sender\'s key is %s. To reply, send a message in the following format:\n' %(code)
        end_content += '%s: <your_message>' %(code)
        content += end_content
        send_anon_pm(recipient_email, content)
        send_anon_pm(sender_email, 'Anon message sent to %s' %(recipient_email))
        return

    # Next, assume first is the code of a relax message
    code = first
    email_match = None
    if database.database_exists(filename):
        email_match = search_for_code(code)
    if email_match:
        sender_email = mssg['sender_email']
        content = 'Response from %s:\n'%(sender_email) + content
        end_content = '\nTo reply anonymously, send a message in the following format:\n'
        end_content += '"%s: <your_message>" (without the quotes)' %(sender_email)
        content += end_content
        send_anon_pm(email_match, content)
        send_anon_pm(sender_email, 'Your reply was sent to %s.' %(code))
        return

    # All assumptions exhausted, reply with help message.
    sender_email = mssg['sender_email']
    send_anon_pm(sender_email, get_help_response())