def upload_data(ui):
    caffeine.on(display=False)
    tmp_dir_name = ui.tmp_dir_name
    dataset_type = ui.current_dataset_type
    csv_file_name = ui.selected_csv[dataset_type]
    s3 = ui.session.resource('s3')
    s3_objects, status = list_objects_in_dataset_bucket(
        ui.session, ui.region, ui.s3_bucket_name, ui.dataset_group_name)
    if status == 'failure':
        caffeine.off()
        return status
    file_prefix = "/" + ui.dataset_group_name + "/datasets/" + dataset_type.lower(
    ) + '_'
    file_name = os.path.basename(csv_file_name)
    new_file_name = file_name
    ds_number = 0
    while file_prefix + new_file_name in s3_objects:
        ds_number += 1
        new_file_name = os.path.splitext(file_name)[0] + '_' + str(
            ds_number) + '.csv'

    status = multi_part_upload_with_s3(
        s3, ui.s3_bucket_name, ui.dataset_group_name + '/' + "datasets/" +
        dataset_type.lower() + '_' + new_file_name,
        os.path.join(tmp_dir_name, "tmp_dataset.csv"), ui.upload_progress_bar)
    if status != 'failure':
        ui.dataset_path[
            dataset_type] = ui.s3_bucket_path + file_prefix + new_file_name
    caffeine.off()
    return status
예제 #2
0
 def run(self):
     caffeine.on(display=True)
     self.welcome()
     # self.trainer.introduce()
     self.print_info()
     for section in self.sections:
         untimed_rest()
         # self.trainer.say_phrase()
         section.run()
     caffeine.off()
예제 #3
0
파일: __init__.py 프로젝트: jvansan/nb_util
def do_parallel(fn, loop, n_jobs=-1, progress=True, keep_alive=True):
    if progress:
        loop = tqdm(loop)
    with joblib.Parallel(n_jobs=-1) as par:
        if keep_alive and can_caffeinate():
            caffeine.on(display=True)
            res = par(joblib.delayed(fn)(l) for l in loop)
            caffeine.off()
        else:
            res = par(joblib.delayed(fn)(l) for l in loop)
    return res
예제 #4
0
    def osx_standby(self, system=True, display=True):
        try:
            if system:
                caffeine.off()
            else:
                caffeine.on(display)

        except NameError:
            self.log_warning(_("Unable to change power state"), _("caffeine lib not found"))

        except Exception, e:
            self.log_warning(_("Unable to change power state"), e)
예제 #5
0
    def osx_standby(self, system=True, display=True):
        try:
            if system:
                caffeine.off()
            else:
                caffeine.on(display)

        except NameError:
            self.log_warning(_("Unable to change power state"),
                             _("caffeine lib not found"))

        except Exception as e:
            self.log_warning(_("Unable to change power state"), e)
예제 #6
0
     # Breaks while loop
     break
 # Else statement for when time does not match desired send time
 else:
     # Checks "currentTime" to see if it's safe to set the amount of time to sleep to 1 hour
     if currentTime.hour < (sendHr - 1) or currentTime.hour > sendHr:
         # Checks that platform is Darwin-based since caffeine is a Mac command
         if platform.system() == "Darwin":
             print(
                 "Running 'caffeine' to prevent the system from sleeping.")
             # Adds 1 to "caffeineMin" every time caffeine runs before time.sleep(3600)
             caffeineHr += 1
             logging.debug("Caffeine has run " + str(caffeineHr) +
                           " times.")
             # Keeps computer awake but allows display to sleep
             caffeine.on(display=False)
         time.sleep(3600)
     # Program sleeps only for 1 minute per loop if time before "currentTime" ≤1hr
     else:
         if platform.system() == "Darwin":
             print(
                 "Running 'caffeine' to prevent the system from sleeping.")
             # Adds 1 to "caffeineMin" every time caffeine runs before time.sleep(60)
             caffeineMin += 1
             logging.debug("Caffeine has run " + str(caffeineMin) +
                           " times.")
             # Keeps computer awake but allows display to sleep
             caffeine.on(display=False)
         time.sleep(60)
     # Restarts while loop
     continue
#required installs (i.e. pip3 install in terminal): pandas, selenium, bs4, and possibly chromedriver(it may come with selenium)
#Download Chromedriver from: https://chromedriver.chromium.org/downloads
#To see what version to install: Go to chrome --> on top right click three dot icon --> help --> about Google Chrome
#Move the chrome driver to (/usr/local/bin) -- open finder -> Command+Shift+G -> search /usr/local/bin -> move from downloads

from selenium import webdriver
from bs4 import BeautifulSoup as bs
import time
from datetime import datetime
import pandas as pd
import random
import openpyxl
import caffeine
from openpyxl import load_workbook
caffeine.on(display=True)
import tkinter as tk
import threading

# In[2]:

root = tk.Tk()

#Defining our variables
tk_page = tk.StringVar()
tk_username = tk.StringVar()
tk_password = tk.StringVar()

canvas = tk.Canvas(root, height=300, width=400, bg='#49694b')
canvas.pack()
예제 #8
0
def create_predictor_and_forecast(ui):
    try:
        # Using caffeine to prevent pc from sleeping.
        caffeine.on(display=False)

        forecast_export_name = forecast_name = predictor_name = ui.new_forecast_line_edit.text(
        )
        forecast_horizon = ui.predictor_forecast_horizon_spinbox.value()
        forecast_client = ui.forecast_client
        dataset_group_arn = ui.dataset_group_arn
        auto_ml = ui.auto_ml_checkbox.isChecked()

        # Check if a country is selected to use its holidays for forecasting.
        holidays = None
        if ui.country_checkbox.isChecked():
            holidays = selected_country(ui)

        # Check if location is a dimension in the data
        location_in_datasets, status = is_location_in_datasets(
            forecast_client, dataset_group_arn)
        if status == 'failure':
            caffeine.off()
            return status

        # Check if auto machine learning is selected
        algorithm = None
        if not auto_ml:
            algorithm = ui.algorithms[ui.algorithm_combobox.currentText()]
            print(algorithm)

        # Get frequency from combobox
        forecast_frequency = combobox_to_freq(ui, 'FORECAST')

        # Create a predictor
        predictor_arn, status = create_predictor(
            forecast_client, predictor_name, algorithm, auto_ml,
            forecast_horizon, forecast_frequency, dataset_group_arn,
            location_in_datasets, holidays)
        if status == 'failure':
            caffeine.off()
            return status

        # Wait until predictor is created
        response = forecast_client.describe_predictor(
            PredictorArn=predictor_arn)
        while response['Status'] != 'ACTIVE' and response[
                'Status'] != 'CREATE_FAILED':
            response = forecast_client.describe_predictor(
                PredictorArn=predictor_arn)
            time.sleep(10)
        if response['Status'] == 'CREATE_FAILED':
            caffeine.off()
            return 'failure'

        # Create a forecast
        forecast_arn, status = create_forecast(forecast_client, forecast_name,
                                               predictor_arn)
        if status == 'failure':
            caffeine.off()
            return status

        # Wait until forecast is created
        response = forecast_client.describe_forecast(ForecastArn=forecast_arn)
        while response['Status'] != 'ACTIVE' and response[
                'Status'] != 'CREATE_FAILED':
            response = forecast_client.describe_forecast(
                ForecastArn=forecast_arn)
            time.sleep(10)
        if response['Status'] == 'CREATE_FAILED':
            caffeine.off()
            return 'failure'
        s3_path = ui.s3_bucket_path + "/" + ui.dataset_group_name + "/forecast/" + forecast_name + "/"
        s3_role_arn = ui.s3_role_arn

        # Create a forecast export
        forecast_export_arn, status = create_forecast_export(
            ui.forecast_client, forecast_export_name, forecast_arn, s3_path,
            s3_role_arn)
        if status == 'failure':
            caffeine.off()
            return status

        # Wait until forecast export is created
        while response['Status'] != 'ACTIVE' and response[
                'Status'] != 'CREATE_FAILED':
            response = forecast_client.describe_forecast_export_job(
                ForecastExportJobArn=forecast_export_arn)
            time.sleep(10)
        if response['Status'] == 'CREATE_FAILED':
            caffeine.off()
            return 'failure'

        # Wait 30 seconds to make sure exported files show on S3 (this should be immediate)
        time.sleep(30)

        # Create a database in AWS Glue
        status = create_database(ui.glue_client)
        if status == 'failure':
            caffeine.off()
            return status

        # Create a crawler in AWS Glue
        status = create_crawler(ui.glue_client, s3_role_arn, s3_path,
                                forecast_name)
        if status == 'failure':
            caffeine.off()
            return status

        # Run the created crawler
        status = run_crawler(forecast_name, ui.glue_client)
        if status == 'failure':
            caffeine.off()
            return status

        caffeine.off()
        return 'success'
    except Exception as e:
        caffeine.off()
        error(str(e))
        return 'failure'
예제 #9
0
def main():
    try:
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            import caffeine
            caffeine.on(display=False)
    except (ImportError, OSError):
        pass

    parser = argparse.ArgumentParser()
    ansible_inventory_args = parser.add_argument_group(
        'Ansible Inventory args')
    ansible_inventory_args.add_argument(
        '--list',
        action='store_true',
        help="output a JSON encoded hash/dictionary of all the groups to be "
        "managed to stdout")
    ansible_inventory_args.add_argument(
        '--host',
        help="hash/dictionary of variables to make available to templates "
        "and playbooks")
    odoo_args = parser.add_argument_group('Odoo connection args')
    odoo_args.add_argument('--save',
                           help='save the login details for further usage')
    odoo_args.add_argument('--load',
                           default='apertoso',
                           help='Load the login details from earlier save')
    odoo_args.add_argument('--username')
    odoo_args.add_argument('--password')
    odoo_args.add_argument('--hostname')
    odoo_args.add_argument('--database')
    odoo_args.add_argument('--protocol',
                           choices=('jsonrpc', 'jsonrpc+ssl'),
                           default='jsonrpc+ssl')
    odoo_args.add_argument('--port', default='8069')
    odoo_args.add_argument('--list-odoo', help='show saved sessions')
    args, options = parser.parse_known_args()

    if args.list_odoo:
        odooconnector.OdooConnector.list()
        sys.exit(1)

    odoo = odooconnector.OdooConnector()

    if all((args.username, args.password, args.database, args.hostname)):
        odoo.connect(args.username, args.password, args.database,
                     args.hostname, args.protocol, args.port)

    if args.save:
        odoo.save_login_session(args.save)
    elif args.load:
        odoo.connect_saved(args.load)

    if args.host:
        instance_data = odoo.search_and_get_data([('fqdn', '=', args.host)],
                                                 limit=1)
        instance_data = instance_data[0]
        remove_null_values(instance_data)
        ansible_vars = get_host_vars(instance_data)
        print json.dumps(
            ansible_vars,
            sort_keys=True,
            indent=4,
            separators=(',', ': '),
        )

    else:
        instance_datas = odoo.search_and_get_data()
        ansible_inventory = create_ansible_inventory(instance_datas)
        map(remove_null_values, instance_datas)
        print json.dumps(
            ansible_inventory,
            sort_keys=True,
            indent=4,
            separators=(',', ': '),
        )
예제 #10
0
from functions import *
import sys
from agent.agent import Agent
from action import Action
import caffeine

if len(sys.argv) != 4:
    print('Usage: python3 train.py [stock] [window] [episodes]')
    exit()

caffeine.on(display=False)  #電腦不休眠
ticker, window_size, episode_count = sys.argv[1], int(sys.argv[2]), int(
    sys.argv[3])
init_cash = 1000000
#要給checkpoint個路徑
c_path = "models/{}/training.ckpt".format(ticker)
m_path = "models/{}/model.h5".format(ticker)
#取得歷史資料
df = pdr.DataReader('{}'.format(ticker),
                    'yahoo',
                    start='2018-1-1',
                    end='2019-1-1')
unit = get_unit(df['Close'].mean(), init_cash)  #目前都是操作固定單位
trading = Action(unit)
#資料整合轉換
data = init_data(df)
#給agent初始化輸入的緯度
input_shape, neurons = get_shape(data[:window_size + 1], window_size)
agent = Agent(ticker, input_shape, neurons, c_path)

l = len(data) - 1
예제 #11
0
        with open(namesPath, 'r', encoding="utf-8"
                  ) as data_file:  #Prepare the names list for a run
            csv_reader = csv.reader(data_file)
            names = []
            for line in csv_reader:
                try:
                    name = line[0]
                    names.append(name)
                except:
                    pass

        time.sleep(4)

        count = 0

        caffeine.on(display=True)  #Prevents screen to sleep on MacOS

        for name in names:  #Send message to all names list

            firstName = name.split(' ', 1)[0]
            if (msgBase.find('%s') != -1):  #Prepare the message before sending
                msg = msgBase % firstName
            else:
                msg = msgBase

            try:  #Try sending the message
                sendMsg(msg, name, driver)  #Send message
                count = count + 1
            except common.exceptions.NoSuchElementException as e:  #Didn't find the "Back" button.
                pass
            except:  #Click the "Back" button.