Exemplo n.º 1
0
def send_sftp():

    connect = create_connection()

    # date = dt.datetime.now()
    # date_str = "{}_{}_{}_{}_{}_balance.csv".format(date.hour, date.minute, date.day, date.month, date.year)
    time_sent = int(time.time())
    date_str = "balance_{}.csv".format(time_sent)

    localpath = 'res/balance.csv'
    remotepath = './uploads/{}'.format(date_str)

    try:
        connect.put(localpath, remotepath=remotepath)
    except Exception as e:
        logger.info(e)
        print(e)
        logger.warning('error in sending file over sftp.')
        logger.info('exiting. retry next balance.')
        sys.exit()

    connect.close()
    logger.info('file sent.')
    #SAVE TIME SENT FOR CHECKING STATUS
    sftp_time = {}
    sftp_time['time_sent'] = time_sent
    sftp_time['filename'] = date_str
    sftp_time['processed'] = False
    write_json('res/sftp_time.json', sftp_time)
Exemplo n.º 2
0
def launch_driver():

	#INIT DRIVER
	driver = get_driver()

	login(driver)

	#LOAD DATA FROM BALANCE
	beam_balance_data = json.load(open('res/beam_balance_data.json'))
	beam_map = json.load(open('res/beam_map.json'))

	#NEEDED FOR SEARCHING FOR CASE

	case_numbers = {}

	count = 0

	curr_time = int(time.time())

	for beam in beam_balance_data:
		count+=1

		#ONLY OPEN CASES FOR BEAMS WITH LOGOFFS OR REDIRECTS
		if beam_balance_data[beam]['logoffs'] or beam_balance_data[beam]['redirects']:
			
			goto_search(driver)
			logger.info(beam)
			search(driver, beam_map[beam]['viasat'])
			case = open_case(driver, beam, beam_balance_data[beam])
			logger.info(case)
			case_numbers[case] = {}
			case_numbers[case]['beam'] = beam
			case_numbers[case]['status'] = 'new'
			case_numbers[case]['timestamp'] = curr_time
			case_numbers[case]['check'] = False
			case_numbers[case]['terminals'] = get_terminal_list(beam_balance_data[beam]) 
			submit_case(driver)

	write_json('res/open_cases.json', case_numbers)


	driver.quit()
Exemplo n.º 3
0
    def create_single_calculation_job(self,
                                      individual: List[float],
                                      generation: int = None) -> None:
        """

        :param individual:
        :param generation:
        :return:
        """

        optimization_task = self.query_current_optimization_task()

        calculation_data = self.apply_individual(individual)

        calculation_id = self.create_unique_id()

        hash_id = md5(json.dumps(calculation_data).encode("utf-8")).hexdigest()

        calculation_data_filepath = create_input_and_output_filepath(
            folder=Path(OPTIMIZATION_DATA, self.current_optimization_id),
            task_id=hash_id,
            file_types=[CALC_INPUT_EXT])

        new_calc_task = self.current_ct(
            author=optimization_task.author,
            project=optimization_task.project,
            optimization_id=optimization_task.optimization_id,
            calculation_id=calculation_id,
            hash_id=hash_id,
            calculation_type=optimization_task.optimization_type,
            calculation_state=CALCULATION_START,  # Set state to start
            generation=generation,
            calculation_data_filepath=calculation_data_filepath)

        write_json(obj=calculation_data, filepath=calculation_data_filepath)

        self.session.add(new_calc_task)
        self.session.commit()
Exemplo n.º 4
0
def send_email():

    #SAVE TIME SENT FOR CHECKING STATUS
    email_time = {}
    email_time['time_sent'] = int(time.time())
    write_json('res/email_time.json', email_time)

    #LOGIN / PASSWORD
    email_config = get_email_config()

    #CONSTRUCT EMAIL TO SEND
    msg = MIMEMultipart()
    msg['Subject'] = "Load Balancing"
    msg['From'] = email_config['username']
    msg['To'] = '*****@*****.**'

    logger.info("preparing email to {0}".format(msg['To']))

    #CREATE CSV FROM BALANCE DATA
    create_csv()

    #ADD CSV AS ATATCHMENT
    part = MIMEBase('application', "octet-stream")
    part.set_payload(open("res/balance.csv", "rb").read())
    part.add_header('Content-Disposition',
                    'attachment; filename="Load_Balance.csv"')

    msg.attach(part)

    #CONNECT TO SERVER AND SEND
    server = smtplib.SMTP('smtp-mail.outlook.com', 587)
    server.starttls()
    server.login(email_config['username'], email_config['password'])
    text = msg.as_string()
    server.sendmail(msg['From'], msg['To'], text)
    server.quit()
    logger.info("email sent.")
def get_all_beam_info(beam_balance_data, iterations):

    iterations['iterations'] += 1

    for beam in beam_balance_data:

        if 'terminal_status' not in beam_balance_data[beam]:
            terminals = get_terminal_list(beam_balance_data[beam])
            terminal_status = check_status(terminals)
            beam_balance_data[beam]['terminal_status'] = terminal_status

            logger.info(beam)
            logger.info("Requesting RL")
            actual_rl = request_beam_capacity(beam, 'RL')
            logger.info("Requesting FL")
            actual_fl = request_beam_capacity(beam, 'FL')

            beam_balance_data[beam]['actual_rl'] = actual_rl
            beam_balance_data[beam]['actual_fl'] = actual_fl

        beam_balance_data[beam]['iterations'] = iterations['iterations']

    allot_limits = json.load(open('config/allot_limits.json'))

    #PERCENTAGE DOES NOT REFLECT ALLOT BANDWIDTH
    for beam in allot_limits:
        if "RL" in allot_limits[beam]:
            beam_balance_data[beam]['actual_rl'] = (
                beam_balance_data[beam]['actual_rl'] /
                allot_limits[beam]["RL"]) * 100
        if "FL" in allot_limits[beam]:
            beam_balance_data[beam]['actual_fl'] = (
                beam_balance_data[beam]['actual_fl'] /
                allot_limits[beam]["FL"]) * 100

    write_json('res/iteration.json', iterations)
    write_json('data/balance_data.json', beam_balance_data)
    write_json('res/beam_balance_data.json', beam_balance_data)

    script_config = json.load(open('config/script_config.json'))

    if script_config['write_to_db']:
        logger.info('writting data.')
        write_data()
    else:
        logger.info('configured to skip writing data.')
Exemplo n.º 6
0
def upload_file() -> jsonify:
    # This is our most important path
    if request.method == 'POST':
        # https://stackoverflow.com/questions/46136478/flask-upload-how-to-get-file-name
        # See if there's a file in our selection field
        if not request.files.get('file', None):
            return render_template('upload.html', error="No file selected!")

        file_upload = request.files["file"]
        request_data = json.load(file_upload)

        schema_upload = load_json(JSON_SCHEMA_MODFLOW_OPTIMIZATION)

        try:
            validate(instance=request_data,
                     schema=schema_upload)

        except ValidationError as e:
            error = {
                "message": f"Validation failed. {e.message}",
                "code": e.validator,
                "schemapath": e.schema_path
            }

            return render_template('upload.html', error=error)

        except SchemaError as e:
            return render_template('upload.html', error=str(e))

        author = request_data.get("author", "unknown")
        project = request_data.get("project", "unknown")
        optimization_id = request_data["optimization_id"]
        optimization_state = request_data["type"]

        method = request_data["optimization"]["parameters"]["method"]
        population_size = request_data["optimization"]["parameters"]["pop_size"]
        total_generation = request_data["optimization"]["parameters"]["ngen"]

        # Create folder named after task_id in optimization_data folder
        data_filepath = create_input_and_output_filepath(folder=OPTIMIZATION_DATA,
                                                         task_id=optimization_id,
                                                         file_types=[DATA_FILE])[0]

        optimizationtask = OptimizationTask(
                                author=author,
                                project=project,
                                optimization_id=optimization_id,
                                optimization_type=method,
                                optimization_state=optimization_state,  # Input: "optimization_start"
                                total_population=population_size,
                                total_generation=total_generation,
                                solution=dict(),
                                data_filepath=data_filepath
                            )

        try:
            write_json(obj=request_data,
                       filepath=data_filepath)

            Session.add(optimizationtask)

            Session.commit()
        except (UnicodeDecodeError, IOError):
            rmtree(Path(OPTIMIZATION_DATA, optimization_id))
            # Path(opt_filepath).unlink()
            #
            Path(data_filepath).unlink()

            Session.rollback()

            return abort(400, "Error: task couldn't be created!")

        return redirect(f"/optimization")  # /{optimization_id}

    if request.method == 'GET':
        if request.content_type == "application/json":
            return json.dumps({
                'message': "test"
            })
        return render_template('upload.html')
def launch_balance_status_sftp():

    #TIMESTAMP OF TIME BALANCE FILE WAS SENT TO SERVER
    sftp_time = json.load(open('res/sftp_time.json'))

    #IF 10 MINUTES HAS PASSED CHECK THE STATUS
    curr_time = int(time.time())
    if not sftp_time['processed']:

        # #CHECKS IF FILE HAS BEEN PROCESSED
        # processed_files = ls_sftp('processed/')
        # if sftp_time['filename'] in processed_files:
        # 	logger.info('csv file has been processed.')
        # 	sftp_time['processed'] = True
        # 	sftp_time['time_processed'] = curr_time
        # 	write_json('res/sftp_time.json', sftp_time)
        # else:
        # 	logger.info('csv file not yet processed.')
        # 	sys.exit()

        #CHECKS LOG FILE FOR PROCESS
        get_sftp()
        processor_log = open('logs/processor.log')
        for line in processor_log:
            if sftp_time['filename'] in line and "processed" in line:
                logger.info('csv file has been processed.')
                sftp_time['processed'] = True
                sftp_time['time_processed'] = curr_time
                write_json('res/sftp_time.json', sftp_time)
                break

        if not sftp_time['processed']:
            logger.warning('csv file not yet processed.')
            sys.exit()
            # #ONLY WAIT UP TO 25 MINUTES FOR FILE TO PROCESS
            # if curr_time - sftp_time['time_sent'] > (25*60)-15:
            # 	logger.info('max wait time exceeded')

            # 	if exists_sftp('uploads/{}'.format(sftp_time['filename'])):
            # 		logger.info('{} exists.'.format(sftp_time['filename']))
            # 		logger.info('removing file to perform new balance.')
            # 		rm_sftp('uploads/{}'.format(sftp_time['filename']))
            # 	else:
            # 		logger.info('{}} does not exist.'.format(sftp_time['filename']))

            # 	write_json('res/sftp_time.json', {})
            # 	logger.info('exiting')
            # 	sys.exit()
            # else:
            # 	logger.info('waiting for {} to be processed.'.format(sftp_time['filename']))

    if (curr_time - sftp_time['time_processed']) >= (60 * 10) - 15:
        sftp_time['time_checked'] = curr_time
        write_json('res/sftp_time.json', sftp_time)

        iterations = json.load(open('res/iteration.json'))
        beam_balance_data = json.load(open('res/beam_balance_data.json'))

        get_all_beam_info(beam_balance_data, iterations)

        write_json('res/sftp_time.json', {})

    else:
        logger.info('waiting to check balance status. {}'.format(
            (int(time.time()) - sftp_time['time_processed'])))

    sys.exit()
                if curr_time - case_info[case]['timestamp'] >= 599:
                    beam_balance_data = get_beam_info(beam_balance_data,
                                                      case_info, case)
                    case_info[case]['check'] = True
                else:
                    logger.info("{} curr time: {}".format(
                        case, (curr_time - case_info[case]['timestamp'])))
        #IF NOT CLOSED CHECK IF IT HAS CLOSED
        else:
            goto_cases(driver)

            if is_closed(driver, case):
                case_info[case]['status'] = 'closed'
                case_info[case]['timestamp'] = curr_time

    write_json('res/beam_balance_data.json', beam_balance_data)
    write_json('res/open_cases.json', case_info)

    for case in case_info:
        logger.info('case for: {} status: {}'.format(
            case_info[case]['beam'], case_info[case]['status']))

        #IF A CASE IS CLOSED BUT THE STATUS HASNT BEEN CHECKED WE WANT TO WAIT
        if case_info[case]['status'] == 'closed':
            if not case_info[case]['check']:
                logger.info('waiting for case to be checked')
                driver.close()
                sys.exit()

    #IF WE HAVE WAITED LONGER THAN 45 MINUTES COMMENT AND MOVE ON
    for case in case_info: