예제 #1
0
    def __init__(
        self,
        redis_host,
        redis_port,
        redis_db,
        redis_password,
        db_host='localhost',
        db_port=5432,
        db_name='policy_db',
        db_user='******',
        db_password='******',
        clean_frequency_seconds=4,
    ):
        super().__init__(redis_host, redis_port, redis_db, redis_password)

        duallog.setup('logs', f'monitor-{self.id}-')

        self.handler.register(EpisodeMessage, self.episode)
        self.handler.register(StartMonitoringMessage, self.start)
        self.tb_step = 0
        self.cleaner = clean_frequency_seconds
        self.cleaner_process = TensorBoardCleaner(
            db_host=db_host,
            db_port=db_port,
            db_name=db_name,
            db_user=db_user,
            db_password=db_password,
            clean_frequency_seconds=clean_frequency_seconds)

        # resume
        self.db = PolicyDB(db_host=db_host,
                           db_port=db_port,
                           db_name=db_name,
                           db_user=db_user,
                           db_password=db_password)
        run = self.db.get_latest()
        if run is not None:
            rundir = 'runs/' + run.run
            Path(rundir).mkdir(parents=True, exist_ok=True)
            self.tb = tensorboardX.SummaryWriter(rundir)
            self.tb_step = RedisStep(self.r)
        self.cleaner_process.start()

        logger.info('Init Complete')
예제 #2
0
args = arg_parser.parse_args()

if args.adapter:
    config.set('monitor', 'adapter', args.adapter)
if args.debug:
    config.set('monitor', 'debug', '1')

# Set up logging
if config.getboolean('monitor', 'debug', fallback=False):
    print("Debug enabled")
    level = logging.DEBUG
else:
    level = logging.INFO
duallog.setup('solar-monitor',
              minLevel=level,
              fileLevel=level,
              rotation='daily',
              keep=30)

# Set up data logging
# datalogger = None
datalogger = DataLogger(config)

# Set up device manager and adapter
device_manager = SolarDeviceManager(adapter_name=config['monitor']['adapter'])
logging.info("Adapter status - Powered: {}".format(
    device_manager.is_adapter_powered))
if not device_manager.is_adapter_powered:
    logging.info("Powering on the adapter ...")
    device_manager.is_adapter_powered = True
    logging.info("Powered on")
예제 #3
0
""" Import Lighthouse """
from lighthouse import bot
import slack

# Import the duallog package to set up simultaneous
# logging to screen and console.
import duallog

# Import the logging package to generate log messages.
import logging

# Set up dual logging and tell duallog where to store the logfiles.
duallog.setup("Lighthouse")

# Bot will run and wait on 1 second intervals.
# This will parse and run commands based on
# parsing of messages received from message
listener = bot.Bot()


@slack.RTMClient.run_on(event="message")
def wait_for_event(**event):
    """
    entry point for commands from slack.
    reads from slack and handles splitting of arguments.
    submission of commands transitions to mcp for handling
    """
    logging.debug("Event received")
    message = event["data"]

    logging.debug(message)
예제 #4
0
#Huge thanks to ALIILAPRO for making the script
#Modified for Logging and Single line exection by dhlalit11 
import urllib.request
import json
import datetime
import random
import string
import time
import os
import sys
import duallog 
import logging
os.system("title WARP-PLUS-CLOUDFLARE By ALIILAPRO")
#Setup save location for log files
duallog.setup('/path/to/log/folder')
os.system('cls' if os.name == 'nt' else 'clear')
print('      _______ _      __________________       _______ _______ _______ _______\n'
'     (  ___  | \     \__   __|__   __( \     (  ___  |  ____ |  ____ |  ___  )\n'
'     | (   ) | (        ) (     ) (  | (     | (   ) | (    )| (    )| (   ) |\n'
'     | (___) | |        | |     | |  | |     | (___) | (____)| (____)| |   | |\n'
'     |  ___  | |        | |     | |  | |     |  ___  |  _____)     __) |   | |\n'
'     | (   ) | |        | |     | |  | |     | (   ) | (     | (\ (  | |   | |\n'
'     | )   ( | (____/\__) (_____) (__| (____/\ )   ( | )     | ) \ \_| (___) |\n'
'     |/     \(_______|_______|_______(_______//     \|/      |/   \__(_______)\n')
print ("[+] ABOUT SCRIPT:")
print ("[-] With this script, you can getting unlimited GB on Warp+.")
print ("[-] Version: 4.0.0")
print ("--------")
print ("[+] THIS SCRIPT CODDED BY ALIILAPRO") 
print ("[-] SITE: aliilapro.github.io") 
print ("[-] TELEGRAM: aliilapro")
예제 #5
0
파일: gui.py 프로젝트: DuaneNielsen/ppo
import multiprocessing
from rollout import single_episode
from policy_db import PolicyDB
import gym
import time
from importlib import import_module

# This design pattern simulates button callbacks
# Note that callbacks are NOT a part of the package's interface to the
# caller intentionally.  The underlying implementation actually does use
# tkinter callbacks.  They are simply hidden from the user.

import logging
import duallog

duallog.setup('logs', 'gui')

rollout_time = None


# todo fix status bar performance
# todo type inference of config
# todo add widget for number of demos


class MicroServiceBuffer:
    def __init__(self):
        self.services = {}
        self.timeout = 10

    def __setitem__(self, key, service):
예제 #6
0
    except Exception as e:
        logging.error('Failed to retrieve weather information: {}.'.format(e))

    logging.info('Weather information updated.')


if __name__ == '__main__':
    """Display and continuously update weather station image on screen.
    """

    # Import required packages.
    import duallog
    import pygame

    # Set up logging.
    duallog.setup('log/frogweather/frogweather')

    # Define the size of the weather station window.
    screensize = (240, 480)

    # Start the pygame engine and create a window.
    pygame.init()
    pygame.display.set_caption('Frogweather')
    screen = pygame.display.set_mode(screensize)

    # Initialize the weather station.
    init()

    # Start the game loop.
    while True:
        # Process game events.
예제 #7
0
def main(argv):
    # Setup logging
    duallog.setup(Path(FLAGS.data_directory) / 'logs')
    logging.set_verbosity(
        FLAGS.logging_verbosity
    )  # Must be called after duallog.setup() to function properly

    # Configure GDAL
    gdal.SetCacheMax(8 * 1000000000)

    # Create absolute paths (either use full path provided as argument or use data dir in the project folder)
    data_dir = Path(FLAGS.data_directory) if os.path.isabs(
        FLAGS.data_directory) else Path.cwd() / FLAGS.data_directory

    # Ensure filename on geojson file
    geojson_path = FLAGS.geojson if FLAGS.geojson.endswith(
        '.geojson') else FLAGS.geojson + '.geojson'

    # If no order_id from previous order is provided, then download the data requested for this order
    order_id = FLAGS.order_id
    if order_id == 'Empty':
        order_id = 'order_' + datetime.datetime.today().strftime(
            '%Y%m%d-%H%M%S')

        logging.info("####################################")
        logging.info("# Initializing Sentinel downloader #")
        logging.info("####################################")
        logging.info("Order id: " + order_id)
        downloader = Downloader(username=FLAGS.username,
                                password=FLAGS.password,
                                satellite=FLAGS.satellite,
                                order_id=order_id,
                                directory=data_dir)

        # Load the geojson file (check whether the filename was included in the provided name)
        if 'denmark_without_bornholm' in str(geojson_path):
            # Load the default geojson (denmark_without_bornholm), which is included in the project code
            footprint = geojson_to_wkt(
                read_geojson(
                    Path('data') / 'geojson' /
                    'denmark_without_bornholm.geojson'))
        else:
            # Load the provided geojson file from the data directory
            footprint = geojson_to_wkt(
                read_geojson(data_dir / 'geojson' /
                             geojson_path))  # Load from data directory

        # Query the data (multiple footprints can be used, but it is recommended to stick to a single footprint)
        downloader.query(footprint, FLAGS.startdate, FLAGS.enddate)

        # Following code can be used if several geojson files are to be queried
        # footprint = geojson_to_wkt(read_geojson('data/geojson/bornholm.geojson'))
        # downloader.query(footprint, FLAGS.startdate, FLAGS.enddate)

        # Print the number of products and size of all products to be downloaded
        downloader.print_num_and_size_of_products()
        downloader.save_queried_products(
        )  # Save a geojson containing all products to be downloaded
        logging.info("")

        if FLAGS.download:
            logging.info("####################")
            logging.info("# Downloading data #")
            logging.info("####################")
            downloader.download_zipfiles()
            logging.info("")

    if FLAGS.process_tiles:
        # Load products to be processed (always load from file to ensure modularity for the downloader and processor)
        queried_products_path = (data_dir / 'orders' /
                                 order_id).with_suffix('.pkl')
        products_df = pd.read_pickle(queried_products_path)

        logging.info("###################")
        logging.info("# Processing data #")
        logging.info("###################")
        processpipeliner = ProcessPipeliner(products_df=products_df,
                                            directory=data_dir)
        processpipeliner.process_products()
예제 #8
0
date_ = datetime.now().strftime('%d_%m_%Y__%H:%M')
# archive filepath
arcdir = "/rundeck/storage/vmax_luns_data/archives/"
if not os.path.exists(arcdir):
    os.makedirs(arcdir)

# Git filepath
gitpath = "/git/cps_storage_scripts/"

# report filepath
reppath = "/rundeck/storage/vmax_hlu_luns_data/"
if not os.path.exists(reppath):
    os.makedirs(reppath)

# Set up dual logging and tell duallog where to store the logfiles.
duallog.setup('{}vmax_hlu2lunmap_{}'.format(reppath, date_))

# Get DC value from the Rundeck option parameter


def check_arg(args=None):
    """Argparse to take DC as parameter."""
    parser = argparse.ArgumentParser(
        description='Script to generate and send VMAX Capacity Report ')

    parser.add_argument('-dc',
                        '--datacenter',
                        help='Datacenter parameter required',
                        required='True')
    results = parser.parse_args(args)
예제 #9
0
""" Import Lighthouse """
from lighthouse import bot
import slack

# Import the duallog package to set up simultaneous
# logging to screen and console.
import duallog

# Import the logging package to generate log messages.
import logging

# Set up dual logging and tell duallog where to store the logfiles.
duallog.setup("logs")

# Bot will run and wait on 1 second intervals.
# This will parse and run commands based on
# parsing of messages received from message
listener = bot.Bot()


@slack.RTMClient.run_on(event="message")
def wait_for_event(**event):
    """
    entry point for commands from slack.
    reads from slack and handles splitting of arguments.
    submission of commands transitions to mcp for handling
    """
    logging.debug("Event received")
    data = event["data"]

    if "subtype" in data and data["subtype"] == "message_changed":
예제 #10
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Test script for the duallog module.

Checks whether log rotation works properly.
"""

# Import required packages.
import logging
import duallog

# Write many log messages to file to test log rotation.
duallog.setup('logtest')

for n in range(1, 100000):
    logging.error('This is test log message no. {:06d}.'.format(n))