Exemplo n.º 1
0
def main():
    args, file_name = helper.parse_args()

    # Initial setup
    helper.setup_logging(args.verbose)
    logging.info("Logging is now setup")

    logging.info('-------------------- READING HISTORY START --------------------')
    # Gets prior history if available, clean state if not
    viability_scores, aliases = read_history()

    # Debugging
    helper.log_state('history read-in', viability_scores, aliases)
    logging.info('-------------------- READING HISTORY END --------------------')

    logging.info('-------------------- INPUT FILE START --------------------')
    # Gets the current ingestion batch from an input file
    viability_scores, aliases = parse_input(viability_scores, aliases, file_name)

    # Debugging
    helper.log_state('input file read-in', viability_scores, aliases)
    logging.info('-------------------- INPUT FILE END --------------------')

    logging.info('-------------------- WRITING FILE OUTPUT START --------------------')
    # Writes any trades to disk
    write_trades(viability_scores)

    # Writes the SA state to history
    write_history(viability_scores, aliases)
    logging.info('-------------------- WRITING FILE OUTPUT END --------------------')
def main():
    args, file_name = helper.parse_args()
    classifier_f = open("MEM.pickle", "rb")
    global MEM_classifier
    MEM_classifier = pickle.load(classifier_f,encoding='latin1')
    classifier_f.close()

    # Initial setup
    helper.setup_logging(args.verbose)
    logging.info("Logging is now setup")

    logging.info('-------------------- READING HISTORY START --------------------')
    # Gets prior history if available, clean state if not
    print('read history ')
    viability_scores, aliases = read_history()

    # Debugging
    helper.log_state('history read-in', viability_scores, aliases)
    logging.info('-------------------- READING HISTORY END --------------------')

    logging.info('-------------------- INPUT FILE START --------------------')
    # Gets the current ingestion batch from an input file
    viability_scores, aliases = parse_input(viability_scores, aliases, file_name)
    print('read history ')
    # Debugging
    helper.log_state('input file read-in', viability_scores, aliases)
    logging.info('-------------------- INPUT FILE END --------------------')

    logging.info('-------------------- WRITING FILE OUTPUT START --------------------')
    # Writes any trades to disk
    write_trades(viability_scores)

    # Writes the SA state to history
    write_history(viability_scores, aliases)
    logging.info('-------------------- WRITING FILE OUTPUT END --------------------')
def main():

    # setup logger
    helper.setup_logging()
    logger = logging.getLogger(__name__)

    while True:
        sensor = pms5003()
        try:
            bme680 = helper.bme680_data()
            sensor.read_frame()
            print({**bme680, **sensor.data})
        except KeyboardInterrupt:
            print("\nTerminating data collection")
            break
        except:
            logger.debug("error", exc_info=True)
            break
Exemplo n.º 4
0
    def __init__(self, config_file, kafka_config_file, verbose):
        self.logger = helper.setup_logging(self.__class__.__name__, verbose)

        self.db_connection = DBConnection(verbose)
        self.kafka_consumer = None

        self.kafka_params = helper.config(kafka_config_file)
        self.db_params = helper.config(config_file, "db")
        self.params = helper.config(config_file)
Exemplo n.º 5
0
 def __init__(self, verbose=False):
     self.logger = helper.setup_logging(self.__class__.__name__, verbose)
Exemplo n.º 6
0
def main():
    # Initial setup
    args = helper.parse_args()
    helper.setup_logging(args.verbose)
    logging.info("Polling is set to: " + str(int(POLLING_TIMEOUT)) +
                 " seconds")

    # Make call to twitter's streaming API to gather tweets
    while True:
        print('Gathering tweets from twitter\n')
        try:
            try:
                auth = OAuthHandler(secrets.consumer_key,
                                    secrets.consumer_secret)
                auth.set_access_token(secrets.access_token,
                                      secrets.access_token_secret)
                twitter_stream = tweepy.Stream(auth, SListener())
                twitter_stream.filter(follow=USERS)
            except:
                print("Authentication error")
                twitter_stream.disconnect()

            # Writes the processing input file
            generate_input_file()

            # Starts the background process
            background_process = Popen(["python", "process.py", INPUT_FILE],
                                       stdout=PIPE)

            while background_process.poll() is None:
                print("[SA engine]\t\tStatus: Currently processing a batch.")

                # Signal handling
                signal.signal(signal.SIGALRM, handler)
                signal.alarm(POLLING_TIMEOUT)

                try:
                    auth = OAuthHandler(secrets.consumer_key,
                                        secrets.consumer_secret)
                    auth.set_access_token(secrets.access_token,
                                          secrets.access_token_secret)
                    twitter_stream = tweepy.Stream(auth, SListener())
                    twitter_stream.filter(follow=USERS)
                except ValueError:
                    print("Checking if backend if free for the next batch")
                except KeyboardInterrupt:
                    print("\nCleaning up and exiting the ingestion engine")
                    if os.path.isfile(INPUT_FILE):
                        os.remove(INPUT_FILE)
                    sys.exit(0)
                except:
                    print("Authentication error")
                    twitter_stream.disconnect()

            # At this point, the last batch is complete
            print("The last batch is now complete, processing next batch.")
            print("--------------------")
        except KeyboardInterrupt:
            print("\nCleaning up and exiting the ingestion engine")
            if os.path.isfile(INPUT_FILE):
                os.remove(INPUT_FILE)
            sys.exit(0)
Exemplo n.º 7
0
 def __init__(self, verbose=False):
     self.logger = helper.setup_logging(self.__class__.__name__, verbose)
     self.db_connection = None
     self.table_name = None
Exemplo n.º 8
0
from flask import Flask
import json
import dataprep
import helper

app = Flask(__name__)

with open("./resources/data_sources.json") as f:
    data_list = json.load(f)


@app.route('/')
def get_data(data_list: list = data_list):
    """instantiate the Data class to pipe results into Flask"""
    data = dataprep.Data()
    return data.get_impression_stats(data_list)


if __name__ == "__main__":
    logger = helper.setup_logging(__name__, log_level="INFO")
    app.run(debug=True, port=8000)
Exemplo n.º 9
0
def process_host(host, zabbix_sender):
    """Takes in host, and then process the domains on that host"""
    print("Processing Host: " + host)
    logger = setup_logging(__name__ + host, LOG_DIR + "/" + host)

    with ZabbixConnection(USER, "https://" + ZABBIX_SERVER, PASSWORD) as zabbix_api:

        openstack_group_id = zabbix_api.get_group_id(GROUP_NAME)
        templateid = zabbix_api.get_template_id(TEMPLATE_NAME)

        logger.info("Starting to process host: %s", host)
        uri = "qemu+ssh://root@" + host + "/system?keyfile=" + KEY_FILE

        try:
            libvirt_connection = LibvirtConnection(uri)
        except LibvirtConnectionError as error:
            # Log the failure to connect to a host, but continue processing
            # other hosts.
            print("Host %s errored out", host)
            logger.exception(error)
            return None

        domains = libvirt_connection.discover_domains()
        for domain in domains:
            try:
                instance_attributes = libvirt_connection.get_misc_attributes(
                    domain)
                project_uuid = instance_attributes["project_uuid"]
                project_name = instance_attributes["project_name"]

                project_uuid_group_id = zabbix_api.get_group_id(project_uuid)
                project_name_group_id = zabbix_api.get_group_id(project_name)

                if project_uuid_group_id is None:
                    project_uuid_group_id = zabbix_api.create_hostgroup(
                        project_uuid)

                if project_name_group_id is None:
                    project_name_group_id = zabbix_api.create_hostgroup(
                        project_name)

                groupids = [openstack_group_id,
                            project_uuid_group_id, project_name_group_id]

                if zabbix_api.get_host_id(domain) is None:
                    logger.info("Creating new instance: %s", domain)
                    zabbix_api.create_host(
                        domain, groupids, templateid, PSK_IDENTITY, PSK)
                elif zabbix_api.get_host_status(domain) == DISABLE_HOST:
                    host_id = zabbix_api.get_host_id(domain)
                    zabbix_api.set_hosts_status([host_id], ENABLE_HOST)

                # Since we decided to update the host groups of all the VMs,
                # I did this. Leaving it here for now in case we decide to do
                # it again.
                # zabbix_api.update_host_groups(domain, groupids)

                metrics = get_instance_metrics(domain, libvirt_connection)
                zabbix_sender.send(metrics)
                logger.info("Domain %s is updated", domain)

            except DomainNotFoundError as error:
                # This may happen if a domain is deleted after we discover
                # it. In that case we log the error and move on.
                logger.error("Domain %s not found", domain)
                logger.exception(error)
            except ZabbixAPIException as error:
                logger.error("Zabbix API error")
                logger.exception(error)
    print("Finished Processing: " + host)
    return domains
Exemplo n.º 10
0
            hosts_to_be_deleted = [result["host_id"]
                                   for result in results if result["action"] == "delete"]
            hosts_to_be_disabled = [result["host_id"]
                                    for result in results if result["action"] == "disable"]
            if hosts_to_be_disabled != []:
                zapi.set_hosts_status(hosts_to_be_disabled, DISABLE_HOST)
            if hosts_to_be_deleted != []:
                zapi.delete_hosts(hosts_to_be_deleted)
            print("Hosts not in openstack:" + str(len(hosts_not_in_openstack)))
            print("hosts_disabled:" + str(len(hosts_to_be_disabled)))
            print("hosts_deleted:" + str(len(hosts_to_be_deleted)))
        finally:
            os.remove(lockfile)


if __name__ == "__main__":
    load_config()
    USER = config['general']['API_USER']
    PASSWORD = config['general']['PASSWORD']
    ZABBIX_SERVER = config['general']['ZABBIX_SERVER']
    LOG_DIR = config['general']['LOG_DIR']
    PSK = config['general']['PSK']
    PSK_IDENTITY = config['general']['PSK_IDENTITY']
    HOSTS_FILE = config['general']['HOSTS_FILE']
    KEY_FILE = config['general']['KEY_FILE']
    GROUP_NAME = "openstack-instances"
    TEMPLATE_NAME = "moc_libvirt_single"
    MAX_PROCESSES = 64
    main_logger = setup_logging(__name__, LOG_DIR + "/main.log")
    main()