Ejemplo n.º 1
0
 def __init__(self, kafka_configfile, s3_configfile):
     """
     class constructor that initializes the instance according to the configurations
     of the S3 bucket and Kafka
     :type kafka_configfile: str     path to kafka config file
     :type s3_configfile   : str     path to S3 config file
     """
     self.kafka_config = helper.parse_config(kafka_configfile)
     self.s3_config = helper.parse_config(s3_configfile)
     self.producer = KafkaProducer(
         bootstrap_servers=self.kafka_config["BROKERS_IP"])
Ejemplo n.º 2
0
 def __init__(self, s3_configfile, psql_configfile):
     """
     class constructor that initializes the Spark job according to the configurations of
     the S3 bucket, PostgreSQL connection and UDF.
     :type s3_configfile:     str  path to S3 config file
     :type psql_configfile:   str  path tp psql config file
     """
     self.s3_config = helper.parse_config(s3_configfile)
     self.psql_config = helper.parse_config(psql_configfile)
     self.conf = SparkConf()
     self.sc = SparkContext(conf=self.conf)
     self.spark = SparkSession.builder.config(conf=self.conf).getOrCreate()
     self.sc.setLogLevel("ERROR")
Ejemplo n.º 3
0
 def __init__(self, kafka_configfile, stream_configfile, psql_configfile):
     """
     class constructor that initializes the instance according to the configurations
     of Kafka (brokers, topic, offsets), data schema and batch interval for streaming
     :type kafka_configfile:  str        path to s3 config file
     :type stream_configfile: str        path to stream config file
     :type psql_configfile:   str        path to psql config file
     """
     self.kafka_config = helper.parse_config(kafka_configfile)
     self.stream_config = helper.parse_config(stream_configfile)
     self.psql_config = helper.parse_config(psql_configfile)
     self.conf = SparkConf()
     self.sc = SparkContext(conf=self.conf).getOrCreate()
     self.spark = SparkSession.builder.config(conf=self.conf).getOrCreate()
     self.ssc = StreamingContext(self.sc, self.stream_config["INTERVAL"])
     self.sc.setLogLevel("ERROR")
Ejemplo n.º 4
0
    def test_parse_config(self):
        # test if correctly parses the config file
        conf = {"field1": "val1", "field2": {"subfield1": 2, "subfield2": "3"}}

        with patch("__builtin__.open",
                   mock_open(read_data=json.dumps(conf))) as mock_file:

            self.assertEqual(conf, helper.parse_config(mock_file),
                             "fail to properly read config from file")
Ejemplo n.º 5
0
from selenium import webdriver
from selenium.webdriver.firefox.options import Options
import datetime
import time

import helper

url = "https://www.hvv.de/en/meinhvv#/login"
uname = helper.parse_config('HVV', 'HVV_LOGIN_USER')
pw = helper.parse_config('HVV', 'HVV_LOGIN_PW')

def fetch_bill(driver):
    # Login
    print("Opening login page")
    driver.get(url)
    mainWindow = driver.current_window_handle
    time.sleep(1)
    print("Performing login")
    driver.find_elements_by_xpath('//*[@id="username"]')[0].send_keys(uname)
    driver.find_elements_by_xpath('//*[@id="password"]')[0].send_keys(pw)
    time.sleep(1)
    submitButton = driver.find_element_by_xpath('//button[@name="button" and @type="submit"]')
    driver.execute_script("arguments[0].scrollIntoView();", submitButton)
    submitButton.click()
    # Open tickets
    time.sleep(1)
    print("Opening ticket history")
    ticketHistory = driver.find_element_by_xpath('//*[text()="History of orders at the Online Shop"]')
    driver.execute_script("arguments[0].scrollIntoView();", ticketHistory)
    ticketHistory.click()
    time.sleep(1)
parser.add_argument('-i', '--input_izh', nargs='+', type=str)
parser.add_argument('-g', '--groups_izh', nargs='+', type=str)
parser.add_argument('-r', '--conn_rand', nargs='+', type=str)
parser.add_argument('-k', '--groups_rand', nargs='+', type=str)
parser.add_argument('-s', '--conn_rand_EE', nargs='+', type=str)
parser.add_argument('-t', '--groups_rand_EE', nargs='+', type=str)
parser.add_argument('-o', '--output', type=str)
parser.add_argument(
    '-c', '--config',
    type=str)  # Experiment file defining the network structure and dynamics
parser.add_argument('-e', '--EE', type=int)  # EE synapses only

args = parser.parse_args()

# load config file
cfg = helper.parse_config(args.config)
Wmax = cfg["network-params"]["plasticity"]["Wmax"]

ratios_izh = []
num_groups_izh = []


def analyzeForRatio(conn_fn, group_fn, Wmax, EE):
    # calculate ratio of strong synapses
    # and number of groups

    # load data from experiments
    with open(conn_fn, "r+") as f:
        conns = json.load(f)

    with open(group_fn, "r+") as f:
'''
### =================================================== ###

### Python module imports
import numpy as np
from sys import argv

### Local file imports
import helper

### Parse input arguments and initialize directories
dataset = argv[1].lower()
loader_module = __import__("data_" + dataset)

helper.init_direc('Datasets/' + dataset)
params = helper.parse_config(dataset)

### Generate desired datasets
for seed in params['seeds']:
    seed = int(seed)
    params['seed'] = seed

    if dataset == 'artificial':
        params['n_ol'] = 0
        loader = loader_module.Loader(params)
        loader.generate()
        loader.make_partitions(n_ol=0)
        loader.save_data(overwrite=params['overwrite'])
        for ol in params['overlaps']:
            params['n_ol'] = ol
            loader.make_partitions(n_ol=ol)