示例#1
0
    def process(self):

        f = utility.Utility()
        #while True:
        ip = f.ip()
        hostname = f.host()
        BROKER = f.broker()
        subT = f.subtopic()
        while True:
            broker = BROKER
            client = mqtt.Client(ip)
            client.connect(broker)
            client.loop_start()
            if (subT == "help"):
                client.publish(f"workstation/{hostname}/n/reboot/help",
                               str(Reboot_Help), 2, False)
                quit()
            elif (subT == ""):
                client.publish(f"workstation/{hostname}/n/reboot",
                               f"Rebooting system {hostname}", 2, False)
                setattr(f, "logText", "Rebooting System...")
                f.log()
                time.sleep(2)
                os.system("reboot")
                time.sleep(0.5)
                client.disconnect()
                quit()
示例#2
0
 def __init__(self):
     rospy.init_node('server')
     self.confirmation = "0"
     rospy.on_shutdown(self.cleanup)
     rospy.loginfo("Starting server node...")
     #read the parameter
     self.RPCSERVERIP = rospy.get_param("RPCSERVERIP", "192.168.178.38")
     self.RPCSERVERPORT = rospy.get_param("RPCSERVERPORT", "8000")
     #Publisher
     self.pub = rospy.Publisher('~status', String, queue_size=1000)
     self.pub1 = rospy.Publisher('~recognition', String, queue_size=1000)
     #rpc server
     rospy.loginfo("Starting rpc server ...")
     #make sure the right address ip is stored in launch file, otherwise it failed
     self.server = SimpleXMLRPCServer(
         (self.RPCSERVERIP, int(self.RPCSERVERPORT)),
         requestHandler=RequestHandler)
     #register server services
     self.server.register_function(self.notify)
     self.server.register_function(self.on_word_recognized)
     rospy.loginfo("Starting rpc server AGGGGGGGGG..." +
                   self.on_word_recognized.func_name)
     rospy.loginfo('NOTIFICATION OF WORK DONE no..............')
     #create a utility
     config = rospy.get_param("FOLDER", "")
     self.ut = u.Utility(config + '/pepper_iai.corpus')
     rospy.loginfo('NOTIFICATION OF WORK DONE........yes......')
     self.ut.parse()
     rospy.loginfo('NOTIFICATION OF WORK DONE........yes......')
     rospy.set_param('ORDER', 1)
示例#3
0
 def process(self):
     f = utility.Utility()
     ip = f.ip()
     hostname = f.host()
     BROKER = f.broker()
     DV = f.DynamicVariable()
     subT = f.subtopic()
     
     while True:
             
         broker = BROKER
         client = mqtt.Client(ip)
         client.connect(broker)
         client.loop_start()
         if(subT == "help"):
             client.publish(f"workstation/{hostname}/n/hostname/help", str(Hostname_Help), 2, False)
             quit()
         elif(subT == ""):
             os.system(f"hostnamectl set-hostname {DV} --static")
             client.publish(f"workstation/{hostname}/n/hostname", f"Hostname changed successfully to {DV} ", 2, False)
             setattr(f,"logText", f"Hostname changed to, {hostname}")
             f.log()
             break
             time.sleep(0.5)
             client.disconnect()
             quit()
    def __init__(self, model, arg_dict):
        '''file_list:[/axial/T1.h5, ...,]
        num_of_input_branches: 2
        
        '''
        self.args = arg_dict
        self.num_ip = arg_dict['num_of_input_branches']
        self.file_list = arg_dict['file_list']
        self.batch_size = arg_dict['batch_size']
        self.model = model
        self.test_dataset = None
        self.T1 = None
        self.T2 = None
        self.X = None
        self.Y_true = None
        self.score_file = None
        self.Y_pred = None
        self.shape = None
        self.break_points = []
        #SCORE LOG OBJECTS
        self.jd_scores = []
        self.dc_scores = []

        self.abs_sum = None
        self.union = None
        self.intersection = None

        self.batch_abs_sum = None
        self.batch_union = None
        self.batch_intersection = None

        #OTHER THHINGS
        self.epsilon = K.epsilon()
        #        self.load_input_data(arg_dict['file_list'])
        self.u_obj = utl.Utility()
示例#5
0
    def process(self):
                
        f = utility.Utility()
        ip = f.ip()
        hostname = f.host()
        BROKER = f.broker()
        subtT = f.subtopic()

        while True:
            
            broker = BROKER
            client = mqtt.Client(ip)
            client.connect(broker)
            client.loop_start()
            if(subtT == "help"):
                client.publish(f"workstation/{hostname}/n/shutdown/help", str(Shutdown_Help), 2, False)
                quit()
            elif(subtT == ""):
                client.publish(f"workstation/{hostname}/n/shutdown", f"shutting down {hostname}", 2, False)
                setattr(f, "logText", f"Shutting down pc {hostname}")
                
                time.sleep(2)
                f.log()
                os.system("shutdown -h now")
                time.sleep(0.5)
                client.disconnect()
                quit()
示例#6
0
 def __init__(self):
     self.user_interact_obj = userinteract.UserInteract()
     self.utility_obj = utility.Utility()
     self.action_obj = Action()
     self.chef_deploy_mode = '2'
     self.hostIP = ""
     self.chef_self_signed_crt = ""
     self.chef_ssldir = ""
示例#7
0
 def _init_utility(self):
     self._utility_list = []
     # name, position, uid, estate_value, status, street_id
     for u in self._utility_list_data["data"]:
         new_block = utility.Utility(u['name'], u['block_id'],
                                     u['position'], u['uid'],
                                     u['estate_value'], u['status'])
         self._utility_list.append(new_block)
         self._block_list[new_block.position] = new_block
         self._epic_bank.add_asset(new_block)
示例#8
0
    def process(self):
        #instantiate the utility file
        f = utility.Utility()
        ip = f.ip()
        hostname = f.host()
        #variable extracted from topic name
        subT = f.subtopic()
        #broker ip set in setup.config file
        BROKER = f.broker()
        DV = f.DynamicVariable()

        while True:
            #set broker address
            broker = BROKER
            #set client, should be unique
            client = mqtt.Client(ip)
            #connect to broker
            client.connect(broker)
            #begin client loop
            client.loop_start()
            #publish information to sub-topic
            title = "Message"

            #getting the date and time
            date = datetime.now()
            dt_string = date.strftime("%d/%m/%Y %H:%M")

            #command = f"zenity --info --title={title} --text='Sent from {sender} on {dt_string}\n\nMessage: {DV}' --width=300 --height=150"
            #icon = "/home/ninja/Documents/Work/mqtta/bell_ring_outline_icon_139893.ico"
            if (subT == ""):
                #message1 = DV.split("/")[-1]
                #sender1 = DV.split("/")[0]

                #os.system(f"Command=$(zenity --info --window-icon={icon} --title={title} --text='Sent from {sender} on {dt_string}\n\nMessage: {DV}' --width=300 --height=150); echo $Command")

                msg_result = subprocess.run([DV],
                                            stdout=subprocess.PIPE,
                                            shell=True)
                rt = msg_result.stdout.decode('utf-8')
                #os.system(f"{DV}")
                #r = subprocess.check_output([DV], shell=True)

                client.publish(f"workstation/{hostname}/n/message", str(rt), 2,
                               False)
                #set log file contents
                #setattr(f, "logText", f"from {sender1} message: {message1} at {dt_string}")
                #f.log()
                #sleep for 0.5 seconds
                time.sleep(0.5)
                #disconnect client
                client.disconnect()
                #quit loop
                exit()
示例#9
0
	def Extract_Stock_Price(self): # Converting dates of form 1/##/** to 01/##/** and using kibot api for obtaining historical stock prices
		try:
			date_split = self.date.split('/')
			if int(date_split[0]) < 10:
				date_split[0] = '0' + date_split[0]
			if int(date_split[1]) < 10:
				date_split[1] = '0' + date_split[1]
			self.date = date_split[0] + '/' + date_split[1] + '/' + date_split[2]
			Utility1 = utility.Utility()
			Utility1.Make_Stock_Dictionary(self.ticker, self.dict, self.date)
			self.cmp = Utility1.stock_price
		except Exception, e:
			print str(e)
示例#10
0
 def __init__(self):
     self.action_obj = action.Action()
     self.utility_obj = utility.Utility()
     self.docker_obj = docker_ao.DockerAO()
     self.CWD = os.getcwd() + "/"
     self.APPORBIT_COMPOSE = self.CWD + "docker-compose"
     self.DEFAULT_EXTRACT_PATH = "/var/apporbit-offline/"
     self.AO_DOWNLOADS_PATH = ""
     self.AO_RESOURCE_PATH = ""
     self.resource_fetcher = resourcefetcher.ResourceFetcher()
     self.compose_file = "provider-compose.yml"
     self.docker_registry_url = ""
     self.host = ""
示例#11
0
    def get_portfolio_raw_data_by_spaceType_and_utilityType(
            self, space_type, utility_type):
        # Save the raw utility data in dictionaries
        dict_raw_utility = {}
        dict_raw_fossil_fuel = {}
        df_temp_meta = self.df_meta
        # Warning: duplicate building ID will be droped!
        df_temp_meta = df_temp_meta.drop_duplicates('building_ID')
        df_temp_meta = df_temp_meta[
            (df_temp_meta['building_space_type_1st'] == space_type)
            & (df_temp_meta['building_ID'].notnull()) &
            (df_temp_meta['building_address'].notnull()) &
            (df_temp_meta['building_area'].notnull())]

        # Add raw utility date into the dictionaries
        for i in df_temp_meta['building_ID']:
            if (utility_type == 1):
                df_temp_detail_utility = (
                    self.get_utility_by_building_id_and_energy_type(i, 1))
                utility_temp = utility.Utility('electricity',
                                               df_temp_detail_utility)
            else:
                df_temp_detail_utility = (
                    self.get_utility_by_building_id_and_energy_type(i, 2))
                utility_temp = utility.Utility('fossil fuel',
                                               df_temp_detail_utility)

            index = df_temp_meta[df_temp_meta['building_ID'] ==
                                 i].index.tolist()[0]

            dict_temp_utility = {
                i: (df_temp_meta.iloc[index]['building_address'],
                    df_temp_meta.iloc[index]['building_area'], space_type,
                    df_temp_meta.iloc[index]['currency'], utility_type,
                    utility_temp)
            }
            dict_raw_utility.update(dict_temp_utility)
        return (dict_raw_utility)
示例#12
0
    def __init__(self):
        self.action_obj = action.Action()
        self.config_obj = config.Config()
        self.utility_obj = utility.Utility()
        self.docker_obj = docker_ao.DockerAO()
        self.emailid = "*****@*****.**"
        self.host = ""
        self.repohost = ""
        self.internal_repo = ""
        self.internal_docker_reg = ""
        self.internal_gems_repo = ""
        self.CWD = os.getcwd() + "/"
        self.TMPDIR = "/var/apporbit-offline/"
        self.AO_PACKAGES_PATH = ""
        self.EXTRACTED_PACKAGES = self.TMPDIR + "appOrbitPackages/"

        self.chef_port = 9443
示例#13
0
文件: se.py 项目: jorginho84/DDCM-NH
    def binding(self, psi):
        """
		1. Computes the binding function for a given psi (structural parameter)
		2. Computes the X vector
		"""

        #Calling parameters instance based on betas
        param0 = self.betas_struct(psi)

        #The model instance
        N = self.output_ins.__dict__['N']
        x_w = self.output_ins.__dict__['x_w']
        x_m = self.output_ins.__dict__['x_m']
        x_k = self.output_ins.__dict__['x_k']
        passign = self.output_ins.__dict__['passign']
        nkids0 = self.output_ins.__dict__['nkids0']
        married0 = self.output_ins.__dict__['married0']
        hours = np.zeros(N)
        childcare = np.zeros(N)
        agech0 = self.output_ins.__dict__['agech0']
        hours_p = self.output_ins.__dict__['hours_p']
        hours_f = self.output_ins.__dict__['hours_f']
        wr = self.output_ins.__dict__['wr']
        cs = self.output_ins.__dict__['cs']
        ws = self.output_ins.__dict__['ws']

        model = util.Utility(param0, N, x_w, x_m, x_k, passign, nkids0,
                             married0, hours, childcare, agech0, hours_p,
                             hours_f, wr, cs, ws)

        #Computing the emax
        emax_instance = self.emax(param0, model)

        #Computing M samples
        samples = self.choices(param0, emax_instance, model)

        #Computing aux model (binding function)

        betas = self.sim_moments(samples)

        x_vector = self.obj_fn(betas)['x_vector']

        return {'betas': betas, 'x_vector': x_vector}
示例#14
0
    def process(self):

        utilities = utility.Utility()
        ipAddress = utilities.ip()
        DV = utilities.DynamicVariable()
        hostname = utilities.host()
        BROKER = utilities.broker()

        while True:
            broker = BROKER
            client = mqtt.Client(ipAddress)
            client.connect(broker)
            client.loop_start()
            client.publish("workstation/list",
                           str(hostname + ": " + ipAddress), 2, False)

            break
            time.sleep(10)
            client.disconnect()
            quit()
示例#15
0
def init_game():
    """
    Initialize the game with map, players and bank
    1. generate a map
    2. initialize players
    3. initialize bank
    all of the data
    :return: map, players list and bank
    """
    # generate a map
    parent_addr = os.path.abspath(os.pardir)
    block_list_data = json_reader(
        os.path.join(parent_addr, 'Data/block_data.json'))
    station_list_data = json_reader(
        os.path.join(parent_addr, 'Data/station_data.json'))
    utility_list_data = json_reader(
        os.path.join(parent_addr, 'Data/utility_data.json'))
    estate_list_data = json_reader(
        os.path.join(parent_addr, 'Data/estate_data.json'))
    chest_list_data = json_reader(
        os.path.join(parent_addr, 'Data/chest_data.json'))
    chance_list_data = json_reader(
        os.path.join(parent_addr, 'Data/chance_data.json'))
    block_list = [0 for x in range(40)]
    station_list = []
    utility_list = []
    estate_list = []
    corner_list = []
    chest_block_list = []
    chance_block_list = []
    tax_list = []

    # initialize bank
    epic_bank = bank.Bank('99', 'EpicBank', 32, 12)
    json_writer(os.path.join(parent_addr, 'Data/bank_data.json'), {
        "house_number": epic_bank.cur_house,
        "hotel_number": epic_bank.cur_hotel
    })

    for b in block_list_data["data"]:
        if b['block_type'] == 0:
            # ["Go", "Go to Jail", "In Jail", "Free Parking"]
            if b['name'] == "Go":
                corner_block = block.Go(b['name'], b['block_id'],
                                        b['position'])
            elif b['name'] == "Go to Jail":
                corner_block = block.Go_To_Jail(b['name'], b['block_id'],
                                                b['position'])
            elif b['name'] == "In Jail":
                corner_block = block.In_Jail(b['name'], b['block_id'],
                                             b['position'])
            elif b['name'] == "Free Parking":
                corner_block = block.Free_Parking(b['name'], b['block_id'],
                                                  b['position'])
            else:
                pass
            block_list[corner_block.position] = corner_block
            corner_list.append(corner_block)
        elif b['name'] == "Community Chest":
            # "Community Chest"
            new_block = cardpile.Community_Chest(b['name'], b['block_id'],
                                                 b['position'])
            block_list[new_block.position] = new_block
            chest_block_list.append(new_block)
        elif b['name'] == "Chance":  # "Chance"
            new_block = cardpile.Chance(b['name'], b['block_id'],
                                        b['position'])
            block_list[new_block.position] = new_block
            chance_block_list.append(new_block)
        elif b['block_type'] == 3:
            # ["Income Tax", "Super Tax"]
            if b['name'] == "Income Tax":
                new_block = tax.Income_Tax(b['name'], b['block_id'],
                                           b['position'], 0.10)
            elif b['name'] == "Super Tax":
                new_block = tax.Super_Tax(b['name'], b['block_id'],
                                          b['position'], 0.10)
            else:
                pass
            block_list[new_block.position] = new_block
            tax_list.append(new_block)
    # name, position, uid, estate_value, status, street_id
    for s in station_list_data["data"]:
        new_block = station.Station(s['name'], s['block_id'], s['position'],
                                    s['uid'], s['estate_value'], s['status'])
        station_list.append(new_block)
        block_list[new_block.position] = new_block
        epic_bank.add_asset(new_block)
    # name, position, uid, estate_value, status, street_id
    for u in utility_list_data["data"]:
        new_block = utility.Utility(u['name'], u['block_id'], u['position'],
                                    u['uid'], u['estate_value'], u['status'])
        utility_list.append(new_block)
        block_list[new_block.position] = new_block
        epic_bank.add_asset(new_block)
    for e in estate_list_data["data"]:
        new_block = estate.Estate(e['name'], e['block_id'], e['position'],
                                  e['uid'], e['estate_value'], e['status'],
                                  e['street_id'], e['house_value'])
        estate_list.append(new_block)
        block_list[new_block.position] = new_block
        epic_bank.add_asset(new_block)

    # initialize players
    player_dict_data = json_reader(
        os.path.join(parent_addr, 'Data/player_list.json'))
    player_dict = {}
    player_dict_data = player_dict_data["data"]
    for i in range(len(player_dict_data)):
        p = player.Player(player_dict_data[i]['id'],
                          player_dict_data[i]['name'],
                          player_dict_data[i]['cash'],
                          player_dict_data[i]['alliance'])
        out_put_line = "%d %d %s %s" % (p.cash, p.id, p.name, p.alliance)
        operation.push2all(out_put_line)
        player_dict[player_dict_data[i]['id']] = p

    # initialize chest cards
    chest_list = []
    for chest in chest_list_data["data"]:
        # 0: Collection, 1: Collect_from_players
        if chest['card_type'] == 0 or chest['card_type'] == 1:
            chest_list.append(
                card.CollectCard(chest['card_id'], chest['card_type'],
                                 chest['description'], chest['amount']))
        elif chest['card_type'] == 2 or chest[
                'card_type'] == 3:  # 2: Pay, 3: Pay_for_repair
            # or chest['card_type'] == 8 8: Pay_to_players
            chest_list.append(
                card.PayCard(chest['card_id'], chest['card_type'],
                             chest['description'], chest['amount']))
        # elif chest['card_type'] == 4 or chest['card_type'] == 6:  # 4: Move_indicate_position, 6: Move_nearby
        #     chest_list.append(card.MoveCard(chest['card_id'], chest['card_type'], chest['description'],
        #                                     chest['block_id']))
        # elif chest['card_type'] == 7:  # Move
        #     chest_list.append(card.MoveCard(chest['card_id'], chest['card_type'], chest['description'],
        #                                     chest['steps']))
        elif chest['card_type'] == 5:  # Bailcard
            chest_list.append(
                card.BailCard(chest['card_id'], chest['card_type'],
                              chest['description']))

    # initialize chance cards
    chance_list = []
    for chance in chance_list_data["data"]:
        if chance['card_type'] == 0:  # 0: Collection
            # or chance['card_type'] == 1, 1: Collect_from_players
            chance_list.append(
                card.CollectCard(chance['card_id'], chance['card_type'],
                                 chance['description'], chance['amount']))
        elif chance['card_type'] == 2 or chance['card_type'] == 3 or chance[
                'card_type'] == 8:  # 2: Pay,
            # 3: Pay_for_repair
            # 8: Pay_to_players
            chance_list.append(
                card.PayCard(chance['card_id'], chance['card_type'],
                             chance['description'], chance['amount']))
        # 4: Move_indicate_position, 6: Move_nearby
        elif chance['card_type'] == 4 or chance['card_type'] == 6:
            chance_list.append(
                card.MoveCard(chance['card_id'], chance['card_type'],
                              chance['description'], chance['block_id']))
        elif chance['card_type'] == 7:  # Move
            chance_list.append(
                card.MoveCard(chance['card_id'], chance['card_type'],
                              chance['description'], chance['steps']))
        elif chance['card_type'] == 5:  # Bailcard
            chance_list.append(
                card.BailCard(chance['card_id'], chance['card_type'],
                              chance['description']))

    # initialize chess board
    two_block_street = []
    three_block_street = []
    for e in estate_list:
        if e.street_id == 1 or e.street_id == 8:
            two_block_street.append(e)
        else:
            three_block_street.append(e)
    chess_board_object = board.Board(two_block_street, three_block_street,
                                     station_list, utility_list, block_list,
                                     corner_list, chest_block_list,
                                     chance_block_list, tax_list)

    global data
    data['chess_board'] = block_list
    data['player_dict'] = player_dict
    data['epic_bank'] = epic_bank
    data['chest_list'] = chest_list
    data['chance_list'] = chance_list
    data['station_list'] = station_list
    data['utility_list'] = utility_list
    data['estate_list'] = estate_list
    data['corner_list'] = corner_list
    data['chest_block_list'] = chest_block_list
    data['chance_block_list'] = chance_block_list
    data['tax_list'] = tax_list
    return data
示例#16
0
 def __init__(self):
     self.utility_obj = utility.Utility()
     self.action_obj = Action()
示例#17
0
 def __init__(self):
     self.utilityobj = utility.Utility()
     return
示例#18
0
    progress = [14515, 47831, 96266, 99914, 360892, 138769, 776654, 210929]

    pol = [
        progress[0] / dolar, progress[1] / dolar, progress[2] / dolar,
        progress[3] / dolar, progress[4] / dolar, progress[5] / dolar,
        progress[6] / dolar, progress[7] / dolar
    ]

    pri = [47872, 113561]
    priori = [pri[0] / dolar, pri[1] / dolar]

    param0 = parameters.Parameters(alphas, betas, gammas, hw, porc, pro, pol,
                                   AEP, priori)

    model = util.Utility(param0, N, p1_0, p2_0, years, treatment, typeSchool,
                         HOURS, p1, p2, catPort, catPrueba, TrameI, priotity,
                         rural_rbd, locality)

    # SIMULACIÓN SIMDATA

    simce_sims = np.zeros((N, n_sim))
    income_sims = np.zeros((N, n_sim))
    baseline_sims = np.zeros((N, n_sim, 2))

    for j in range(n_sim):
        modelSD = sd.SimData(N, model)
        opt = modelSD.choice()
        simce_sims[:, j] = opt['Opt Simce']
        income_sims[:, j] = opt['Opt Income'][1 - x]
        baseline_sims[:, j, 0] = opt['Potential scores'][0]
        baseline_sims[:, j, 1] = opt['Potential scores'][1]
示例#19
0
choices_list = []
cost_list = []
contribution_list = []
sd_matrix_list = []

for j in range(len(models_list)):
	output_ins=estimate.Estimate(nperiods,param0,x_w,x_m,x_k,x_wmk,passign,
		agech0,nkids0,married0,D,dict_grid,M,N,moments_vector,var_cov,hours_p,hours_f,
		models_list[j][0],models_list[j][1],models_list[j][2])

	hours = np.zeros(N) #arbitrary to initialize model instance
	childcare  = np.zeros(N)

	#obtaining values to normalize theta
	model = util.Utility(param0,N,x_w,x_m,x_k,passign,nkids0,married0,hours,
		childcare,agech0,
		hours_p,hours_f,models_list[j][0],models_list[j][1],models_list[j][2])

	np.random.seed(1)
	emax_instance = output_ins.emax(param0,model)
	choices = output_ins.samples(param0,emax_instance,model)
		
	#SD matrix
	ltheta = np.log(choices['theta_matrix'])
	
	sd_matrix = np.zeros((nperiods,M))
	for jj in range (M):
		for t in range(nperiods):
			sd_matrix[t,jj] = np.std(ltheta[:,t,jj],axis=0)
	
	sd_matrix_list.append(sd_matrix)
示例#20
0
import MODEL_Y1 as net#Choose model architecture

#%%OTHER MODULES
from default import *
import matplotlib.pyplot as plt
from keras.callbacks import CSVLogger,ModelCheckpoint, EarlyStopping,Callback
from keras.preprocessing.image import (ImageDataGenerator, apply_transform,
                                       transform_matrix_offset_center,
                                       flip_axis)
import utility as utl

#Create net dependent Utility Object
# It will also be used during testing, as keras load_model() function expects the
# definition of custom functions used during compilation to be passed again
# during loading the saved model.
util_obj=utl.Utility({'load_model':net.load_keras_model})



#class to aid saving models at given interval.
class CheckPointManager(Callback):
    def __init__(self,model_folder,model_filename,interval=5):
        assert(model_filename[len(model_filename)-3:]=='.h5')
        self.file_loc=model_folder+"/"+model_filename[:-3]#
        self.interval=interval
        super(Callback, self).__init__()
    def on_epoch_end(self, epoch, logs={}):
        if((epoch+1)%self.interval==0):
            m_loc=self.file_loc+"_epoch"+str(epoch+1)+".h5"
            print("="*10,"Saving checkpoint:",m_loc,"="*10)
            self.model.save(m_loc)
示例#21
0
def elast_gen(bs, shocks):

    eta = bs[0]
    alphap = bs[1]
    alphaf = bs[2]

    #wage process
    wagep_betas = np.array([bs[3], bs[4], bs[5], bs[6], bs[7]]).reshape((5, 1))

    income_male_betas = np.array([bs[8], bs[9], bs[10]]).reshape((3, 1))
    c_emp_spouse = bs[11]

    #Production function [young[cc0,cc1],old]
    gamma1 = bs[12]
    gamma2 = bs[13]
    gamma3 = bs[14]
    tfp = bs[15]
    sigma2theta = 1

    kappas = [bs[16], bs[17]]

    sigma_z = [1, bs[18]]

    rho_theta_epsilon = bs[19]

    lambdas = [1, 1]

    #Re-defines the instance with parameters
    param = util.Parameters(alphap, alphaf, mu_c, eta, gamma1, gamma2, gamma3,
                            tfp, sigma2theta, rho_theta_epsilon, wagep_betas,
                            income_male_betas, c_emp_spouse, marriagep_betas,
                            kidsp_betas, eitc_list, afdc_list, snap_list, cpi,
                            fpl_list, lambdas, kappas, pafdc, psnap, mup,
                            sigma_z)

    #The estimate class
    output_ins = estimate.Estimate(nperiods, param, x_w, x_m, x_k, x_wmk,
                                   passign, agech0, nkids0, married0, D,
                                   dict_grid, M, N, moments_vector, var_cov,
                                   hours_p, hours_f, wr, cs, ws)

    hours = np.zeros(N)
    childcare = np.zeros(N)

    model_orig = util.Utility(param, N, x_w, x_m, x_k, passign, nkids0,
                              married0, hours, childcare, agech0, hours_p,
                              hours_f, wr, cs, ws)

    #Obtaining emax instance: this is fixed throughout the exercise
    emax_instance = output_ins.emax(param, model_orig)

    choices_c = {}
    models = []
    for j in range(2):
        np.save(
            '/home/jrodriguez/NH_HC/results/model_v2/experiments/NH/shock.npy',
            shocks[j])
        models.append(
            Shock(param, N, x_w, x_m, x_k, passign, nkids0, married0, hours,
                  childcare, agech0, hours_p, hours_f, wr, cs, ws))
        choices_c['Choice_' + str(j)] = output_ins.samples(
            param, emax_instance, models[j])

    #Computing changes in % employment for control group
    h_sim_matrix = []
    employment = []
    wages = []
    full = []
    for j in range(2):
        h_sim_matrix.append(choices_c['Choice_' + str(j)]['hours_matrix'])
        employment.append(choices_c['Choice_' + str(j)]['hours_matrix'] > 0)
        full.append(choices_c['Choice_' + str(j)]['hours_matrix'] == hours_f)
        wages.append(choices_c['Choice_' + str(j)]['wage_matrix'])

    #Extensive margin
    elast_extensive = np.zeros(M)
    for j in range(M):
        elast_periods = np.zeros(nperiods)

        for t in range(nperiods):

            elast_periods[t] = (np.mean(employment[1][:, t, j], axis=0) -
                                np.mean(employment[0][:, t, j], axis=0)) / (
                                    shocks[1] * np.mean(
                                        (employment[0][:, t, j]), axis=0))

        elast_extensive[j] = np.mean(elast_periods)

    #Intensive margin
    elast_intensive = np.zeros(M)
    for j in range(M):
        elast_periods = np.zeros(nperiods)

        for t in range(nperiods):
            sample = (employment[0][:, t, j] == 1)
            elast_periods[t] = np.mean(
                (h_sim_matrix[1][sample, t, j] -
                 h_sim_matrix[0][sample, t, j]),
                axis=0) / (shocks[1] *
                           np.mean(h_sim_matrix[0][sample, t, j], axis=0))

        elast_intensive[j] = np.mean(elast_periods)

    return {'Extensive': elast_extensive, 'Intensive': elast_intensive}
示例#22
0
wr = 1
cs = 1
ws = 1

#The estimate class
output_ins = estimate.Estimate(nperiods, param0, x_w, x_m, x_k, x_wmk, passign,
                               agech0_a, agech0_b, d_childa, d_childb, nkids0,
                               married0, D, dict_grid, M, N, moments_vector,
                               var_cov, hours_p, hours_f, wr, cs, ws)

#The model (utility instance)
hours = np.zeros(N)
childcare = np.zeros(N)

model = util.Utility(param0, N, x_w, x_m, x_k, passign, nkids0, married0,
                     hours, childcare, childcare, agech0_a, agech0_b, d_childa,
                     d_childb, hours_p, hours_f, wr, cs, ws)

#Obtaining emax instance: this is fixed throughout the exercise
emax_instance = output_ins.emax(param0, model)

#Original choices (dictionary)
choices = output_ins.samples(param0, emax_instance, model)

#The E[Log] of consumption, leisure, and child care to normalize E[log theta]=0
ec = np.mean(np.mean(np.log(choices['consumption_matrix']), axis=2), axis=0)
hours_m = choices['hours_matrix'].copy()
boo_p = hours_m == hours_p
boo_f = hours_m == hours_f
boo_u = hours_m == 0
cc_a = choices['childcare_a_matrix'].copy()
示例#23
0
    def __init__(self):
        self.docker_obj = docker_ao.DockerAO()
        self.utility_obj = utility.Utility()
        self.config_obj = config.Config()
        self.action_obj = action.Action()
        self.internal_registry = ""
        self.reposync_file = 'reposync.conf'
        self.apporbit_repo = "http://repos.gsintlab.com/"
        self.ao_noarch = self.apporbit_repo + "release/noarch/"
        self.mist_url = self.apporbit_repo + "release/mist/master/run.jar"
        self.chef_url = self.apporbit_repo +\
            "1.5.1/chef-12.6.0-1.el7.x86_64.rpm"
        self.CWD = os.getcwd() + "/"

        self.PACKAGESDIR = self.CWD + "appOrbitPackages/"
        self.INFRADIR = self.CWD + "infra_images/"
        self.RPMSDIR = self.CWD + "appOrbitRPMs/"
        self.GEMDIR = self.CWD + "appOrbitGems/"
        self.NOARCHDIR = self.RPMSDIR + "noarch/"
        self.NOARCH507DIR = self.NOARCHDIR + "5.0.7/"
        self.MIST = self.RPMSDIR + "mist/"
        self.MISTMASTER = self.MIST + "master/"
        self.vmImportUrl = "http://repos.gsintlab.com/release/images/AO-PROXY.ova"
        self.VMIMPORT = self.RPMSDIR + "vmsvc/"

        self.AO_RESOURCE_TAR = 'appOrbitResources.tar'
        self.AO_PACKAGES_TAR = 'appOrbitPackages.tar.gz'
        self.AO_GEMS_TAR = 'appOrbitGems.tar.gz'
        self.AO_RPMS_TAR = 'appOrbitRPMs.tar.gz'

        self.apporbit_apps = {
            'moneyball-exports': 'moneyball-exports',
            'moneyball-api': 'moneyball-api',
            'moneyball-router': 'moneyball-router',
            'grafana-app': 'apporbit-grafana-app',
            'prometheus-app': 'apporbit-prometheus-app',
            'swagger-ui': 'apporbit-swagger-ui',
            'apporbit/hypervisor': 'apporbit/hypervisor',
            'apporbit/vmsvc/vdiskimport': 'apporbit/vmsvc/vdiskimport',
            'apporbit/vmsvc/vdiskproxy': 'apporbit/vmsvc/vdiskproxy'
        }

        self.support_packages = {
            self.ao_noarch + "nginx-1.6.3.tar.gz": self.NOARCHDIR,
            self.ao_noarch + "passenger-5.0.10.tar.gz": self.NOARCHDIR,
            self.ao_noarch + "5.0.7/agent-x86_64-linux.tar.gz":
            self.NOARCH507DIR,
            self.ao_noarch + "5.0.7/nginx-1.6.3-x86_64-linux.tar.gz":
            self.NOARCH507DIR,
            self.chef_url: self.RPMSDIR,
            self.mist_url: self.MISTMASTER,
            self.vmImportUrl: self.VMIMPORT
        }

        self.apps_insecure_reg = "apporbit-apps.apporbit.io:5000"

        self.hub_images = {
            'centos': 'centos:centos7.0.1406',
            'mysql': 'mysql:5.6.24',
            'registry': 'registry:2',
            'cadvisor': 'google/cadvisor:v0.23.2',
            'node-exporter': 'prom/node-exporter:0.12.0',
            'postgres': 'postgres',
            'redis': 'redis'
        }

        self.apporbit_images = {
            'apporbit-services': 'apporbit-services',
            'apporbit-controller': 'apporbit-controller',
            'apporbit-rmq': 'apporbit-rmq',
            'apporbit-docs': 'apporbit-docs',
            'apporbit-chef': 'apporbit-chef:2.0',
            'apporbit-consul': 'consul',
            'apporbit-locator': 'locator',
            'apporbit-svcd': 'svcd',
            'apporbit-captain': 'captain'
        }

        self.infra_containers = {
            'dnsmasq': 'dnsmasq:1.1',
            'kubedns': 'kubedns-amd64:1.5',
            'exechealthz': 'exechealthz-amd64:1.0',
            'kubernetes-dashboard': 'kubernetes-dashboard-amd64:v1.1.0',
            'pause': 'pause-amd64:3.0'
        }
示例#24
0
def main():
    CONF_FILE='setup.conf'
    if os.geteuid() != 0:
        sys.exit("You need to have root privileges to run this script.\nPlease try again, this time using 'sudo'. Exiting.")

    if not os.path.exists("/var/log/apporbit"):
       os.makedirs("/var/log/apporbit")

    logging.basicConfig(filename='/var/log/apporbit/apporbit-server.log', level=logging.DEBUG,
                         format='[ %(asctime)s  %(levelname)s ] %(message)s', datefmt='%Y-%m-%dT%H:%M:%S')

    # arguments parser
    parser = argparse.ArgumentParser(description='apporbit-server to manage apporbit server')
    parser.add_argument("-d","--deploychef",action='store_true', help='Deploy chef enable flag')
    parser.add_argument("-c","--consul", action='store_true', help='Deploy consul')
    parser.add_argument("--setuponly", action='store_true', help='Setup appOrbit Server')
    parser.add_argument("--start", action='store_true', help='Start appOrbit Server')
    parser.add_argument("--stop", action='store_true', help='Stop appOrbit Server')
    parser.add_argument("--kill", action='store_true', help='Forcefully stop appOrbit Server')
    parser.add_argument("--restart", action='store_true', help='Restart appOrbit Server')
    parser.add_argument("--pullimages", action='store_true', help='Pull new versions of appOrbit Server images')
    parser.add_argument("--removedata", action='store_true', help='Remove Data in appOrbit Server')
    parser.add_argument("--removeconfig", action='store_true', help='Remove Config in appOrbit Server')
    parser.add_argument("--removeall", action='store_true', help='Remove Data, Config and Keys in appOrbit Server')
    parser.add_argument("--upgrade", action='store_true', help='Upgrade Setup')
    parser.add_argument("--buildpackages", action='store_true', help='Fetch resources for offline installation')
    parser.add_argument("--setupprovider", action='store_true', help='Set up provider machine for offline installation')
    parser.add_argument("--deployoffline", action='store_true', help='Deploy apporbit on an offline host')
    parser.add_argument("--offline", action='store_true', help='Install apporbit host offline (guided)')
    parser.add_argument("--status", action='store_true', help='Show status of appOrbit Server')
    parser.add_argument("list",  nargs='*', help='List of components')
    args = parser.parse_args()
    if args.deploychef:
        chef_dep_obj = action.DeployChef()
        chef_dep_obj.deploy_chef()
        print "Chef Deployed"
        sys.exit(0)
    if args.consul:
        consul_obj = action.DeployConsul()
        consul_obj.deploy_consul()
        print "Consul Deployed"
        sys.exit(0)

    config_obj = config.Config()
    userinteract_obj = userinteract.UserInteract()
    utility_obj = utility.Utility()
    action_obj = action.Action()

    setupDone = False
    try:
      if (os.path.isfile(config_obj.apporbit_serverconf) 
           and os.path.isfile(config_obj.composeFile)
           and utility_obj.isPreviousInstallSuccess()
           ):
          setupRequired = False
          logging.info("Setup not required. Loading existing setup config")
          config_obj.loadConfig(config_obj.apporbit_serverconf)
      else:
          # This is the only visual clue that the product is not installed.
          print ("appOrbit server is not installed.")
          setupRequired = True
          if os.path.isfile(CONF_FILE):
              logging.info('Using ' + CONF_FILE + ' file for deployment')
              config_obj.loadConfig(CONF_FILE)
    except:
          #setupRequired = True
          raise
    skipSetup = False
    if args.upgrade and not setupRequired:
        logging.info("Requesting to upgrade configuration")
        if action_obj.showStatus(config_obj,show=False):
            logging.info("Stopping Server")
            action_obj.removeCompose(config_obj, show=True)
            logging.info("Stopped Server")
        logging.warning("Backing  appOrbit server setup configuration.")
        action_obj.backupSetupConfig(config_obj)
        action_obj.removeSetupConfig(config_obj)
        config_obj.upgrade = True
        setupRequired = True
    elif not args.setuponly and (args.stop or args.kill or args.status or args.removedata or args.removeconfig or args.removeall or args.buildpackages or args.setupprovider or args.deployoffline or args.offline):
       skipSetup = True

    if  args.setuponly or (setupRequired and not skipSetup):
        print ("apporbit-server will install/upgrade the appOrbit server in this machine")
        print ("Installation log will be in : /var/log/apporbit/apporbit-server.log")
        logging.info("Starting appOrbit Installation/upgrade")

        # Will check all the System Requirements
        # Fail and exit if Not fixable Requirements like
        # Hardware Requirements are not satisfied
        # Fail but not exit with Fixable Reqruiements
        utility_obj.loadTempFile(config_obj)
        if not config_obj.systemreqs:
            print "Verifying system information."
            with utility.DotProgress("Verify"):
                utility_obj.progressBar(0)
                utility_obj.preSysRequirements(config_obj)
                utility_obj.verifySystemInfo()
                logging.info("System info verification is completed!")

                # Will Fix all the Fixable Software Requriements
                # Will Fix Docker startup and
                # Seliux settings.
                if not utility_obj.fixSysRequirements():
                    logging.error("Unable to auto fix System Requirements.")
                    print "Unable to auto fix system Requirements. Check Log for details and fix it"
                    sys.exit(1)
                utility_obj.progressBar(20)
            logging.info("fix System Requirements is completed!")
            #config_obj.systemreqs = True
            print "   -- [Done]"
            utility_obj.createTempFile(config_obj)


        # If CONF_FILE file is available it would have already been loaded
        # else will proceed with the Customer Deployment.
        # In Regular customer Deployment case we will not provide any config file.

        if not os.path.isfile(CONF_FILE):
            logging.info("Starting to get user configuration.")
            # Get User Configuration for Customer Deployment
            # and write to a config file apporbit_deploy.conf
            userinteract_obj.getUserConfigInfo(config_obj, utility_obj)
            utility_obj.createTempFile(config_obj)

        # Validate that the apporbit_domain is not a hostname
        if config_obj.apporbit_domain and not utility_obj.validateDomain(config_obj.apporbit_domain):
            print "ERROR: Apporbit Domain cannot be ip or hostname"
            sys.exit(1)

        # Validate that the apporbit_host chosen during configuration belongs to the current host machine.
        if not utility_obj.validateHostIP(config_obj.apporbit_host):
            print "WARNING: Given Name/IP is not accessible publicly or on private network"
            if os.path.isfile(CONF_FILE):
                print "apporbit-server will proceed in 5 seconds with that Name/IP.. Break CTRL-C to stop"
                time.sleep(5)
            else:
                user_input = raw_input("Do you want to Abort(a) or continue(c) installation[c]:") or 'c'
                if user_input == "a" or user_input != "c":
                    sys.exit(1)
            print "Continuing .."

        # Setup configuration files
        print "\nConfiguring appOrbit setup"
        max_api_users = action_obj.calcMaxPhusionProcess()
        config_obj.setupConfig(utility_obj, max_api_users)

        print "Preparing and removing old containers for appOrbit server."
        with utility.DotProgress("Prepare"):
            utility_obj.progressBar(0)
            action_obj.predeployAppOrbit(config_obj)
            utility_obj.progressBar(20)
        print "   -- [Done]"

        if config_obj.remove_data:
            logging.warning("Removing old data")
            print "Removing old data for appOrbit server."
            action_obj.removeData(config_obj)

        try:
            if os.stat(config_obj.APPORBIT_DATA) and os.listdir(config_obj.APPORBIT_DATA):
                config_obj.initial_install = False
            else:
                config_obj.initial_install = True
        except:
            config_obj.initial_install = True

        if args.setuponly:
            utility_obj.removeTempFile()
            print "Requested setup only."
            print "Use apporbit-server --pullimages to pull images."
            print "Then use apporbit-server --start to start appOrbit server."
            return

        print "Download appOrbit Server container images"
        logging.info("Updating Images")
        with utility.DotProgress("Pull"):
            utility_obj.progressBar(0)
            action_obj.pullImages(config_obj)
            utility_obj.progressBar(20)
        print "   -- [Done]"


        print "Deploying appOrbit server."
        if 'update' in args.list or 'upgrade' in args.list:
           config_obj.upgrade = True
        with utility.DotProgress("Deploy"):
            utility_obj.progressBar(0)
            action_obj.deployAppOrbitCompose(config_obj)
            utility_obj.progressBar(20)
	    utility_obj.removeTempFile()
        print "   -- [Done]"

        print "Waiting for appOrbit server to be active"
        with utility.DotProgress("Waiting"):
            utility_obj.progressBar(0)
            action_obj.waitForDeployment(config_obj,utility_obj)
            utility_obj.progressBar(20)
        print "   -- [Done]"

        print "Now login to the appOrbit server using"
        print "https://" + config_obj.apporbit_host 
        if args.upgrade:
            print "Please login using existing credentials"
        else:
            print "Login: "******"and default password 'admin1234'"
        logging.info("END OF DEPLOYMENT")

        logtimestamp = str(datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
        shutil.move('/var/log/apporbit/apporbit-server.log', '/var/log/apporbit/apporbit-server-' + logtimestamp + '.log')
        print "Server logs moved to /var/log/apporbit/apporbit-server-" + logtimestamp + '.log'

    elif args.pullimages:
        print "Update  appOrbit Server images"
        logging.info("Updating Images")
        with utility.DotProgress("PullImages"):
            utility_obj.progressBar(0)
            action_obj.pullImages(config_obj)
            utility_obj.progressBar(20)
        print "   -- [Done]"
    elif args.start: 
        print "Start appOrbit Server containers"
        if 'update' in args.list or 'upgrade' in args.list:
           config_obj.upgrade = True
        logging.info("Starting Server")
        action_obj.deployAppOrbitCompose(config_obj, show=True)
        print "[Done]"
        print "Waiting for appOrbit server to be active"
        with utility.DotProgress("Waiting"):
            utility_obj.progressBar(0)
            action_obj.waitForDeployment(config_obj,utility_obj)
            utility_obj.progressBar(20)
        print "   -- [Done]"
        print "Now login to the appOrbit server using"
        print "https://" + config_obj.apporbit_host 
    elif args.stop:
        print "Stop appOrbit Server containers"
        logging.info("Stopping Server")
        action_obj.removeCompose(config_obj, show=True)
        logging.info("Stopped Server")
        print " [Done]"
    elif args.kill:
        print "Stop and kill appOrbit Server containers"
        logging.info("Stopping and killing Server")
        action_obj.removeRunningContainers(config_obj, show=True)
        logging.info("Killed appOrbit Server")
        print "   -- [Done]"
    elif args.restart:
        complist = ' '.join(args.list)
        print "Restart appOrbit Server components:", complist
        logging.info("Restarting appOrbit Server components: %s" %complist)
        action_obj.restartAppOrbitCompose(config_obj, complist, show=True)
        logging.info("Restarted appOrbit Server")
        print " [Done]"
    elif args.removedata:
        logging.info("Requesting to remove data")
        if action_obj.showStatus(config_obj,show=False):
            print "Run apporbit-server --stop to stop containers before deleting data."
            logging.error("appOrbit server is running. Cannot delete data")
            return False
        else:
            logging.warning("REMOVING appOrbit server Volume data.")
            action_obj.removeData(config_obj)
            print "Removing appOrbit server data."
    elif args.removeconfig:
        logging.info("Requesting to remove setup configuration")
        if action_obj.showStatus(config_obj,show=False):
            print "Run apporbit-server --stop to stop containers before deleting setup configuration."
            logging.error("appOrbit server is running. Cannot delete setup configuration")
            return False
        else:
            logging.warning("REMOVING appOrbit server setup configuration.")
            action_obj.removeSetupConfig(config_obj)
            print "Removing appOrbit server setup configuration."
    elif args.removeall:
        logging.info("Requesting to remove all data, configuration, keys")
        if action_obj.showStatus(config_obj,show=False):
            print "Run apporbit-server --stop to stop containers before deleting setup configuration."
            logging.error("appOrbit server is running. Cannot delete all data, keys and configuration")
            return False
        else:
            logging.warning("REMOVING appOrbit server setup configuration, data and keys.")
            action_obj.removeSetupConfig(config_obj)
            action_obj.removeData(config_obj)
            action_obj.removeKeys(config_obj)
            print "Removing appOrbit server setup configuration, data and keys."
    elif args.status:
        # If product is not installed it will show above that it is not installed.
        # If it is installed then the next block will show the status of containers
        if os.path.isfile(config_obj.apporbit_serverconf):
            print "Showing status of appOrbit Server"
            action_obj.showStatus(config_obj, show=True)
    elif args.buildpackages:
        import resourcefetcher
        rf = resourcefetcher.ResourceFetcher()
        rf.fetch_resources()
    elif args.setupprovider:
        import provider
        ps = provider.Provider()
        ps.setup_provider()
        finalMessage = ("Copy apporbit-server and appOrbitPackages.tar.gz to "
              "appOrbit host\nand execute ./apporbit-server --deploy-offline")
        print finalMessage
    elif args.deployoffline:
        import offlinedeploy
        od = offlinedeploy.OfflineDeploy()
        od.deploy_apporbit()
    elif args.offline:
        import resourcefetcher, provider, offlinedeploy
        opt = raw_input("Are the resources fetched [y]/n : ") or "y"
        if str(opt).lower() in ['n', 'no']:
            rf = resourcefetcher.ResourceFetcher()
            rf.fetch_resources()
            sys.exit(1)
        opt = raw_input("Is the provider set up done [y]/n : ") or "y"
        if str(opt).lower() in ['n', 'no']:
            ps = provider.Provider()
            ps.setup_provider()
        opt = raw_input("Do you want to install appOrbit on this host [y]/n : ") or "y"
        if str(opt).lower() in ['y', 'yes']:
            od = offlinedeploy.OfflineDeploy()
            od.deploy_apporbit()
    elif args.list:
        # if I am here I have not given any valid option but did enter a list argument
        complist = ' '.join(args.list)
        print 'Invalid arguments: ' + complist + '\n\n'
        parser.print_help()
    else:
        # No options and no list and product is already installed. Just show the status and help command
        print "appOrbit server is already configured.\n"
        print "Run the installer again with --upgrade if you wish to upgrade.\n"
        print "For example:\n" 
        print "bash <(curl -s  http://repos.apporbit.com/install" +\
	    "/appOrbitKit/install.sh) --upgrade OR"
        print "/opt/apporbit/bin/apporbit-server --upgrade\n"
        action_obj.showStatus(config_obj,show=True)
        print ""
        parser.print_help()

    return
示例#25
0
    def process(self):
        #instantiate the utility file
        f = utility.Utility()
        ip = f.ip()
        hostname = f.host()
        #variable extracted from topic name
        subT = f.subtopic()
        #broker ip set in setup.config file
        BROKER = f.broker()
        DV = f.DynamicVariable()

        Message_Help = (
            "\nHOW TO USE THE MESSAGING PLUGIN"
            "\n"
            "\nThis plugin will send a message to target PC"
            "\n"
            "\nExample Topic: workstation/hostname or ip/parameter/message/"
            "\nExample message: Your Name / Message"
            "\n"
            "\nJohn Doe / We have a meeting at 10AM. Don't be late."
            "\n"
            "\nMary / Hi John, need you to work over time tomorrow.")

        while True:
            #set broker address
            broker = BROKER
            #set client, should be unique
            client = mqtt.Client(ip)
            #connect to broker
            client.connect(broker)
            #begin client loop
            client.loop_start()
            #publish information to sub-topic
            title = "Message"
            message1 = DV.split("/")[-1]
            sender1 = DV.split("/")[0]
            #getting the date and time
            date = datetime.now()
            dt_string = date.strftime("%d/%m/%Y %H:%M")

            if (subT == "help"):
                client.publish(f"workstation/{hostname}/n/message/help",
                               str(Message_Help), 2, False)

            elif (subT == ""):

                #os.system(f"Command=$(zenity --info --window-icon={icon} --title={title} --text='Sent from {sender} on {dt_string}\n\nMessage: {DV}' --width=300 --height=150); echo $Command")
                cmd = (
                    f"Command=$(zenity --display=:0.0 --info --title={title} --text='Sent from {sender1} on {dt_string}\n\nMessage: {message1}' --width=300 --height=150); echo $Command"
                )
                subprocess.check_output([cmd], shell=True)

                client.publish(f"workstation/{hostname}/n/message",
                               str("Okayy"), 2, False)
                #set log file contents
            setattr(f, "logText",
                    f"from {sender1} message: {message1} at {dt_string}")
            f.log()
            #sleep for 0.5 seconds
            time.sleep(0.5)
            #disconnect client
            client.disconnect()
            #quit loop
            exit()
示例#26
0
def run_single(bldg_id=1,
               saving_target=2,
               anio=3,
               r=4,
               space_type='Office',
               cached_weather=True,
               write_fim=True,
               write_model=True,
               return_data=False,
               use_default_benchmark_data=True,
               df_user_bench_stats_e=None,
               df_user_bench_stats_f=None):
    # Set paths
    s_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
    data_path = s_path + '/Data/'
    report_path = s_path + '/outputs/'

    # Create an outputs directoty if there isn't one.
    if not os.path.exists(report_path): os.makedirs(report_path)

    # Initialize a portfolio instance
    p = portfolio.Portfolio('Test')
    # p.read_raw_data_from_xlsx(data_path + 'portfolio.xlsx')
    p.read_raw_data_from_xlsx(data_path + 'portfolio.xlsx')

    # Get building data from the portfolio
    building_id = bldg_id
    building_info = p.get_building_info_by_id(building_id)
    if (building_info == None):
        return False, None
    else:
        building_test = building.Building(building_id, *building_info,
                                          saving_target)
        # Get utility data from portfolio
        df_raw_electricity = p.get_utility_by_building_id_and_energy_type(
            building_ID=building_id, energy_type=1, anio=anio)
        df_raw_fossil_fuel = p.get_utility_by_building_id_and_energy_type(
            building_ID=building_id, energy_type=2, anio=anio)
        df_raw_utility_e = df_raw_electricity
        df_raw_utility_f = df_raw_fossil_fuel
        utility_test_e = utility.Utility('electricity', df_raw_utility_e)
        utility_test_f = utility.Utility('fossil fuel', df_raw_utility_f)
        building_test.add_utility(utility_test_e, utility_test_f)
        weather_test_e = weather.Weather(building_test.coord)
        weather_test_f = weather.Weather(building_test.coord)
        building_test.add_weather(cached_weather, weather_test_e,
                                  weather_test_f)

        # Fit inverse model and benchmark
        has_fit = building_test.fit_inverse_model()
        # Continue only if there is at least one change-point model fit.
        if has_fit:
            if (use_default_benchmark_data):
                building_test.benchmark()
                building_test.ee_assess()
            else:
                # Note: the benchmark data sets are generated from the portfolio spreadsheet.
                # 1 ~ electricity; 2 ~ fossil fuel
                dict_raw_electricity = p.get_portfolio_raw_data_by_spaceType_and_utilityType(
                    space_type, utility_type=1)
                dict_raw_fossil_fuel = p.get_portfolio_raw_data_by_spaceType_and_utilityType(
                    space_type, utility_type=2)

                # Generate the benchmark stats from the user provided data in the portfolio spreadsheet
                if df_user_bench_stats_e is None:
                    df_user_bench_stats_e = p.generate_benchmark_stats_wrapper(
                        dict_raw_electricity, cached_weather)
                if df_user_bench_stats_f is None:
                    df_user_bench_stats_f = p.generate_benchmark_stats_wrapper(
                        dict_raw_fossil_fuel, cached_weather)

                building_test.benchmark(
                    use_default=False,
                    df_benchmark_stats_electricity=df_user_bench_stats_e,
                    df_benchmark_stats_fossil_fuel=df_user_bench_stats_f)
                building_test.ee_assess(
                    use_default=False,
                    df_benchmark_stats_electricity=df_user_bench_stats_e,
                    df_benchmark_stats_fossil_fuel=df_user_bench_stats_f)

            building_test.calculate_savings()
            building_test.plot_savings()
            building_test.disaggregate_consumption_wrapper()

            # Output to files
            # Save FIM to csv
            #if (hasattr(building_test, 'FIM_table_e')):
            #if write_model: building_test.coeff_out_e.to_csv(report_path + 'bldg_' + str(building_test.bldg_id) + "_Electricity Coeffs_out.csv")
            #if write_fim: building_test.FIM_table_e.to_csv(report_path + 'bldg_' + str(building_test.bldg_id) + "_Electricity FIM_recommendations.csv")
            #if (hasattr(building_test, 'FIM_table_f')):
            #if write_model: building_test.coeff_out_f.to_csv(report_path + 'bldg_' + str(building_test.bldg_id) + "_Fossil Fuel Coeffs_out.csv")
            #if write_fim: building_test.FIM_table_f.to_csv(report_path + 'bldg_' + str(building_test.bldg_id) + "_Fossil Fuel FIM_recommendations.csv")
            if r == 1:
                # Generate static HTML report
                report_building = report.Report(building=building_test)
                report_building.generate_building_report_beta(report_path)

            return True, building_test
        else:
            cont = 0
else:
    from game import GuessGame

torch.manual_seed(args['seed'])
torch.cuda.manual_seed(args['seed'])
np.random.seed(args['seed'])
random.seed(args['seed'])

torch.backends.cudnn.deterministic = True

# @title Train
team = popGuessGame(args)
# get data
data = Dataset(args['numColors'], args['numShapes'], args['attrSize'])
train_np = data.getTrain()
util = utility.Utility(args, data)

sloss_l = np.zeros(args['trainIters'])
pop_rloss_l = np.zeros(args['trainIters'])  # record mean rloss
pop_acc_l = np.zeros(
    args['trainIters'])  # use mean rewards to calculate accuracy

entropy_l = np.zeros(args['trainIters'])

# easy-to-teach evaluation
evalAcc_l = np.zeros(
    (args['resetNum'] // 10, args['deterResetNums'], args['deterResetIter']))

dTopo = np.zeros(args['resetNum'] + 1)
dEntropy = np.zeros(args['resetNum'] + 1)
示例#28
0
    def ll(self, beta):
        """
		Takes structural parameters and computes the objective function for optimization 
		
		"""
        start_time = time.time()
        print('')
        print('')
        print('Beginning sample generator')

        def sym(a):
            return ((1 / (1 + np.exp(-a))) - 0.5) * 2

        #updating beta->parameters instance to compute likelihood.
        self.param0.eta = beta[0]
        self.param0.alphap = beta[1]
        self.param0.alphaf = beta[2]
        self.param0.betaw[0] = beta[3]
        self.param0.betaw[1] = beta[4]
        self.param0.betaw[2] = beta[5]
        self.param0.betaw[3] = np.exp(beta[6])
        self.param0.betaw[4] = beta[7]
        self.param0.beta_spouse[0] = beta[8]
        self.param0.beta_spouse[1] = beta[9]
        self.param0.beta_spouse[2] = np.exp(beta[10])
        self.param0.c_emp_spouse = beta[11]
        self.param0.gamma1 = beta[12]
        self.param0.gamma2 = beta[13]
        self.param0.gamma3 = beta[14]
        self.param0.tfp = beta[15]
        self.param0.kappas[0][0] = beta[16]
        self.param0.kappas[0][1] = beta[17]
        self.param0.kappas[0][2] = beta[18]
        self.param0.kappas[0][3] = beta[19]
        self.param0.kappas[1][0] = beta[20]
        self.param0.kappas[1][1] = beta[21]
        self.param0.kappas[1][2] = beta[22]
        self.param0.kappas[1][3] = beta[23]
        self.param0.rho_theta_epsilon = sym(beta[24])
        self.param0.rho_theta_ab = sym(beta[25])

        #The model (utility instance)
        hours = np.zeros(self.N)
        childcare = np.zeros(self.N)

        model = util.Utility(self.param0, self.N, self.x_w, self.x_m, self.x_k,
                             self.passign, self.nkids0, self.married0, hours,
                             childcare, childcare, self.agech0_a,
                             self.agech0_b, self.d_childa, self.d_childb,
                             self.hours_p, self.hours_f, self.wr, self.cs,
                             self.ws)

        ##obtaining emax instance##
        emax_instance = self.emax(self.param0, model)

        ##obtaining samples##

        choices = self.samples(self.param0, emax_instance, model)

        ###########################################################################
        ##Getting the betas of the auxiliary model#################################
        ###########################################################################
        dic_betas = self.aux_model(choices)

        time_opt = time.time() - start_time
        print('')
        print('')
        print('Done sample generation in')
        print("--- %s seconds ---" % (time_opt))
        print('')
        print('')
        start_time = time.time()
        print('Beginning aux model generator')

        #utility_aux
        beta_childcare = np.mean(dic_betas['beta_childcare'], axis=0)  #1x1
        beta_hours1 = np.mean(dic_betas['beta_hours1'], axis=0)  #1x1
        beta_hours2 = np.mean(dic_betas['beta_hours2'], axis=0)  #1x1
        beta_wagep = np.mean(dic_betas['beta_wagep'], axis=1)  # 6 x 1
        beta_kappas_t2 = np.mean(dic_betas['beta_kappas_t2'], axis=1)  #4 x 1
        beta_kappas_t5 = np.mean(dic_betas['beta_kappas_t5'], axis=1)  #4 x 1
        beta_inputs = np.mean(dic_betas['beta_inputs'], axis=1)  #4 x 1
        betas_init_prod = np.mean(dic_betas['betas_init_prod'], axis=1)  #1 x 1
        beta_wage_spouse = np.mean(dic_betas['beta_wage_spouse'], axis=1)
        beta_emp_spouse = np.mean(dic_betas['beta_emp_spouse'], axis=0)
        beta_theta_corr = np.mean(dic_betas['beta_theta_corr'], axis=0)

        ###########################################################################
        ####Forming the likelihood#################################################
        ###########################################################################

        #Number of moments to match
        num_par = beta_childcare.size + beta_hours1.size + beta_hours2.size + beta_wagep.size + beta_wage_spouse.size + beta_emp_spouse.size + beta_kappas_t2.size + beta_kappas_t5.size + beta_inputs.size + betas_init_prod.size + beta_theta_corr.size

        #Outer matrix
        x_vector = np.zeros((num_par, 1))

        x_vector[0:beta_childcare.size,
                 0] = beta_childcare - self.moments_vector[0, 0]

        ind = beta_childcare.size
        x_vector[ind:ind + beta_hours1.size,
                 0] = beta_hours1 - self.moments_vector[ind, 0]

        ind = ind + beta_hours1.size
        x_vector[ind:ind + beta_hours2.size,
                 0] = beta_hours2 - self.moments_vector[ind, 0]

        ind = ind + beta_hours2.size
        x_vector[ind:ind + beta_wagep.size,
                 0] = beta_wagep - self.moments_vector[ind:ind +
                                                       beta_wagep.size, 0]

        ind = ind + beta_wagep.size
        x_vector[ind:ind + beta_wage_spouse.size,
                 0] = beta_wage_spouse - self.moments_vector[
                     ind:ind + beta_wage_spouse.size, 0]

        ind = ind + beta_wage_spouse.size
        x_vector[ind:ind + beta_emp_spouse.size,
                 0] = beta_emp_spouse - self.moments_vector[
                     ind:ind + beta_emp_spouse.size, 0]

        ind = ind + beta_emp_spouse.size
        x_vector[ind:ind + beta_kappas_t2.size,
                 0] = beta_kappas_t2 - self.moments_vector[ind:ind +
                                                           beta_kappas_t2.size,
                                                           0]

        ind = ind + beta_kappas_t2.size
        x_vector[ind:ind + beta_kappas_t5.size,
                 0] = beta_kappas_t5 - self.moments_vector[ind:ind +
                                                           beta_kappas_t5.size,
                                                           0]

        ind = ind + beta_kappas_t5.size
        x_vector[ind:ind + beta_inputs.size,
                 0] = beta_inputs - self.moments_vector[ind:ind +
                                                        beta_inputs.size, 0]

        ind = ind + beta_inputs.size
        x_vector[ind:ind + betas_init_prod.size,
                 0] = betas_init_prod - self.moments_vector[
                     ind:ind + betas_init_prod.size, 0]

        ind = ind + betas_init_prod.size
        x_vector[ind:ind + beta_theta_corr.size,
                 0] = beta_theta_corr - self.moments_vector[
                     ind:ind + beta_theta_corr.size, 0]

        #The Q metric
        q_w = np.dot(np.dot(np.transpose(x_vector), self.w_matrix), x_vector)
        print('')
        print('The objetive function value equals ', q_w)
        print('')

        time_opt = time.time() - start_time
        print('Done aux model generation in')
        print("--- %s seconds ---" % (time_opt))

        return q_w
示例#29
0
    def process(self):
        #instantiate the utility file
        f = utility.Utility()
        #get system information
        system = f.system()
        #system individual parts
        ip = f.ip()
        memory = f.ram()
        storage = f.storage()
        uptime = f.uptime()
        cpu = f.cpu()
        hostname = f.host()
        
        Report_Help = (
            "\nHOW TO USE THE REPORT PLUGIN"
            "\n"
            "\nThis plugin will retrieve status information of target PC"
            "\n"
            "\ntopic = workstation/hostname-or-ip/parameter/report/   <--- This returns all info"
            "\n"
            "\nIf you want individual values, simply add it after the slash, like this"
            "\n"
            "\nworkstation/hostname/r/report/cpu"
            "\nOR workstation/ip/r/report/uptime"
            "\n"
            "\nYou can use these options: cpu, memory, storage, uptime, hostname and ip"
        )
        
        #variable extracted from topic name
        subT = f.subtopic()
        #broker ip set in setup.config file
        BROKER = f.broker()

        while True:
            #set broker address    
            broker = BROKER
            #set client, should be unique
            client = mqtt.Client(ip)
            #connect to broker
            client.connect(broker)
            #begin client loop
            client.loop_start()
            #publish information to sub-topic
            if(subT == "ip"):
                client.publish(f"workstation/{hostname}/n/report/ip", str(ip), 2, False)

            elif(subT == "memory"):
                client.publish(f"workstation/{hostname}/n/report/memory", str(memory), 2, False)

            elif(subT == "storage"):
                client.publish(f"workstation/{hostname}/n/report/storage", str(storage), 2, False)

            elif(subT == "uptime"):
                client.publish(f"workstation/{hostname}/n/report/uptime", str(uptime), 2, False)

            elif(subT == "cpu"):
                client.publish(f"workstation/{hostname}/n/report/cpu", str(cpu), 2, False)

            elif(subT == "hostname"):
                client.publish(f"workstation/{hostname}/n/report/hostname", str(hostname), 2, False)

            elif(subT == ""):
                client.publish(f"workstation/{hostname}/n/report/system", str(system), 2, False)
            
            elif(subT == "help"):
                client.publish(f"workstation/{hostname}/n/report/help", str(Report_Help), 2, False)
            #set log file contents
            setattr(f, "logText", str(system))
            f.log()
            #sleep for 0.5 seconds
            time.sleep(0.5)
            #disconnect client
            client.disconnect()
            #quit loop
            quit()
示例#30
0
betasw = [betas_opt[6],betas_opt[7]]
sigma2w_reg = 0.34
betastd = betas_opt[9]


T          = (24-8)*20  #monthly waking hours
Lc         = 8*20       #monthly cc hours
w_matrix   = np.identity(10)
times = 50
times_boot = 1000 



#------------ CALL CLASSES, ESTIMATION SIM & BOOTSTRAP ------------#
param0 = parameters.Parameters(betasw, betastd, betasn, sigma2n, sigma2w_reg, meanshocks, covshocks, T, Lc, alpha, gamma, times)
model     = util.Utility(param0, N, data)
model_sim = simdata.SimData(N, model)
model_boot= bstr.bootstrap(N, data)

moments_boot = model_boot.boostr(times_boot)
model_est = est.estimate(N, data, param0, moments_boot, w_matrix)


results_estimate = model_est.simulation(model_sim)


#------------ EXCEL TABLE ------------#

workbook = xlsxwriter.Workbook('data/labor_choice.xlsx')
worksheet = workbook.add_worksheet()