Example #1
0
  def __init__(self,appdir): 
    self.resolution=width,height=1024,768

    pygame.init()
#    pygame.mouse.set_visible(0)
    self.screen=pygame.display.set_mode(resolution,pygame.FULLSCREEN)
#    self.screen=pygame.display.set_mode(self.resolution)
  
    self.snd_startup=Tools.load_sound(appdir,"startup.ogg")
    self.snd_right=Tools.load_sound(appdir,"ok.ogg")
    self.snd_wrong=Tools.load_sound(appdir,"risa.ogg")
    self.snd_click=Tools.load_sound(appdir,"click.ogg")

    self.fnt_title = pygame.font.Font(None, 80)
    self.fnt_question = pygame.font.Font(None, 45)
    self.fnt_answer = pygame.font.Font(None, 35)
    self.fnt_score = pygame.font.Font(None, 32)

    self.color1=(50,90,90)
    self.color2=(255,218,70)
    self.color3=(250,250,255)
    self.color_right=(10,220,10)
    self.color_wrong=(220,10,10)

    self.background,bg_rect = Tools.load_image(appdir,"background.jpg")
    self.scoreboard_area=bg_rect
    self.scoreboard_area.top=700
      
    self.rect_a1=Rect(30,200,600,125)
    self.rect_a2=Rect(30,350,600,125)
    self.rect_a3=Rect(30,500,600,125)
Example #2
0
File: sml.py Project: Sreevis/sofa
def insertVisual(parentNode, solid, color):
    node = parentNode.createChild("node_"+solid.name)
    translation=solid.position[:3]
    rotation = Quaternion.to_euler(solid.position[3:])  * 180.0 / math.pi
    for m in solid.mesh:
        Tools.meshLoader(node, m.source, name="loader_"+m.id)
        node.createObject("OglModel",src="@loader_"+m.id, translation=concat(translation),rotation=concat(rotation), color=color)
Example #3
0
 def __init__(self, name, node1, node2, mask, compliance=0, index1=0, index2=0, isCompliance=True):
     self.node = node1.createChild(name)
     self.mask = mask
     self.dofs = self.node.createObject(
         "MechanicalObject", template="Vec6" + template_suffix, name="dofs", position="0 0 0 0 0 0"
     )
     input = []  # @internal
     input.append("@" + Tools.node_path_rel(self.node, node1) + "/dofs")
     if not node2 is None:
         input.append("@" + Tools.node_path_rel(self.node, node2) + "/dofs")
         self.mapping = self.node.createObject(
             "RigidJointMultiMapping",
             name="mapping",
             input=concat(input),
             output="@dofs",
             pairs=str(index1) + " " + str(index2),
             geometricStiffness=geometric_stiffness,
         )
         node2.addChild(self.node)
     else:
         self.mapping = self.node.createObject(
             "RigidJointMapping",
             name="mapping",
             input=concat(input),
             output="@dofs",
             pairs=str(index1) + " " + str(index2),
             geometricStiffness=geometric_stiffness,
         )
     self.constraint = GenericRigidJoint.Constraint(self.node, mask, compliance, isCompliance)
def run_trials_for_patterns_per_output_on_subsets_sequential(patterns_per_output, distortion_P):
    # 20 trials per set size, 10 patterns per chaotically recalled output:
    for round_ctr in range(20):
        for set_size_ctr in range(2, 6):
            init_str = 'Performing perfect neocortical memory consolidation according to proposed distortion scheme' \
                       'for SUBSETS, i.e. with catastrophic interference. '+ \
                       '. Suite round#'+str(round_ctr)+'. Set size ='+str(set_size_ctr)+'.'
            print init_str
            Tools.append_line_to_log(init_str)

            ann.reset()
            training_set_10 = generate_training_set(
                set_size_ctr, training_patterns_associative, patterns_per_output=patterns_per_output,
                distortion_P=distortion_P)
            for subset_ctr in range(5):
                training_subset_10 = training_set_10[subset_ctr * set_size_ctr * patterns_per_output:
                    (subset_ctr + 1) * set_size_ctr * patterns_per_output]
                for i in range(15):  # training iterations
                    ann.train(training_subset_10)
            g_10 = NeocorticalMemoryConsolidation. \
                evaluate_goodness_of_fit(ann, training_patterns_associative[:2 * set_size_ctr])
            res_10_str = str(i+1) + ' training iterations, ' + str(patterns_per_output) + \
                         ' patterns per output, P='+str(distortion_P)+', goodness of fit, g=' + str(g_10)
            print res_10_str
            Tools.append_line_to_log(res_10_str)
Example #5
0
    def upgrade_hotel(self):
        os.system("clear") 
        print "Enter the corresponding number for the hotel you want to upgrade"
        i = 1
        for hotel in self.hc.hotels:
            print "Enter %d for %s" %(i,hotel.name)
            i += 1
        hotel = self.hc.hotels[int(Tools.get_option('Your choice: ',Tools.produce_hotel_option(self.hc)))-1]
        os.system("clear") 
        print "You choose to upgrade %s. " %hotel.name
        print "Your %s is a %s now" %(hotel.name,hotel.level) 
        upgrade_options = Tools.upgrade_option(hotel)
        i = 0
        choice = []
        while i< len(upgrade_options):
            if upgrade_options[i] is None:
                print "Can't upgrade anymore."
                break
                return
            else :
                print "Press %d to upgrade %s to a %s. " %(i+1,hotel.name,upgrade_options[i])
                choice.append(i+1)
                i += 1
        option = int(Tools.get_option("Enter your choice:",choice))-1
        cost_after_upgrade = Hotel(self.env,'test',upgrade_options[option],hotel.simpy_rooms['Queen Standard'].capacity,\
            hotel.simpy_rooms['King Standard'].capacity,hotel.simpy_rooms['Queen Deluxe'].capacity,\
            hotel.simpy_rooms['King Deluxe'].capacity).initial_cost()

        if self.player.buy_property(cost_after_upgrade - hotel.initial_cost(),"You don't have enough money to upgrade your hotel "):
            hotel.level = upgrade_options[option]
            print "Successfully upgrade your hotel to %s" %hotel.level
        yield self.env.timeout(0)  
Example #6
0
    def getAdjacentNodes(self, curnode, dest):
        """MUST BE IMPLEMENTED"""
        result = []
       
        cl = curnode.location
        dl = dest
        
        #Decompose the value of wall
        wall_decompose = Tools.sum_pow2(self.m[(cl.x,cl.y)]['wall'])
        list_wall = [1,2,4,8]
        for i in wall_decompose:
            list_wall.remove(i)
        #Test for each value if a wall exist
        if Tools.search_in_liste(list_wall,2):
            n = self._handleNode(cl.x+1,cl.y,curnode,dl.x,dl.y)
            result.append(n)

        if Tools.search_in_liste(list_wall,8):
            n = self._handleNode(cl.x-1,cl.y,curnode,dl.x,dl.y)
            result.append(n)
            
        if Tools.search_in_liste(list_wall,4):
            n = self._handleNode(cl.x,cl.y-1,curnode,dl.x,dl.y)
            result.append(n)
            
        if Tools.search_in_liste(list_wall,1):
            n = self._handleNode(cl.x,cl.y+1,curnode,dl.x,dl.y)
            result.append(n)

        return result
Example #7
0
def get_pos_dict(data):
    d = {}

    for sent in data:
	for word in sent:
	    Tools.add_count_to_dict(d, word[1])
    return d
Example #8
0
  def compile(self, targetPath):
    # Collect the source and header files
    sources = []
    for item in self.config.get("sources", []):
      sources.append(self.collectItem(os.path.join(targetPath, "src"), item))

    headers = []
    for item in self.config.get("headers", []):
      if not isinstance(item, Config.Group):
        headers.append(self.collectItem(os.path.join(targetPath, "include"), item))

    # Prepare the code generator and its namespace
    namespace = {
      "targetName":  self.targetName,
      "projectName": self.name,
      "config":      self.config,
      "sources":     sources,
      "headers":     headers,
    }

    def generate(templatePath, outputPath):
      Generator.generate(templates = [Resource.getPath(*templatePath)],
                         namespace = namespace,
                         outputFile = open(os.path.join(*outputPath), "w"))

    # Create build files
    Tools.makePath(os.path.join(targetPath, "jni"))
    generate(["templates", "project", "android", "Android.mk.tmpl"],
             [targetPath, "jni", "Android.mk"])
    generate(["templates", "project", "android", "default.properties.tmpl"],
             [targetPath, "default.properties"])
Example #9
0
    def build_hotel_rooms(self):
        os.system("clear") 
        print "Enter the corresponding number for the hotel you want to build more rooms"
        i = 1
        for hotel in self.hc.hotels:
            print "Enter %d for %s" %(i,hotel.name)
        hotel = self.hc.hotels[int(Tools.get_option('Your choice: ',Tools.produce_hotel_option(self.hc)))-1]
        os.system("clear") 
        print "you choose to upgrade %s " %hotel.name 
        print "%s now have %d Queen Standard rooms,%d King Standard rooms, \
        %d Queen Deluxe rooms and %d King Deluxe rooms" %(hotel.name,hotel.simpy_rooms['Queen Standard'].capacity,\
        hotel.init_room_number['King Standard'],hotel.init_room_number['Queen Deluxe'],\
        hotel.init_room_number['King Deluxe'])

        roomtype = hotel.ROOM_TYPES[int(Tools.get_option("""Which type of rooms would you like to build more? 
        Enter 1 for Queen Standard,
        Enter 2 for King Standard,
        Enter 3 for Queen Deluxe,
        Enter 4 for King Deluxe """,[1,2,3,4]))-1]
        
        m = "Sorry, you do not have enough money to build this many rooms." + '\n' + "With all your money,\
            you can only build %d rooms of this type" %(self.player.checking_account.balance/hotel.room_cost[roomtype])
        roomnumber = hotel.simpy_rooms[roomtype].capacity + 1
        upgrade_cost = self.player.checking_account.balance + 1
        while True :
            while roomnumber > (hotel.simpy_rooms[roomtype].capacity - hotel.init_room_number[roomtype]):
                message = "How many more rooms would you want to build ? Can only buy %d more" %(hotel.simpy_rooms[roomtype].capacity - hotel.init_room_number[roomtype])
                roomnumber = Tools.check_positive_valid_input(message)
            upgrade_cost = hotel.room_cost[roomtype]*roomnumber
            if self.player.buy_property(upgrade_cost,m):
                break   
        #change the record of room number for that type of room 
        hotel.init_room_number[roomtype] += roomnumber 
        yield hotel.simpy_rooms[roomtype].put(roomnumber)  #actually build the roomssimpy_rooms[roomtype].put[roomnumber]   #actually build the rooms
        print "Successfully built %d more %s rooms for your %s" %(roomnumber,roomtype,hotel.name)
Example #10
0
 def buyStock(self):
     os.system("clear")
     #could have a input box for user to enter 
     print "The stocks you can buy are :"
     print self.STOCKS.keys()
     print "Enter the ticker name to buy the stock your want "
     stock_name = Tools.get_option("Enter the name below",self.STOCKS.keys())
     print "You chose %s 's stock" %stock_name
     current_date = Tools.current_date(self.start_date,self.env)
     stock = self.STOCKS[stock_name]
     check_history = Tools.check_confirm("Enter Y to check transaction history for this stock.Enter N to ignore and continue.")
     if check_history:
         stock.check_transaction()
     stock_price = float(stock.get_historical_prices(current_date,current_date))
     print "The current price of the stock is %f dolars per share. " %stock_price
     wanna_buy = Tools.check_confirm("Enter Y to continue buy this stock. Enter N to exit")
     if wanna_buy:
         print "Please enter the number of shares you want to buy."
         amount =Tools.check_positive_valid_input("Enter your number below:")
         cost = amount*stock_price
         print "The cost for buying %d shares of %s stock is %f dollars" %(amount,stock_name,cost)
         print "The commision fee is 30 dollars." #also deduct the commision fee
         if self.player.buy_property(cost+30,"Sorry, you don't have enough money for buying the stock."):
             stock.buy_record(stock_price,amount)   #record this transaction
             stock.calculate_money(cost,True)   #calculate how much money we have spent on buying this stock so far
             stock.amount += amount #record how much stock you have bought
     else :
         return
Example #11
0
    def setOutputs(self, f):
        t = Tools.getTime()
        # control of tape change
        tape_input = self.inputs["Tape"].getValue(f)
        if tape_input != self.tapeName:
            self.index = 0
            self.t0 = Tools.getTime()
            self.outputs["finished"].setValue(0, f)
            self.number = self.number + 1
            self.tapeName = tape_input
            self.tape = []
            r = csv.DictReader(open(self.tapeName, "r"), delimiter=";")
            for row in r:
                self.tape.append(row)
        # control of outputs
        duration = float(self.tape[self.index]["Duration"])
        if (t - self.t0) >= duration and self.index <= len(self.tape) - 2:
            self.index = self.index + 1
            self.t0 = t
            self.number = self.number + 1
            print self.index
        elif (t - self.t0) >= duration and self.index >= len(self.tape) - 1:
            self.outputs["finished"].setValue(1, f)

        for name in self.tape[self.index]:

            self.outputs[name].setValue(self.tape[self.index][name], f)
        self.outputs["Number"].setValue(str(self.number), f)
        return f
Example #12
0
    def reset_hpc_module(self):
        dims = self.dims
        # ============== WEIGHT MATRICES ===================
        input_ec_weights = Tools.binomial_f(dims[0], dims[1], self.connection_rate_input_ec)
        self.update_input_ec_weights(input_ec_weights)

        # randomly assign all weights between the EC and CA3
        ec_ca3_weights = np.random.normal(0.5, np.sqrt(0.25), (dims[1], dims[3])).astype(np.float32)

        # randomly assign about 25 % of the weights to a random connection weight
        ec_dg_weights = Tools.binomial_f(dims[1], dims[2], self.PP) * np.random.normal(0.5, 0.5, (dims[1], dims[2])).astype(np.float32)

        # randomly assign about 4 % of the weights to random connection weights
        dg_ca3_weights = Tools.binomial_f(dims[2], dims[3], self.MF) * \
                         np.random.normal(0.9, np.sqrt(0.01), (dims[2], dims[3])).astype(np.float32)  # elemwise

        # randomly assign 100 % of the weights between CA3 and CA3
        ca3_ca3_weights = np.random.normal(0.5, np.sqrt(0.25), (dims[3], dims[3])).astype(np.float32)

        # random weight assignment, full connection rate CA3-out
        ca3_output_weights = np.random.normal(0., np.sqrt(0.5), (dims[3], dims[4])).astype(np.float32)

        self.update_ec_dg_Ws(ec_dg_weights)
        self.update_ec_ca3_Ws(ec_ca3_weights)
        self.update_dg_ca3_Ws(dg_ca3_weights)
        self.update_ca3_ca3_Ws(ca3_ca3_weights)
        self.update_ca3_out_Ws(ca3_output_weights)

        self.reset_eta_and_zeta_values()
Example #13
0
 def WeeklyReport(self):
     while G.reported != round(self.env.now/7) :
         G.reported = round(self.env.now/7) 
         yield self.env.timeout(0)
         yield self.env.process(Tools.weekly_report_notice(self.env))
         print "Week %d has passed." %math.ceil(G.reported)  #announce the current week
         #hand control back to simpy
         self.revenue_report()
         #check if we need to pay back loan
         print G.tillPay
         print self.player.loan
         if G.tillPay == 0  and self.player.loan != 0:
             self.bc.pay_loan() #has to pay loan before doing other stuff
         if G.tillWithdraw !=0 :
             G.tillWithdraw -= 1 
         if G.tillPay != 0:
             G.tillPay -= 1   #loan payment countdown
         while True:
             choice = int(Tools.get_option("Enter 1 for hotel upgrade. Enter 2 to check out bank. Enter 3 to check out stock.Enter 4 to skip and continue.",[1,2,3,4]))
             os.system("clear") #clear screen
             if choice == 1:
                 yield self.env.process(self.hotel_upgrade())
             if choice == 2:
                 #ToDo : add in the bank part 
                 yield self.env.process(self.bank_business())
             if choice == 3:
                 yield self.env.process(self.stock_business())
             if choice == 4 :
                 break
         
         print "Done weekly report." +'\n' + "Starting week %d " %(G.reported+1)
         Tools.Continue()
Example #14
0
 def __del__(self):
     Project.Project.__del__(self)
     
     if self.success:
         Tools.deleteIfExists( self.tmpFolder )
     
     Tools.banner( 'Releasing finished (%s)' % ( 'Ok' if self.success else 'Fails' ) )
    def convolution_Spiketrain_basisfunctions(self, spks, T, dt):
        
        """
        Filter spike train spks with the set of rectangular basis functions defining the Filter.
        Since all the basis functions have the same width calculation can be made efficient by filter just ones and shifting.
        """
        
        T_i     = int(T/dt)
        
        bins_i  = Tools.timeToIndex(self.bins, dt)      
        spks_i  = Tools.timeToIndex(spks, dt)   
        nb_bins = self.getNbOfBasisFunctions()
        
        X = np.zeros( (T_i, nb_bins) )
        
        # Filter the spike train with the first rectangular function (for the other simply shift the solution        
        tmp = np.zeros( T_i + bins_i[-1] + 1)
            
        for s in spks_i :
            lb = s + bins_i[0]
            ub = s + bins_i[1]
            tmp[lb:ub] += 1
          
        tmp = tmp[:T_i]   

        # Fill the matrix by shifting the vector tmp
        for l in np.arange(nb_bins) :
            tmp_shifted = np.concatenate( ( np.zeros( int(bins_i[l]) ), tmp) )
            X[:,l] = tmp_shifted[:T_i]
                    
        return X
    def convolution_Spiketrain_basisfunctions(self, spks, T, dt):
        
        T_i     = int(T/dt)
        
        bins_i  = Tools.timeToIndex(self.bins, dt)      
        spks_i  = Tools.timeToIndex(spks, dt)   
        nb_bins = self.getNbOfBasisFunctions()
        
        X = np.zeros( (T_i, nb_bins) )
        
        # Filter the spike train with the first rectangular function (for the other simply shift the solution        
        tmp = np.zeros( T_i + bins_i[-1] + 1)
            
        for s in spks_i :
            lb = s + bins_i[0]
            ub = s + bins_i[1]
            tmp[lb:ub] += 1
          
        tmp = tmp[:T_i]   

        # Fill the matrix by shifting the vector tmp
        for l in np.arange(nb_bins) :
            tmp_shifted = np.concatenate( ( np.zeros( int(bins_i[l]) ), tmp) )
            X[:,l] = tmp_shifted[:T_i]
                    
        return X
    def convolution_Spiketrain_basisfunctions(self, spks, T, dt):
        
        """
        Filter spike train spks with the set of rectangular basis functions defining the Filter.
        """
        
        T_i     = int(T/dt)
                       
        bins_i = Tools.timeToIndex(self.bins, dt)
        spks_i = Tools.timeToIndex(spks, dt)
 
        nb_bins = self.getNbOfBasisFunctions()
        
        X = np.zeros( (T_i, nb_bins) )
        
        # Fill matrix
        for l in np.arange(nb_bins) :
                        
            tmp = np.zeros( T_i + bins_i[-1] + 1 )
            
            for s in spks_i :
                lb = s + bins_i[l]
                ub = s + bins_i[l+1]
                tmp[lb:ub] += 1
            
            X[:,l] = tmp[:T_i]
        
        
        return X
def iterate_over_experiments_suite_span_output_demo_local(start_index, stop_index):
    ann = NeocorticalNetwork(49, 30, 49, 0.01, 0.9)

    for exp_index in range(start_index, stop_index):
        current_chaotic_patterns, current_pseudopatterns = \
            Tools.retrieve_patterns_for_consolidation(exp_index, exp_index%4 + 2)  # 2-5 looped
        training_set = []

        t0 = time.time()
        ann.reset()
        for cp_subset in current_chaotic_patterns:
            training_subset = []
            for cp in cp_subset:
                training_subset.append([cp[1], cp[1]])
            for i in range(15):
                ann.train(training_subset)

        results_line = 'Neocortical module consolidation. Output as IO. Exp#'+str(exp_index)+\
                       '\n'+str(i+1)+' iters: g='+str(evaluate_goodness_of_fit(ann, get_target_patterns(exp_index%4+2)))

        ann.reset()
        for cp_subset in current_chaotic_patterns:
            training_subset = []
            for cp in cp_subset:
                training_subset.append([cp[1], cp[1]])
            for i in range(200):
                ann.train(training_subset)

        results_line += '\n'+str(i+1)+' iters: g=' + str(evaluate_goodness_of_fit(ann, get_target_patterns(exp_index % 4 + 2)))
        t1 = time.time()
        print 'Trained and evaluated performance in '+'{:8.3f}'.format(t1-t0), 'seconds'
        print results_line
        Tools.append_line_to_log(results_line)

    return ann
Example #19
0
    def displayMap(self, map):
        """ Display the map
        :param map: Map() object to be displayed
        """
        # Display point cloud
        points = Tools.rotate(map.cloud, map.bot_ori[-1])
        points = Tools.translate(points, map.bot_pos[-1])
        self.displayCloud(points, '.b')

        #Display beacon
        b = map.BEACON
        self.displayCloud(b, 'Hr')

        #Display robot

        bot = Tools.rotate(self.ROBOT_SHAPE, map.bot_ori[-1]) + numpy.array([map.bot_pos[-1][0], map.bot_pos[-1][1]])

        plt.plot(bot.astype(int).transpose()[0], bot.astype(int).transpose()[1])
        plt.plot(map.bot_pos[-1][0], map.bot_pos[-1][1], '.b')

        #Display obstacles
        obs = [map.obstacles[i].center for i in range(len(map.obstacles))]
        obs = Tools.rotate(obs,  map.bot_ori[-1])
        obs = Tools.translate(obs, map.bot_pos[-1])
        self.displayCloud(obs, '*k')
Example #20
0
 def neuronal_turnover_helper_ec_dg(self, column_index):
     # DG neuron connections are rewired.
     # for every neuron in ec, rewire its weights to this neuron - that means ONE row in the weights matrix!
     weights_row_connection_rate_factor = Tools.binomial_f(1, self.dims[1], self.PP)
     # multiply with random weights:
     # weights_vector = uniform_f(1, self.dims[1]) * weights_row_connection_rate_factor
     weights_vector = Tools.random_f(1, self.dims[1]) * weights_row_connection_rate_factor
     self.update_ec_dg_weights_column(column_index, weights_vector[0])
Example #21
0
 def __init__(self, node, name, node1, node2, compliances=[0,0,0,0,0,0], index1=0, index2=0):
         self.node = node.createChild( name )
         self.dofs = self.node.createObject('MechanicalObject', template='Vec6d', name='dofs', position='0 0 0 0 0 0' )
         input = [] # @internal
         input.append( '@' + Tools.node_path_rel(self.node,node1) + '/dofs' )
         input.append( '@' + Tools.node_path_rel(self.node,node2) + '/dofs' )
         self.mapping = self.node.createObject('RigidJointMultiMapping', template='Rigid,Vec6d', name='mapping', input=concat(input), output='@dofs', pairs=str(index1)+" "+str(index2))
         self.constraint = CompleteRigidJoint.Constraint( self.node, compliances ) # the constraint compliance cannot be in the same branch as eventual limits...
Example #22
0
def get_unigram_dict(data):
    d = {}

    for sent in data:
        for word in sent:
            Tools.add_count_to_dict(d, word[0])

    return d
Example #23
0
 def __init__(self, node, name, node1, node2, compliance=0, index1=0, index2=0, rest_lenght=-1 ):
     self.node = node.createChild( name )
     self.dofs = self.node.createObject('MechanicalObject', template='Rigid', name='dofs' )
     input = [] # @internal
     input.append( '@' + Tools.node_path_rel(self.node,node1) + '/dofs' )
     input.append( '@' + Tools.node_path_rel(self.node,node2) + '/dofs' )
     self.mapping = self.node.createObject('SubsetMultiMapping', template='Rigid,Rigid', name='mapping', input = concat(input), output = '@dofs', indexPairs="0 "+str(index1)+" 1 "+str(index2) )
     self.constraint = DistanceRigidJoint.Constraint(self.node, compliance, rest_lenght)
Example #24
0
 def neuronal_turnover_helper_dg_ca3(self, row_index):
     # DG neuron connections are rewired.
     # for every neuron in dg, rewire its weights to all neurons of ca3
     weights_row_connection_rate_factor = Tools.binomial_f(1, self.dims[3], self.MF)
     # multiply with random weights:
     # weights_vector = uniform_f(1, self.dims[3]) * weights_row_connection_rate_factor
     weights_vector = Tools.random_f(1, self.dims[3]) * weights_row_connection_rate_factor
     self.update_dg_ca3_weights_row(row_index, weights_vector[0])
Example #25
0
def init(img_off, img_on, mask):
    global g_img_buffer
    global g_mask

    g_img_buffer["off"] = Tools.encode_img(img_off)
    g_img_buffer["on"] = Tools.encode_img(img_on)

    g_mask = mask
Example #26
0
 def __init__(self, node, name, node1, node2, mask, compliance=0, index1=0, index2=0):
         self.node = node.createChild( name )
         self.dofs = self.node.createObject('MechanicalObject', template = 'Vec6d', name = 'dofs', position = '0 0 0 0 0 0' )
         input = [] # @internal
         input.append( '@' + Tools.node_path_rel(self.node,node1) + '/dofs' )
         input.append( '@' + Tools.node_path_rel(self.node,node2) + '/dofs' )
         self.mapping = self.node.createObject('RigidJointMultiMapping', template = 'Rigid,Vec6d', name = 'mapping', input = concat(input), output = '@dofs', pairs = str(index1)+" "+str(index2))
         self.constraint = GenericRigidJoint.Constraint( self.node, mask, compliance )
Example #27
0
 def __init__(self, node, name, node1, node2, stiffnesses=[0,0,0,0,0,0], index1=0, index2=0):
         self.node = node.createChild( name )
         self.dofs = self.node.createObject('MechanicalObject', template = 'Vec6d', name = 'dofs', position = '0 0 0 0 0 0' )
         input = [] # @internal
         input.append( '@' + Tools.node_path_rel(self.node,node1) + '/dofs' )
         input.append( '@' + Tools.node_path_rel(self.node,node2) + '/dofs' )
         self.mapping = self.node.createObject('RigidJointMultiMapping', template = 'Rigid,Vec6d', name = 'mapping', input = concat(input), output = '@dofs', pairs = str(index1)+" "+str(index2))
         compliances = vec.inv(stiffnesses);
         self.compliance = self.node.createObject('DiagonalCompliance', template="Vec6d", name='compliance', compliance=concat(compliances), isCompliance=0)
Example #28
0
File: sml.py Project: Sreevis/sofa
def parseData(xmlData):
    """ return the list of data in xmlData
    """
    if xmlData.attrib["type"]=="float":
        return Tools.strToListFloat(xmlData.text)
    elif xmlData.attrib["type"]=="int":
        return Tools.strToListInt(xmlData.text)
    elif xmlData.attrib["type"]=="string":
        return xmlData.text.split()
Example #29
0
    def plotAverageModel(cls, iGIFs):


        """
        Average model parameters and plot summary data.
        """

        GIF.plotAverageModel(iGIFs)


        # NONLINEAR THRESHOLD COUPLING
        #######################################################################################################
        plt.subplot(2,4,4)
                    
        K_all = []
        
        plt.plot([-80, -20],[-80,-20], ls='--', color='black', lw=2, zorder=100)   
        
        for iGIF in iGIFs :
                
            (K_support, K) = iGIF.getNonlinearCoupling()
       
            plt.plot(K_support, K, color='0.3', lw=1, zorder=5)
            
            K_all.append(K)

        K_mean = np.mean(K_all, axis=0)
        K_std  = np.std(K_all, axis=0)
        
        plt.fill_between(K_support, K_mean+K_std,y2=K_mean-K_std, color='gray', zorder=0)
        plt.plot(K_support, np.mean(K_all, axis=0), color='red', lw=2, zorder=10)  
            
           
        plt.xlim([-80,-20])
        plt.ylim([-65,-20])
        Tools.removeAxis(plt.gca(), ['top', 'right'])
        plt.xlabel('Membrane potential (mV)')
        plt.ylabel('Threshold coupling (mV)')  
 

        # tau_theta
        #######################################################################################################
        plt.subplot(4,6,12+4)
 
        p_all = []
        for iGIF in iGIFs :
                
            p = iGIF.theta_tau
            p_all.append(p)
            
        plt.hist(p_all, histtype='bar', color='red', ec='white', lw=2)
        plt.xlabel('tau theta (ms)')        
        Tools.removeAxis(plt.gca(), ['top', 'left', 'right'])
        plt.yticks([])     

        plt.show()
Example #30
0
 def __init__(self, name, node1, node2, compliance=0, index1=0, index2=0 ):
     self.node = node1.createChild( name )
     self.dofs = self.node.createObject('MechanicalObject', template = 'Vec6'+template_suffix, name = 'dofs', position = '0 0 0 0 0 0' )
     input = [] # @internal
     input.append( '@' + Tools.node_path_rel(self.node,node1) + '/dofs' )
     input.append( '@' + Tools.node_path_rel(self.node,node2) + '/dofs' )
     self.mapping = self.node.createObject('RigidJointMultiMapping', name = 'mapping', input = concat(input), output = '@dofs', pairs = str(index1)+" "+str(index2),
                                           geometricStiffness = geometric_stiffness)
     self.compliance = self.node.createObject('UniformCompliance', name='compliance', compliance=compliance)
     node2.addChild( self.node )
Example #31
0
	def __init__(self, settings, modelParms):
		self.aSet = settings
		self.logger = Tools.loggedPrint.instance()
		self.modelParms = modelParms
		self.dataDir = settings.fetch("datadir") + '/' + settings.fetch("modeldata")
		self.wrfDir = settings.fetch("wrfdir")
		self.startTime = settings.fetch("starttime")
		# Copy important files to the directory
		Tools.popen(self.aSet, "cp " + settings.fetch("headdir") + "run_files/* " + self.wrfDir + '/' + self.startTime[0:8] + "/output")
		# Copy required WRF files
		Tools.popen(self.aSet, "cp " + self.aSet.fetch("wrfrunfiles") + "* " + self.wrfDir + '/' + self.startTime[0:8] + "/output")
		# Move the generated files to the run directory		
		Tools.popen(self.aSet, "mv namelist.input " + self.wrfDir + '/' + self.startTime[0:8] + "/output")	
Example #32
0
    def basePartial( self, xdata, params, parlist=None ):
        """
        Returns the partials at the input values.

        Parameters
        ----------
        xdata : array_like
            values at which to calculate the result
        params : array_like
            values for the parameters.
        parlist : array_like
            list of indices active parameters (or None for all)

        """
        np = self.npbase if parlist is None else len( parlist )
        partial = numpy.ndarray( ( Tools.length( xdata ), np ) )

        x = math.pi * ( xdata[:,0] * params[2] + params[3] + params[4] * xdata[:,1] )
        sx = numpy.sin( x )
        s2 = sx * sx

        p1 = params[1] * params[1]
        dd = 1.0 / ( 1 + p1 * s2 )
#        dd = 1.0 / ( 1 + params[1] * s2 )

        d2 = dd * dd
        p3 = - 2 * math.pi * params[0] * p1 * sx * numpy.cos( x ) * d2
#        p3 = - 2 * math.pi * params[0] * params[1] * sx * numpy.cos( x ) * d2

        parts = { 0 : ( lambda: dd ),
                  1 : ( lambda: - 2 * params[0] * params[1] * s2 * d2 ),
#                  1 : ( lambda: - params[0] * s2 * d2 ),
                  3 : ( lambda: p3 ),
                  2 : ( lambda: xdata[:,0] * p3 ),
                  4 : ( lambda: xdata[:,1] * p3 ) }

        if parlist is None :
            parlist = range( self.npmax )

        for k,kp in enumerate( parlist ) :
            partial[:,k] = parts[kp]()

        return partial
Example #33
0
def generate_rigid(filename, density = 1000.0, scale=[1,1,1], rotation=[0,0,0], rigidFilename=None):

        # TODO bind GenerateRigid
        # - faster than writing in a file
        # - more robust (if several processes try to work in the same file)

        if rigidFilename is None:
            tmpfilename = Tools.path( __file__ ) +"/tmp.rigid"
        else:
            tmpfilename = rigidFilename

        cmdRel = [ 'GenerateRigid', filename, tmpfilename, str(density), str(scale[0]), str(scale[1]), str(scale[2]), str(rotation[0]), str(rotation[1]), str(rotation[2]) ]
        cmd = list(cmdRel)
        cmd[0] = Sofa.build_dir() + '/bin/' + cmd[0]
        #print cmd
                         
        try:

            output = Popen(cmd, stdout=PIPE)

        except OSError:
            # try the debug version
            cmd[0] += 'd'

            try:
                    output = Popen(cmd, stdout=PIPE)
            except OSError:
                
                    try:
                    #try if it is accessible from PATH
                            output = Popen(cmdRel, stdout=PIPE)

                    except OSError:
                            # try the debug version
                            cmdRel[0] += 'd'                    
                            try:
                                    output = Popen(cmdRel, stdout=PIPE)
                            except OSError:
                                    print 'error when calling GenerateRigid, do you have GenerateRigid built in SOFA?'
                                    raise

        output.communicate() # wait until Popen command is finished!!!
        return read_rigid(tmpfilename)
Example #34
0
    def result( self, xdata, *fitpar ) :
        """
        Result method to make connection to the scipy optimizers

        Parameters
        ----------
        xdata : array_like
            input data
        fitpar : tuple of float
            parameters for the model
        """
        params = Tools.toArray( fitpar, dtype=float, ndim=1 )
        params = self.insertParameters( params )
        res = self.model.result( xdata, params )
        if hasattr( self, "normdfdp" ) :
            extra = numpy.inner( self.normdfdp, params )
            res = numpy.append( res, [extra] )

        return res
Example #35
0
def generateBenchmark(config,format):
    NBSAMPLES = 256
    data1=np.random.randn(NBSAMPLES)
    data2=np.random.randn(NBSAMPLES)
    
    data1 = Tools.normalize(data1)
    data2 = np.abs(data1)

    if format==31:
       data1=floatRound(data1,31)

    if format==15:
       data1=floatRound(data1,15)

    if format==7:
       data1=floatRound(data1,7)

    config.writeInput(1, data1,"InputBench")
    config.writeInput(2, data2,"InputBench")
Example #36
0
 def convolution_SpikeTrain(self, spks, T, dt):
     
     """
     Compute and return the convolutional integral between a spiking input spks of duration T and the Filter.
     spks  : in ms, spike times
     T     : in ms, duration of the experiment
     dt    : in ms, 
     """
     
     spks_i = Tools.timeToIndex(spks, dt)        
     (t,F) = self.getInterpolatedFilter(dt)
     F_length = len(F)
     
     filtered_spks = np.zeros(int(T/dt) + 5*F_length )
     
     for s in spks_i :
         filtered_spks[s : s + F_length] += F
     
     return filtered_spks[:int(T/dt)]
    def baseDerivative(self, xdata, params):
        """
        Return the derivative df/dx at each xdata (=x).

        Parameters
        ----------
        xdata : array_like
            value at which to calculate the partials
        params : array_like
            parameters to the model

        """
        xd = numpy.where(xdata == self.knots[-1],
                         (1.0 - self.eps) * self.knots[-1], xdata)

        partial = Tools.toArray(self.Bspline.collmat(xd, deriv_order=1),
                                ndim=2)

        return numpy.inner(params, partial)
Example #38
0
    def derivative(self, xdata, param, useNum=False):
        """
        Return the derivatives (df/dx) of the model at the inputs

        Parameters
        ----------
        xdata : array_like
            an input vector or array
        param : array_like
            parameters for the model
        useNum : bool
            if true, numeric derivatives are used.

        """
        result = None
        df = numpy.zeros_like(xdata)
        xdata = Tools.toArray(xdata)
        df = self._recursiveDerivative(xdata, param, result, df, useNum=useNum)
        return df
Example #39
0
 def configStart(config):
     configself=config['self']
     sshConfig=configself['sshConfig']
     dbConfig=configself['dbConfig']
     return ServerConfig(
         SSH_Config(
             sshConfig['hostname'],
             sshConfig['port'],
             sshConfig['username'],
             sshConfig['password'],
             sshConfig['pfile']
         ),
         DbConfig(
             dbConfig['dbName'],
             dbConfig['username'],
             dbConfig['password']
             ),
         Tools.FileOperator(config)
         )
def order_food(data):
    username = data[0]
    admin_id = int(data[1])
    info = data[2]
    info = ConvertData().convert_from_json(info)
    now_date = datetime.datetime.now()

    # credit = Tools.get_user_price_by_card_id(username)

    price = 0

    for index, value in enumerate(info):
        db = database()
        query = "select price from tbl_facility where id=%d limit 1" % (
            value[0])
        result = db.select(query)
        current_price = result[0][0]

        current_price -= current_price

        current_price *= int(value[1])
        price += current_price
        db.close()

    # if credit < price:
    #     return 'no credit'

    result = Tools.update_user_price_by_card_id(username, price)

    if result is False:
        return False

    for index, value in enumerate(info):
        query = """insert into tbl_order (id_user, id_facility, count, active, is_delivered, date, id_admin)
                   values
                   ((select id from tbl_user where username='******' limit 1), %d, %d, 0, 0, '%s', %d)""" % \
                (username, int(value[0]), int(value[1]), now_date, admin_id)
        db = database()
        result = db.insert(query)
        if result is False:
            return False
        db.close()
    return True
Example #41
0
    def dealContent(self,contentUrl,titleXpath,postDate,source,contextXpath):
        print("download:   ",contentUrl)
        doc = {}
        doc['_id'] = uuid.uuid4();

        context = self.downloade(contentUrl)
        page = etree.HTML(context)
        title = page.xpath(titleXpath)
        source = source
        contents = page.xpath(contextXpath)
        contextClean = Tools.clearContent(contents)

        doc['title'] = title[0].strip()
        doc['postdate'] = postDate
        doc['mediaType'] = '新闻'
        doc['source'] = source
        doc['context'] = contextClean

        self.mongo.saveDoc(doc)
Example #42
0
        def parseXml(self, meshXml):
            parseIdName(self, meshXml)
            self.format = meshXml.find("source").attrib["format"]
            self.source = meshXml.find("source").text

            for g in meshXml.findall("group"):
                self.group[g.attrib["id"]] = Model.Mesh.Group()

                if not g.find("index").text:
                    Sofa.msg_warning(
                        "SofaPython.sml", "Group: group '" + g.attrib["id"] +
                        "' of mesh '" + self.name + "' is empty")
                else:
                    self.group[g.attrib["id"]].index = Tools.strToListInt(
                        g.find("index").text)
                    for d in g.findall("data"):
                        self.group[g.attrib["id"]].data[
                            d.attrib["name"]] = parseData(d)
                    parseTag(self.group[g.attrib["id"]], g)
Example #43
0
 def Run ( self ):
   
   Tools.CreateAndMoveIn( self.Name, 'Force' ) 
   #self.Commit()
   Begin = '\033[94m MuonCalibrationFit ----> '
   End = ' \033[0m'
   print Begin + ' Executing Job ' + self.Name + End
   self.Execute()
   print Begin + ' Saving Job ' + self.Name + End
   self.Save()
   print Begin + ' Producing Plots for Job ' + self.Name + End
   self.PublishPlots()
   print Begin + ' Sending Notification Mail for Job ' + self.Name + End
   self.SendMail()
   print Begin + ' (Re)Saving Job ' + self.Name + End
   self.Save()
   #===#
   if not 'Check' in self.Name and self.DoCheck:
     self.RunCheck()
Example #44
0
def putFile(content, path, useCompression=False, username=None):
    """
	Writes a file to docroot/path
	
	@since: 1.0

	@type  content: utf-8 encoded (or buffer) string, encoded in mime64
	@param content: the content of the file
	@type  path: string
	@param path: a complete path, with filename and extension, relative to the document root.
	@type  useCompression: bool
	@param useCompression: (since 1.3) if set to True, the content is gziped before being mime64-encoded.
	@type  username: string
	@param username: (since 1.7) the committer/writer
	
	@rtype: bool
	@returns: True if OK, False otherwise
	"""
    getLogger().info(">> putFile(%s, %s)" % (path, useCompression))
    if not path.startswith('/'):
        path = '/' + path

    res = False
    try:
        content = base64.decodestring(content)
        if useCompression:
            content = zlib.decompress(content)
        revision = FileSystemManager.instance().write(path,
                                                      content,
                                                      username=username)
        # No revision handling for now
        # We should return the new filepath in case of a success
        # /repository/samples/[email protected]
        # etc
        res = True
    except Exception as e:
        e = Exception("Unable to perform operation: %s\n%s" %
                      (str(e), Tools.getBacktrace()))
        getLogger().info("<< putFile(...): Fault:\n" + str(e))
        raise (e)

    getLogger().info("<< putFile(): %s" % str(res))
    return res
Example #45
0
	def weight_distribution(self,filepath):
		connectCntTuple = {}
		for n,nbrsdict in self.WG.adjacency():
			for nbr,keydict in nbrsdict.items():
				for key,eattr in keydict.items():
					if 'weight' in eattr:
						if n+' '+nbr in connectCntTuple.keys():
							connectCntTuple[n+' '+nbr] += eattr['weight']
						else:
							connectCntTuple[n+' '+nbr] = eattr['weight']
		connectCntList = Tools.TupleSort(connectCntTuple)
		x = np.arange(1,len(connectCntList)+1,1)
		y = np.zeros(len(connectCntList))
		for n in range(len(connectCntList)):
			y[n] = connectCntList[n][1]
			# print connectCntList[n][1]
		title = 'weight_distribution'
		# Tools.plot(x,y,title)
		return curveFitting.powerLawFitting(x,y,title,filepath)
Example #46
0
    def explode(self):
        '''
        if not self.gemsAdded:
            for gem in range(self.amount_of_gems):
                self.containmentList.append(spawnRandomGems())
        '''
        if not self.gemsAdded:
            for gem in self.containmentList:
                rngX = random.randint(-100, 100)
                rngY = random.randint(-200, 0)
                print('appending gem here')
                Stages.stageHandler.consumablesList.append(
                    gem(self, self.x + rngX, self.y + rngY))
            self.gemsAdded = True

        self.iterationList = Images.chestExplosion
        self.broken = Tools.animateMe(self, False, True)
        if self.broken:
            Stages.stageHandler.currentStage.matterList.remove(self)
Example #47
0
 def UpdatePredictionDisplay(self, predList):
     """ Updates GUI display after signal emitted by ClassifyRealTime thread indicating that a gesture has been predicted """
     if predList[0] != "rest":
         self.restFlag = False
         if len(predList) == 1:
             # Only contains predicted label and no debug statistics
             self.plainTextEditClassRT.moveCursor(QtGui.QTextCursor.End)
             self.plainTextEditClassRT.insertPlainText(predList[0])
             self.plainTextEditClassRT.moveCursor(QtGui.QTextCursor.End)
             #self.plainTextEditClassRT.appendPlainText(predList[0])
         else:
             self.plainTextEditClassRT.appendPlainText(
                 predList[0] + Tools.ListToCSstr(predList[1]))
     else:
         if self.restFlag != True:
             self.plainTextEditClassRT.appendPlainText("")
             self.restFlag = True
     # Update log file
     self.LogSession(predList)
Example #48
0
    def __init__(self, action_dim):

        self.count = 0

        self.action_dim = action_dim


        self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")

        self.memory = Tools.Memory_Replay(MEMORY_SIZE)

        self.online = Net.Net(self.action_dim).to(self.device)

        self.target = Net.Net(self.action_dim).to(self.device)

        self.optimizer = torch.optim.Adam(self.online.parameters(), LR)

        self.loss_func = nn.SmoothL1Loss()
        self.steps_done = 0
def main():
    logging.getLogger('requests').setLevel(logging.CRITICAL)
    logging.basicConfig(
        format=
        '[%(asctime)s] %(filename)s:%(lineno)d %(levelname)s - %(message)s',
        level=logging.INFO,
        filename=Tools.FILE_LOG,
        datefmt='%d.%m.%Y %H:%M:%S')
    try:
        # get_proxy()
        pass
    except Exception as e:
        logging.error(
            f'Exception of type {type(e).__name__!s} in get_proxy(): {e}')
    if not Tools.SINGLE_RUN:
        while True:
            Tools.clear_log()
            Tools.create_db_if_notexist()
            avt = Avito.Avito()
            avt.check_new_posts_avito()
            you = Youla.Youla()
            you.check_new_posts_youla()
            logging.info('[App] Script went to sleep.')
            time.sleep(60 * 10)
    else:
        Tools.clear_log()
        Tools.create_db_if_notexist()
        try:
            avt = Avito.Avito()
            avt.check_new_posts_avito()
        except Exception as ex:
            logging.error(
                f'Exception of type {type(ex).__name__!s} in main(): {ex}')
        try:
            you = Youla.Youla()
            you.check_new_posts_youla()
        except Exception as ex:
            logging.error(
                f'Exception of type {type(ex).__name__!s} in main(): {ex}')
    logging.info('[App] Script exited.\n')
Example #50
0
    def __init__(self, name='mymodel', eventsfile=None, catalog=None, out=None, roi=None, frame='fk5', unit='degree', allsky=False, model_type='likelihood'):
        """Model class containing methods and attributes for building models for use with Fermitools

        Parameters
        ----------
        name : str (Optional)
            Name of the model. Default is 'mymodel'.
        eventsfile : str (Optional)
            Path to file containing events data. This is only used for region of interest information
        catalog : str (Optional)
            Path to source catalog.
        out : str (Optional)
            Name of the output model files.
        roi : tuple (Optional)
            Tuple defining ROI (horizontal center, vertical center, radius).
        frame : str (Optional)
            Coordinate frame to use for region of interest argument, e.g. galactic, icrs. Any astropy frame is allowed
        unit : str (Optional)
            Units for region of interest argument. Any astropy unit is allowed.
        allsky : bool (Optional)
            Flag to set region of interest to the entire sky. By default this sets the region of interest center to the galactic center (l=0, b=0).

        """
        self.name = name
        self.srcs = catalog

        if out is None:
            self.out = os.path.join(os.getcwd(), self.name + '.xml')
        else:
            self.out = out
        
        if (roi is not None) or allsky:
            self.setROI(roi=roi, frame=frame, unit=unit, allsky=allsky)
        elif eventsfile is not None:
            self.roi = Tools.getPos(eventsfile)
        else:
            self.roi = None
        
        if not model_type in ['likelihood', 'simulation']:
            raise IOError("Model type must be 'likelihood' or 'simulation'.")
        else:
            self.model_type = model_type
Example #51
0
 def deploy(self, show = False):
     self.buildDependencies( show )
     
     # files to relink
     binaries = self.binaries()
     
     for binary in binaries:
         if not self.relinkBinary( binary ):
             return False
     
     # files to copy and relink
     qtPluginsPath = self.qt.pluginsFilePath( os = 'macos' )
     start = len( qtPluginsPath )
     bundleFrameworksPath = self.frameworksFilePath()
     bundleQtPluginsPath = self.qtPluginsFilePath()
     binaries = self.dependenciesList
     
     for binary in binaries:
         target = ''
         
         # qt plugin
         if binary.startswith( qtPluginsPath ):
             target = '%s/%s' % ( bundleQtPluginsPath, binary[ start : ] )
         # lib/framework
         else:
             target = '%s/%s' % ( bundleFrameworksPath, os.path.basename( binary ) )
         
         target = os.path.normpath( target )
         
         # copy dependency
         if not os.path.exists( target ):
             print 'Copying dependency %s to %s...' % ( os.path.basename( binary ), target )
             
             if not Tools.copy( binary, target ):
                 return False
         
         # relink dependency
         if not self.relinkBinary( target ):
             return False
     
     # all is fine !
     return True
    def partial(self, xdata, params, parlist=None):
        np = self.npbase if parlist is None else len(parlist)
        partial = numpy.ndarray((Tools.length(xdata[:, 0]), np))
        x = (xdata[:, 0] - params[1])
        y = (xdata[:, 1] - params[2])
        c = math.cos(params[5])
        s = math.sin(params[5])
        u = (x * c - y * s) / params[3]
        v = (x * s + y * c) / params[4]
        r2 = u * u + v * v
        r = numpy.sqrt(r2)
        p0dkdr = params[0] * self.kernel.partial(r)
        drdu = -u / r
        drdv = -v / r

        parts = {
            0: (lambda: self.kernel.resultsq(r2)),
            #  dfdp1 = p0 * dkdr * ( drdu * dudx + drdv * dvdx ) * dxdp1
            1: (lambda: p0dkdr *
                (drdu * c / params[3] + drdv * s / params[4])),

            #  dfdp2 = p0 * dkdr * ( drdu * dudy + drdv * dvdy ) * dydp2
            2: (lambda: p0dkdr *
                (-drdu * s / params[3] + drdv * c / params[4])),

            #  dfdp3 = p0 * dkdr * drdu * dudp3
            3: (lambda: p0dkdr * drdu * u / params[3]),
            4: (lambda: p0dkdr * drdv * v / params[4]),

            #  dfdp5 = p0 * dkdr * ( drdu * ( dudc * dcdp5 + duds * dsdp5 ) +
            #                        drdv * ( dvdc * dcdp5 + dvds * dsdp5 ) )
            5: (lambda: -p0dkdr * (drdu * (-x * s - y * c) / params[3] + drdv *
                                   (-y * s + x * c) / params[4]))
        }

        if parlist is None:
            parlist = range(np)

        for k, kp in enumerate(parlist):
            partial[:, k] = parts[kp]()

        return partial
def testAllSong(path, one_mode=None, data_fi=None):
    data_file = []
    test_mo = [
        'happy',
    ]
    data_file.append(data_fi)

    if one_mode == None:
        print 'error mode'
        return
    extra.initSoft()
    if one_mode == 'emotion':
        test_data, train_target = extra.readSelectedTrainData(
            data_file, test_mo)
    else:
        test_data, train_target = extra.readSelectedTrainData(
            data_file, test_mo)

    files = tool.getAllFileWithoutPath(path)
    return tat.testAlldefualtData(test_data, one_mode, files, path)
Example #54
0
    def accept(self):
        if self.tank is None:
            return False

        mw = self.getMainWindow()
        form = mw.findChild(QtGui.QWidget, "TaskPanel")
        form.points = self.widget(QtGui.QSpinBox, "Points")
        n = form.points.value()

        points = Tools.tankCapacityCurve(self.tank, n)
        l = []
        z = []
        v = []
        for p in points:
            l.append(p[0] * 100)
            z.append(p[1].getValueAs("m").Value)
            v.append(p[2].getValueAs("m^3").Value)

        PlotAux.Plot(l, z, v, self.tank)
        return True
Example #55
0
	def _doStop(self, component, probeFunction, timeout = 5.0):
		pid = probeFunction()
		if pid > 0:
			self.notify("Stopping %s..." % component)
			os.kill(pid, signal.SIGINT)
			start = time.time()
			while self.tsProbe() > 0 and time.time() < start + timeout:
				time.sleep(0.5)
			if self.tsProbe() > 0:
				self.notify("Unable to stop %s gracefully after %s seconds, killing it..." % (component, timeout))
				for p in Tools.getChildrenPids(pid):
					try:
						os.kill(p, signal.SIGKILL)
					except Exception, e:
						self.notify("WARNING: unable to kill process %s (%s)" % (p, str(e)))
				# But we assume we killed it...
				return 0
			else:
				self.notify("Stopped.")
				return 0
Example #56
0
 def sax(self, ps: pd.Series):
     '''
     refine power series to sax description
     :param ps: pd.Series index:timestamp value: power readings
     :param max_T: max-time between samples
     :return: list[tuples(delata_value,total_value,start_time,delta_time)]
     '''
     # list of tuples (starttime(pd.timestamp),endtime(pd.timestamp), cluster center value)
     naive_description = Tools.ps2description(
         ps=ps, centers=Parameters().sax_steps)
     result = []
     for i, thenaive in enumerate(naive_description):
         if (i == 0):
             result.append((thenaive[2], thenaive[2], thenaive[0],
                            thenaive[1] - thenaive[0]))
         else:
             result.append(
                 (thenaive[2] - naive_description[i - 1][2], thenaive[2],
                  thenaive[0], thenaive[1] - thenaive[0]))
     return result
Example #57
0
    def get_action(self, episode: Episode) -> int:
        model_input = None
        if self.nb_frames > 1:
            frames = []
            for i in range(max(0,
                               episode.current_length() - self.nb_frames),
                           episode.current_length()):
                frame = episode.steps[i].state.screen
                if self.preprocess:
                    frame = Tools.preprocess_frame(frame)
                if self.normalize_to_unit_interval:
                    frame = frame.astype(
                        NORMALISED_SCREEN_DATA_TYPE) / MAX_PIXEL_VALUE
                frames.append(frame)

            model_input = FrameStacker.stack_last_frame(
                numpy.array(frames), self.nb_frames, self.stack_in_colors)
        else:
            model_input = episode.current_state().screen
        return self.screen_to_actions_function(model_input)
Example #58
0
def MapCube(spectype, emin, emax, efile, directory, **spectrumargs):
    """Observation Simulation MapCube flux definition"""
    xmldoc_out = minidom.getDOMImplementation().createDocument(
        None, None, None)
    try:
        flux = spectrumargs['flux']
    except KeyError:
        try:
            flux = Tools.getFlux(spectype, emin, emax, **spectrumargs)
        except GetFluxError as e:
            raise SpectrumError(e)

    spectrumClass = xmldoc_out.createElement('SpectrumClass')
    spectrumClass.setAttribute('name', "MapCube")
    spectrumClass.setAttribute('params', "{0},{1}".format(flux, efile))

    use_spectrum = xmldoc_out.createElement('use_spectrum')
    use_spectrum.setAttribute('frame', "galaxy")

    return spectrumClass, use_spectrum
Example #59
0
 def sendSign(self, cookie):
     """
     签到
     :return:
     """
     self.httpClint.set_cookies(**Tools.StrToDict(cookie))
     user_id_re = re.compile("USERID=(\S+); ")
     self.user_id = re.search(user_id_re, cookie).group(1)
     data = {
          "channel": 'app',
          "captcha_code": "",
          "captcha_hash": "",
          "source": "main",
          "latitude": 114.06031036376953, "longitude": 22.570497512817383
     }
     signInUrls = copy.copy(urls["sign_in"])
     signInUrls["req_url"] = signInUrls["req_url"].format(self.user_id)
     signRsp = self.httpClint.send(urls=signInUrls, data=data)
     logger.info(signRsp)
     self.daily()
Example #60
0
def getSkillIdList():
    files = Tools.list_all_files(Tools.ICONS_PATH)
    skIdList = []
    for file in files:
        list = readPic(file)
        if (list[0] == list[1]):
            skIdList.append(list[0])

        elif (list[0] == list[2]):
            skIdList.append(list[0])

        elif (list[1] == list[2]):
            skIdList.append(list[1])

        else:
            # print(list, file)
            # 3种Hash未能识别,使用直方法识别
            dict = {}
            for id in list:
                # print(id)
                pic = Tools.SKILL_IMAGE_PATH + id + '.png'
                try:
                    img1 = cv2.imread(file)

                    img2 = cv2.imread(pic)

                    n = RGB(img1, img2)

                    if (n == 1.0):
                        dict[id] = n
                    else:
                        dict[id] = float(n[0])
                except:
                    continue
            skdict = {}

            for k, v in dict.items():
                skdict[k] = v
            sk = max(skdict, key=lambda k: skdict[k])
            skIdList.append(sk)
    return skIdList