Exemple #1
0
def main():
    curr_full_path = FileUtils.getCurrentDirFullPath()
    config_fn = 'portal_activity_job_config.yaml'
    cI = ConfigUtils(curr_full_path + "/configs/", config_fn)
    configItems = cI.getConfigs()
    configItems[
        'config_dir'] = curr_full_path + "/" + configItems['config_dir']
    configItems['curr_full_path'] = curr_full_path
    db_ini = configItems['config_dir'] + configItems['database_config']
    conn_alq, meta_alq = PostgresStuff.connect_alq(db_ini)
    conn = PostgresStuff.connect(db_ini)
    db_tbl = configItems['activity_table']
    first_run = getFirstRun(conn)
    if first_run == 0:
        print("****First RUN! No new created datasets in the past " +
              configItems['activity']['create']['time_interval'] + "*****")
        exit(0)
    insert_created = updateCreatedDatasets(
        conn, configItems['activity']['create']['time_interval'])
    #print insert_created
    created_datasets = MonitorPortal.generateActivityReport(
        conn_alq, configItems, 'create')
    if (not (created_datasets)):
        print("**** No new created datasets in the past " +
              configItems['activity']['create']['time_interval'] + "*****")
        exit(0)
    datasetid_notified = MonitorPortal.generateEmail(conn_alq, configItems,
                                                     'create',
                                                     created_datasets)
    updted_notified_cnt = MonitorPortal.updateNotifiedDatasetIds(
        conn, configItems, 'create', datasetid_notified)
    print("******Notfied that " + str(updted_notified_cnt) +
          " datasets were created****")
    print("******Updated " + str(updted_notified_cnt) +
          " rows in the created_dataset table****")
 def train(self):
     for e in xrange(self.starting_epoch, self.max_epochs):
         # training phase
         self.__reset_context()
         self.training_set.seek(0)
         previous_word = 0
         for current_word in  self.training_set:
             current_word = int(current_word)
             self.__feedforward(previous_word)
             self.__backpropagate(previous_word, current_word) 
             self.neu_context = np.copy(self.neu_hidden)
             previous_word = current_word
             # reset context at the end of each sentence
             if(self.reset_context_each_sentence and current_word==0):
                 self.__reset_context()
         # validation phase
         self.__reset_context()
         self.validation_set.seek(0)
         previous_word = 0
         logp = 0
         word_counter = 0
         for current_word in  self.validation_set:
             current_word = int(current_word)
             self.__feedforward(previous_word)
             # word==-1 are not in vocabulary
             if(current_word!=-1):
                 word_counter += 1
                 logp += np.log2(self.neu_output[current_word])
             self.neu_context = np.copy(self.neu_hidden)
             previous_word = current_word
             # reset context at the end of each sentence
             if(self.reset_context_each_sentence and current_word==0):
                 self.__reset_context()
         # print progress
         print("*******************")
         print("{}".format(time.strftime("%Y-%m-%d %H:%M:%S")))
         print("Epoch {}".format(e))
         print("Learning rate {}".format(self.learning_rate))
         print("Validation log probability {}".format(logp))
         print("Validation words counter {}".format(word_counter))
         if(word_counter>0):
             print("Validation PPL {}".format(np.power(2.0, -logp / word_counter)))
         # check improvement
         if(logp < self.logp_previous):
             self.__restore_weights()
         else:
             self.__save_weights()
         if(logp*self.min_improvement < self.logp_previous):
             if (not self.learning_rate_divide): 
                 self.learning_rate_divide = True
             else:
                 break
         if (self.learning_rate_divide):
             self.learning_rate /= 2
         self.logp_previous = logp
         # log last epoch for recovery
         cu.log_current_epoch(e+1, self.learning_rate, self.logp_previous, self.learning_rate_divide, self.recovery_config_file)
Exemple #3
0
def get_filter_list():
    
    global saved_filters

    if len(saved_filters) == 0:
        ConfigUtils.read_config( saved_filters, './config/savedfilters.txt' )
    
    filters = saved_filters.keys()
    filters.sort()
    return filters
def TrainModel(model, session, mnist, cfg):
    data_tr = mnist.train
    data_val = mnist.validation
    loss_history = []
    logs = []
    acc_val_best_so_far = 0
    current_step = session.run(model.global_step)
    lg = ConfigUtils.LogGenerator()
    for iteration in range(current_step, cfg.max_batch+1):
        batch = data_tr.next_batch(cfg.batch_size)
        learn_rate = cfg.lr * cfg.lr_mapper.GetFactor(iteration)
        feed_dict = {model.pl_images:_SubtractByMean(batch[0]), model.pl_labels:batch[1], model.pl_dropout_rate:0.5, model.pl_phase:1, model.pl_lr:learn_rate}
        loss, _ = session.run([model.loss, model.op_train], feed_dict=feed_dict)
        loss_history.append(loss)
        
        if cfg.is_save and iteration % cfg.save_every==0:
            model.saver.save(session, cfg.save_dir, iteration)
        if iteration % cfg.print_every != 0:
            continue
        feed_dict[model.pl_dropout_rate] = 0
        feed_dict[model.pl_phase] = 0
        acc_tr, loss_softmax_tr, loss_re_tr = session.run([model.accuracy, model.loss_softmax, model.loss_reg], feed_dict=feed_dict)
        acc_val, loss_softmax_val, loss_re_val = EvaluateModel(model, session, data_val, cfg)
        
        acc_val_best_so_far = acc_val if acc_val > acc_val_best_so_far else acc_val_best_so_far
        log = lg.GetContent()
        log.iteration = iteration
        log.acc_tr = acc_tr
        log.loss_softmax_tr = loss_softmax_tr
        log.loss_re_tr = loss_re_tr
        log.loss_tr = loss_softmax_tr + loss_re_tr
        log.acc_val = acc_val
        log.loss_softmax_val = loss_softmax_val
        log.loss_re_val = loss_re_val
        log.loss_val = loss_softmax_val + loss_re_val
        logs.append(log)
        if cfg.verbose:
            print('iteration: {0}\t acc_tr: {1:4.3f}\t acc_te: {2:4.3f}\t loss_tr: {3:4.4f}\t loss_val: {4:4.4f}\t lr: {5:4.2f}'.format(iteration, acc_tr, acc_val, log.loss_tr, log.loss_val, np.log(learn_rate)))
            
    acc_val, loss_softmax_val, loss_re_val = EvaluateModel(model, session, data_val, cfg) 
    rg = ConfigUtils.ReportGenerator()
    report = rg.GetContent()
    report.acc = acc_val
    report.best_acc = acc_val_best_so_far
    report.loss_softmax =loss_softmax_val
    report.loss_re = loss_re_val
    report.loss = loss_softmax_val + loss_re_val
    report.traing_loss_hist = loss_history
    report.distribution = model.cfg.distribution
    report.reg = model.cfg.reg
    report.lr = cfg.lr
    return report, logs
Exemple #5
0
def delete_filter(filter_name):
    
    global saved_filters

    if len(saved_filters) == 0:
        ConfigUtils.read_config( saved_filters, './config/savedfilters.txt' )
            
    name = filter_name.title()
    try:
        del saved_filters[name]
        ConfigUtils.write_config( saved_filters, './config/savedfilters.txt' )
    except KeyError:
        pass
Exemple #6
0
def main():
    fieldConfigFile, config_inputdir, jobType = parse_opts()
    cI = ConfigUtils(config_inputdir, fieldConfigFile)
    configItems = cI.getConfigs()
    configItems['dataset_name'] = jobType
    lg = pyLogger(configItems)
    logger = lg.setConfig()
    logger.info("****************JOB START******************")
    sc = SocrataClient(config_inputdir, configItems, logger)
    client = sc.connectToSocrata()
    clientItems = sc.connectToSocrataConfigItems()
    scrud = SocrataCRUD(client, clientItems, configItems, logger)
    sQobj = SocrataQueries(clientItems, configItems, logger)

    mmdd_fbf = configItems['dd']['master_dd']['fbf']
    field_profiles_fbf = configItems['dd']['field_profiles']['fbf']
    base_url = configItems['baseUrl']

    field_type_fbf = configItems['dd']['field_type']['fbf']

    load_mm_dd = ProfileFields.getBaseDatasetJson(sQobj, configItems, mmdd_fbf)
    #print load_mm_dd
    #load_mm_dd  = True
    current_field_profiles = ProfileFields.getCurrentFieldProfiles(
        sQobj, base_url, field_profiles_fbf)

    if load_mm_dd:
        master_dfList = ProfileFields.get_dataset_as_dfList(
            configItems['pickle_data_dir'], configItems['mm_dd_json_fn'],
            base_url)
        ProfileFields.removeDeletedFields(scrud, field_profiles_fbf,
                                          master_dfList,
                                          current_field_profiles)
        current_field_profiles = ProfileFields.getCurrentFieldProfiles(
            sQobj, base_url, field_profiles_fbf)

        dataset_info = ProfileFields.buildInsertFieldProfiles(
            sQobj, scrud, configItems, master_dfList, current_field_profiles)
        print dataset_info
        dsse = JobStatusEmailerComposer(configItems, logger, jobType)
        if dataset_info['DatasetRecordsCnt'] > 1:
            dsse.sendJobStatusEmail([dataset_info])
        else:
            dataset_info = {
                'Socrata Dataset Name': configItems['dataset_name'],
                'SrcRecordsCnt': 0,
                'DatasetRecordsCnt': 0,
                'fourXFour': "Nothing to Insert"
            }
            dataset_info['isLoaded'] = 'success'
            dsse.sendJobStatusEmail([dataset_info])
Exemple #7
0
def save_filter(filter_name, filter_str):
    
    global saved_filters
    
    if len(saved_filters) == 0:
        ConfigUtils.read_config( saved_filters, './config/savedfilters.txt' )
            
    name = filter_name.title()
    name = name.replace(' ', '_')
    saved_filters[name] = filter_str
    
    ConfigUtils.write_config( saved_filters, './config/savedfilters.txt' )
    
    return
Exemple #8
0
def get_saved_filter( filter_name ):
    global saved_filters
    
    if len(saved_filters) == 0:
        ConfigUtils.read_config( saved_filters, './config/savedfilters.txt' )
        
    filter_list = []
    try:
        name = filter_name.title()
        filter_str = saved_filters[name]
        filter_list = filter_str.split('+')
    except:
        pass
    
    return filter_list
def Tune():
    '''
    for small net
    re_max = -4
    re_min = -14
    lr_max = -6
    lr_min = -10
    '''
    re_max = -4
    re_min = -14
    lr_max = -6
    lr_min = -10
    distribution = np.array(['uniform', 'normal'])
    iteration = 1
    max_batch = 1000
    verbose = True
    
    #list_lr = np.exp(np.linspace(start=-9, stop=-13, num=iteration))
    list_lr = np.exp(np.random.uniform(low=lr_min, high=lr_max, size=iteration))
    list_re = np.exp(np.random.uniform(low=re_min, high=re_max, size=iteration))
    mask_distribution = np.random.randint(low=0, high=len(distribution), size=iteration)
    list_dstribution = distribution[mask_distribution]

    
    _Notice()
    list_report = []
    list_logs = []
    mcg = ConfigUtils.ModelCfgGenerator()
    ccg = ConfigUtils.TrainCfgGenerator()
    
    for i in range(iteration):
        print('number of hyperparam setting tried: {}/{}'.format(i+1, iteration))
        cfg_model = mcg.GetContent(distribution=list_dstribution[i], reg=list_re[i])
        cfg_train = ccg.GetContent(lr=list_lr[i], max_batch=max_batch, verbose=verbose)
        mnist = input_data.read_data_sets(cfg_train.data_dir , one_hot=False, seed=231)
        report, logs = Run(cfg_model,cfg_train, mnist)
        log_df = Logs2Df(logs)
        list_report.append(report)
        list_logs.append(log_df)
      
    df = Reports2Df(list_report)
    return df, list_logs
Exemple #10
0
def get_saved_filter_json( global_config, filter_name, store_data_to_file=False ):
    global saved_filters
    
    competition = global_config['this_competition'] + global_config['this_season']

    if len(saved_filters) == 0:
        ConfigUtils.read_config( saved_filters, './config/savedfilters.txt' )
        
    result = []
    result.append('{ "filters": [\n')
    
    if filter_name != None:
        try:
            filter_name = filter_name.title()
            filter_str = saved_filters[filter_name]
        except:
            pass
    
        result.append('   { "name": "%s", "filter_str": "%s" }\n' % (filter_name,filter_str))
    else:
        for name, filter_str in saved_filters.iteritems():
            result.append('   { "name": "%s", "filter_str": "%s" }' % (name,filter_str))
            result.append(',\n')
            
        if len(saved_filters) > 0:
            result = result[:-1]
    
    result.append('] }\n')
    
    json_str = ''.join(result)
    
    if store_data_to_file:
        try:
            if filter_name == None:
                file_name = 'attrfilters.json'
            else:
                file_name = 'attrfilter_%s.json' % filter_name
            FileSync.put( global_config, '%s/EventData/%s' % (competition,file_name), 'text', json_str)                
        except:
            raise

    return json_str
Exemple #11
0
  def runWebTask(configItems):
    cItemsWebtask =  ConfigUtils(configItems['inputConfigDir'], configItems['webtask_config_fn'] )
    configItemsWebTasks = cItemsWebtask.getConfigs()
    url = configItemsWebTasks['webtask_url']
    attempts = 0
    while attempts < 3:
      try:
        response = urllib2.urlopen(url, timeout = 15)
        content = response.read()
        if(content):
          f = open( configItems['log_dir'] + "algolia-sync-results.json", 'w' )
          f.write( content )
          f.close()
          return True
          break

      except urllib2.URLError as e:
        attempts += 1
        print type(e)
        return False
    return False
Exemple #12
0
def main():
    curr_full_path = FileUtils.getCurrentDirFullPath()
    config_fn = 'portal_activity_job_config.yaml'
    cI = ConfigUtils(curr_full_path + "/configs/", config_fn)
    configItems = cI.getConfigs()
    configItems[
        'config_dir'] = curr_full_path + "/" + configItems['config_dir']
    configItems['curr_full_path'] = curr_full_path
    db_ini = configItems['config_dir'] + configItems['database_config']
    conn_alq, meta_alq = PostgresStuff.connect_alq(db_ini)
    conn = PostgresStuff.connect(db_ini)
    db_tbl = configItems['activity_table']
    rotate_items = rotateActivityData(conn)
    activity = "rotate_portal_activity"
    subject_line = configItems['activity'][activity]['email_msg'][
        'subject_line']
    msg_body = configItems['email_msg_template']['header'] + configItems[
        'activity'][activity]['email_msg']['msg']
    msg_body = msg_body + configItems['email_msg_template']['footer']
    em = Emailer(configItems)
    em.sendEmails(subject_line, msg_body.encode('utf-8').strip())
def main():
    curr_full_path = FileUtils.getCurrentDirFullPath()
    config_fn = 'portal_activity_job_config.yaml'
    cI = ConfigUtils(curr_full_path + "/configs/", config_fn)
    configItems = cI.getConfigs()
    configItems[
        'config_dir'] = curr_full_path + "/" + configItems['config_dir']
    configItems['curr_full_path'] = curr_full_path
    db_ini = configItems['config_dir'] + configItems['database_config']
    conn_alq, meta_alq = PostgresStuff.connect_alq(db_ini)
    conn = PostgresStuff.connect(db_ini)
    db_tbl = configItems['activity_table']
    digest_items = digestStaleDelayedDatasets(conn_alq, configItems,
                                              'stale_delayed_digest')
    if (not (digest_items)):
        print("**** No digest items " +
              configItems['activity']['update']['time_interval'] + "*****")
        exit(0)
    datasetid_notified = MonitorPortal.generateEmail(conn_alq, configItems,
                                                     'stale_delayed_digest',
                                                     digest_items)
def main():
  curr_full_path = FileUtils.getCurrentDirFullPath()
  config_fn = 'portal_activity_job_config.yaml'
  cI =  ConfigUtils(curr_full_path+ "/configs/" , config_fn)
  configItems = cI.getConfigs()
  configItems['config_dir'] = curr_full_path+ "/" + configItems['config_dir']
  configItems['curr_full_path']  = curr_full_path
  db_ini = configItems['config_dir'] + configItems['database_config']
  conn_alq, meta_alq =PostgresStuff.connect_alq(db_ini)
  conn = PostgresStuff.connect(db_ini)
  db_tbl = configItems['activity_table']
  insert_late_updated = updateStaleDelayedDatasets(conn, configItems['activity']['update']['time_interval'])
  print (insert_late_updated)
  stale_late_datasets  = MonitorPortal.generateActivityReport(conn_alq, configItems, 'update')
  print ( stale_late_datasets)
  if (not (stale_late_datasets)):
    print ("**** No changes for stale or deleyed datasets  " + configItems['activity']['update']['time_interval'] + "*****")
    exit (0)
  datasetid_notified = MonitorPortal.generateEmail(conn_alq, configItems, 'update', stale_late_datasets)
  updted_notified_cnt = MonitorPortal.updateNotifiedDatasetIds(conn, configItems, 'update', datasetid_notified)
  print ("******Notfied that " +str(updted_notified_cnt) + " datasets are late or stale****" )
  print ("******Updated" + str(updted_notified_cnt) + " rows in the late_updated_dataset table****" )
Exemple #15
0
    def emit(self, record):
        msg = self.format(record)
        _filePath = datetime.datetime.now().strftime(self.filefmt)
        filename = self.formatter.filename
        suffix = '/' + filename + '_'
        newpath = _filePath.replace('/', suffix)
        _dir = os.path.dirname(newpath)
        try:
            if os.path.exists(_dir) is False:
                os.makedirs(_dir)
        except Exception:
            print "can not make dirs"
            print "filepath is " + newpath
            pass
        try:
            global newfilepath
            if hasattr(self, 'newfile'):
                newpath = self.newfile
            with open(newpath, 'a') as _fobj:
                filesize = self.get_FileSize(newpath)
                logFileSize = ConfigUtils.getWebPorperties('LogFileSize')
                if filesize < logFileSize:
                    _fobj.write(str(msg))
                    _fobj.write("\n")
                    _fobj.flush()
                    _fobj.close()
                else:
                    newFilFmt = os.path.join("logs", "%Y%m%d%H%M%S.log")
                    filePath = datetime.datetime.now().strftime(newFilFmt)
                    suffix_new = '/' + filename + '_'
                    self.newfile = filePath.replace('/', suffix_new)
                    with open(self.newfile, 'a') as _fobj:
                        _fobj.write(str(msg))
                        _fobj.write("\n")
                        _fobj.flush()
                        _fobj.close()

                # self.queryFileInDir(str(msg),filename)

        except Exception, e:
            print "can not write to file"
            print "filepath is " + newpath
            print e.message
Exemple #16
0
 def __init__(self):
     self.window = Tk()
     self.window.title("Dimens Convert Tool")
     windowW = 420
     if (platform.system() == "Windows"):
         windowW = 420
     else:
         windowW = 490
     self.window.minsize(windowW, 355)
     self.center_wind(self.window, windowW, 355)
     frame = Frame(self.window)
     frame.pack()
     config_path = os.path.split(
         os.path.realpath(__file__))[0] + "/" + "configs.ini"
     self.rootDir = os.path.split(os.path.realpath(__file__))[0]
     self.config_path = config_path
     conf = ConfigUtils.Config(config_path)
     self.conf = conf
     fontSize = 35
     menubar = Menu(self.window)
     helpmenu = Menu(menubar, tearoff=0)
     helpmenu.add_command(label="使用方法", command=self.help)
     helpmenu.add_separator()
     helpmenu.add_command(label="关于", command=self.about)
     menubar.add_cascade(label="帮助", menu=helpmenu)
     self.window.config(menu=menubar)
     index = 0
     designDevice = DesignDevice.Design(conf, index, frame, fontSize,
                                        self.rootDir)
     index = designDevice.layout()
     self.designDevice = designDevice
     targetDevice = TargetDevice.Design(conf, index, frame, fontSize)
     index = targetDevice.layout()
     self.targetDevice = targetDevice
     btApply = Button(frame, text="生成", command=self.processButtonGeneral)
     btApply.grid(row=index + 1, column=1, columnspan=2)
     btCancel = Button(frame, text="取消", command=self.processButtonCancel)
     btCancel.grid(row=index + 1, column=2, columnspan=2)
     # 监测事件直到window被关闭
     self.window.mainloop()
Exemple #17
0
def _Test():
    import ConfigUtils
    cfg = ConfigUtils.ModelCfg()
    model = BigModel(cfg)
    session = tf.InteractiveSession()
    session.run(model.op_init)
        )
        parser.add_option("-v", "--verbose", action="store_true", dest="verbose", help="Be verbose.")
        parser.add_option(
            "-c", "--config", action="store", dest="configFile", default="config.json", help="Config file (json)"
        )

        options, args = parser.parse_args()

    except Exception, e:
        print e
        print "For help use --help"
        sys.exit(2)

    ble = BleAutomator(options.interface, options.verbose)

    addresses = ConfigUtils.readAddresses(options.configFile)
    if not addresses:
        sys.exit(1)
    address_ind = 0

    if len(addresses) == 1:
        cycle = 0
        ble.connect(addresses[0])
    else:
        cycle = 1

        # Endless loop:
    while True:
        # Connect to peer device.
        if cycle == 1:
            ble.connect(addresses[address_ind])
            default=None,
            help='Target address. (Can be found by running "hcitool lescan")')

        options, args = parser.parse_args()

    except Exception, e:
        print e
        print "For help use --help"
        sys.exit(2)

    ble = BleAutomator(options.interface, options.verbose)

    if (options.address):
        addresses = [options.address]
    else:
        addresses = ConfigUtils.readAddresses(options.configFile)
        if (not addresses):
            sys.exit(1)
    address_ind = 0

    switch = 1
    if (len(addresses) == 1):
        if not ble.connect(addresses[0]):
            exit(1)
        # Subscribe for notifications
        handle = ble.getHandle(CHAR_POWER_SAMPLES) + 2
        if (not ble.writeCharacteristicHandle(
                handle, Conversion.uint16_to_uint8_array(1))):
            exit(1)
        while (True):
            getSamples(addresses[0])
def main():
#	instacia das variaveis globais
	global counter
	global shutdown
	global fanPort
	global minFanUpTime
	global refreshRate
	global maxTemp
	global minTemp
	global channel_id
	global write_key
	global tskrefresh
	global lastUpdate
	global channel
	global isRelay
	global onValue
	global offValue
	global useSocCmd
	
#	Carrega as configura??es	
	(fanPort,minFanUpTime,refreshRate,maxTemp,minTemp,channel_id,write_key,tskrefresh,isRelay,useSocCmd) = configs.loadConfig()
	
	if(isRelay):
		onValue = 0
		offValue = 1
	else:
		onValue = 1
		offValue = 0
	
	if(channel_id != -1):
		channel = thingspeak.Channel(id=channel_id,write_key=write_key)
	
	parser = optparse.OptionParser()
	
	parser.add_option("-v", "--version", action="store_true", dest="version",
                  help="Show the software version.", default = False)
				  
	parser.add_option("-c", "--config", action="store_true", dest="config",
                  help="Generates a default config file. WARNING: this can overwrite existing settings file.", default = False)
	
	parser.add_option("--reloadConf", action="store_true", dest="reload",
                  help="Reloads the config file to apply changes.", default = False)
	
	group = optparse.OptionGroup(parser, "Controll Options")
	
	group.add_option("-f", "--force", type="string", nargs = 1, dest="force",
                  help="force the fan to be always ON/OFF", default = "null")
	
	group.add_option("-r", "--restore", action="store_true", dest="restore",
                  help="restore the fan to auto mode", default = False)
	
	parser.add_option_group(group)
	
	group = optparse.OptionGroup(parser, "Status Options")
	  
	group.add_option("-a", "--appear", action="store_true", dest="appear",
                  help="force run even if a process is already running", default=False)
	
	group.add_option("-s", "--status", action="store_true", dest="fanstatus",
                  help="show if the fan is ON/OFF", default=False)
				  
	group.add_option("-t", "--temp", action="store_true", dest="temp",
                  help="shows the current temperature", default=False)
	
	parser.add_option_group(group)
	
	group = optparse.OptionGroup(parser, "Installation Options")
				  
	group.add_option("--install", action="store_true", dest="install",
                  help="makes 'fan' command available to bash command line", default=False)
	
	group.add_option("--uninstall", action="store_true", dest="uninstall",
                  help="uninstall 'fan' command from bash command line", default=False)
				  
	group.add_option("--autoinit", type="string", nargs = 1, dest="autoinit",
                  help="sets the auto init true/false, if true the process will start at boot up", default = "null")
				  
	parser.add_option_group(group)
	
	group = optparse.OptionGroup(parser, "Dangerous Options","Use this options with caution.")
				  
	group.add_option("--clear", action="store_true", dest="clear",
                  help="free shared memory, this may cause some stability issues, try using '--restore' after using this option.", default = False)
				  
	parser.add_option_group(group)
				  
	(options, args) = parser.parse_args()
	
#	para o processo caso o computador comece a desligar
	signal.signal(signal.SIGTERM, stop)
	
	if(options.reload):
		
		try:
			configReload = sysv_ipc.SharedMemory(19021999) #procura a memoria compartilhada
			utils.write_to_memory(configReload,"reload")
			print("The configuration was reloaded.")

		except:
			print("Fail to reload the configuration.")
				
		sys.exit()
		
	elif(options.config):
		try:
			configs.createConfig()
			print('Config file created.')
		
		except:
			print('Fail to create the config file.')
			
		sys.exit()
		
	elif(options.version):
		print("Fan version: %s" % (version))
		sys.exit()
		
	elif(options.clear):
		try:
			setGPIO()
			fanForce = sysv_ipc.SharedMemory(22061995)
			sysv_ipc.remove_shared_memory(fanForce.id)
			print("Memory Cleared.")
		except:
			print("There was no memory to clear.")
		

		GPIO.cleanup(fanPort)
		print("GPIO Cleared.")
			
		sys.exit()
	
	elif(options.install):		
		try:
			installFan()
			print("'fan' was installed to the command line.")
		except:
			print("Fail to install 'fan' to the command line.")
		
		sys.exit()
		
	elif(options.uninstall):		
		try:
			os.remove("/usr/local/bin/fan")
			print("'fan' was removed from command line.")
		except:
			print("Fail to remove 'fan' from command line.")
		
		sys.exit()
		
	elif(options.autoinit == "true"):
		try:
			installAutoInit()
			print("Autoinit set to True, restart your system apply changes.")
		except:
			print("Fail to set Autoinit to True.")
		
		sys.exit()

	elif(options.autoinit == "false"):
		try:
			uninstallAutoInit()
			print("Autoinit set to False")
		except:
			print("Fail to set Autoinit to False.")
		
		sys.exit()
			
	elif(options.autoinit != "null"):
		print("Invalid parameter.")
		print("")
		print("Usage: --autoinit [true/false]")
		sys.exit()
		
#	se receber o comando -a forca abrir uma instancia nova		
	elif (options.appear):
		setGPIO()
#		desliga a fan
		GPIO.output(fanPort,offValue)
	
	elif(options.force == "on"):
		try:
			fanForce = sysv_ipc.SharedMemory(22061995) #procura a memoria compartilhada
			utils.write_to_memory(fanForce,"on")
			setGPIO()
			GPIO.output(fanPort,onValue)
			print("The Fan was forced to be on.")
		except:
			print("Fail forcing fan to be on.")
		
		sys.exit()
		
	elif(options.force == "off"):
		try:
			fanForce = sysv_ipc.SharedMemory(22061995) #procura a memoria compartilhada
			utils.write_to_memory(fanForce,"off")
			setGPIO()
			GPIO.output(fanPort,offValue)
			print("The Fan was forced to be off. WARNING: The fan will not auto turn on anymore.")
		except:
			print("Fail forcing fan to be off.")
		
		sys.exit()
	
	elif(options.force != "null"):
		print("Invalid parameter.")
		print("")
		print("Usage: --force [on/off]")
		sys.exit()
	
	elif(options.restore):

		try:
			fanForce = sysv_ipc.SharedMemory(22061995) #procura a memoria compartilhada
			utils.write_to_memory(fanForce,"default")
			print("The Fan was restored to auto-mode.")

		except:
			print("The Fan was restored to auto-mode.")
		
		sys.exit()
		
	elif(options.fanstatus):
		setGPIO()
		if(GPIO.input(fanPort) == offValue):
			print("The fan is inactive.")
		else:
			print("The fan is active.")
		sys.exit()
	
	elif(options.temp):
		print ("Temperature: %0.2f 'C" % (getTemp()))
		sys.exit()
		
	else:
		parser.error("fan requires an argument.")
		quit()
	
	try:
		fanForce = sysv_ipc.SharedMemory(22061995,sysv_ipc.IPC_CREX) #cria memoria compartilhada
		configReload = sysv_ipc.SharedMemory(19021999,sysv_ipc.IPC_CREX) #cria memoria compartilhada
	except:
		fanForce = sysv_ipc.SharedMemory(22061995) #cria memoria compartilhada
		configReload = sysv_ipc.SharedMemory(19021999) #cria memoria compartilhada
		
	utils.write_to_memory(fanForce,"default")
	utils.write_to_memory(configReload,"default")
	
	try:
		while (shutdown == False):
#			descobre se a fan esta ligada			
			status = GPIO.input(fanPort)
#			se receber um comando de force via console para de rodar
			if(utils.read_from_memory(fanForce) == "default"):
#				se a temperatura for maior ou igual a maxTemp graus liga a fan
				if (getTemp() >= maxTemp):
					if(status == offValue):
#						liga a fan
						GPIO.output(fanPort,onValue)
#						envia o status para o thingspeak
						
						k=0
						
#						aguarda o tempo minimo de execucao da fan
						while(k < minFanUpTime and shutdown == False):	
#							se receber um comando de controle de fan sai da espera
							if(utils.read_from_memory(fanForce) == "default"):
#								envia o status para o thingspeak
								updateThingspeak()
								
								if(utils.read_from_memory(configReload) == "reload"):
									reloadConfigs()
									utils.write_to_memory(configReload,"default")
									
								time.sleep(1)
								k+=1
							else:
								break
						
						counter=0
					else:
						updateThingspeak()
						
						if(utils.read_from_memory(configReload) == "reload"):
									reloadConfigs()
									utils.write_to_memory(configReload,"default")
								
						time.sleep(refreshRate)#apos o tempo minimo ele aguarda o tempo de refresh hate definido
						counter=0
						
#				se estiver no limite da transicao aguarda 2x o refresh rate para desligar a fan, assim evitando liga e desliga de fan			
				elif (counter < 2):
					counter += 1
					time.sleep(refreshRate)
				
				elif (minTemp < getTemp()):
					counter = 0
					updateThingspeak()
					
					if(utils.read_from_memory(configReload) == "reload"):
									reloadConfigs()
									utils.write_to_memory(configReload,"default")
				
				else:
					if(status == onValue):
						GPIO.output(fanPort,offValue)
						
					updateThingspeak()
					if(utils.read_from_memory(configReload) == "reload"):
									reloadConfigs()
									utils.write_to_memory(configReload,"default")
									
					time.sleep(refreshRate)
			else:
				updateThingspeak()
				if(utils.read_from_memory(configReload) == "reload"):
									reloadConfigs()
									utils.write_to_memory(configReload,"default")
				time.sleep(refreshRate)
				
	except KeyboardInterrupt:
		pass
	
	finally:
		GPIO.cleanup(fanPort)
		sysv_ipc.remove_shared_memory(fanForce.id)
		sysv_ipc.remove_shared_memory(configReload.id)
Exemple #21
0
	import ConfigUtils as cu
except:
	printException('Failed to load support scripts.',sys.exc_info())
#Ref0002 End

#########################################################################################################
#########################################################################################################
# Gets configuration data from the repository
#########################################################################################################
#########################################################################################################
True=1
False=0

#Ref0002 Begin
try: 
	cu.printMsg('Begin run.',False)
except:
	print 'ERROR, cannot continue.  Failed to load support scripts.  Run wsadmin -f ConfigDump.py from the same directory it exists in.'
	sys.exit(1)

excludeTypeList = []  #Ref0044
regexList=[]  #Ref0038
if len(sys.argv) > 0:
	if " ".join(sys.argv).find("=") == -1: #Ref0038 
		cu.setRepositoryConfigFileName(sys.argv[0])
		#Ref0002 End
		if len(sys.argv) > 1:              #Ref0037
			cu.setCellSuffix(sys.argv[1])  #Ref0037
	#Ref0038 Begin
	else:
		for arg in sys.argv:
if __name__ == "__main__":

   # Decode the input augument
   options, args = decodeCommandLine()
   print "Input filelist    : %s" % options.inList
   print "Histogram list    : %s" % options.histoList
   print "Output directory  : %s" % options.outDir
   if options.selectionList:
      print "Selection criteria: %s" % options.selctionList
   else:
      print "No selection criterion is applied."

   # Read in the input filelist
   sList = {}
   nSources, sList = ConfigUtils.getFileList( options.inList )

   # Read in the histogram list
   Histos = {}
   tree, Histos = ConfigUtils.getHistoNames( options.histoList )

   # Create the output directory
   createDir()
   # Copy the input file list to the output directory
   os.system('cp %s %s' % ( options.inList, options.outDir ) )

   # Read in the selection criteria
   Selection = {}
   if options.selectionList:
      Selection = ConfigUtils.getSelection( options.selctionList )
 def __init__(self, vocabulary_filename, training_set_size, training_set_filename, validation_set_filename, test_set_filename, config_file):
     # load configuration file
     options = cu.init_configuration(config_file)
     self.recovery_config_file = options['recovery_config_file']
     self.recovery_path = options['recovery_path']
     # load dataset files
     self.training_set = open(training_set_filename, 'r')
     self.validation_set = open(validation_set_filename, 'r')
     self.test_set = open(test_set_filename, 'r')
     # load vocabulary
     v = open(vocabulary_filename, 'r')
     vocabulary = json.load(v)
     v.close()
     ss_occurrences = vocabulary.pop("</s>")
     vocabulary = OrderedDict({"</s>": ss_occurrences}.items() + OrderedDict(sorted(vocabulary.items(), key=lambda x: (-x[1], x[0]))).items())
     self.vocabulary_size = len(vocabulary)
     # smoothing factor 
     self.occurrences_smoothing = options['occurrences_smoothing']
     if(self.occurrences_smoothing):
         occurrences = vocabulary.values()
         log_training_set_size = np.log(training_set_size)
         self.smoothing_factors = []
         for i in xrange(self.vocabulary_size): 
             self.smoothing_factors.insert(i, log_training_set_size - np.log(occurrences[i]))
         # normalize between 0.2 and 1.2
         old_min = min(self.smoothing_factors)
         old_max = max(self.smoothing_factors)
         new_min = 1.0
         new_max = 2.0
         norm_range = (new_max-new_min)/(old_max-old_min) 
         for i in xrange(self.vocabulary_size):
             factor = new_min + (self.smoothing_factors[i] - old_min) * norm_range
             self.smoothing_factors[i] = factor
     # net parameters
     self.learning_rate = options['learning_rate']
     self.starting_epoch = options['starting_epoch']
     self.max_epochs = options['max_epochs']
     self.min_improvement = options['min_validation_logp_improvement']
     self.logp_previous = options['logp_previous']
     self.learning_rate_divide = options['learning_rate_divide']
     self.reset_context_each_sentence = options['reset_context_each_sentence']
     # init neurons
     self.hidden_layer_size = options['hidden_layer_size']
     self.neu_input_index = -1
     self.neu_context = [0.1] * self.hidden_layer_size
     self.neu_hidden = np.zeros(self.hidden_layer_size)
     self.neu_output = np.zeros(self.vocabulary_size)
     # init synapses
     try:
         self.__restore_weights()
     except IOError:
         # weights between input and hidden layer
         self.syn_input = get_random_matrix(
                 self.vocabulary_size + self.hidden_layer_size, 
                 self.hidden_layer_size,
                 options['weight_min_value'],
                 options['weight_max_value']) 
         # weights between hidden and output layer
         self.syn_hidden = get_random_matrix(
                 self.hidden_layer_size, 
                 self.vocabulary_size,
                 options['weight_min_value'],
                 options['weight_max_value'])
Exemple #24
0
import ConfigUtils

logs_file_name_prefix = "/home/tomek/logs/logs_" + ConfigUtils.get_datetime_prefix() + "_"
logs_file_append_pid = True

projects_db_path = '/home/tomek/workspace/project_db.json'

# autoscan
autoscan_server_runnable_files = ['app_server.sh', 'standalone.sh']
autoscan_folders_to_skip = ['target', 'tests', 'standalone', 'glassfish', 'ACA']

actions = {
    'clean-build': ('cb', 'mvn clean install'),
    'clean-build2': ('cb2', 'mvn2 clean install'),
    'clean-build3': ('cb3', 'mvn3 clean install'),
    'mvn': ('m', 'mvn {0} install', ['']),

    'reset-hard': ('rh', 'git reset --hard && git checkout {0} && git fetch && git reset --hard origin/{0}',
                   ['master']),

    'apply-new-code': ('anc', 'git add -A && git stash && git fetch && git reset --hard origin/{0} && git stash apply',
                       ['master']),

    'run': ('r', '{0}', []),
    'deploy-core-full': ('dcf', '{0} undeploy && {0} stop && {0} clean && {0} deploy', []),

}

continue_on_fail = False
def reloadConfigs():
	global fanPort
	global minFanUpTime
	global refreshRate
	global maxTemp
	global minTemp
	global channel_id
	global write_key
	global tskrefresh
	global channel
	global isRelay
	global onValue
	global offValue
	global useSocCmd
	
	lastFanPort = fanPort
	
	(fanPort,minFanUpTime,refreshRate,maxTemp,minTemp,channel_id,write_key,tskrefresh,isRelay,useSocCmd) = configs.loadConfig()
	
	if(channel_id != -1):
		channel = thingspeak.Channel(id=channel_id,write_key=write_key)
	
	if(isRelay):
		onValue = 0
		offValue = 1
	else:
		onValue = 1
		offValue = 0
	
	GPIO.cleanup(lastFanPort)
	setGPIO()
Exemple #26
0
def main():
    fieldConfigFile, config_inputdir, jobType, hourly = parse_opts()
    cI = ConfigUtils(config_inputdir, fieldConfigFile)
    configItems = cI.getConfigs()
    configItems['dataset_name'] = jobType
    lg = pyLogger(configItems)
    logger = lg.setConfig()
    logger.info("****************JOB START******************")
    sc = SocrataClient(config_inputdir, configItems, logger)
    client = sc.connectToSocrata()
    clientItems = sc.connectToSocrataConfigItems()
    scrud = SocrataCRUD(client, clientItems, configItems, logger)
    sQobj = SocrataQueries(clientItems, configItems, logger)

    mmdd_fbf = configItems['dd']['master_dd']['fbf']
    ds_profiles_fbf = configItems['dd']['ds_profiles']['fbf']
    base_url = configItems['baseUrl']
    field_type_fbf = configItems['dd']['field_type']['fbf']
    asset_inventory_fbf = configItems['dd']['asset_inventory']['fbf']

    ds_profiles = ProfileDatasets.getCurrentDatasetProfiles(
        sQobj, base_url, ds_profiles_fbf)
    update_counter = 0
    updated_datasets = []

    #if int(hourly) == 1:
    #  print "****hourly update****"
    for datasetid, last_updt in ds_profiles.iteritems():
        mm_profiles_to_updt = ProfileDatasets.getViewsLastUpdatedAt(
            datasetid, last_updt, clientItems)
        #print datasetid
        #print mm_profiles_to_updt
        if ('cols' in mm_profiles_to_updt.keys()):
            dataset_info_mm = {
                'Socrata Dataset Name': configItems['dataset_name'],
                'SrcRecordsCnt': 0,
                'DatasetRecordsCnt': 0,
                'fourXFour': mmdd_fbf,
                'row_id': 'columnid'
            }
            dataset_info_mm['DatasetRecordsCnt'] = 0
            dataset_info_mm['SrcRecordsCnt'] = len(mm_profiles_to_updt['cols'])
            #print mm_profiles_to_updt.keys()
            print "***************"
            print "Updating " + mm_profiles_to_updt['dataset_name']
            print "*************"
            print "**************"

            dataset_info_mm = scrud.postDataToSocrata(
                dataset_info_mm, mm_profiles_to_updt['cols'])
            update_counter = update_counter + 1
            updated_datasets.append(mm_profiles_to_updt['dataset_name'])

    datasets = ProfileDatasets.getBaseDatasets(sQobj, base_url, mmdd_fbf)
    asset_inventory_dict = ProfileDatasets.getAssetInventoryInfo(
        sQobj, base_url, asset_inventory_fbf)
    #delete datasets from the profile that no longer exist
    ProfileDatasets.removeDeletedDatasets(scrud, ds_profiles_fbf,
                                          asset_inventory_dict, ds_profiles,
                                          datasets)

    ds_profiles = ProfileDatasets.getCurrentDatasetProfiles(
        sQobj, base_url, ds_profiles_fbf)

    field_types = ProfileDatasets.getFieldTypes(sQobj, base_url,
                                                field_type_fbf)

    dataset_info = ProfileDatasets.buildInsertDatasetProfiles(
        sQobj, scrud, configItems, datasets, ds_profiles, field_types,
        asset_inventory_dict)

    ## triggers a webtask to update the agolia index
    updated_algolia = False
    if (WebTasks.runWebTask(configItems)):
        updated_algolia = True
    print "******Ran Updated *** agolia script****" + str(updated_algolia)
    print dataset_info
    dsse = JobStatusEmailerComposer(configItems, logger, jobType)
    if dataset_info['DatasetRecordsCnt'] > 1 and updated_algolia:
        print "update complete"
        dsse.sendJobStatusEmail([dataset_info])
    else:
        dataset_info = {
            'Socrata Dataset Name': configItems['dataset_name'],
            'SrcRecordsCnt': 0,
            'DatasetRecordsCnt': 0,
            'fourXFour': "Nothing to Insert"
        }
        dataset_info['isLoaded'] = 'success'
        dsse.sendJobStatusEmail([dataset_info])
    # command line options handling
    parser = OptionParser()
    
    parser.add_option(
        "-l","--processloop",dest="processloop", default='0',
        help='Process Team Files')
    parser.add_option(    
        "-a","--aliases",dest="comp_alias_file",default='ScoutingAppEventAliases.txt',
        help="Competition Alias Configuration File")
    
    # Parse the command line arguments
    (options,args) = parser.parse_args()

    global_config = {}
    
    ConfigUtils.read_config(global_config, './config/ScoutingAppConfig.txt')

    logger = Logger.get_logger('./config', 'logging.conf', 'scouting.fileproc')
    global_config['logger'] = logger

    # load the competition alias file if one is specified
    if options.comp_alias_file != '':
        comp_alias_file = './config/' + options.comp_alias_file        
        logger.debug('Loading Competition Alias file: %s' % comp_alias_file)
        CompAlias.read_comp_alias_config(comp_alias_file)

    session = DbSession.open_db_session((global_config['db_name'] + global_config['this_season']), DataModel)

    counter = 0
    done = False
Exemple #28
0
True = 1

#########################################################################################################
#########################################################################################################
# Execution logic begins here
#########################################################################################################
#########################################################################################################
if len(sys.argv) < 3:  #Ref0036
    print 'Syntax:'
    print '  python ' + sys.argv[0] + ' ReportFileName FileList1 FileList3 ...'
    print '    ReportFileName is the name of output report (fully qualified or relative)'
    print '    FileList parameters are lists of files generated by CollectFileData.sh'  #Ref0036
    #	print '    LibertyTarX is a tar file created by ConfigDumpLiberty.sh'
    sys.exit(0)

cu.printMsg('Begin run.', False)

fileIdx = 1
if sys.argv[0].find('.py') > 0:
    fileIdx = 2

ReportFileName = sys.argv[1]
cu.printMsg('Report file name: %s' % ReportFileName, False)

#Get information from input files
FileNames = sys.argv[fileIdx:]
for idx in range(0, len(FileNames) - 1):
    FileNames[idx] = FileNames[idx].strip()
ServerDataList = cu.createServerFileDataList(FileNames)  #Ref0030

#Run reports
Exemple #29
0
                               ["category=", "origin=", "config="])
except getopt.GetoptError:
    print('Usage: ' + progname + ' -c <category> -o <origin> -f <configfile>')
    sys.exit(2)
for opt, arg in opts:
    if opt == '-h':
        print(progname + ' -c <category -o <origin> -f <configfile>')
        sys.exit()
    elif opt in ("-c", "--category"):
        category = arg
    elif opt in ("-o", "--origin"):
        origin = arg
    elif opt in ("-f", "--config"):
        configfile = arg

ConfigUtils.init(configfile)

localSyncFolderConfig = SyncFolderConfig('local', category, 'out')
cloudSyncFolderConfig = SyncFolderConfig('cloud', category, origin)

logger = LoggerFactory.createLogger(
    "upload", os.path.join(localSyncFolderConfig.subfolder, "upload.log"))

if __name__ == "__main__":
    path = localSyncFolderConfig.folder
    event_handler = EventHandler()
    observer = Observer()
    observer.schedule(event_handler, path, recursive=True)
    observer.start()
    UploadFolder.upload(localSyncFolderConfig, cloudSyncFolderConfig)
    try: