Ejemplo n.º 1
0
    def __init__(self, subject, *args):
        """Set up a TASK child class environment.

        Initialise the Global Configuration, the Logger, the system load routines.
        Define a list of dependencies prerequisite to run this tasks.
        Define, create and aliases a Working directory for the tasks.

        If more arguments have been supplied to generic tasks, GenericTask will create an alias
        for each additionnal arg adding the suffix Dir to the name provided
        on the first optionnal arg provided to __init__

        """

        self.__order = self.initializeTasksOrder()
        self.__name = self.__class__.__name__.lower()
        self.__moduleName = self.__class__.__module__.split(".")[-1]
        self.__cleanupBeforeImplement = True
        self.config = subject.getConfig()
        self.subject = subject
        self.subjectDir = self.subject.getDir()
        self.toadDir = self.config.get('arguments', 'toad_dir')
        self.workingDir = os.path.join(self.subjectDir, self.__moduleName)
        self.logDir = os.path.join(self.subjectDir, self.get('dir', 'log'))
        self.qaDir = os.path.join(self.subjectDir, '00-qa')
        self.tasksAsReferences = None
        Logger.__init__(self, subject.getLogDir())
        Load.__init__(self, self.config)
        Qa.__init__(self)
        self.dependencies = []
        self.__dependenciesDirNames = {}
        for arg in args:
            self.dependencies.append(arg)
Ejemplo n.º 2
0
def etl_fact_macro_details(source_engine, target_engine):
    """fact_macro_details的etl主函数

    从235 tag_detail表etl到240 fact_macro_details表
    :param source_engine: 源数据库引擎
    :param target_engine: 目标数据库引擎
    """
    extract = Extract(source_engine, target_engine)
    transform = Transform()
    load = Load(target_engine)
    record = Record(table='fact_macro_detail', record_path='rec.cfg')

    start_params = record.get_record()
    divisions = extract.std_divisions()

    for i in range(start_params['rounds']):
        start_id = start_params['update_id'] + i * start_params['chunksize'] + 1
        end_id = start_params['update_id'] + (
            i + 1) * start_params['chunksize'] + 1

        tag_details = extract.tag_details(start_id, end_id)
        if len(tag_details) == 0:
            continue
        macro_details = transform.compile_datasets(tag_details, divisions)
        load.loading(macro_details)
        update_id = tag_details['id'].max() if tag_details['id'].max(
        ) else start_params['update_id']
        record.update_record(update_id)
Ejemplo n.º 3
0
def etl_fact_draw_main(engine_source, engine_target,chunksize=5000,record_file='etl_fact_draw.record'):
    """绘图事实表的ETL

    :param engine_source: 源数据库引擎
    :param engine_target: 目标数据库引擎
    """
    extract = Extract(engine_source,chunksize,record_file)
    transform = Transform()
    load = Load(engine_target)
    # 抽取数据
    df_industry,df_draw_gen = extract.extract_main()
    logging.info('Extract datasets completed.')

    for k,df_draw in enumerate(df_draw_gen,1):
        logging.info('Round %d, From obs.%d to obs.%d,start.' % \
                     (k, (k-1)*chunksize, k*chunksize))
        # 清理、转换数据
        df_clean = transform.transform_main(df_industry, df_draw)
        logging.info('Round %d, Data cleaning completed.'%k)

        try:
            load.load_main(df_clean)
            logging.info('Round %d, loading %d obs. Secceed '%(k,len(df_clean)))
            with open(record_file,'w') as f:
                f.write(str(max(df_draw['id'])))
        except Exception as e:
            df_clean[['drawGuid', 'marketGuid']].to_csv('unsecceed_samples.csv', mode='a',index=False)
            logging.error('Round %d,%s' %(k,e))
            raise
Ejemplo n.º 4
0
 def __init__(self, address):
     self.address = address
     self.loadList = []
     with open(self.configFile) as data:
         map = json.load(data)
     common_house_address = map['mainStation'][address]['house']
     common_solar_address = map['mainStation'][address]['solar']
     self.loadList.append(Load(common_house_address, self.commonHouseProfit))
     self.loadList.append(Load(common_solar_address, self.commonSolarCost))
Ejemplo n.º 5
0
    def __init__(self, P_Builder):
        self.Builder = P_Builder
        self.Load = Load()
        self.File = File()
        self.Save = Save(self.File)
        self.Kanban = Kanban()
        self.Graphical_Kanban = None

        self.action_flag = None
        self.Temp_Widget_Reference = None
Ejemplo n.º 6
0
def etl(game, extract_date, data_dir=DATA_DIR, db=load.DB_FILENAME):
    logger.info('Start ETL for game {0}'.format(game))
    load_date = datetime.today()
    data_dir = os.path.join(data_dir, game)
    if game == 'hb':
        trans_fun = Transform.hb_transform
    elif game == 'wwc':
        trans_fun = Transform.wwc_transform

    data = Extract.extract(data_dir, extract_date)
    data = Transform.transform(data, trans_fun, extract_date, load_date)
    Load.load(data, game, db=db)
Ejemplo n.º 7
0
def etl_fact_market(*args):
    """fact_market表主函数
    
    :param args: 按位参数engine_zone_macro,engine_draw,engine_target
    """
    # 初始化 extract,transform和load三个对象
    extract = Extract(engine_zone_macro, engine_draw, engine_target)
    transform = Transform()
    load = Load(engine_target)

    # 抽取已经经过etl的商圈
    done_market = extract.done_market()
    df_tag_counts = extract.tag_counts()
    df_industry = extract.industry()
    has_dealed = []

    for i, sample_tag_counts in df_tag_counts.iterrows():

        grandParentId = sample_tag_counts['grandParentId']
        if len(grandParentId) != 36:  # 判断grandParentId的有效性
            logging.warning('Round %d, %s is invalid ,skipped.' %
                            (i, grandParentId))
            continue

        elif grandParentId in done_market:  # 判断该商圈是已经经过etl
            logging.warning('Round %d, %s etl before' % (i, grandParentId))
            continue

        if grandParentId in has_dealed:
            logging.warning('Round %d, %s etl before' % (i, grandParentId))
            continue
        else:
            has_dealed.append(grandParentId)

        # 抽取数据
        zone_grandparent = extract.zone_grandparent(grandParentId)
        if len(zone_grandparent) == 0:
            logging.warning('Round %d, has no draw samples' % i)
            continue
        rent = extract.rent_details(grandParentId)
        industry_tmp = df_industry[df_industry['grandParentId'] ==
                                   grandParentId]
        # 转换数据
        rent = transform.rent_calculate(rent)
        industry_dict = transform.reshape_industry(industry_tmp)
        # 组合数据
        clean = transform.compile_dfs(sample_tag_counts, rent, industry_dict,
                                      zone_grandparent)
        try:
            load.loading(clean)
            logging.info('Round %d, %s etl secceed' % (i, grandParentId))
        except Exception as e:
            logging.error('Round %d, %s' % (i, e))
Ejemplo n.º 8
0
def etl_dimension_time(target_engine):
    """时间维度表主函数

    :param target_engine: 目标数据库引擎
    """
    extract = Extract()
    transform = Transform()
    load = Load(target_engine)

    full_time = extract.gen_full_time()
    time_table = transform.gen_date(full_time)
    load.loading(time_table)
Ejemplo n.º 9
0
class GameFSM(FSM):
    def enterStartMenu(self):
        self.menu = StartMenu()

    def exitStartMenu(self):
        self.menu.destroy()
        del self.menu

    def enterTrain(self):
        self.train = TrainingMode()

    def exitTrain(self):
        self.train.destroy()
        del self.train

    def enterBattle(self):
        self.battle = BattleMode()

    def exitBattle(self):
        if self.battle.popupText:
            self.battle.popupText.detachNode()
            self.battle.popupText = None
        self.battle.destroy()
        del self.battle

    def enterControls(self):
        self.controls = Controls()

    def exitControls(self):
        self.controls.destroy()
        del self.controls

    def enterSave(self):
        self.save = Save()

    def exitSave(self):
        self.save.destroy()
        del self.save

    def enterLoad(self):
        self.load = Load()

    def exitLoad(self):
        self.load.destroy()
        del self.load

    def enterStats(self):
        self.stats = Stats()

    def exitStats(self):
        self.stats.destroy()
        del self.stats
Ejemplo n.º 10
0
def etl_demension_division(target_engine):
    """division表的etl主函数

    从统计局爬取的标准csv表中抽取数据,载入到数据仓库
    :param target_engine:目标数据库引擎
    """
    extract = Extract()
    transform = Transform()
    load = Load(target_engine)
    logging.info('Initialize three instances')

    division_datasets = extract.std_divisions()
    std_districts = transform.std_districts(division_datasets)
    load.loading(std_districts)
Ejemplo n.º 11
0
 def resume(self):
     self.resume_check = True
     self.main.activeObj = set()
     print("LOADING THE GAME")
     load_game = Load(self.main)
     load_game.load()
     print(self.resume_check)
     #need to get values for these to actually work
     self.main.color = load_game.getColor()
     self.main.numPlayers = load_game.getNumPlayers()
     self.main.activeObj = set()
     self.main.board = Board(self.main)
     self.main.game = Game(self.main)
     self.main.deck = Deck(self.main)
     self.main.deck.start_deck()
     load_game.set_values()
     self.main.game.playing.playerInfoList = self.main.game.playing.getPlayerInfoList(
         self.main.game.playerNum)
     self.main.game.playing.relaxedButton.visible = False
     # print("6")
     # self.main.save = Save(self.main)
     # print("7")
     # self.main.save.save()
     print("DONE LOADING")
     self.main.gameStarted = True
Ejemplo n.º 12
0
    def test_load(self):
        data = [{'accountid':'1', 'gender':'female', 'age':42, 'country':'Germany', 
                 'extract_date':'2018-06-28', 'load_date':'2019-03-07', 'game':'foo'},
                {'accountid':'2', 'gender':'male', 'age':38, 'country':'United States', 
                 'extract_date':'2018-06-28', 'load_date':'2019-03-07', 'game':'bar'}]
        Load.load(data, 'test', TEST_DB)

        expected = [('foo', '1', 'female', 42, 'Germany', '2018-06-28', '2019-03-07'), 
                    ('bar', '2', 'male', 38, 'United States', '2018-06-28', '2019-03-07')]
        with sqlite3.connect(TEST_DB) as conn:
            c = conn.cursor()
            sql = '''SELECT * FROM test_accounts'''
            c.execute(sql)
            result = c.fetchall()
            self.assertEqual(result, expected)
Ejemplo n.º 13
0
def Load_Cfagent(defaults):
    with Load(defaults["load_name"], num=defaults['num']) as load:
        collector, env, mover, teleporter, CFagent = load.items(Collector, Game, Mover, Teleporter, CFAgent)
        buffer = ReplayBuffer(**defaults)
        CFbuffer = CFReplayBuffer(**defaults)

        with Save(env, collector, mover, teleporter, CFagent, **defaults) as save:
            intervention_idx, modified_board = teleporter.pre_process(env)
            dones = CFagent.pre_process(env)
            CF_dones, cfs = None, None
            CFagent.CF_count = 0
            for frame in loop(env, collector, save, teleporter):
                CFagent.counterfact(env, dones, teleporter, CF_dones, cfs)
                modified_board = teleporter.interveen(env.board, intervention_idx, modified_board)
                actions = mover(modified_board)
                observations, rewards, dones, info = env.step(actions)
                modified_board, modified_rewards, modified_dones, teleport_rewards, intervention_idx = teleporter.modify(observations, rewards, dones, info)
                buffer.teleporter_save_data(teleporter.boards, observations, teleporter.interventions, teleport_rewards, dones, intervention_idx)
                mover.learn(modified_board, actions, modified_rewards, modified_dones)
                board_before, board_after, intervention, tele_rewards, tele_dones = buffer.sample_data()
                teleporter.learn(board_after, intervention, tele_rewards, tele_dones, board_before)
                collector.collect([rewards, modified_rewards, teleport_rewards], [dones, modified_dones])
                CF_dones, cfs = CFagent.counterfact_check(dones, env, **defaults)
                CFbuffer.CF_save_data(CFagent.boards, observations, CFagent.counterfactuals, rewards, dones, CF_dones)
                CFboard, CFobs, cf, CFrewards, CFdones1 = CFbuffer.sample_data()
                CFagent.learn(CFobs, cf, CFrewards, CFdones1, CFboard)
Ejemplo n.º 14
0
def run_etl(filename):
    logger.info("application ran")
    start = time.time()
    app = Extract()
    # Command to extract data from csv via s3 bucket:
    raw_data_list = app.get_data_from_bucket(filename)
    # Commands to load data from RDS:
    # raw_data_list = app.load_yesterdays_data() # extract output from yesterday
    # raw_data_list = app.load_all_data()  # extract output from all time
    end_extract = time.time()
    extract_time = round(end_extract - start, 4)
    print(f"Extract time: {extract_time}")
    logger.info(f"Extract time: {extract_time}")
    apple = Transform()
    transformed_data, new_drinks, new_locations, basket = apple.transform(raw_data_list) # raw data into transform returns transformed data and drinks dic
    # transformed_data, basket = apple.transform(raw_data_list) # raw data into transform returns transformed data and drinks dic

    end_transform = time.time()
    transform_time = round(end_transform - end_extract,4)
    logger.info(f"Transform time: {transform_time}")
    print(f"Transform time: {transform_time}")
    appley = Load()

    # appley.save_transaction(transformed_data) # populate RDS instance with cleaned data.
    # appley.save_drink_menu(new_drinks) # generate drinks menu
    # appley.save_location_menu(new_locations) # generate locations menu
    # appley.save_basket(basket) # generate drinks menu


    end_load = time.time()
    load_time = round(end_load - end_transform, 4)
    logger.info(f"Loading time: {load_time}")
    total_time = extract_time + transform_time + load_time
    logger.info(f"total time: {total_time}")
    print(f"Load time: {load_time}\nTotal time: {total_time}")
Ejemplo n.º 15
0
    def cache_update(cmode, ignore_error):

        dotrc = Glo.dot_buildc_rc_path()
        if not os.path.exists(dotrc):
            print 'Can not found ' + dotrc
            print 'Please run buildc init and then config .buildc.rc!'
            sys.exit(Errors.conf_file_not_found)

        dotrepository = Glo.dot_buildc_repository_path()

        buildc_rc = Load.load_dot_buildc_rc(dotrc)
        cache_svn_tree = CacheSvnTree(buildc_rc.external_repositories)
        cache_svn_tree.import_format_tree_from_file(dotrepository)

        result = cache_svn_tree.check_tree_consistency()
        if result == False:
            return False

        result = cache_svn_tree.is_new_tree(None)
        if result == True:
            print "Warning: local cache does not need to be updated."
            return False

        cache_svn_tree.update_tree(None, cmode, ignore_error)

        cache_svn_tree.export_format_tree_to_file(dotrepository)
Ejemplo n.º 16
0
def main(argv):
    message = OrderedDict({
        "Network": FLAGS.network,
        "data": FLAGS.data,
        "epoch": FLAGS.n_epoch,
        "batch_size": FLAGS.batch_size,
        "Optimizer": FLAGS.opt,
        "learning_rate": FLAGS.lr,
        "Denoising": FLAGS.denoise,
        "l2_norm": FLAGS.l2_norm,
        "Augmentation": FLAGS.aug
    })

    ## load dataset
    data = Load(FLAGS.data)

    ## setting models
    encode, decode = set_model(outdim=40, size=data.size, channel=data.channel)
    model = eval(FLAGS.network)(encode=encode,
                                decode=decode,
                                denoise=FLAGS.denoise,
                                size=data.size,
                                channel=data.channel,
                                name=FLAGS.network,
                                out_dim=data.output_dim,
                                lr=FLAGS.lr,
                                opt=FLAGS.opt,
                                trainable=True)

    #training
    trainer = AETrainer(FLAGS, message, data, model, FLAGS.network)
    trainer.train()
    return
Ejemplo n.º 17
0
def objective(trial):
    tf.reset_default_graph()
    param = {
        'opt' : trial.suggest_categorical('opt', ['SGD','Momentum','Adadelta','Adagrad','Adam','RMSProp']),
        'lr' : trial.suggest_loguniform('lr', 8e-5, 8e-2),
        'batch_size' : trial.suggest_categorical('batch_size', [64, 96 ,128]),
        'aug': trial.suggest_categorical('aug', ['None','shift','mirror','rotate','shift_rotate','cutout']),
        'l2': trial.suggest_categorical('l2', ['True','False'])
    }

    FLAGS.aug = param['aug']
    FLAGS.l2_norm = param['l2']
    FLAGS.batch_size = param['batch_size']

    # prepare training
    ## load dataset
    data = Load(FLAGS.data)

    ## setting models
    model_set = set_model(data.output_dim)
    model = eval(FLAGS.network)(model=model_set, name=FLAGS.network, out_dim=data.output_dim, lr=param['lr'], opt=param['opt'], trainable=True)

    #training
    trainer = OptunaTrain(FLAGS=FLAGS, message=None, data=data, model=model, name='tuning')
    test_accuracy = trainer.train()
    return -test_accuracy
Ejemplo n.º 18
0
    def pack_build(cmode, tag, force_update):
        build_home = os.getcwd()

        attribute_lists = Pack.__pack_init()
        distribution = attribute_lists.distribution
        source       = attribute_lists.source

        Pack.__do_clean(build_home)

        if "dependences" in list(dir(attribute_lists)):
            dependences = attribute_lists.dependences

            result = Cache.cache_build_by_external_libs(dependences, cmode, force_update)
            if result == False:
                return False

            dotrc = Glo.dot_buildc_rc_path()
            if not os.path.exists(dotrc):
                print('Can not found ' + dotrc)
                print('Please run buildc init and then config .buildc.rc!')
                sys.exit(Errors.conf_file_not_found)
            buildc_rc = Load.load_dot_buildc_rc(dotrc)

            dotrepository  = Glo.dot_buildc_repository_path()
            svn_tree = SvnTree()
            svn_tree.import_format_tree_from_file(dotrepository)
            for dependence in dependences:
                Pack.__copy_dependent_library(dependence, svn_tree, buildc_rc, build_home, cmode)

        result = Pack.__do_pack(build_home, source, distribution, cmode, tag, force_update)
        if result == False:
            return False
        return True
Ejemplo n.º 19
0
    def ld_rec(self):
        self.load_widget.select_recording(self)   #Pass main widget to subwidgets as it contains needed parameters.
 
        #RESTART Process widget with updated info; SEEMS THERE SHOULD BETTER WAY TO DO THIS
        self.load_widget = Load(self)
        self.central_widget.addWidget(self.load_widget)
        self.central_widget.setCurrentWidget(self.load_widget)
Ejemplo n.º 20
0
    def test_clear(self):
        self.input_file = StringIO()
        self.input_file.write(
            'Text<value>.SF NS Text,13,-1,5,50,0,0,0,0,0<value>Text<value>48<value>218<value>4278190335<value>Group1\n'
        )
        self.input_file.write(
            'Ellipse<value>237<value>277<value>344<value>392<value>4278190335<value>Group1\n'
        )
        self.input_file.write(
            'Line<value>315:58;315:59;315:61;317:67;321:72;324:'
            '76;332:93;342:108;351:117;357:124;360:129;365:'
            '134;367:137;368:140;370:143;373:147;375:151;375:'
            '152;376:152;377:156;377:157;377:158<value>'
            '4294901760<value>Group2\n')
        self.input_file.write(
            'Rect<value>176<value>131<value>92<value>50<value>4294901760<value>Group2\n'
        )
        self.input_file.seek(0, 0)

        app = QApplication(sys.argv)
        mainwindow = MainWindow()
        load = Load(self.input_file, mainwindow.piirtoalusta)
        self.input_file.close()
        mainwindow.menubar.clear()
        items = mainwindow.piirtoalusta.scene.items()

        self.assertEqual(0, len(items), "Undo failed. Item still there")
Ejemplo n.º 21
0
Archivo: mie.py Proyecto: mjonyh/mie
class Mie_class:
  def __init__(self):
    self.intp_data = Load();

  def mie(self, a = 0.3):
    qsca = [];
    qabs = [];
    qext = [];
    mie = Mie();

    wl = np.arange(0.21, 1.2, .001);

    n_cu, k_cu, n_w, k_w = self.intp_data.intpdata(wl);

    for i in range(len(wl)):
      mie.x = a * 2*np.pi/wl[i];
      temp_n = n_cu[i]/n_w[i];
      temp_k = k_cu[i]+k_w[i];
      'print temp_n, temp_k'
      mie.m = complex(temp_n, temp_k);
      '''
      mie.y = 3 * a * 2*np.pi/wl[i];
      mie.m2 = complex(n_w[i], k_w[i]);
      '''
      qsca.append(mie.qsca());
      qabs.append(mie.qabs());
      qext.append(mie.qb());

    return wl, qsca, qabs, qext;
Ejemplo n.º 22
0
    def cache_remove():
        dotrc = Glo.dot_buildc_rc_path()
        if not os.path.exists(dotrc):
            print 'Can not found ' + dotrc
            print 'Please run buildc init and then config .buildc.rc!'
            sys.exit(Errors.conf_file_not_found)

        buildc_rc = Load.load_dot_buildc_rc(dotrc)
        for repository in buildc_rc.external_repositories:
            svn_path = repository[0]
            cache_path = Glo.get_local_cache_path(
                svn_path, buildc_rc.external_repositories)

            print "\n===>Begin remove local cache of repository [" + svn_path + ']'
            ret = Util.execute_and_return('rm -rf ' + cache_path)
            if (ret != 0):
                print 'Remove [' + cache_path + '] Failed!'
                sys.exit(ret)
            else:
                print 'Remove [' + cache_path + '] OK!'
            print "\n<=== End remove local cache of repository [" + svn_path + ']'

        dotrepository = Glo.dot_buildc_repository_path()
        svn_tree = SvnTree()
        svn_tree.import_format_tree_from_file(dotrepository)
        svn_tree.take_item_data_by_browse(None, SvnTree.set_empty_node, 1)
        svn_tree.export_format_tree_to_file(dotrepository)
Ejemplo n.º 23
0
def main():

    if len(sys.argv) != 3:
        print('Error: Execution -> python3 main.py <url> <name_database>')
        exit(1)

    url = sys.argv[1]
    name_db = sys.argv[2]

    transformation = Transformation(url=url,
                                    output_path='databases/',
                                    name_db=name_db)
    transformation.transformation()

    load = Load(transformation.new_engine)
    load.load(output_path='excel/')
Ejemplo n.º 24
0
class Mie_class:
    def __init__(self):
        self.intp_data = Load()

    def mie(self, a=0.3):
        qsca = []
        qabs = []
        qext = []
        mie = Mie()

        wl = np.arange(0.21, 1.2, .001)

        n_cu, k_cu, n_w, k_w = self.intp_data.intpdata(wl)

        for i in range(len(wl)):
            mie.x = a * 2 * np.pi / wl[i]
            temp_n = n_cu[i] / n_w[i]
            temp_k = k_cu[i] + k_w[i]
            'print temp_n, temp_k'
            mie.m = complex(temp_n, temp_k)
            '''
      mie.y = 3 * a * 2*np.pi/wl[i];
      mie.m2 = complex(n_w[i], k_w[i]);
      '''
            qsca.append(mie.qsca())
            qabs.append(mie.qabs())
            qext.append(mie.qb())

        return wl, qsca, qabs, qext
Ejemplo n.º 25
0
    def cache_remove():
        dotrc = Glo.dot_buildc_rc_path()
        if not os.path.exists(dotrc):
            print 'Can not found ' + dotrc
            print 'Please run buildc init and then config .buildc.rc!'
            sys.exit(Errors.conf_file_not_found)

        buildc_rc = Load.load_dot_buildc_rc(dotrc)
        for repository in buildc_rc.external_repositories:
            svn_path   = repository[0]
            cache_path = Glo.get_local_cache_path(svn_path, buildc_rc.external_repositories)

            print "\n===>Begin remove local cache of repository [" + svn_path + ']'
            ret = Util.execute_and_return('rm -rf ' + cache_path)
            if (ret != 0):
                print 'Remove [' + cache_path + '] Failed!'
                sys.exit(ret)
            else:
                print 'Remove [' + cache_path + '] OK!'
            print "\n<=== End remove local cache of repository [" + svn_path + ']'

        dotrepository = Glo.dot_buildc_repository_path()
        svn_tree  = SvnTree()
        svn_tree.import_format_tree_from_file(dotrepository)
        svn_tree.take_item_data_by_browse(None, SvnTree.set_empty_node, 1)
        svn_tree.export_format_tree_to_file(dotrepository)
Ejemplo n.º 26
0
    def cache_update(cmode, ignore_error):

        dotrc = Glo.dot_buildc_rc_path()
        if not os.path.exists(dotrc):
            print 'Can not found ' + dotrc
            print 'Please run buildc init and then config .buildc.rc!'
            sys.exit(Errors.conf_file_not_found)

        dotrepository = Glo.dot_buildc_repository_path()

        buildc_rc  = Load.load_dot_buildc_rc(dotrc)
        cache_svn_tree = CacheSvnTree(buildc_rc.external_repositories)
        cache_svn_tree.import_format_tree_from_file(dotrepository)

        result = cache_svn_tree.check_tree_consistency()
        if result == False:
            return False

        result = cache_svn_tree.is_new_tree(None)
        if result == True:
            print "Warning: local cache does not need to be updated."
            return False

        cache_svn_tree.update_tree(None, cmode, ignore_error)

        cache_svn_tree.export_format_tree_to_file(dotrepository)
Ejemplo n.º 27
0
    def __init__(self):
        self.b = Battery(constants.battery_capacity,constants.battery_max_charge,constants.battery_max_discharge,constants.nbatt,constants.nbatt_c,constants.nbatt_d,\
            constants.battery_cost, constants.life_time, constants.round_trip)
        self.g = GasTurbine(constants.gas_turbine_max, constants.microgas_turbine_om_cost, constants.fual_cost, constants.co2_coe, constants.co2_cost, \
            constants.so2_coe, constants.so2_cost, constants.no_coe, constants.no_cost)
        self.l = Load(constants.shortage_cost)
        self.p = PV(constants.pv_max, constants.pv_om_cost)
        self.w = WindTurbine(constants.wind_turbine_max,
                             constants.wind_turbine_om_cost)
        self.m = MEMS(self.b, self.g, self.l, self.p, self.w)
        self.bm = BaseMEMS(self.b, self.g, self.l, self.p, self.w)

        self.l.set_forecast(constants.load_important_forecast,
                            constants.load_transferable_forecast)
        self.p.set_forecast([ir / 0.2 * 1000 for ir in constants.pv_forecast])
        self.w.set_forecast(
            [0.2 * wind_speed**3 for wind_speed in constants.wind_forecast])
Ejemplo n.º 28
0
def etl_fact_market(source_engine, target_engine, rec_path):

    extract = Extract(source_engine, target_engine)
    transform = Transform()
    load = Load(target_engine)
    record = Record('rec.cfg')

    start_params = record.get_record()
    unique_marketguid = []
    done_market = []
    has_dealed = []

    for i, grandParentId in enumerate(unique_marketguid):

        if len(grandParentId) != 36:  # 判断grandParentId的有效性
            logging.error('Round %d, %s is not valid.' % (i, grandParentId))
            continue

        elif grandParentId in done_market:  # 判断该商圈是已经经过etl
            logging.warning('Round %d, %s etl before' % (i, grandParentId))
            continue

        if grandParentId in has_dealed:
            logging.warning('Round %d, %s etl before' % (i, grandParentId))
            continue
        else:
            has_dealed.append(grandParentId)

        zone_grandparent = extract.zone_grandparent(grandParentId)
        if len(zone_grandparent) == 0:
            logging.warning('Round %d, has no draw samples' % i)
            continue

        rent = extract.rent_details(grandParentId)
        industry_tmp = industry[industry['grandParentId'] == grandParentId]
        # 转换数据
        rent = transform.rent_calculate(rent)
        industry_dict = transform.reshape_industry(industry_tmp)
        # 组合数据
        clean = transform.compile_dfs(sample_tag_counts, rent, industry_dict,
                                      zone_grandparent)
        try:
            load.loading(clean)
        except Exception as e:
            logging.error('Round %d, %s' % (i, e))
Ejemplo n.º 29
0
    def ld_rat(self):

        self.load_widget.load_rat(self)   #Pass main widget to subwidgets as it contains needed parameters.
        self.exp_type = 'rat'

        #RESTART Process widget with updated info; SEEMS THERE IS A BETTER WAY TO DO THIS
        self.load_widget = Load(self)
        self.central_widget.addWidget(self.load_widget)
        self.central_widget.setCurrentWidget(self.load_widget)
Ejemplo n.º 30
0
    def __build_component_deps(build_home, url, cmode, force_update):
        if not os.path.exists(build_home + os.sep + '.build'):
            Util.execute_and_output('mkdir -p ' + build_home + os.sep +
                                    '.build')
        os.chdir(build_home + os.sep + '.build')
        print "Create dir [.build] OK!"

        Util.execute_and_output('rm -rf ' + url[url.rindex("/") + 1:])
        SvnLocalOper.export(url, None, None, Glo.source_svn_user(),
                            Glo.source_svn_passwd(), False)
        print "Export [" + url + "] OK!"

        source_home = build_home + '/.build/' + url[url.rindex("/") + 1:]
        os.chdir(source_home)
        print "Cd " + source_home

        dotrc = Glo.dot_buildc_rc_path()
        if not os.path.exists(dotrc):
            print('Can not found ' + dotrc)
            print('Please run buildc init and then config .buildc.rc!')
            sys.exit(Errors.conf_file_not_found)
        buildc_rc = Load.load_dot_buildc_rc(dotrc)

        buildc_cfg = Load.load_buildc_cfg(Glo.buildc_cfg_path(), Glo.var_str())

        is_valid = Cache.cache_build_by_external_libs(buildc_cfg.external_libs,
                                                      cmode, force_update)
        if is_valid == False:
            os.chdir(build_home)
            print "Cd " + build_home
            return False

        dotrepository = Glo.dot_buildc_repository_path()
        svn_tree = SvnTree()
        svn_tree.import_format_tree_from_file(dotrepository)
        for dependence in buildc_cfg.external_libs:
            Pack.__copy_dependent_all(dependence, svn_tree, buildc_rc,
                                      build_home, cmode)

        os.chdir(build_home)
        print "Cd " + build_home

        print 'Build deps [' + url + '] OK!'
        return True
Ejemplo n.º 31
0
    def __init__(self):
        super(Window, self).__init__()
        self.setGeometry(50, 50, 500, 300)
        #self.setWindowTitle("OpenNeuron")
        self.setWindowIcon(QtGui.QIcon('pythonlogo.png'))


        #Set widget to show up with viewbox
        toolMenu = QtGui.QMenuBar()
        toolMenu.setNativeMenuBar(False) # <--Sets the menu with the widget; otherwise shows up as global (i.e. at top desktop screen)
        self.setMenuBar(toolMenu)

        #***** TEXT PARAMETERS FIELDS ******
        self.animal_name_text='' 
        self.rec_name_text=''
        if False: 
            #Mouse July 11 as default experiment
            self.root_dir = '/media/cat/12TB/in_vivo/tim/cat/' 
            self.animal_name_text='2016_07_11_vsd' 
            self.rec_name_text='track1_150Hz_iso1.0_spontaneous.rhd'
            
            self.animal = Mouse(self.animal_name_text, self.root_dir)
            self.animal.recName =self.root_dir+self.animal_name_text+'/rhd_files/'+ self.rec_name_text
            self.setWindowTitle(self.animal.name+'/'+self.animal.recName.replace(self.animal.home_dir+self.animal.name+'/rhd_files/', ''))
            self.animal.load_tsf_header(self.animal.recName.replace('rhd_files','tsf_files').replace('.rhd','_hp.tsf'))
            self.exp_type = 'mouse'

        if False: 
            #Cat ptc20 as default experiment
            self.root_dir = '/media/cat/8TB/in_vivo/nick/' 
            self.animal_name_text='ptc20' 
            self.rec_name_text='71-tr3-blankscreen/71-tr3-blankscreen.tsf'

            self.animal = Cat(self.animal_name_text, self.root_dir)
            self.animal.recName =self.root_dir+self.animal_name_text+'/'+ self.rec_name_text
            self.setWindowTitle(self.animal.name+'/'+self.animal.recName.replace(self.root_dir+self.animal_name_text+'/', ''))
            self.animal.load_tsf_header(self.animal.recName)
            self.exp_type = 'cat'

        #Load default experiment

        #self.animal.rec_length = self.animal.tsf.n_vd_samples/float(self.animal.tsf.SampleFrequency)


        #Menu Item Lists
        self.make_menu()

        #LOAD CENTRAL WIDGET
        self.central_widget = QtGui.QStackedWidget()
        self.setCentralWidget(self.central_widget)
        
        #SET DEFAULT WIDGET TO PROCESS
        self.load_widget = Load(self)
        self.central_widget.addWidget(self.load_widget)
        
        self.show()
Ejemplo n.º 32
0
 def reset(self):
     self.__data = {}
     now = datetime.datetime.now()
     self.__data['seq'] = int(round(time.time() * 1000))
     self.__data['timestamp-client'] = now.isoformat()
     cinfo = cpuinfo.get_cpu_info()
     self.__data['cpu'] = format(cinfo['brand'])
     osinfo = OsInfo()
     oinfo = osinfo.getOsInfo(cinfo)
     self.__data['os'] = oinfo['os']
     self.__data['os-dist'] = oinfo['dist']
     self.__data['os-version'] = oinfo['version']
     self.__data['os-arch'] = oinfo['arch']
     self.__data['os-kernel'] = oinfo['kernel']
     self.__data['cpu-temp'] = cputemp.get_cpu_temp()
     l = Load()
     self.__data['cpu-load'] = l.getCpuLoad()
     self.__data['storage'] = l.getStorageStatus()
     self.__data['network'] = netinfo.get_network_interfaces()
Ejemplo n.º 33
0
 def reset(self):
     a = 1
     self.__data = {}
     from os.path import expanduser
     idFileName = expanduser("~") + '/.hidevid'
     fileExists = os.path.exists(idFileName)
     if ( fileExists ):
         idFileHandle = open(idFileName,'r')
         deviceId = idFileHandle.readline()
         deviceId = deviceId.strip()
         self.__data['id'] = deviceId
         idFileHandle.close
     
     now = datetime.datetime.now()
     self.__data['seq'] = int(round(time.time() * 1000))
     self.__data['tsClient'] = now.isoformat()
     cinfo = cpuinfo.get_cpu_info()
     self.__data['cpu'] = format(cinfo['brand'])
     self.__data['cpuCount'] = cinfo['count']
     osinfo = OsInfo()
     oinfo = osinfo.getOsInfo(cinfo)
     self.__data['os'] = oinfo['os']
     self.__data['osDist'] = oinfo['dist']
     self.__data['osVersion'] = oinfo['version']
     self.__data['osArch'] = oinfo['arch']
     self.__data['osKernel'] = oinfo['kernel']
     self.__data['cpuTemp'] = cputemp.get_cpu_temp()
     l = Load()
     self.__data['cpuLoad'] = l.getCpuLoad()
     MemInfo = meminfo.getMemoryStatus()
     self.__data['memAvail'] = MemInfo['memAvail'] / 1024
     self.__data['memUsed'] = MemInfo['memUsed'] / 1024
     self.__data['swapAvail'] = MemInfo['swpAvail'] / 1024
     self.__data['swapUsed'] = MemInfo['swpUsed'] / 1024
     self.__data['storage'] = l.getStorageStatus()
     self.__data['network'] = netinfo.get_network_interfaces()
     CpuTimes = cputimes.get_cpu_times()
     self.__data['cpuUser'] = CpuTimes['user']
     self.__data['cpuSystem'] = CpuTimes['system']
     self.__data['cpuIdle'] = CpuTimes['idle']
     self.__data['ioWait'] = CpuTimes['iowt']
     self.__data['UpTime'] = CpuTimes['uptime']
Ejemplo n.º 34
0
    def __create_dot_repository(dotrc, dotrepository):
        buildc_rc = Load.load_dot_buildc_rc(dotrc)

        cache_svn_tree  = CacheSvnTree(buildc_rc.external_repositories)

        cache_svn_tree.check_local_cache_conflict()

        cache_svn_tree.build_tree()

        cache_svn_tree.take_item_data_by_browse(None, SvnTree.default_empty_node, 1)
        cache_svn_tree.export_format_tree_to_file(dotrepository)
Ejemplo n.º 35
0
def main(args):
    print("------------Start Evaluation-----------")
    print("CheckPoint : {}".format(FLAGS.ckpt_dir))
    print("Network : {}".format(FLAGS.network))
    print("data : {}".format(FLAGS.data))
    print("---------------------------------------")

    # load dataset
    data = Load(FLAGS.data)
    batch_size = 100
    dataset = data.load(data.x_train,
                        data.y_train,
                        batch_size=batch_size,
                        is_training=True)
    iterator = dataset.make_initializable_iterator()
    inputs, labels = iterator.get_next()
    test_inputs = tf.random.uniform([batch_size * 3, FLAGS.z_dim], -1, +1)
    index = tile_index(batch_size * 3)

    model = eval(FLAGS.network)(z_dim=FLAGS.z_dim,
                                size=data.size,
                                channel=data.channel,
                                lr=0.0,
                                class_num=data.output_dim,
                                conditional=FLAGS.conditional,
                                opt=None,
                                trainable=False)

    D_logits, D_logits_ = model.inference(inputs, batch_size, labels)
    G = model.predict(test_inputs, batch_size * 3, index)

    tf.train.Saver()
    with tf.Session() as sess:
        utils = Utils(sess=sess)
        utils.initial()
        if utils.restore_model(FLAGS.ckpt_dir):
            image = sess.run(G)
            utils.gan_plot(image)
            return
        else:
            return
Ejemplo n.º 36
0
    def reset(self):
        self.__data = {}
        from os.path import expanduser
        idFileName = expanduser("~") + '/.hidevid'
        fileExists = os.path.exists(idFileName)
        if (fileExists):
            idFileHandle = open(idFileName, 'r')
            deviceId = idFileHandle.readline()
            deviceId = deviceId.strip()
            self.__data['id'] = deviceId
            idFileHandle.close

        now = datetime.datetime.now()
        self.__data['seq'] = int(round(time.time() * 1000))
        self.__data['tsClient'] = now.isoformat()
        cinfo = cpuinfo.get_cpu_info()
        self.__data['cpu'] = format(cinfo['brand'])
        self.__data['cpuCount'] = cinfo['count']
        osinfo = OsInfo()
        oinfo = osinfo.getOsInfo(cinfo)
        self.__data['os'] = oinfo['os']
        self.__data['osDist'] = oinfo['dist']
        self.__data['osVersion'] = oinfo['version']
        self.__data['osArch'] = oinfo['arch']
        self.__data['osKernel'] = oinfo['kernel']
        self.__data['cpuTemp'] = cputemp.get_cpu_temp()
        l = Load()
        self.__data['cpuLoad'] = l.getCpuLoad()
        MemInfo = meminfo.getMemoryStatus()
        self.__data['memAvail'] = MemInfo['memAvail'] / 1024
        self.__data['memUsed'] = MemInfo['memUsed'] / 1024
        self.__data['swapAvail'] = MemInfo['swpAvail'] / 1024
        self.__data['swapUsed'] = MemInfo['swpUsed'] / 1024
        self.__data['storage'] = l.getStorageStatus()
        self.__data['network'] = netinfo.get_network_interfaces()
        CpuTimes = cputimes.get_cpu_times()
        self.__data['cpuUser'] = CpuTimes['user']
        self.__data['cpuSystem'] = CpuTimes['system']
        self.__data['cpuIdle'] = CpuTimes['idle']
        self.__data['ioWait'] = CpuTimes['iowt']
        self.__data['UpTime'] = CpuTimes['uptime']
Ejemplo n.º 37
0
    def __create_dot_repository(dotrc, dotrepository):
        buildc_rc = Load.load_dot_buildc_rc(dotrc)

        cache_svn_tree = CacheSvnTree(buildc_rc.external_repositories)

        cache_svn_tree.check_local_cache_conflict()

        cache_svn_tree.build_tree()

        cache_svn_tree.take_item_data_by_browse(None,
                                                SvnTree.default_empty_node, 1)
        cache_svn_tree.export_format_tree_to_file(dotrepository)
Ejemplo n.º 38
0
 def loadFile(self):
     input_file = None
     try:
         path = QFileDialog.getOpenFileName(self.piirtoalusta)[0]
         input_file = open(path)
     except OSError:
         print("Could not open {}".format(path))
     else:
         Load(input_file, self.piirtoalusta)
     finally:
         if input_file:
             input_file.close()
Ejemplo n.º 39
0
    def __build_component_deps(build_home, url, cmode, force_update):
        if not os.path.exists(build_home + os.sep + '.build'):
            Util.execute_and_output('mkdir -p ' + build_home + os.sep + '.build')
        os.chdir(build_home + os.sep + '.build')
        print "Create dir [.build] OK!"

        Util.execute_and_output('rm -rf ' + url[url.rindex("/")+1:])
        SvnLocalOper.export(url, None, None, Glo.source_svn_user(), Glo.source_svn_passwd(), False)
        print "Export [" + url + "] OK!"

        source_home = build_home + '/.build/' + url[url.rindex("/")+1:]
        os.chdir(source_home)
        print "Cd " + source_home

        dotrc = Glo.dot_buildc_rc_path()
        if not os.path.exists(dotrc):
            print('Can not found ' + dotrc)
            print('Please run buildc init and then config .buildc.rc!')
            sys.exit(Errors.conf_file_not_found)
        buildc_rc = Load.load_dot_buildc_rc(dotrc)

        buildc_cfg = Load.load_buildc_cfg(Glo.buildc_cfg_path(), Glo.var_str())

        is_valid = Cache.cache_build_by_external_libs(buildc_cfg.external_libs, cmode, force_update)
        if is_valid == False:
            os.chdir(build_home)
            print "Cd " + build_home
            return False

        dotrepository  = Glo.dot_buildc_repository_path()
        svn_tree = SvnTree()
        svn_tree.import_format_tree_from_file(dotrepository)
        for dependence in buildc_cfg.external_libs:
            Pack.__copy_dependent_all(dependence, svn_tree, buildc_rc, build_home, cmode)

        os.chdir(build_home)
        print "Cd " + build_home

        print 'Build deps [' + url + '] OK!'
        return True
Ejemplo n.º 40
0
    def __check_buildc_cfg(cmode, lib_root_path=None):
        platform_info = Glo.CPU + '_' + cmode[0:2] + '_' + Glo.SYSTEM

        buildc_rc = Load.load_dot_buildc_rc(Glo.dot_buildc_rc_path())
        buildc_cfg = Load.load_buildc_cfg(Glo.buildc_cfg_path(), Glo.var_str())

        cache_libs = []
        dotrepository = Glo.dot_buildc_repository_path()

        cache_svn_tree = CacheSvnTree(buildc_rc.external_repositories)
        cache_svn_tree.import_format_tree_from_file(dotrepository)
        cache_svn_tree.get_cache_libs(None, cmode, cache_libs)

        libs_depend = []
        for (libname, libversion, archives) in buildc_cfg.external_libs:
            is_found = False
            for (cache_libname, cache_libversion, cache_path) in cache_libs:
                if libname == cache_libname and libversion == cache_libversion:
                    path = None
                    if lib_root_path == None:
                        path = cache_path + '/' + libname + '/' + libversion + '/' + platform_info
                    else:
                        path = lib_root_path + '/' + libname + '/' + libversion + '/' + platform_info

                    if not os.path.exists(cache_path + '/' + libname + '/' +
                                          libversion + '/' + platform_info):
                        print 'Can not found [' + cache_path + '/' + libname + '/' + libversion + '/' + platform_info + '] in local library cache!'
                        print 'Please make sure the library [' + cache_path + '/' + libname + '/' + libversion + '/' + platform_info + '] is available!'
                        continue

                    libs_depend.append((libname, libversion, archives, path))

                    is_found = True
                    break
            if not is_found:
                print 'Error: Can not found [' + libname + '] in local library cache!!!!'
                print 'Please make sure the library [' + libname + '] is available!'
                sys.exit(Errors.lib_not_found)

        return libs_depend
Ejemplo n.º 41
0
def run_etl(filename):
    logger.info("application ran")
    start = time.time()
    app = Extract()
    raw_data_list = app.get_data_from_bucket(filename) # extract output
    end_extract = time.time()
    extract_time = round(end_extract - start, 4)
    print(f"Extract time: {extract_time}")
    logger.info(f"Extract time: {extract_time}")
    apple = Transform()
    transformed_data, transformed_drink_menu_data = apple.transform_new_data(raw_data_list) # raw data into transform returns transformed data and drinks dic

    end_transform = time.time()
    transform_time = round(end_transform - end_extract,4)
    logger.info(f"Transform time: {transform_time}")
    print(f"Transform time: {transform_time}")
    appley = Load()

    appley.save_transaction(transformed_data) # populate RDS instance with cleaned data.
    appley.save_drink_menu(transformed_drink_menu_data) # generate drinks menu
 
    end_load = time.time()
    load_time = round(end_load - end_transform, 4)
    logger.info(f"Loading time: {load_time}")
    total_time = extract_time + transform_time + load_time
    logger.info(f"total time: {total_time}")
    print(f"Load time: {load_time}\nTotal time: {total_time}")
Ejemplo n.º 42
0
    def __check_buildc_cfg(cmode, lib_root_path = None):
        platform_info = Glo.CPU + '_' + cmode[0:2] + '_' + Glo.SYSTEM

        buildc_rc = Load.load_dot_buildc_rc(Glo.dot_buildc_rc_path())
        buildc_cfg = Load.load_buildc_cfg(Glo.buildc_cfg_path(), Glo.var_str())

        cache_libs = []
        dotrepository = Glo.dot_buildc_repository_path()

        cache_svn_tree = CacheSvnTree(buildc_rc.external_repositories)
        cache_svn_tree.import_format_tree_from_file(dotrepository)
        cache_svn_tree.get_cache_libs(None, cmode, cache_libs)

        libs_depend = []
        for (libname, libversion, archives) in buildc_cfg.external_libs:
            is_found = False
            for (cache_libname, cache_libversion, cache_path) in cache_libs:
                if libname == cache_libname and libversion == cache_libversion:
                    path = None
                    if lib_root_path == None:
                        path = cache_path + '/' + libname + '/' + libversion + '/' + platform_info
                    else:
                        path = lib_root_path + '/' + libname + '/' + libversion + '/' + platform_info

                    if not os.path.exists(cache_path + '/' + libname + '/' + libversion + '/' + platform_info):
                        print 'Can not found [' + cache_path + '/' + libname + '/' + libversion + '/' + platform_info + '] in local library cache!'
                        print 'Please make sure the library [' + cache_path + '/' + libname + '/' + libversion + '/' + platform_info + '] is available!'
                        continue

                    libs_depend.append((libname, libversion, archives, path))

                    is_found = True
                    break
            if not is_found:
                print 'Error: Can not found [' + libname + '] in local library cache!!!!'
                print 'Please make sure the library [' + libname + '] is available!'
                sys.exit(Errors.lib_not_found)

        return libs_depend
Ejemplo n.º 43
0
    def __init__(self, subject, *args):
        """Set up a TASK child class environment.

        Initialise the Global Configuration, the Logger, the system load routines.
        Define a list of dependencies prerequisite to run this tasks.
        Define, create and aliases a Working directory for the tasks.

        If more arguments have been supplied to generic tasks, GenericTask will create an alias
        for each additionnal arg adding the suffix Dir to the name provided
        on the first optionnal arg provided to __init__

        """

        self.__order = self.initializeTasksOrder()
        self.__name = self.__class__.__name__.lower()
        self.__moduleName = self.__class__.__module__.split(".")[-1]
        self.__cleanupBeforeImplement = True
        self.config = subject.getConfig()
        self.subject = subject
        self.subjectDir = self.subject.getDir()
        self.toadDir = self.config.get('arguments', 'toad_dir')
        self.workingDir = os.path.join(self.subjectDir, self.__moduleName)
        self.logDir = os.path.join(self.subjectDir, self.get('dir', 'log'))
        self.qaDir = os.path.join(self.subjectDir, '00-qa')
        self.tasksAsReferences = None
        Logger.__init__(self, subject.getLogDir())
        Load.__init__(self, self.config)
        Qa.__init__(self)
        self.dependencies = []
        self.__dependenciesDirNames = {}
        for arg in args:
            self.dependencies.append(arg)
        # Needed for tractquerier and tractfiltering task
        self._defaultQuery = None
        self.queriesFile = self._tractquerierFile('queries',
                                                  'queries_freesurfer6')
        self.tq_dictFile = self._tractquerierFile('tq_dict',
                                                  'tq_dict_freesurfer6')
Ejemplo n.º 44
0
    def cache_remove_by_cmode(cmode):
        dotrc = Glo.dot_buildc_rc_path()
        if not os.path.exists(dotrc):
            print 'Can not found ' + dotrc
            print 'Please run buildc init and then config .buildc.rc!'
            sys.exit(Errors.conf_file_not_found)

        buildc_rc  = Load.load_dot_buildc_rc(dotrc)
        dotrepository = Glo.dot_buildc_repository_path()
        cache_svn_tree = CacheSvnTree(buildc_rc.external_repositories)
        cache_svn_tree.import_format_tree_from_file(dotrepository)

        cache_svn_tree.remove_tree(None, cmode)

        cache_svn_tree.export_format_tree_to_file(dotrepository)
Ejemplo n.º 45
0
    def check_consistency():
        dotrc = Glo.dot_buildc_rc_path()
        if not os.path.exists(dotrc):
            print 'Can not found ~/.buildc.rc!'
            print 'Please run buildc init to generate this file!'
            sys.exit(Errors.conf_file_not_found)

        dotrepository  = Glo.dot_buildc_repository_path()
        buildc_rc      = Load.load_dot_buildc_rc(dotrc)
        cache_svn_tree = CacheSvnTree(buildc_rc.external_repositories)
        cache_svn_tree.import_format_tree_from_file(dotrepository)

        result = cache_svn_tree.check_tree_consistency()
        if result == False:
            return False
Ejemplo n.º 46
0
    def cache_build_by_external_libs(external_libs, cmode, force_update = True):
        dotrc = Glo.dot_buildc_rc_path()
        if not os.path.exists(dotrc):
            print 'Can not found ' + dotrc
            print 'Please run buildc init and then config .buildc.rc!'
            sys.exit(Errors.conf_file_not_found)

        buildc_rc  = Load.load_dot_buildc_rc(dotrc)
        dotrepository  = Glo.dot_buildc_repository_path()
        cache_svn_tree = CacheSvnTree(buildc_rc.external_repositories)
        cache_svn_tree.import_format_tree_from_file(dotrepository)

        is_valid = True

        for dependence in external_libs:
            (dep_libname, dep_libversion) = Glo.get_dependent_name_and_version(dependence)[0:2]
            print '\n===>Begin build library [' + dep_libname + ' ' + dep_libversion + ']'
            result = Cache.build_dependent_and_fault_tolerant(cache_svn_tree, True, dep_libname, dep_libversion, cmode, force_update)
            print '<=== End build library [' + dep_libname + ' ' + dep_libversion + ']'
            if result == False:
                is_valid = False
                break

        if is_valid == False:
            is_valid = True

            cache_svn_tree = CacheSvnTree(buildc_rc.external_repositories)
            cache_svn_tree.import_format_tree_from_file(dotrepository)
            for dependence in external_libs:
                (dep_libname, dep_libversion) = Glo.get_dependent_name_and_version(dependence)[0:2]
                print '\n===>Begin build library [' + dep_libname + ' ' + dep_libversion + ']'
                result = Cache.build_dependent_and_fault_tolerant(cache_svn_tree, False, dep_libname, dep_libversion, cmode, force_update)
                print '<=== End build library [' + dep_libname + ' ' + dep_libversion + ']'
                if result == False:
                    is_valid = False

        cache_svn_tree.export_format_tree_to_file(dotrepository)
        return is_valid
Ejemplo n.º 47
0
class Window(QtGui.QMainWindow):

    def __init__(self):
        super(Window, self).__init__()
        self.setGeometry(50, 50, 500, 300)
        #self.setWindowTitle("OpenNeuron")
        self.setWindowIcon(QtGui.QIcon('pythonlogo.png'))


        #Set widget to show up with viewbox
        toolMenu = QtGui.QMenuBar()
        toolMenu.setNativeMenuBar(False) # <--Sets the menu with the widget; otherwise shows up as global (i.e. at top desktop screen)
        self.setMenuBar(toolMenu)

        #***** TEXT PARAMETERS FIELDS ******
        self.animal_name_text='' 
        self.rec_name_text=''
        if False: 
            #Mouse July 11 as default experiment
            self.root_dir = '/media/cat/12TB/in_vivo/tim/cat/' 
            self.animal_name_text='2016_07_11_vsd' 
            self.rec_name_text='track1_150Hz_iso1.0_spontaneous.rhd'
            
            self.animal = Mouse(self.animal_name_text, self.root_dir)
            self.animal.recName =self.root_dir+self.animal_name_text+'/rhd_files/'+ self.rec_name_text
            self.setWindowTitle(self.animal.name+'/'+self.animal.recName.replace(self.animal.home_dir+self.animal.name+'/rhd_files/', ''))
            self.animal.load_tsf_header(self.animal.recName.replace('rhd_files','tsf_files').replace('.rhd','_hp.tsf'))
            self.exp_type = 'mouse'

        if False: 
            #Cat ptc20 as default experiment
            self.root_dir = '/media/cat/8TB/in_vivo/nick/' 
            self.animal_name_text='ptc20' 
            self.rec_name_text='71-tr3-blankscreen/71-tr3-blankscreen.tsf'

            self.animal = Cat(self.animal_name_text, self.root_dir)
            self.animal.recName =self.root_dir+self.animal_name_text+'/'+ self.rec_name_text
            self.setWindowTitle(self.animal.name+'/'+self.animal.recName.replace(self.root_dir+self.animal_name_text+'/', ''))
            self.animal.load_tsf_header(self.animal.recName)
            self.exp_type = 'cat'

        #Load default experiment

        #self.animal.rec_length = self.animal.tsf.n_vd_samples/float(self.animal.tsf.SampleFrequency)


        #Menu Item Lists
        self.make_menu()

        #LOAD CENTRAL WIDGET
        self.central_widget = QtGui.QStackedWidget()
        self.setCentralWidget(self.central_widget)
        
        #SET DEFAULT WIDGET TO PROCESS
        self.load_widget = Load(self)
        self.central_widget.addWidget(self.load_widget)
        
        self.show()

    def make_menu(self):
        
        #FILE MENUS
        loadMouse = QtGui.QAction("&Load Mouse", self)
        loadMouse.setStatusTip('Load Mouse')
        loadMouse.triggered.connect(self.ld_mouse)

        loadMouseLever = QtGui.QAction("&Load Mouse-Lever", self)
        loadMouseLever.setStatusTip('Load Mouse-Lever')
        loadMouseLever.triggered.connect(self.ld_mouse_lever)

        loadCat = QtGui.QAction("&Load Cat", self)
        loadCat.setStatusTip('Load Cat')
        loadCat.triggered.connect(self.ld_cat)
                
        loadRat = QtGui.QAction("&Load Rat", self)
        loadRat.setStatusTip('Load Rat')
        loadRat.triggered.connect(self.ld_rat)
                
        loadRecording = QtGui.QAction("&Select Recording", self)
        loadRecording.setStatusTip('Select Recording')
        loadRecording.triggered.connect(self.ld_rec)


        #PROCESSING MENUS
        trackTools = QtGui.QAction("&Track Wide Tools", self)
        trackTools.setStatusTip('Track Wide Tools')
        trackTools.triggered.connect(self.track_tools)
        
        intanTools = QtGui.QAction("&Intan Conversion Tools", self)
        intanTools.setStatusTip('Intan Conversion Tools')
        intanTools.triggered.connect(self.intan_tools)


        preprocessExperiment = QtGui.QAction("&Process Data", self)
        preprocessExperiment.setStatusTip('Process Data')
        preprocessExperiment.triggered.connect(self.prepreprocess_data)


        seamansData = QtGui.QAction("&Seamans' Lab Data", self)
        seamansData.setStatusTip('Seamans Lab Data')
        seamansData.triggered.connect(self.seamans_data)


        filterData = QtGui.QAction("&Filter Data", self)
        filterData.setStatusTip('Filter Data')
        filterData.triggered.connect(self.fltr_data)


        metaInfo = QtGui.QAction("&File Info", self)
        metaInfo.setStatusTip('File Info')
        metaInfo.triggered.connect(self.data_info)


        #IMAGING TOOLS MENUS
        ophysTools = QtGui.QAction("&Event Triggered Imaging", self)
        ophysTools.setStatusTip('Event Triggered Imaging')
        ophysTools.triggered.connect(self.ophys_tools)

        ophysToolsMCD = QtGui.QAction("&Event Triggered Imaging - MCD", self)
        ophysToolsMCD.setStatusTip('Event Triggered Imaging MCD')
        ophysToolsMCD.triggered.connect(self.ophys_tools_mcd)

        ephysTools = QtGui.QAction("&Event Triggered Ephys", self)
        ephysTools.setStatusTip('Event Triggered Ephys')
        ephysTools.triggered.connect(self.ephys_tools)
        
        mouseLeverTools = QtGui.QAction("&Mouse-Lever", self)
        mouseLeverTools.setStatusTip('Mouse-Lever')
        mouseLeverTools.triggered.connect(self.mouse_lever_tools)

        catTools = QtGui.QAction("&Cat", self)
        catTools.setStatusTip('Cat')
        catTools.triggered.connect(self.cat_tools)

        ratTools = QtGui.QAction("&Rat", self)
        ratTools.setStatusTip('Rat')
        ratTools.triggered.connect(self.rat_tools)


        #EPHYS TOOLS MENUS
        View_Traces = QtGui.QAction("&Traces Tools", self)
        View_Traces.setStatusTip('Traces Tools')
        View_Traces.triggered.connect(self.view_rtraces)
        
        View_Templates = QtGui.QAction("&Template Tools", self)
        View_Templates.setStatusTip('Template Tools')
        View_Templates.triggered.connect(self.view_templts)
        
        #Event_Triggered_Maps = QtGui.QAction("&Cell STM", self)
        #Event_Triggered_Maps.setStatusTip('Cell Analysis')
        #Event_Triggered_Maps.triggered.connect(self.event_triggered_analysis)

        LFP_Analysis = QtGui.QAction("&LFP Tools", self)
        LFP_Analysis.setStatusTip('LFP Analysis')
        LFP_Analysis.triggered.connect(self.lfp_analysis)

        MSL_Analysis = QtGui.QAction("&MSL Tools", self)
        MSL_Analysis.setStatusTip('MSL')
        MSL_Analysis.triggered.connect(self.msl_analysis)


        Rasters_Analysis = QtGui.QAction("&Rasters", self)
        Rasters_Analysis.setStatusTip('Raster Analysis')
        Rasters_Analysis.triggered.connect(self.rasters_analysis)
        
        
        Count_Matrix = QtGui.QAction("&Count Matrix", self)
        Count_Matrix.setStatusTip('Count Matrix')
        Count_Matrix.triggered.connect(self.view_count_matrix)


        exitApplication = QtGui.QAction("&Exit Application", self)
        exitApplication.setStatusTip('Exit')
        exitApplication.triggered.connect(self.close_application)
        
        #MAKE MENUS
        mainMenu = self.menuBar()
        
        fileMenu = mainMenu.addMenu('&Load')
        fileMenu.addAction(loadMouse)
        fileMenu.addAction(loadMouseLever)
        fileMenu.addAction(loadCat)
        fileMenu.addAction(loadRat)
        fileMenu.addAction(loadRecording)
        fileMenu.addAction(exitApplication)

        fileMenu = mainMenu.addMenu('Pre-Process')
        fileMenu.addAction(intanTools)
        fileMenu.addAction(trackTools)
        fileMenu.addAction(seamansData)
        fileMenu.addAction(filterData)
        fileMenu.addAction(metaInfo)
        #fileMenu.addAction(preprocessExperiment)

        fileMenu = mainMenu.addMenu('Event Triggered Analysis')
        #fileMenu.addAction(Event_Triggered_Maps)
        fileMenu.addAction(ophysTools)
        fileMenu.addAction(ophysToolsMCD)
        fileMenu.addAction(ephysTools)
        fileMenu.addAction(mouseLeverTools)
        #fileMenu.addAction(catTools)
        #fileMenu.addAction(ratTools)

        fileMenu = mainMenu.addMenu('Ephys Tools')
        fileMenu.addAction(View_Traces)
        fileMenu.addAction(View_Templates)
        fileMenu.addAction(LFP_Analysis)
        fileMenu.addAction(MSL_Analysis)
        fileMenu.addAction(Rasters_Analysis)

        fileMenu.addAction(Count_Matrix)

    #************* LOAD FILE MENUS *****************
    def ld_mouse(self):

        self.load_widget.load_mouse(self)   #Pass main widget to subwidgets as it contains needed parameters.

        self.exp_type = 'mouse'

        #RESTART Process widget with updated info; SEEMS THERE IS A BETTER WAY TO DO THIS
        self.load_widget = Load(self)
        self.central_widget.addWidget(self.load_widget)
        self.central_widget.setCurrentWidget(self.load_widget)


    def ld_mouse_lever(self):

        self.load_widget.load_mouse_lever(self)   #Pass main widget to subwidgets as it contains needed parameters.

        self.exp_type = 'mouse_lever'

        #RESTART Process widget with updated info; SEEMS THERE IS A BETTER WAY TO DO THIS
        self.load_widget = Load(self)
        self.central_widget.addWidget(self.load_widget)
        self.central_widget.setCurrentWidget(self.load_widget)

        
    def ld_cat(self):

        self.load_widget.load_cat(self)   #Pass main widget to subwidgets as it contains needed parameters.
        self.exp_type = 'cat'

        #RESTART Process widget with updated info; SEEMS THERE IS A BETTER WAY TO DO THIS
        self.load_widget = Load(self)
        self.central_widget.addWidget(self.load_widget)
        self.central_widget.setCurrentWidget(self.load_widget)


    def ld_rat(self):

        self.load_widget.load_rat(self)   #Pass main widget to subwidgets as it contains needed parameters.
        self.exp_type = 'rat'

        #RESTART Process widget with updated info; SEEMS THERE IS A BETTER WAY TO DO THIS
        self.load_widget = Load(self)
        self.central_widget.addWidget(self.load_widget)
        self.central_widget.setCurrentWidget(self.load_widget)
        
        
    def ld_rec(self):
        self.load_widget.select_recording(self)   #Pass main widget to subwidgets as it contains needed parameters.
 
        #RESTART Process widget with updated info; SEEMS THERE SHOULD BETTER WAY TO DO THIS
        self.load_widget = Load(self)
        self.central_widget.addWidget(self.load_widget)
        self.central_widget.setCurrentWidget(self.load_widget)
                
    def close_application(self):
        print("whooaaaa so custom!!!")
        
       
        sys.exit()


    #********** EXP TOOLS MENUS *******************
    def ophys_tools(self):
        ophys_widget = EventTriggeredImaging(self)
        self.central_widget.addWidget(ophys_widget)  
        self.central_widget.setCurrentWidget(ophys_widget)
        
    def ophys_tools_mcd(self):
        ophys_widget = EventTriggeredImagingMCD(self)
        self.central_widget.addWidget(ophys_widget)  
        self.central_widget.setCurrentWidget(ophys_widget)
        
        

    def ephys_tools(self):
        ephys_widget = EventTriggeredEphys(self)
        self.central_widget.addWidget(ephys_widget)  
        self.central_widget.setCurrentWidget(ephys_widget)

    def mouse_lever_tools(self):
        mouse_lever_widget = MouseLeverTools(self)
        self.central_widget.addWidget(mouse_lever_widget)  
        self.central_widget.setCurrentWidget(mouse_lever_widget)
    
    def cat_tools(self):
        cat_widget = CatTools(self)
        self.central_widget.addWidget(cat_widget)  
        self.central_widget.setCurrentWidget(cat_widget)

    def rat_tools(self):
        rat_widget = RatTools(self)
        self.central_widget.addWidget(rat_widget)  
        self.central_widget.setCurrentWidget(rat_widget)




    #********** ANALYSIS MENUS *******************
    
    def view_rtraces(self):
        traces_widget = TracesTools(self)
        self.central_widget.addWidget(traces_widget)  
        self.central_widget.setCurrentWidget(traces_widget)
    
    def view_templts(self):
        templates_widget = TemplateTools(self)
        self.central_widget.addWidget(templates_widget)  
        self.central_widget.setCurrentWidget(templates_widget)
        
    
    #def event_triggered_analysis(self):
        #event_widget = EventTriggered(self)
        #self.central_widget.addWidget(event_widget)  
        #self.central_widget.setCurrentWidget(event_widget)

    def lfp_analysis(self):
        lfp_widget = LFP(self)
        self.central_widget.addWidget(lfp_widget)  
        self.central_widget.setCurrentWidget(lfp_widget)
    
    def msl_analysis(self):
        msl_widget = MSL(self)
        self.central_widget.addWidget(msl_widget)  
        self.central_widget.setCurrentWidget(msl_widget)


    def rasters_analysis(self):
        rasters_widget = Rasters(self)
        self.central_widget.addWidget(rasters_widget)  
        self.central_widget.setCurrentWidget(rasters_widget)
        

    def view_count_matrix(self):
        count_matrix_widget = CountMatrix(self)
        self.central_widget.addWidget(count_matrix_widget)  
        self.central_widget.setCurrentWidget(count_matrix_widget)



    #************ PROCESSING MENUS ***************
    def track_tools(self):
        track_widget = TrackWideTools(self)
        self.central_widget.addWidget(track_widget)      
        self.central_widget.setCurrentWidget(track_widget)

    def intan_tools(self):
        intan_widget = IntanTools(self)
        self.central_widget.addWidget(intan_widget)      
        self.central_widget.setCurrentWidget(intan_widget)
        


    def seamans_data(self):
        seamans_widget = Seamans(self)
        self.central_widget.addWidget(seamans_widget)      
        self.central_widget.setCurrentWidget(seamans_widget)


    def prepreprocess_data(self):  #NOT CURENTLY USED...
        print "....processing experiment: ", self.fileName
        
        #MUST BE EXPERIMENT SPECIFIC
        

    def fltr_data(self):
        filter_widget = Filter(self)
        self.central_widget.addWidget(filter_widget)  
        self.central_widget.setCurrentWidget(filter_widget)
        

    def data_info(self):
        
        info_widget = Info(self)
        self.central_widget.addWidget(info_widget)  
        self.central_widget.setCurrentWidget(info_widget)
Ejemplo n.º 48
0
    def cache_build_by_config(buildc_cfg_path, cmode, force_update = True):
        buildc_cfg = Load.load_buildc_cfg(buildc_cfg_path, Glo.var_str())

        is_valid = Cache.cache_build_by_external_libs(buildc_cfg.external_libs, cmode, force_update)
        return is_valid
Ejemplo n.º 49
0
    def reconfig(cmode, libs_depend, project_root_path = None):
        makerules = Glo.make_rules_path()

        if (Glo.SYSTEM == 'solaris'):
            this_awk = 'nawk'
        else:
            this_awk = 'gawk'
        print "Reconfig [" + makerules + "]..."

        #Warning if we can not found '@Generated by buildc@' embeded in the Make.rules
        f = open(makerules)
        s = f.read(1024)
        if s.find("@Generated by buildc@") == -1:
            print "Warning: Please make sure the Make.rules file is generated by buildc!"
        f.close()

        c = Load.load_buildc_cfg(Glo.buildc_cfg_path(), Glo.var_str())
        project_name, version, author = c.project
        if project_root_path == None:
            topdir = os.path.abspath('./')
        else:
            topdir = project_root_path
        this_os = Glo.SYSTEM
        this_cpu = Glo.CPU
        cmode = cmode

        if cmode == '64-bit':
            if this_os == 'solaris' and this_cpu == 'x86':
                cc = '/usr/sfw/bin/gcc -m64'
            else:
                cc = 'gcc -m64'
        else:
            cc = 'gcc -m32'

        libs         = ''
        includes     = ''
        static_libs  = ''
        dynamic_libs = ''

        lib_roots = ''
        lib_roots_count = len(libs_depend)
        if not lib_roots_count == 0:
            last_lib = libs_depend[lib_roots_count - 1]
        for (libname, libversion, archives, libpath) in libs_depend:
            if libname == last_lib[0]:
                lib_roots += (libname + '_ROOT = ' + libpath + "#@lib_roots_end@")
            else:
                lib_roots += (libname + '_ROOT = ' + libpath + "#@lib_roots@\\n")

            includes += ('-I $(' + libname + '_ROOT)' + '/include ')
            libs += (' -L $(' + libname + '_ROOT)' + '/lib')
            for archive in archives:
                libs += (' -l' + Glo.libname2compile_option(archive))
                if Glo.is_static_lib(archive):
                    static_libs += (' -L $(' + libname + '_ROOT)' + '/lib')
                    static_libs += (' -l' + Glo.libname2compile_option(archive))
                else:
                    dynamic_libs += (' -L $(' + libname + '_ROOT)' + '/lib')
                    dynamic_libs += (' -l' + Glo.libname2compile_option(archive))

        custom_defs = ''
        for cdef in c.custom_defs:
            custom_defs += (cdef + ' ')

        custom_vars = ''
        custom_vars_count = len(c.custom_vars)
        for var in c.custom_vars:
            custom_vars += (var[0] + ' = ' + str(var[1]))
            if var == c.custom_vars[custom_vars_count - 1]:
                custom_vars += "#@custom_vars_end@"
            else:
                custom_vars += "#@custom_vars@\\n"

        custom_includes = ''
        for inc in c.custom_includes:
            custom_includes += ('-I ' + inc + ' ')

        custom_libs = ''
        for (libpath, archives) in c.custom_libs:
            if not len(libpath) == 0:
                custom_libs += (' -L' + libpath)

            for archive in archives:
                custom_libs += (' -l' + Glo.libname2compile_option(archive))
                if Glo.is_static_lib(archive):
                    if not len(libpath) == 0:
                        static_libs += (' -L ' + libpath)
                    static_libs += (' -l' + Glo.libname2compile_option(archive))
                else:
                    if not len(libpath) == 0:
                        dynamic_libs += (' -L ' + libpath)
                    dynamic_libs += (' -l' + Glo.libname2compile_option(archive))

        system_libs = ''
        for (libpath, archives) in c.system_libs:
            if not len(libpath) == 0:
                system_libs += (' -L ' + libpath)

            for archive in archives:
                system_libs += (' -l' + Glo.libname2compile_option(archive))

        cmd  = "sed -e '1,$ s/=.*@topdir@/= "  + Glo.add_backlash(topdir) + "#@topdir@/g' " + Glo.make_rules_path() + '|'
        cmd += "sed -e '1,$ s/=.*@os@/= "      + this_os                          + "#@os@/g'"      + '|'
        cmd += "sed -e '1,$ s/=.*@cpu@/= "     + this_cpu                         + "#@cpu@/g'"     + '|'
        cmd += "sed -e '1,$ s/=.*@cmode@/= "   + cmode                            + "#@cmode@/g'"   + '|'
        cmd += "sed -e '1,$ s/=.*@version@/= " + version                          + "#@version@/g'" + '|'
        cmd += "sed -e '1,$ s/=.*@cc@/= "               + Glo.add_backlash(cc)                   + "#@cc@/g'"                + '|'
        cmd += "sed -e '1,$ s/=.*@default_includes@/= " + Glo.add_backlash(Glo.default_includes) + "#@default_includes@/g'"  + '|'
        cmd += "sed -e '1,$ s/=.*@default_libs@/= "     + Glo.add_backlash(Glo.default_libs)     + "#@default_libs@/g'"      + '|'
        cmd += "sed -e '1,$ s/=.*@custom_includes@/= "  + Glo.add_backlash(custom_includes)      + "#@custom_includes@/g'"   + '|'
        cmd += "sed -e '1,$ s/=.*@custom_libs@/= "      + Glo.add_backlash(custom_libs)          + "#@custom_libs@/g'"       + '|'
        cmd += "sed -e '1,$ s/=.*@system_libs@/= "      + Glo.add_backlash(system_libs)          + "#@system_libs@/g'"       + '|'
        cmd += "sed -e '1,$ s/=.*@static_libs@/= "      + Glo.add_backlash(static_libs)          + "#@static_libs@/g'"       + '|'
        cmd += "sed -e '1,$ s/=.*@dynamic_libs@/= "     + Glo.add_backlash(dynamic_libs)         + "#@dynamic_libs@/g'"      + '|'
        cmd += "sed -e '1,$ s/=.*@custom_defs@/= "      + custom_defs                            + "#@custom_defs@/g'"       + '|'
        cmd += "sed -e '1,$ s/=.*@lib_includes@/= "     + Glo.add_backlash(includes)             + "#@lib_includes@/g'"      + '|'
        cmd += "sed -e '1,$ s/=.*@libs_depend@/= "      + Glo.add_backlash(libs)                 + "#@libs_depend@/g'"       + '|'
        cmd += "sed -e '/^.*@lib_roots@/d'"                          + '|'
        cmd += "sed -e '1,$ s/^.*@lib_roots_end@/@lib_roots@/g'"     + '|'
        cmd += "sed -e '/^.*@custom_vars@/d'"                        + '|'
        cmd += "sed -e '1,$ s/^.*@custom_vars_end@/@custom_vars@/g'" + '|'

        if lib_roots_count == 0:
            cmd += ("sed -e '1,$ s/@lib_roots@/#@lib_roots_end@/g'" + '|')
        else:
            cmd += (this_awk + " '{ sub(/@lib_roots@/, \"" + lib_roots + "\"); print }'" + '|')

        if custom_vars_count == 0:
            cmd += ("sed -e '1,$ s/@custom_vars@/#@custom_vars_end@/g'")
        else:
            cmd += (this_awk + " '{ sub(/@custom_vars@/, \"" + custom_vars + "\"); print }'")

        cmd += "> " + Glo.make_rules_temp_path()

        Util.execute_and_output(cmd)
        Util.execute_and_output('mv ' + Glo.make_rules_temp_path() + ' ' + Glo.make_rules_path())

        print "Reconfig [" + makerules + "] OK!"
Ejemplo n.º 50
0
    def __config(cmode, libs_depend, project_root_path = None):
        makerules_tpl = Glo.make_rules_tpl_path()

        if (Glo.SYSTEM == 'solaris'):
            this_awk = 'nawk'
        else:
            this_awk = 'gawk'

        c = Load.load_buildc_cfg(Glo.buildc_cfg_path(), Glo.var_str())
        project_name, version, author = c.project
        date = time.strftime('%Y-%m-%d',time.localtime(time.time()))
        if project_root_path == None:
            topdir = os.path.abspath('./')
        else:
            topdir = project_root_path
        this_os  = Glo.SYSTEM
        this_cpu = Glo.CPU
        cmode = cmode

        if cmode == '64-bit':
            if this_os == 'solaris' and this_cpu == 'x86':
                cc = '/usr/sfw/bin/gcc -m64'
            else:
                cc = 'gcc -m64'
        else:
            cc = 'gcc -m32'

        libs = ''
        includes = ''
        static_libs = ''
        dynamic_libs = ''

        lib_roots = ''
        lib_roots_count = len(libs_depend)
        if not lib_roots_count == 0:
            last_lib = libs_depend[lib_roots_count - 1]
        for (libname, libversion, archives, libpath) in libs_depend:
            if libname == last_lib[0]:
                lib_roots += (libname + '_ROOT = ' + libpath + "#@lib_roots_end@")
            else:
                lib_roots += (libname + '_ROOT = ' + libpath + "#@lib_roots@\\n")

            includes += ('-I $(' + libname + '_ROOT)' + '/include ')
            libs     += (' -L $(' + libname + '_ROOT)' + '/lib')
            for archive in archives:
                libs += (' -l' + Glo.libname2compile_option(archive))
                if Glo.is_static_lib(archive):
                    static_libs += (' -L $(' + libname + '_ROOT)' + '/lib')
                    static_libs += (' -l' + Glo.libname2compile_option(archive))
                else:
                    dynamic_libs += (' -L $(' + libname + '_ROOT)' + '/lib')
                    dynamic_libs += (' -l' + Glo.libname2compile_option(archive))

        custom_defs = ''
        for cdef in c.custom_defs:
            custom_defs += (cdef + ' ')

        custom_vars = ''
        custom_vars_count = len(c.custom_vars)
        for var in c.custom_vars:
            custom_vars += (var[0] + ' = ' + str(var[1]))
            if var == c.custom_vars[custom_vars_count - 1]:
                custom_vars += "#@custom_vars_end@"
            else:
                custom_vars += "#@custom_vars@\\n"

        custom_includes = ''
        for inc in c.custom_includes:
            custom_includes += ('-I ' + inc + ' ')

        custom_libs = ''
        for (libpath, archives) in c.custom_libs:
            if not len(libpath) == 0:
                custom_libs += (' -L ' + libpath)

            for archive in archives:
                custom_libs += (' -l' + Glo.libname2compile_option(archive))
                if Glo.is_static_lib(archive):
                    if not len(libpath) == 0:
                        static_libs += (' -L ' + libpath)
                    static_libs += (' -l' + Glo.libname2compile_option(archive))
                else:
                    if not len(libpath) == 0:
                        dynamic_libs += (' -L ' + libpath)
                    dynamic_libs += (' -l' + Glo.libname2compile_option(archive))

        system_libs = ''
        for (libpath, archives) in c.system_libs:
            if not len(libpath) == 0:
                system_libs += (' -L ' + libpath)

            for archive in archives:
                system_libs += (' -l' + Glo.libname2compile_option(archive))

        cmd  = "sed -e '1,$ s/@project_name@/"     + project_name + "/g' " + makerules_tpl + '|'
        cmd += "sed -e '1,$ s/@author@/"           + author       + "/g'"  + '|'
        cmd += "sed -e '1,$ s/@date@/"             + date         + "/g'"  + '|'
        cmd += "sed -e '1,$ s/@topdir@/"           + Glo.add_backlash(topdir) + "#@topdir@/g'"  + '|'
        cmd += "sed -e '1,$ s/@os@/"               + this_os                          + "#@os@/g'"      + '|'
        cmd += "sed -e '1,$ s/@cpu@/"              + this_cpu                         + "#@cpu@/g'"     + '|'
        cmd += "sed -e '1,$ s/@cmode@/"            + cmode                            + "#@cmode@/g'"   + '|'
        cmd += "sed -e '1,$ s/@version@/"          + version                          + "#@version@/g'" + '|'
        cmd += "sed -e '1,$ s/@cc@/"               + Glo.add_backlash(cc)     + "#@cc@/g'"      + '|'
        cmd += "sed -e '1,$ s/@default_includes@/" + Glo.add_backlash(Glo.default_includes) + "#@default_includes@/g'" + '|'
        cmd += "sed -e '1,$ s/@default_libs@/"     + Glo.add_backlash(Glo.default_libs)     + "#@default_libs@/g'"     + '|'
        cmd += "sed -e '1,$ s/@custom_defs@/"      + custom_defs                                       + "#@custom_defs@/g'"      + '|'
        cmd += "sed -e '1,$ s/@custom_includes@/"  + Glo.add_backlash(custom_includes)         + "#@custom_includes@/g'"  + '|'
        cmd += "sed -e '1,$ s/@custom_libs@/"      + Glo.add_backlash(custom_libs)             + "#@custom_libs@/g'"      + '|'
        cmd += "sed -e '1,$ s/@system_libs@/"      + Glo.add_backlash(system_libs)             + "#@system_libs@/g'"      + '|'
        cmd += "sed -e '1,$ s/@static_libs@/"      + Glo.add_backlash(static_libs)             + "#@static_libs@/g'"      + '|'
        cmd += "sed -e '1,$ s/@dynamic_libs@/"     + Glo.add_backlash(dynamic_libs)            + "#@dynamic_libs@/g'"     + '|'
        cmd += "sed -e '1,$ s/@lib_includes@/"     + Glo.add_backlash(includes)                + "#@lib_includes@/g'"     + '|'
        cmd += "sed -e '1,$ s/@libs_depend@/"      + Glo.add_backlash(libs)                    + "#@libs_depend@/g'"      + '|'

        if lib_roots_count == 0:
            cmd += ("sed -e '1,$ s/@lib_roots@/#@lib_roots_end@/g'" + '|')
        else:
            cmd += (this_awk + " '{ sub(/@lib_roots@/, \"" + lib_roots + "\"); print }'" + '|')

        if custom_vars_count == 0:
            cmd += ("sed -e '1,$ s/@custom_vars@/#@custom_vars_end@/g'")
        else:
            cmd += (this_awk + " '{ sub(/@custom_vars@/, \"" + custom_vars + "\"); print }'")

        cmd += "> " + Glo.make_rules_path()

        Util.execute_and_output(cmd)
Ejemplo n.º 51
0
 def __pack_init():
     c = Load.load_setup_cfg(Pack.SETUP_CFG_PATH)
     return c
Ejemplo n.º 52
0
    def utility(self, state, player):
        """ This return a value between -infinity and infinity based on how good is the state for the player"""

        player_type = player.get_type()
        player_color = player.get_color()

        if player_color is Piece.WHITE:
            if state.is_check_for_enemy(Piece.BLACK):
                if state.is_check_mate_for_enemy(Piece.BLACK):
                    return 99999999
                else:
                    return 99999999 - 100000
            if state.is_check_for_enemy(Piece.WHITE):
                if state.is_check_mate_for_enemy(Piece.WHITE):
                    return -99999999
                else:
                    return -99999999 + 100000
        elif player_color is Piece.BLACK:
            if state.is_check_for_enemy(Piece.WHITE):
                if state.is_check_mate_for_enemy(Piece.WHITE):
                    return 99999999
                else:
                    return 99999999 - 100000
            if state.is_check_for_enemy(Piece.BLACK):
                if state.is_check_mate_for_enemy(Piece.BLACK):
                    return -99999999
                else:
                    return -99999999 + 100000

        if player_type is Player.IDIOT:
            return 0

        elif player_type is Player.FIGHTER:

            if state.can_kill(player_color):
                state_quality = 50000
            else:
                state_quality = 0

            for row in state.get_board():
                for pos in row:
                    if pos is not None:
                        if pos.get_color() is player_color:
                            state_quality += Piece.VALUES[pos.get_type()]
                        else:
                            state_quality -= Piece.VALUES[pos.get_type()]
            return state_quality

        elif player_type is Player.SMARTEST or player_type is Player.HUMAN:
            db_access = Load()
            can_kill = state.can_kill(player_color)
            can_be_killed = state.can_be_killed(player_color)

            if db_access.compare_with_data_base_states(state, player):
                state_quality = 20000
            else:
                state_quality = 0

            if can_kill and not can_be_killed:
                state_quality += 15000
            elif not can_kill and not can_be_killed:
                state_quality += 10000
            elif can_kill and can_be_killed:
                state_quality -= 5000
            elif not can_kill and can_be_killed:
                state_quality -= 5000

            for row in state.get_board():
                for pos in row:
                    if pos is not None:
                        if pos.get_color() is player_color:
                            state_quality += Piece.VALUES[pos.get_type()]
                        else:
                            state_quality -= Piece.VALUES[pos.get_type()]
            return state_quality
Ejemplo n.º 53
0
        self.api_request("postMessage", {"room": room, "message": message})
        return self


if __name__ == "__main__":
    pBody = pyBot()
    _exit_ = exit

    def exit():
        pBody.logout()
        _exit_()

    atexit.register(exit)
    pBody.login().join("#lobby")
    # get initial list of all messages...
    l = Load()
    l.start()
    pBody.get_messages()
    l.running = False
    l.join()

    def process_messages():
        running = False
        while running == False:
            msgs = pBody.get_messages()
            if msgs:
                for msg in msgs:
                    pass

            running = pBody.done
            # process_messages();
Ejemplo n.º 54
0
Archivo: mie.py Proyecto: mjonyh/mie
 def __init__(self):
   self.intp_data = Load();