Beispiel #1
0
def process_directory(wdir, wfiles):
    wd = Path(wdir)
    h.log(h.LOG_VERBOSE, "INFO: In folder: {}".format(wd))

    # Search for a local configuration file in the current directory
    cf = wd.joinpath("project_config.conf")
    if cf.is_file():
        h.log(h.LOG_VERBOSE, "INFO: Local configuration file found")
        cfg = deepcopy(c.CONFIG)
        c.set_config(cfg, cf)
    else:
        # Use the global configuration
        cf = None
        cfg = c.CONFIG

    # Look for files with extensions specified in the configuration
    extns = set(
        [x.strip().lower() for x in cfg['PROJECT']['FileExt'].split(',')])
    files = []
    for ext in extns:
        rx = re.compile(str.replace(h.RXP['FileExt'], "@1", ext),
                        re.IGNORECASE)
        files += list(filter(rx.findall, wfiles))

    if len(files) > 0:
        load_files(wdir, cfg, files)

    if wdir in PROJECTS:
        PROJECTS[wdir]['local_config'] = (cf is not None)
        process_project(wdir, PROJECTS[wdir], cfg)

    # Should we stop walking further down this path
    return not cfg['PROJECT']['Recurse']
 def post(self):
     if self.ensure_admin():
         client_id = self.request.get("client_id")
         client_secret = self.request.get("client_secret")
         callback_url = self.request.get("callback_url")
         config.set_config(client_id, client_secret, callback_url)
         self.render()
Beispiel #3
0
def create_project(location, name,  executable=False):
    """
    Creates a project folder given a env new_dir
    """
    new_dir = "{}/{}".format(location, name)
    
    if os.path.exists(new_dir):
        error("Folder {} already exists!".format(name))
        sys.exit(1)
    # Make boilerplate files
    try:
        os.mkdir(new_dir)
        os.mkdir(new_dir + "/.carpet/")
        os.chdir(new_dir)
        open("__init__.py", "a").close()
        open("Carpet.toml", "a").close()
        _repo = git.Repo.init(path=new_dir)
        config.build_default_config(new_dir, name=name)
        
        # If the project is meant to be executable,
        # add a __main__.py 
        if executable:
            main = open("__main__.py", "a")
            main.write("print('Hello World')")
            main.close()
        else:
            config.set_config(new_dir, "package/executable", False)
        
        virtualenv.create_environment(new_dir + "/.carpet/env")
    except Exception as e:
        shutil.rmtree(new_dir)
        colour("FAIL", "Failed to create project")
        echo("Have you checked your permissions?")

    colour("BLUE", "Created new project!")
Beispiel #4
0
    def execute(self, args: Namespace):
        prev_loss = Inf
        loss = -Inf

        initialize_app()
        ref = db.reference("optimize")
        continue_ref = ref.child("continue")

        if continue_ref.get() is None:
            continue_ref.set(True)

        # Allow optimization to be killed
        while continue_ref.get():
            config, prev_loss, pulled_from_cloud = get_latest_config()
            if pulled_from_cloud:
                config = step_config(config)

            set_config(config)
            loss = calculate_loss(get_metrics())
            better = loss < prev_loss
            save_cloud_config(config, loss, better)

            should_stop = abs(prev_loss - loss) < LOSS_THRESH

            if better:
                prev_loss = loss

            if should_stop:
                break
 def __init__(self):
     self.sellers = []
     self.price_sensitivity = []
     self.num_sellers = 0
     self.logit_dim = 0
     config.set_config()
     return super().__init__()
Beispiel #6
0
def setup_config():
    if not os.path.exists(config._configfile):
        # this is the default config, it will be overwritten if a config file already exists. Else, it saves it
        conf_data = readstatic.read_static('default_config.json', ret_bin=False)
        config.set_config(json.loads(conf_data))

        config.save()

    config.reload()

    settings = 0b000
    if config.get('log.console.color', True):
        settings = settings | USE_ANSI
    if config.get('log.console.output', True):
        settings = settings | OUTPUT_TO_CONSOLE
    if config.get('log.file.output', True):
        settings = settings | OUTPUT_TO_FILE
    set_settings(settings)

    verbosity = str(config.get('log.verbosity', 'default')).lower().strip()
    if not verbosity in ['default', 'null', 'none', 'nil']:
        map = {
            str(LEVEL_DEBUG) : LEVEL_DEBUG,
            'verbose' : LEVEL_DEBUG,
            'debug' : LEVEL_DEBUG,
            str(LEVEL_INFO) : LEVEL_INFO,
            'info' : LEVEL_INFO,
            'information' : LEVEL_INFO,
            str(LEVEL_WARN) : LEVEL_WARN,
            'warn' : LEVEL_WARN,
            'warning' : LEVEL_WARN,
            'warnings' : LEVEL_WARN,
            str(LEVEL_ERROR) : LEVEL_ERROR,
            'err' : LEVEL_ERROR,
            'error' : LEVEL_ERROR,
            'errors' : LEVEL_ERROR,
            str(LEVEL_FATAL) : LEVEL_FATAL,
            'fatal' : LEVEL_FATAL,
            str(LEVEL_IMPORTANT) : LEVEL_IMPORTANT,
            'silent' : LEVEL_IMPORTANT,
            'quiet' : LEVEL_IMPORTANT,
            'important' : LEVEL_IMPORTANT
        }

        if verbosity in map:
            set_level(map[verbosity])
        else:
            logger.warn('Verbosity level %s is not valid, using default verbosity.' % verbosity)

    if type(config.get('client.webpassword')) is type(None):
        config.set('client.webpassword', base64.b16encode(os.urandom(32)).decode('utf-8'), savefile=True)
    if type(config.get('client.client.port')) is type(None):
        randomPort = netcontroller.get_open_port()
        config.set('client.client.port', randomPort, savefile=True)
    if type(config.get('client.public.port')) is type(None):
        randomPort = netcontroller.get_open_port()
        config.set('client.public.port', randomPort, savefile=True)
    if type(config.get('client.api_version')) is type(None):
        config.set('client.api_version', onionrvalues.API_VERSION, savefile=True)
Beispiel #7
0
def db_build(config_key):
    if config_key == 'sel':
        current_app.config.from_object(set_config(select=True))
    elif config_key == 'env':
        current_app.config.from_object(set_config())
    else:
        quit()
    db.create_all()
Beispiel #8
0
def test_faceting():
    from datastructures import Facet
    from query import Query
    import connection
    import config
    config.set_config('dev', '0.0.0.0', 8983)
    config.load_config('dev') # set up addresses and stuff 
    
    response = connection.search(Query(Facet('regions', mincount=1)))
Beispiel #9
0
def evolution_search():
    for exp_type in config_dict()['exp_order']:
        save_dir = f'{os.path.dirname(os.path.abspath(__file__))}/search-{args.save}-{exp_type}-{dataset}-{time.strftime("%Y%m%d-%H%M%S")}'
        utils.create_exp_dir(save_dir)
        fh = logging.FileHandler(os.path.join(save_dir, 'log.txt'))
        fh.setFormatter(logging.Formatter(log_format))
        logging.getLogger().addHandler(fh)

        np.random.seed(args.seed)
        logging.info("args = %s", args)

        # setup NAS search problem
        if exp_type == 'micro':  # NASNet search space
            n_var, lb, ub = set_micro_exp(args)
        elif exp_type == 'macro':  # modified GeneticCNN search space
            n_var, lb, ub = set_macro_exp(args)
        elif exp_type == 'micromacro' or exp_type == 'micro_garbage' or exp_type == 'macro_garbage':  # modified GeneticCNN search space
            n_var_mac, lb_mac, ub_mac = set_macro_exp(args)
            n_var_mic, lb_mic, ub_mic = set_micro_exp(args)
            n_var = n_var_mic + n_var_mac
            lb = np.array([*lb_mac, *lb_mic])
            ub = np.array([*ub_mac, *ub_mic])
        else:
            raise NameError('Unknown search space type')

        problem = NAS(n_var=n_var, search_space=exp_type,
                      n_obj=2, n_constr=0, lb=lb, ub=ub,
                      init_channels=args.init_channels, layers=args.layers,
                      epochs=args.epochs, save_dir=save_dir, batch_size=args.batch_size)

        # configure the nsga-net method
        method = engine.nsganet(pop_size=args.pop_size,
                                n_offsprings=args.n_offspring,
                                eliminate_duplicates=True)

        if args.termination == 'ngens':
            termination = ('n_gen', args.n_gens)
        elif args.termination == 'time':
            termination = TimeTermination(time.time(), args.max_time)

        res = minimize(problem,
                       method,
                       callback=do_every_generations,
                       termination=termination)

        val_accs = res.pop.get('F')[:, 0]

        if exp_type == 'microtomacro' or exp_type == 'micro':
            best_idx = np.where(val_accs == np.min(val_accs))[0][0]
            best_genome = res.pop[best_idx].X
            with open(f'{save_dir}/best_genome.pkl', 'wb') as pkl_file:
                pickle.dump(best_genome, pkl_file)
        if exp_type == 'microtomacro':
            set_config('micro_creator', make_micro_creator(best_genome))

    return (100 - np.min(val_accs)) / 100
Beispiel #10
0
def set_all_config():
    """Overwrite existing JSON config with new JSON string"""
    try:
        new_config = request.get_json(force=True)
    except JSONDecodeError:
        abort(400)
    else:
        config.set_config(new_config)
        config.save()
        return Response('success')
Beispiel #11
0
 def post(self):
   if self.ensure_admin():
     settings = {}
     for key in CONFIG_KEYS:
       settings[key] = self.request.get(key)
       if not settings[key]:
         settings[key] = ''
     logging.info(settings)
     config.set_config(**settings)
     self.render()
Beispiel #12
0
def create_app(register_blueprints=True):
    app = Flask(__name__)

    set_config(app)
    configure_extensions(app)

    if register_blueprints:
        configure_blueprints(app)
        configure_hook(app)

    return app
def main():
    config.set_config(base_dir = '/home/oferb/docs/train_project', experiment_id='webcam2_first2000', lowres=300, crop='150x100+150+0')
    
    # first time image processing:
    #code_written_to_run_once.rename_image_files()
    #utils.copy_image_subset(os.path.join(config.all_data, 'webcam2', 'frames_res300_crop_150x100+150+0'), os.path.join(config.all_data, 'webcam2_first2000', 'frames_res300_crop_150x100+150+0'), range(0,2000))

    datafile = process_video(motion_thresh=2)    
    datafile = shelve.open(os.path.join(config.experiment_output, 'shelve.data'))
    train_spotted = use_hmm(datafile['img_times'], datafile['change_vals'])
    datafile['train_spotted'] = train_spotted
    datafile.close()
Beispiel #14
0
 def __init__(self,
              episodes_per_day=None,
              encounter_size=None,
              prob_unnecessary_worry=0,
              config=None):
     set_config(self, config)
     if not prob_unnecessary_worry:
         prob_unnecessary_worry = self.cfg.PROB_NON_C19_SYMPTOMS_PER_DAY
     self.episodes_per_day = episodes_per_day or self.cfg.SIMULATOR_PERIODS_PER_DAY
     self.encounter_size = encounter_size or self.cfg.MEAN_NETWORK_SIZE
     self.prob_worry = prob_unnecessary_worry / self.episodes_per_day
     self.queues = [TestQueue()]
     self.test_recorder = []
Beispiel #15
0
async def init_forward(forward, client, dialogs: list):

    from_chat_id = forward['from_chat_id']
    from_chat_title = forward['from_chat_title']
    to_primary = forward['to_primary']
    logging.info(f'init_forward: {from_chat_id}')

    is_ready = False

    if from_chat_id == 0:
        from_chat_id = await get_chat_id_by_name(client, from_chat_title)
        if from_chat_id != 0:
            forward['from_chat_id'] = from_chat_id
            is_ready = True
    else:
        chat = get_chat_by_id_list(from_chat_id, dialogs)
        if chat is not None:
            chat_title = chat.title
            forward['from_chat_title'] = chat_title
            db_stats.upsert_channel(from_chat_id, None, chat_title)
            is_ready = True

    if not is_ready:
        logging.info('Cannot add forward source %s', from_chat_title)
        return

    main_channels.append(from_chat_id)

    if to_primary['id'] == 0:
        is_ready = False
        id_ = await get_chat_id_by_name(client, to_primary['title'])
        if id_ != 0:
            to_primary['id'] = id_
            is_ready = True

    if not is_ready:
        logging.info('Cannot add primary destination %s', from_chat_title)
        return

    to_secondary_id = 0
    to_primary_id = to_primary['id']

    if 'to_secondary' in forward:
        to_secondary = forward['to_secondary']
        to_secondary_id = to_secondary['id']
        if to_secondary_id == 0:
            to_secondary_id = await get_chat_id_by_name(
                client, to_secondary['title'])

    config.set_config(main_config)
    forward_exec(from_chat_id, to_primary_id, to_secondary_id, client)
Beispiel #16
0
def prepare_database(ctx):
    """
    Drops schema of the current database and creates the new one with applied migrations. Designed for test running
    purpose only.

    Usage:
        inv dev.prepare_database
    """

    from config import set_config
    set_config('testing')

    from tests import prepare_database
    prepare_database()
Beispiel #17
0
 def __init__(self, society, config=None, name=None, home=None):
     set_config(self, config)
     self.society = society
     self.isolation = None
     self.infectious = False
     self.immune = False
     self.time_since_infection = 0
     self.disease = None
     self.infector = None
     self.victims = set()
     self.infected = False
     self.episode_time = 1. / self.society.episodes_per_day
     self.name = name
     # Add household attribute for CityPopulation
     self.home = home
Beispiel #18
0
def init_tornado(ENVIRONMENT):
    """
    this function is use to init tornado web server, notice that this function must be executed at last
    """

    config.set_config(ENVIRONMENT)

    CONFIG = config.get_config()

    import tornado.ioloop
    import tornado.web
    import tornado.httpserver
    import tornado.options

    from urls import urls

    # application

    SETTINGS = dict(template_path=os.path.join(os.path.dirname(__file__),
                                               "templates"),
                    static_path=os.path.join(os.path.dirname(__file__),
                                             "static"),
                    debug=CONFIG['runtime']['debug'],
                    login_url="/login",
                    cookie_secret=CONFIG['runtime']['cookie-secret'])

    application = tornado.web.Application(handlers=urls, **SETTINGS)

    print 'Development server is running at {0}://{1}:{2}/'.format(
        CONFIG['runtime']['protocol'], CONFIG['runtime']['base-url'],
        CONFIG['runtime']['port'])

    print 'Quit the server with CONTROL-C'

    for each in urls:
        print each[0], '\t', each[1]  # print urls

    if ENVIRONMENT != "production":
        tornado.options.parse_command_line()
        server = tornado.httpserver.HTTPServer(application)
        server.listen(CONFIG['runtime']['port'])
        tornado.ioloop.IOLoop.instance().start()
    else:
        # mutiple processes in production environment
        server = tornado.httpserver.HTTPServer(application)
        server.bind(CONFIG['runtime']['port'])
        server.start(0)  # forks one process per cpu
        tornado.ioloop.IOLoop.instance().start()
Beispiel #19
0
def db_reset(config_key):
    if config_key == 'sel':
        current_app.config.from_object(set_config(select=True))
    elif config_key == 'env':
        current_app.config.from_object(set_config())
    # here add an --all argument whose create all db files and models
    if current_app.config['ENV'] == 'production':
        c = str(
            input(
                '\nThe data you are trying to erase belongs to a PRODUCTION environment!\n'
                'If you confirm this action all sensible information stored in the actual database will be lost forever.\n\n'
                'Press [D] to confirm this action : '))
        if c.lower() != 'd':
            print('INFO : no action has been taken.')
            exit(0)
    db.drop_all()
    print("\nSUCCESS : all data have been dropped.")
Beispiel #20
0
def main():
    colided_list = []
    level, screen, clock, fps, rect_list = set_config()
    group = RenderUpdates()
    heroine = Hero(100, 30, group)

    keys = {
        K_LEFT: False,
        K_RIGHT: False,
        K_UP: False,
        K_DOWN: False,
        K_RETURN: False,
        27: False,
        K_a: False
    }  # obs 27 = 'esc'

    pygame.display.flip()

    while True:
        clock.tick(fps)
        for event in pygame.event.get([KEYUP, KEYDOWN]):
            value = (event.type == KEYDOWN)
            if event.key in keys:
                keys[event.key] = value

        idx = heroine.rect.collidelist(rect_list)
        if idx != -1 and rect_list[idx] not in colided_list:
            colided_list.append(rect_list[idx])
            print rect_list[idx]
            if rect_list[
                    idx].x <= heroine.rect.x and heroine.rect.x <= rect_list[
                        idx].width:
                heroine.fsm.set_state("stand_still")
                heroine.cannot_move_to = None
                #import pdb; pdb.set_trace()
            else:
                heroine.cannot_move_to = heroine.fsm.side
        if idx == -1:
            heroine.fsm.set_state("fall")

        if keys[27]:  # tecla ESC
            pygame.quit()
            sys.exit()
        elif keys[K_LEFT] and heroine.cannot_move_to != "left":
            heroine.fsm.set_state("move")
            heroine.fsm.update("left")
        elif keys[K_RIGHT] and heroine.cannot_move_to != "right":
            heroine.fsm.set_state("move")
            heroine.fsm.update("right")
        elif keys[K_UP]:
            heroine.fsm.set_state("jump")
        elif keys[K_DOWN]:
            heroine.fsm.set_state("get_down")
        elif keys[K_a]:
            heroine.fsm.set_state("attack")
        heroine.fsm.auto_update()
        utils.clear_screen(level, screen)
        pygame.display.update(group.draw(screen))
def main():
    data = CMajorScaleDistribution(32)
    parser = set_config()
    (options, args) = parser.parse_args()
    audiofiles = glob.glob(options.DATA_PATH + '/nsynth-test/audio/*wav')
    nsynth = NSynthGenerator(audiofiles, 32)
    batch = data.__next__()
    batch = nsynth.__next__()
    print(batch.shape)
Beispiel #22
0
    def __init__(self,
                 days_infectious=None,
                 pr_transmission_per_day=None,
                 config=None):
        set_config(self, config)
        days_infectious = days_infectious or (
            self.cfg.DAYS_INFECTIOUS_TO_SYMPTOMS + self.cfg.DAYS_OF_SYMPTOMS)
        pr_transmission_per_day = pr_transmission_per_day or self.cfg.PROB_INFECT_IF_TOGETHER_ON_A_DAY
        Disease.__init__(self, days_infectious, pr_transmission_per_day)
        self.days_before_infectious = self.cfg.DAYS_BEFORE_INFECTIOUS
        self.days_to_symptoms = self.cfg.DAYS_INFECTIOUS_TO_SYMPTOMS
        self.prob_symptomatic = self.cfg.PROB_SYMPTOMATIC

        # when you stop showing symptoms, you stop being infectious
        self.days_of_symptoms = days_infectious - self.days_to_symptoms
        if self.cfg.DAYS_OF_SYMPTOMS != self.days_of_symptoms:
            logging.info(
                f"setting days of symptoms to {self.days_of_symptoms} rather than {self.cfg.DAYS_OF_SYMPTOMS}"
            )
Beispiel #23
0
async def handle_set_config(body, writer):
    log.info("handle_set_config")
    try:
        new_config = ujson.loads(body)
        if config.set_config(new_config):
            await writer.awrite("HTTP/1.0 204 No Content\r\n\r\n")
        else:
            await writer.awrite(
                "HTTP/1.0 400 Bad Request\r\n\r\nInvalid config keys")
    except Exception:
        await writer.awrite("HTTP/1.0 400 Bad Request\r\n\r\nInvalid json")
Beispiel #24
0
def configure(interactive, token=None, account=None, db=None):
    cfg = None
    if interactive:
        cfg = {
            'token':
            click.prompt('Token'),
            'account_id':
            click.prompt('Account ID'),
            'db_path':
            click.prompt('Path to the DB',
                         default='{}/.tempo/db.sqlite3'.format(
                             pathlib.Path.home()))
        }
    else:
        if not token or not account or not db:
            print(
                'Please, provide all the parameters (token, account id and db path) or enter the interactive mode -i'
            )
            exit(1)
        cfg = {'token': token, 'account_id': account, 'db_path': db}
    set_config(cfg)
    set_db()
Beispiel #25
0
def config():
    with open(CONFIG_FILE, 'r') as config_json:
        config = json.load(config_json)

    if request.method == 'GET':
        response = jsonify(config)
    elif request.method == 'POST':
        with open(CONFIG_FILE, 'w') as config_file:
            new_config, response = set_config(config, request.form)
            json.dump(new_config, config_file)
        response = jsonify(response)

    response.status_code = 200
    return response
Beispiel #26
0
    def show_basedir_dialog(self):
        print('=' * 30)
        if not query_yes_no(
                'Do you want to specify the path to local OneDrive repository?'
        ):
            print('	Skipped')
            return
        path = input(
            'Please enter the abs dir path to sync with your OneDrive (default: '
            + config.USER_HOME_PATH + '/OneDrive): ').strip()
        if path == '': path = config.USER_HOME_PATH + '/OneDrive'
        result = False
        try:
            result = mkdir_if_missing(path)
        except OSError as e:
            print('OSError {}.'.format(e))

        if not result:
            print('CRITICAL: the path cannot be used for syncing.')
        else:
            config.set_config('base_path', path)
            config.save_config()
            print('Now use "%s" as the OneDrive base path.' % path)
    def execute(self, args: Namespace):
        """
        Calculate metrics for the specified video and output to Firebase.
        """

        initialize_app()
        config, _, _ = get_latest_config()
        set_config(config)

        time = datetime.utcnow().strftime("%Y%m%d-%H%M%S")

        ref = db.reference("metrics")
        video_ref = ref.child("input")
        date_ref = video_ref.child(time)

        date_ref.set([{
            "true_positive": m.true_positive,
            "false_positive": m.false_positive,
            "false_negative": m.false_negative,
        } for m in get_metrics()])

        latest_ref = video_ref.child("latest")
        latest_ref.set(time)
Beispiel #28
0
 def show_log_path_dialog(self):
     print('=' * 30)
     if 'log_path' not in config.APP_CONFIG:
         config.set_config('log_path', None)
     if config.APP_CONFIG['log_path'] == None: display_log_path = 'stderr'
     else: display_log_path = config.APP_CONFIG['log_path']
     print('Current file path to display logs: ' + display_log_path)
     if not query_yes_no('Do you want to change this to another path?'):
         print('	Skipped')
         return
     print('WARNING: the specified file will be overwritten!')
     path = input('new abs file path ([Enter] for stderr): ').strip()
     if path == '': path = None
     else:
         try:
             with open(path, 'w') as f:
                 f.write('')
         except:
             print('Error: "' + path + '" is not a writable file path.')
             return
     config.set_config('log_path', path)
     config.save_config()
     if path == None: path = 'stderr'
     print('Now use ' + path + ' to show logs.')
Beispiel #29
0
def test_connection():
    from datastructures import Searchable, Facet
    from query import Query
    import connection
    import config
    #
    #  define all , will be rewritten to set_up/tear_down when i find some time for cleanup
    #
    class CategorySearchable(Searchable):
        multivalued = True
        solr_query_field = 'categories'
        solr_query_param = 'fq'
    class RegionSearchable(Searchable):
        multivalued = True
        solr_query_field = 'regions'
        solr_query_param = 'fq'
    reg1 = RegionSearchable(1)
    cat1 = CategorySearchable(1)
    OR_searchable = Searchable(reg1, cat1, operator='OR')
    
    config.set_config('dev', '0.0.0.0', 8983)
    config.load_config('dev') # set up addresses and stuff 
    
    connection.search(Query(OR_searchable, Facet('regions', mincount=1)))
Beispiel #30
0
def main():
    level, screen, clock, fps = set_config()
    group = RenderUpdates()
    heroine = Hero(20, 140, "elisa", group)

    keys = {
        K_LEFT: False,
        K_RIGHT: False,
        K_UP: False,
        K_DOWN: False,
        K_RETURN: False,
        27: False,
        K_a: False
    }  # obs 27 = 'esc'

    pygame.display.flip()
    while True:
        clock.tick(fps)

        for e in pygame.event.get([KEYUP, KEYDOWN]):
            valor = (e.type == KEYDOWN)
            if e.key in keys.keys():
                keys[e.key] = valor

        if keys[27]:  # tecla ESC
            pygame.quit()
            sys.exit()
        if heroine.attacking:
            heroine.animate_attack()
        elif keys[K_LEFT]:
            heroine.move("left")
        elif keys[K_RIGHT]:
            heroine.move("right")
        elif keys[K_UP]:
            heroine.jump()
        elif keys[K_DOWN]:
            heroine.get_down()
        elif keys[K_a]:
            heroine.attack()
        #heroine.is_jumping(gravity)

        utils.clear_screen(level, screen)
        pygame.display.update(group.draw(screen))
def main():
    tokenizer = BertTokenizer.from_pretrained('./albert_pretrain/vocab.txt')
    device = torch.device('cuda: 0' if torch.cuda.is_available() else 'cpu')

    # 1. 加载数据
    examples = read_examples(full_file=infile)  # 输入数据的文件夹

    # 2. 数据预处理
    with gzip.open('./dev_example.pkl.gz', 'wb') as fout:
        pickle.dump(examples, fout)

    features = convert_examples_to_features(examples,
                                            tokenizer,
                                            max_seq_length=512,
                                            max_query_length=50)
    with gzip.open('./dev_feature.pkl.gz', 'wb') as fout:
        pickle.dump(features, fout)

    args = set_config()
    helper = DataHelper(gz=True, config=args)
    dev_example_dict = helper.dev_example_dict
    dev_feature_dict = helper.dev_feature_dict
    eval_dataset = helper.dev_loader

    roberta_config = BertConfig.from_pretrained(
        './albert_pretrain/bert_config.json')
    encoder = BertModel.from_pretrained(args.bert_model, config=roberta_config)
    model = BertSupportNet(config=args, encoder=encoder)

    model.load_state_dict(
        torch.load(
            './output/checkpoints/train_v1/ckpt_seed_44_epoch_20_99999.pth',
            map_location={'cuda:6': 'cuda:0'}))

    model.to(device)

    predict(model, eval_dataset, dev_example_dict, dev_feature_dict, outfile)
Beispiel #32
0
 #!/usr/bin/env python

import re
import os
import json
import model
import config
import model_utils

from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash, send_from_directory, jsonify
from werkzeug.contrib.fixers import ProxyFix
from werkzeug.utils import secure_filename

app = Flask(__name__, template_folder="./templates/")
config.set_config(app)

logger = app.logger#, getLogger('sqlalchemy'),
#           getLogger('otherlibrary')]
import logging
fh = logging.FileHandler("./app.log")
fh.setFormatter(logging.Formatter(
    '%(asctime)s %(levelname)s: %(message)s '
    '[in %(pathname)s:%(lineno)d]'
))

logger.addHandler(fh)

fh.setLevel(logging.INFO)

@app.route('/api/login', methods=['GET', 'POST'])
def login_handler():
Beispiel #33
0
        self.user["agree"] = int(d(".zm-profile-header-user-agree strong").text())
        self.user["thanks"] = int(d(".zm-profile-header-user-thanks strong").text())
        self.user["asks"] = int(d(".profile-navbar a.item").eq(1).find("span.num").text())
        self.user["answers"] = int(d(".profile-navbar a.item").eq(2).find("span.num").text())
        self.user["posts"] = int(d(".profile-navbar a.item").eq(3).find("span.num").text())
        self.user["collections"] = int(d(".profile-navbar a.item").eq(4).find("span.num").text())
        self.user["logs"] = int(d(".profile-navbar a.item").eq(5).find("span.num").text())

        self.user["followings_num"] = int(d(".zm-profile-side-following a.item").eq(0).find("strong").text())
        self.user["followers_num"] = int(d(".zm-profile-side-following a.item").eq(1).find("strong").text())

        self.user["weibo"] = d("a.zm-profile-header-user-weibo").attr("href") or ""

    def get(self):
        self.logger.warning("Start crawling %s", self.user_domain)
        self.get_followings_page()
        self.get_profile()

        self.logger.warning("Finish crawling profile for %s,", self.user_domain)
        return self.user


if __name__ == "__main__":
    config.set_config("env", "dev")
    logging.basicConfig(level=logging.INFO)
    s = Session.get()
    pc = ProfileCrawler(s, "jonas-lu")
    user = pc.get()
    for p in user:
        print(p, user[p])
import db
import config
import time

cfg = config.set_config()
d = db.DBClient(cfg['db_ip'], cfg['db_name']).main()


def main():

    result = d.discovery_inventory.find()
    for i in result:
        print i

main()
Beispiel #35
0
 def focus_cb(self, widget, data = None):
     config = camera.Config(self.camera) 
     root = config.get_root_widget()
     focus = root.get_child_by_name('autofocusdrive')
     focus.set_value(1)
     config.set_config()
Beispiel #36
0
args = argparse.ArgumentParser(description="Produce an SMT forumla representing a schedule for teams competing in SR")

form = args.add_mutually_exclusive_group()
form.add_argument("--z3", action="store_true", default=False, dest="z3", help="Produce a formula using Z3 enumerations")
form.add_argument("--qfbv", action="store_true", default=False, dest="qfbv", help="Produce a formula with enumerated bitvector variables")
form.add_argument("--qfaufbv", action="store_true", default=False, dest="qfaufbv", help="Produce a formula using an uninterpreted function")

args.add_argument("--rounds", type=int, help="Number of rounds", default=13)
args.add_argument("--teams", type=int, help="Number of teams", default=32)
args.add_argument("--closeness", type=int, help="Closeness constraint", default=5)
args.add_argument("--slots", type=int, help="Slots per match", default=4)

the_args = args.parse_args()

set_config(the_args.rounds, the_args.teams, the_args.closeness, the_args.slots)
compute_bitwidths()
validate_config()


print "(set-info :status unknown)"
print "(set-option :produce-models true)"

output_object = None
if the_args.z3:
    output_object = Z3()
elif the_args.qfaufbv:
    output_object = QFAUFBV()
elif the_args.qfbv:
    output_object = QFBV()
else:
Beispiel #37
0
def run(args):
    print()
    config.set_config(args.name, args.value)
Beispiel #38
0
import slave
import master
import config

mode = sys.argv[1]
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, os.path.dirname(CURRENT_DIR))

fmt = "%(asctime)s %(name)s %(message)s"
logging.basicConfig(level=logging.WARNING, format=fmt)


def on_term_handler(sig, frame):
    logging.warning('Recv SIGTERM')
    if mode == "slave":
        slave.terminate()
    elif mode == "master":
        master.terminate()

signal.signal(signal.SIGTERM, on_term_handler)

config.set_config('env', 'run')
if mode:
    if mode == "master":
        master.start('jonas-lu')
    else:
        instance_id = sys.argv[2] if len(sys.argv) >= 3 else 0
        slave.start(instance_id)
else:
    sys.exit('Invalid arguments')
Beispiel #39
0
def login():
    logged_in = config.get_config("logged_in")
    if (logged_in is not None and len(logged_in) > 0):
        return

    config.delete_cookiejar()

    opener, cookiejar = get_url_opener()
    response = opener.open('http://m.lattelecom.tv/authorization')
    response_code = response.getcode()
    if response_code != 200:
        raise Exception("Cannot get session id")
    response_text = response.read()

    if "captcha" in response_text:
        raise Exception("captcha not implemented")

    phpsessid = None
    for cookie in cookiejar:
        if cookie.name == 'PHPSESSID':
            phpsessid = cookie.value
    if phpsessid is None:
        raise Exception("phpsessid not found")

    bitrate_cookie = cookielib.Cookie(version=0, name='MobBitr', value='1', port=None, port_specified=False,
                                      domain='m.lattelecom.tv', domain_specified=False, domain_initial_dot=False, path='/',
                                      path_specified=True, secure=False, expires=None, discard=True, comment=None,
                                      comment_url=None, rest={'HttpOnly': None}, rfc2109=False)
    cookiejar.set_cookie(bitrate_cookie)
    cookiejar.save(filename=config.get_cookiejar_file(), ignore_discard=True)

    # some extra requests for auth (big brother)
    match = re.search('src="(/auth/[\d]+\.gif)"', response_text)
    if match is None:
        config.delete_cookiejar()
        # @TODO maybe retry?
        raise Exception("auth gif not found")

    gif_path = 'http://m.lattelecom.tv' + match.group(1)
    opener, cookiejar = get_url_opener('http://m.lattelecom.tv/authorization')
    opener.open(gif_path)

    gif64 = base64.b64encode(gif_path)
    opener, cookiejar = get_url_opener('http://m.lattelecom.tv/authorization')
    url = 'https://auth.lattelecom.lv/url/session?sid=' + phpsessid + '&sp=OPT&retUrl=' + gif64 + '='
    opener.open(url)

    # perform real login
    opener, cookiejar = get_url_opener()
    username = config.get_config("username")
    password = config.get_config('password')
    params = urllib.urlencode(dict(login='******', email=username, passw=password))
    utils.log(params)

    response = opener.open('http://m.lattelecom.tv/authorization/', params)
    response_text = response.read()
    if re.search('is_logged_in=true', response_text) is None:
        print response_text
        raise Exception("login failed")

    cookiejar.save(filename=config.get_cookiejar_file(), ignore_discard=True)
    config.set_config("logged_in", "yeah!")

    utils.log("login success!")
Beispiel #40
0
#!/usr/bin/python
# -*- coding: utf-8 -*-

import logging
from logging.handlers import RotatingFileHandler
from lbindex import index_registries
from lbdaemon import Daemon
from lbrest import LBRest
from multiprocessing import Pool
import config
import sys

config.set_config()

# Set up log configurations
logger = logging.getLogger("LBIndex")
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
#handler = logging.FileHandler(config.LOGFILE_PATH)
max_bytes = 1024*1024*20 # 20 MB
handler = RotatingFileHandler(config.LOGFILE_PATH, maxBytes=max_bytes, backupCount=10, encoding=None)
handler.setFormatter(formatter)
logger.addHandler(handler)


class LBIndex(Daemon):
    """ Light Base Golden Extractor Daemon
    """

    def run(self):
        """ 
Beispiel #41
0
 def set_config(self, key, value):
     return config.set_config('discord', key, value)
Beispiel #42
0
                  action="store_true",
                  default=False,
                  dest="qfaufbv",
                  help="Produce a formula using an uninterpreted function")

args.add_argument("--rounds", type=int, help="Number of rounds", default=13)
args.add_argument("--teams", type=int, help="Number of teams", default=32)
args.add_argument("--closeness",
                  type=int,
                  help="Closeness constraint",
                  default=5)
args.add_argument("--slots", type=int, help="Slots per match", default=4)

the_args = args.parse_args()

set_config(the_args.rounds, the_args.teams, the_args.closeness, the_args.slots)
compute_bitwidths()
validate_config()

print "(set-info :status unknown)"
print "(set-option :produce-models true)"

output_object = None
if the_args.z3:
    output_object = Z3()
elif the_args.qfaufbv:
    output_object = QFAUFBV()
elif the_args.qfbv:
    output_object = QFBV()
else:
    output_object = QFBV()
#         http://www.apache.org/licenses/LICENSE-2.0
#
#    Unless required by applicable law or agreed to in writing, software
#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#    License for the specific language governing permissions and limitations
#    under the License.

from config import set_config

from flask import Flask, request, render_template, redirect, url_for, abort
from flask_pymongo import PyMongo

app = Flask(__name__)

set_config(app)

mongo = PyMongo(app)


@app.route('/')
def index():
    configs = mongo.db.configs.find({})

    return render_template('config_list.html', configs=configs)


@app.route('/config/new', methods=["GET", "POST"])
def config_new():
    if request.method == "GET":
        return render_template('config_new.html')
Beispiel #44
0
 def setconfig(self, k, v):
     config.set_config(self.db(), k, v)
Beispiel #45
0
import config
import os

import sys
reload(sys)
sys.setdefaultencoding('utf8')

if __name__ == "__main__":
    if len(sys.argv) < 2:
        print "usage: python server.py [environment]"
        print "environment:\t production|testing|development"
        sys.exit(os.EX_USAGE)

    ENVIRONMENT = sys.argv[1]

    config.set_config(ENVIRONMENT)

    CONFIG = config.get_config()

    import tornado.ioloop
    import tornado.web
    import tornado.httpserver
    import tornado.options

    from urls import urls

    # application

    SETTINGS = dict(
        template_path=os.path.join(os.path.dirname(__file__), "templates"),
        static_path=os.path.join(os.path.dirname(__file__), "static"),
Beispiel #46
0
#!/usr/bin/env python2
# -*- coding: utf-8 -*-

from flask import Flask
import os
from config import set_config


app = Flask(__name__)
app.config['DATABASE'] = os.path.join(app.root_path, 'trees.db')

set_config(app)