Example #1
0
    def setUp(self):
        unittest.TestCase.setUp(self)

        agentFactory = WaeAgentFactory()
        environmentFactory = WaeEnvironmentFactory()
        trainerFactory = WaeTrainerFactory()
        buildParameterFactory = WaeBuildParameterFactory()
        store = Store(self.dbPath, self.trainLogFolderPath)
        logger = MyLogger(console_print=True)

        self.builder = Builder(trainerFactory, agentFactory,
                               environmentFactory, store, logger)

        self.buildParameters = []
        for k1 in range(2):
            nIntervalSave = 10
            nEpoch = 20
            nLayer = int(np.random.choice((1, 2)))

            self.buildParameters.append(
                WaeBuildParameter(int(nIntervalSave),
                                  int(nEpoch),
                                  label="test" + str(k1),
                                  nLayer=nLayer,
                                  eps_given_sinkhorn=0.1,
                                  tol_sinkhorn=0.1))

        self.loader = Loader(agentFactory, buildParameterFactory,
                             environmentFactory, store)
Example #2
0
    def load_module(self, script):
        src = open(script).read()
        parser = MemeParser(src)
        try:
            ast,_ = parser.apply("start")
        except Exception as err:
            print(err.formatError(''.join(parser.input.data)))
            sys.exit(1)

        print "---- AST ----"
        print ast
        print "//---- AST ----"
        self.current_module = _create_compiled_module({"filepath": script,
                                                       "ast": ast,
                                                       "parent_module":"memetalk/kernel",
                                                       "@tag":"a compiled module"})

        self.env_id_table = []
        self.env_idx = 0
        self.functions = []
        self.fun_literals = []

        self.loading_class = False

        loader = Loader([ast])
        loader.i = self
        loader.apply("load_module")
        return self.current_module
Example #3
0
    def load_manda(self):
        self.stock_split = Loader.stock_split()
        self.reverse_stock_split = Loader.reverse_stock_split()

        # 株式分割・統合が予定されていれば変化率を適用する
        self.stock_split["date"] = list(map(lambda x: self.apply_manda_date(x), self.stock_split["date"].astype(str).values.tolist()))
        self.reverse_stock_split["date"] = list(map(lambda x: self.apply_manda_date(x), self.reverse_stock_split["date"].astype(str).values.tolist()))
Example #4
0
    def __init__(self):
        self.screen_size = (640,480)
        self.flags = pygame.DOUBLEBUF|pygame.HWSURFACE
        self._screen = pygame.display.set_mode(self.screen_size, self.flags)
        self.screen = self._screen.convert().subsurface(0,0,320,240)
        self.screen_rect = self.screen.get_rect()
        
        self.clock = pygame.time.Clock()
        self.fps = 60
        self.td = 0 # time delta
        self.keys = pygame.key.get_pressed()

        self.scene = None

        self.frames = {}
        self.sounds = {}

        self.loader = Loader()
        self.loader.image2load = 'sprite_sheet.png'
        self.loader.json_file = data.filepath('sprites.json')
        self.loader.sounds2load = [
            'DST-Defunkt.ogg',
            'menu.wav',
        ]


        self.loader.start()
        self.music = pygame.mixer.Channel(3)
        self.sfx = pygame.mixer.Channel(4)
Example #5
0
class munin(object):
    IRCU_ROUTER = 'munin.ircu_router'
    def __init__(self):
        config = ConfigParser.ConfigParser()
        if not config.read('muninrc'):
            raise ValueError("Expected configuration in muninrc, not found.")

        self.loader = Loader()
        self.loader.populate('munin')
        self.ircu_router = self.loader.get_module(self.IRCU_ROUTER)

        self.client = connection(config)
        self.client.connect()
        self.client.wline("NICK %s" % config.get("Connection", "nick"))
        self.client.wline("USER %s 0 * : %s" % (config.get("Connection", "user"),
                                                config.get("Connection", "name")))
        self.config = config
        router=self.ircu_router.ircu_router(self.client,self.config,self.loader)
        while True:
            try:
                self.reboot()
                break
            except socket.error, s:
                print "Exception during command at %s: %s" %(time.asctime(),s.__str__())
                traceback.print_exc()
                raise
            except socket.timeout, s:
                print "Exception during command at %s: %s" %(time.asctime(),s.__str__())
                traceback.print_exc()
                raise
            except reboot.reboot, r:
                continue
def load_Rx(text_file_objx, color=None):
    vertices_multilinex = Loader.load_object_vertices(text_file_objx)
    Rx = Loader.create_obj_from_vertices(vertices_multilinex)
    if color is not None:
        Rx.color = color
    Rx = Obj(Rx, vertices_multilinex)
    return Rx
Example #7
0
def load(det, db):
    """
    Load file
    """
    try:

        auth = request.authorization
        loader = Loader(det, db, auth)
        state = loader.load(request)

        status_code = 200
        if state != 0:
            status_code = 500 + state

        resp = Response(loader.log, mimetype='text/plain')
        resp.status_code = status_code
        return resp

    except Exception as ex:

        print "ERROR:", ex

        resp = Response(str(ex), mimetype='text/plain')
        resp.status_code = 500

        if len(ex.args) == 2 and type(ex.args[0]) == int and type(
                ex.args[1]) in (str, unicode):
            resp = Response(ex.args[1], mimetype='text/plain')
            resp.status_code = ex.args[0]

        return resp
def market_hours():
    if current_time.hour >= 10 and current_time.hour < 16:
        return Loader.loader()
    elif current_time.hour == 9 and current_time.minute >= 30:
        return Loader.loader()
    else:
        print('closed')
    def test_mkdirs(self):
        """Appropriate destination folders were created."""
        Loader.load(self.loader)

        self.assertTrue(os.path.exists(os.path.join(
            self.loader.data['destination'][0], 'fighter', 'Peach', 'FitPeach00.pcs'
        )))
Example #10
0
    def __init__(self, args):
        """
        Initializes a Circleguard instance.

        [SimpleNamespace or argparse.Namespace] args:
            A namespace-like object representing how and what to compare. An example may look like
            `Namespace(cache=False, local=False, map_id=None, number=50, threshold=20, user_id=None)`
        """

        # get all replays in path to check against. Load this per circleguard instance or users moving files around while the gui is open doesn't work.
        self.PATH_REPLAYS = [
            join(PATH_REPLAYS_STUB, f) for f in os.listdir(PATH_REPLAYS_STUB)
            if isfile(join(PATH_REPLAYS_STUB, f)) and f != ".DS_Store"
        ]

        self.cacher = Cacher(args.cache)
        self.args = args
        if (args.map_id):
            self.users_info = Loader.users_info(args.map_id, args.number)
        if (args.user_id and args.map_id):
            user_info = Loader.user_info(
                args.map_id, args.user_id
            )[args.
              user_id]  # should be guaranteed to only be a single mapping of user_id to a list
            self.replays_check = [
                OnlineReplay.from_map(self.cacher, args.map_id, args.user_id,
                                      user_info[0], user_info[1], user_info[2])
            ]
    def query_intervals(cls, db, package_name):

        # This will get you all the query strings ever used for that package,
        # together with first-seen and last-seen dates.
        db.cursor.execute(
            """
            SELECT
                `packages`.`id` AS `package_id`,
                `packages`.`name` AS `package_name`,
                `deps`.`version_query`,
                MIN(`deps`.`started`) AS `first_started`,
                MAX(`deps`.`ended`) AS `last_ended`
            FROM
                `packages`,
                `project_explicit_dependencies` AS `deps`
            WHERE
                `packages`.`id`=`deps`.`package_id` AND
                `packages`.`name`=%s
            GROUP BY
                `packages`.`id`,
                `deps`.`version_query`;""", package_name)

        stored_rows = []
        latest_ts = -1

        # Each row is a specific query string
        for row in db.cursor.fetchall():
            (pkg_id, pkg_name, qs, first_started, last_ended) = row
            first_ts = Loader.dt2ts(first_started)
            last_ts = Loader.dt2ts(last_ended)
            stored_rows.append((qs, first_ts, last_ts))

            latest_ts = max(latest_ts, last_ts)

        return (stored_rows, latest_ts if latest_ts != -1 else None)
Example #12
0
def get_simulation_params(file_name, steps_inner=None, alpha=None):
    results, params, coeff_mask, hyper_params = load_pickle(file_name)
    print('model_name:  ' + str(hyper_params['model_name']))
    if alpha is not None:
        hyper_params['alpha'] = alpha
    if steps_inner is not None:
        print('steps_inner: ' + str(steps_inner) + '  trained with: ' +
              str(hyper_params['steps_inner']))
        hyper_params['steps_inner'] = steps_inner
    else:
        print('steps_inner: ' + str(hyper_params['steps_inner']))
    print('alpha:       ' + str(hyper_params['alpha']))
    rng = jax.random.PRNGKey(-1)
    loader = Loader(hyper_params)
    model, _ = loader.get_model_and_hyper_params(rng)

    if (hyper_params['model_name'] == 'IV'):
        params_psi = params[:hyper_params['n_psi']]
        params_phi = None
    else:
        params_psi = params[hyper_params['n_phi']:-1]
        params_phi = params[:hyper_params['n_phi']]
    sindy_coeff = params[-1][0]
    psi_vec = model.func['psi_vec']
    phi_vec = model.func['phi_vec']
    return phi_vec, psi_vec, coeff_mask, sindy_coeff, hyper_params, params_psi, params_phi
Example #13
0
    def __init__(self):
        self.loader = Loader()
        self.desk = self.loader.load_image("back.png")
        self.back = self.desk.copy()

        self.cards1 = self.loader.load_image("cards1.png", True)
        self.cards2 = self.loader.load_image("cards2.png", True)

        self.card_back = self.loader.load_image("card.png", True)
        self.card_images = []
        for i in range(18):
            self.card_images.append(
                self.loader.load_image("img%d.png" % (i + 1)))

        self.gfxcards = []
        self.gfxcards_hidden = []
        self.delay = 0

        self.title_fnt = self.loader.load_font("KLEPTOMA.TTF", 50)
        self.text_fnt = self.loader.load_font("scribble.TTF", 24)
        self.small_text_fnt = self.loader.load_font("scribble.TTF", 15)

        self.START_SCREEN = 0
        self.GAME_PLAY = 1
        self.GAME_OVER = 2

        self.card_snd = self.loader.load_sound("card.wav")

        self.goto_start()
Example #14
0
    def __init__(self,
                 model,
                 batch_size=100,
                 pretrain_iter=20000,
                 train_iter=8000,
                 sample_iter=10,
                 svhn_dir='svhn',
                 mnist_dir='mnist',
                 log_dir='logs',
                 sample_save_path='sample',
                 faces_dir='./wiki_dataset',
                 emoji_dir='./emoji_dataset',
                 model_save_path='model',
                 pretrained_model='model/svhn_model-10000',
                 test_model='model/dtn-'):

        self.model = model
        self.batch_size = batch_size
        self.pretrain_iter = pretrain_iter
        self.train_iter = train_iter
        self.sample_iter = sample_iter
        self.svhn_dir = svhn_dir
        self.mnist_dir = mnist_dir
        self.faces_dir = faces_dir
        self.emoji_dir = emoji_dir
        self.log_dir = log_dir
        self.sample_save_path = sample_save_path
        self.model_save_path = model_save_path
        self.pretrained_model = pretrained_model
        self.test_model = test_model
        self.config = tf.ConfigProto()
        self.config.gpu_options.allow_growth = True
        self.loader = Loader(mode=self.model.mode)
        self.save_points = [500, 1000, 2000, 3000, 6000]
    def __init__(self,
                 sess,
                 image_size=48,
                 model_type="deep",
                 batch_size=25,
                 dataset="shape"):
        """Initialize the parameters for an Deep Visual Analogy network.

    Args:
      image_size: int, The size of width and height of input image
      model_type: string, The type of increment function ["add", "deep"]
      batch_size: int, The size of a batch [25]
      dataset: str, The name of dataset ["shape", ""]
    """
        self.sess = sess

        self.image_size = image_size
        self.model_type = model_type
        self.batch_size = batch_size
        self.dataset = dataset
        self.loader = Loader(self.dataset, self.batch_size)

        self.sample_dir = "samples"
        if not os.path.exists(self.sample_dir):
            os.makedirs(self.sample_dir)

        # parameters used to save a checkpoint
        self._attrs = ['batch_size', 'model_type', 'image_size']
        self.options = ['rotate', 'scale', 'xpos', 'ypos']

        self.build_model()
Example #16
0
 def prepare_detector(self) -> None:
     """
         This function prepares detector to be able\n
         to fully use given YOLO model in camera\n
         Returns:
         -------------------------
         Function returns nothing, it is only responsible\n
         for initializing instance variables\n
     """
     for value in [
             self.config.model_weights_path, self.config.model_config_path,
             self.config.coco_names
     ]:
         if value is None:
             raise ValueError(
                 f"Encountered None value for {value}, loading yolo suspended"
             )
     loader = Loader(model_weights_path=self.config.model_weights_path,
                     model_config_path=self.config.model_config_path,
                     coco_names=self.config.coco_names)
     if self.config.use_tracker:
         self.tracker = Tracker.load_tracker_from_config(
             self.config.tracker_type)
         self.tracker.init_tracker(self.config.multi_tracker)
     if self.config.use_opencv:
         self.model, self.classes, self.output_layers, self.colors = loader.load_cv2_yolo(
         )
     else:
         self.session, self.classes, self.colors = loader.load_onnx_yolo()
     if self.config.inference_engine:
         self.set_target()
         self.set_backend()
Example #17
0
def main():
    log_args(args)

    loader = Loader(
        deep_feature_dir=args["deep_feature_dir"],
        texture_feature_dir=args["texture_feature_dir"],
        clinical_feature_file=args["snuh_brmh_clinic_feature_file"],
        oneside=args["oneside"],
        label_file=args["label_file"])

    for data_type in args["data_type"]:
        if phase == "train":
            input_x, input_y = loader.get_data(data_type)
            run_args = dict(**args)
            trainer = Trainer(run_args)
            trainer.run(input_x, input_y)

        elif phase == "test":
            input_x, subjects = loader.get_data(data_type)
            run_args = dict(subjects=subjects,
                            input_x=input_x,
                            test_type=data_type,
                            **args)
            inferencer = Inferencer(run_args)
            inferencer.run()
Example #18
0
 def process(self):
     """
     Trains different model based on a csv-data containing the hyper_params.
     If a model is trained, results, parameters of model, coefficient mask and the hyperparaemters are saved
     in analysis_folder. Models are numbered in the same ordered as they are in the hyper_params.csv.
     """
     rng = jax.random.PRNGKey(-1)
     rng_batch = npr.RandomState(5)
     for j, hyper_params in self.hyper_params_df.iterrows():
         hyper_params = hyper_params.to_dict()
         path_save = self.analysis_folder + '/' + str(j)
         if os.path.isfile(path_save):
             continue
         else:
             loader = Loader(hyper_params)
             train_set = hyper_params['training_set']
             X, dX, X_eval, dX_eval, t = self.data[train_set]
             loader.hyper_params['num_batches'], _ = divmod(
                 X.shape[0], loader.hyper_params['batch_size'])
             loader.hyper_params['x_dim'] = X.shape[1]
             model, hyper_params = loader.create_model(rng)
             trainer = Trainer(X, dX, X_eval, dX_eval, t, model,
                               hyper_params)
             results, params, coeff_mask = trainer.fit(rng_batch)
             with open(path_save, 'wb') as fp:
                 pickle.dump([results, params, coeff_mask, hyper_params],
                             fp)
    def __init__(self):
        '''
        Basically parses command line flags, and sets other 'global' experiment
        variables.
        '''
        # FIXME: Need to ensure the equivalent of init_globals in Parameters
        self.params = Parameters()
        self.loader = Loader(self)
        self.stats = Stats(self.params)
        self.data = self.loader.load_data()

        # list of hashes of each of the used samples
        self.used_samples = []
        self.attack_data = defaultdict(list)

        # used for serwadda attack.
        # TODO: Turn this into a loop and check all arguments.
        if 'cmu' in self.params.dataset or 'mturk' in self.params.dataset:
            self.unique_features_data = self._get_unique_data()
            a = self.data['0'][0][0]
            b = self.data['0'][0][1]
            c = self.data['0'][0][2]
            assert isclose(a + c, b), 'test 1'
        else:
            # Run some other asserts here
            self.unique_features_data = None
Example #20
0
def load_news():
    """This function will load news files in the storage bucket to the BigQuery tables 
    """
    logger = logging.getLogger('app.load_news')

    credentials, gcp_project_id = google.auth.default()
    gcs_client = storage.Client(project=gcp_project_id,
                                credentials=credentials)
    bigquery_client = bigquery.Client(project=gcp_project_id,
                                      credentials=credentials)

    # instantiate Loader class and load file to BigQuery
    loader = Loader(bigquery_client, gcs_client)
    dataset_id = 'news'

    articles_bucket = os.getenv('ARTICLES_BUCKET')
    articles_processed_bucket = os.getenv('ARTICLES_PROCESSED_BUCKET')
    articles_table_id = 'articles'
    print('loading news from bucket')
    logger.info('loading news from bucket')
    articles_load_job = loader.load_from_bucket(articles_bucket,
                                                articles_processed_bucket,
                                                dataset_id, articles_table_id)

    return "Loaded news data to BigQuery", 200
Example #21
0
    def setUp(self):
        unittest.TestCase.setUp(self)

        agentFactory = PoleAgentFactory()
        environmentFactory = PoleEnvironmentFactory()
        trainerFactory = PoleTrainerFactory()
        buildParameterFactory = PoleBuildParameterFactory()
        store = Store(self.dbPath)
        logger = MyLogger(console_print=True)

        self.builder = Builder(trainerFactory, agentFactory,
                               environmentFactory, store, logger)

        self.buildParameters = []
        for k1 in range(2):
            nIntervalSave = 3
            nEpoch = 5
            self.buildParameters.append(
                PoleBuildParameter(int(nIntervalSave),
                                   int(nEpoch),
                                   label="test" + str(k1)))

        for agentClass in ("agent002", "agent003", "agent004"):
            self.buildParameters.append(
                PoleBuildParameter(int(nIntervalSave),
                                   int(nEpoch),
                                   agentClass=agentClass,
                                   label="test " + agentClass))

        self.loader = Loader(agentFactory, buildParameterFactory,
                             environmentFactory, store)
Example #22
0
def load_tracking():
    """This function will load tracking files in the storage bucket to the BigQuery tables 
    """
    credentials, gcp_project_id = google.auth.default()
    bigquery_client = bigquery.Client(project=gcp_project_id,
                                      credentials=credentials)
    gcs_client = storage.Client(project=gcp_project_id,
                                credentials=credentials)

    loader = Loader(bigquery_client, gcs_client)
    dataset_id = 'tracking'

    impressions_bucket = os.getenv('IMPRESSIONS_BUCKET')
    impressions_processed_bucket = os.getenv('IMPRESSIONS_PROCESSED_BUCKET')
    impressions_table_id = 'impressions'
    impressions_load_job = loader.load_from_bucket(
        impressions_bucket, impressions_processed_bucket, dataset_id,
        impressions_table_id)

    clicks_bucket = os.getenv('CLICKS_BUCKET')
    clicks_processed_bucket = os.getenv('CLICKS_PROCESSED_BUCKET')
    clicks_table_id = 'clicks'
    clicks_load_job = loader.load_from_bucket(clicks_bucket,
                                              clicks_processed_bucket,
                                              dataset_id, clicks_table_id)

    return "Loaded tracking data to BigQuery", 200
Example #23
0
def test(model_name):
    ##### Preprcessing ####
    loader = Loader()
    x_test, test_id_list = loader.read_test_data(testing_id, testing_folder)
    x_peer, peer_id_list = loader.read_test_data(peer_review_id,
                                                 peer_review_folder)

    idx_to_word = np.load('./ixtoword.npy').tolist()

    n_video_lstm_step = x_test.shape[1]
    dim_image = x_test.shape[2]

    model = S2VT(dim_image=dim_image,
                 n_words=len(idx_to_word),
                 dim_hidden=dim_hidden,
                 n_video_lstm_step=n_video_lstm_step,
                 n_caption_lstm_step=n_caption_lstm_step)

    tf_video, tf_generated_words = model.build_model(is_training=False)

    ##### Strat ####
    sess = tf.Session()
    saver = tf.train.Saver()
    saver.restore(sess, os.path.join(model_path, model_name))

    test_generated_words_index = sess.run(tf_generated_words,
                                          feed_dict={tf_video: x_test})
    write_file(test_output_file_name, idx_to_word, test_generated_words_index,
               test_id_list)

    peer_generated_words_index = sess.run(tf_generated_words,
                                          feed_dict={tf_video: x_peer})
    write_file(peer_output_file_name, idx_to_word, peer_generated_words_index,
               peer_id_list)
Example #24
0
    def gilletMiller(self, runFromJson, input=None):
        runFromJson = True

        with open('request.json') as jsonFile:
            data = jsonFile.read()
        options = json.loads(data)

        if runFromJson == False:
            pass

        print("capacity_of_the_bus = ", options["capacity_of_the_bus"])
        points = input["points"] if runFromJson != True else options["points"]

        full_distance_matrix = input[
            "full_distance_matrix"] if runFromJson != True else options[
                "full_distance_matrix"]

        full_distance_matrix = numpy.array(full_distance_matrix)

        for i in range(full_distance_matrix.shape[0]):
            for j in range(i, full_distance_matrix.shape[1]):
                full_distance_matrix[i, j] = full_distance_matrix[j, i]

        pupils_on_each_bus_stop = input[
            "pupils_on_each_bus_stop"] if runFromJson != True else options[
                "pupils_on_each_bus_stop"]

        capacity_of_the_bus = input[
            "capacity_of_the_bus"] if runFromJson != True else options[
                "capacity_of_the_bus"]

        constraint = input["CONSTRAINT"]
        minimize_number_of_buses = False

        loader = Loader("Running Heuristics...", "Heuristics Done!",
                        0.1).start()
        solution = gillet_miller_init(points=points,
                                      D=full_distance_matrix,
                                      d=pupils_on_each_bus_stop,
                                      C=capacity_of_the_bus,
                                      L=constraint,
                                      minimize_K=minimize_number_of_buses)
        loader.stop()

        ROUTES = []

        loader.__init__("Preparing Response", "Response ready!!", timeout=0.1)
        loader.start()

        for route_idx, route in enumerate(sol2routes(solution)):
            print("Route #%d : %s" % (route_idx + 1, route))
            ROUTES.append({"routeId": route_idx + 1, "stops": route})

        loader.stop()

        with open("response.json", "w") as outfile:
            json.dump({"success": True, "data": {"routes": ROUTES}}, outfile)

        return ROUTES
 def __init__(self):
     self.database = None
     self.valid_types = ['photo', 'user', 'poi']
     self.valid_txt_models = ['tf', 'df', 'tf-idf']
     self.valid_vis_models = ['CM', 'CM3x3', 'CN', 'CN3x3',
             'CSD', 'GLRLM', 'GLRLM3x3', 'HOG', 'LBP', 'LBP3x3', 'ALL']
     self.loader = Loader()
     self.io()
Example #26
0
 def GET(self):
     i = web.input(status=None, match_type=None)
     status = i.status or None
     match_type = i.match_type or None
     loader = Loader()
     items = loader.get_items(status=status, match_type=match_type)
     summary = loader.get_summary()
     return render.index(items, summary)
Example #27
0
    def __init__(self):
        FSM.__init__(self, "Core Game Control")
        self.loader = Loader(self.enterScenario)

        self.accept('f1', base.toggleWireframe)
        self.accept('f2', base.toggleTexture)
        self.accept('f3', self.toggleDebug)
        self.accept('r', self.clearScene)
Example #28
0
    def unzip(cls, path):
        """
            unzip the zip file
        """

        loader = Loader()

        loader.unzip(path)
Example #29
0
    def get_log(self):

        ld = Loader()
        course_initialization = Course_Initialization()

        log_data = ld.load_csv(course_initialization.config.get_log_csv_path())

        return log_data
Example #30
0
    def _load_poly(self, gl):
        vbo = Loader.load_vbo(self.vbo_path)
        ibo = Loader.load_ibo(self.ibo_path)

        normal = self.compute_normal(gl, vbo[:, 0:4], ibo)[:, 0:3]
        normal = np.nan_to_num(normal)
        vbo = np.hstack((vbo, normal))
        return vbo, ibo
Example #31
0
 def __init__(self):
     Interface.__init__(self)
     Loader.__init__(self)
     Resizer.__init__(self)
     Processor.__init__(self)
     Colorizer.__init__(self)
     Displayer.__init__(self)
     
     getattr(self, self.args.command)()
Example #32
0
class Test(unittest.TestCase):
    def setUp(self):
        self.loader = Loader()

    def test_correctly_formatted_input(self):
        # use the reload object functionality to load the simulation
        # this way the user doesn't have to select the correct file for each simulation
        self.loader.filename = 'ballformation.sim'
        # the visual library causes tracebacks on exit (on IDLE)
        scene.visible = 0
        objects, errors = self.loader.loadObjects(True)
        self.assertEqual(Loader.OBJECTS_LOADED, errors)
        test_value = False
        if 'Ball1' in objects.keys():
            test_value = True
        self.assertEqual(True, test_value)
        if 'Ball2' in objects.keys():
            test_value = True
        self.assertEqual(True, test_value)
        if 'Ball3' in objects.keys():
            test_value = True
        self.assertEqual(True, test_value)
        if 'Ball4' in objects.keys():
            test_value = True
        self.assertEqual(True, test_value)
        if 'Ball5' in objects.keys():
            test_value = True
        self.assertEqual(True, test_value)
        if 'Ball6' in objects.keys():
            test_value = True
        self.assertEqual(True, test_value)

    def test_empty_file(self):
        scene.visible = 0
        self.loader.filename = 'empty_file.sim'
        objects, errors = self.loader.loadObjects(True)
        self.assertEqual(0, errors)
        self.assertEqual(0, len(objects))

    def test_invalid_input(self):
        # use the reload object functionality to load the simulation
        # this way the user doesn't have to select the correct file for each simulation
        self.loader.filename = 'broken_file.sim'
        # the visual library causes tracebacks on exit
        scene.visible = 0
        objects, errors = self.loader.loadObjects(True)
        self.assertEqual(Loader.ERRORS_OCCURRED, errors)
        # check the errors from the errorlog-file

    def test_nonexistent_filename(self):
        # possible if someone loads a file, then removes the file from the
        # computer and hits reload simulation
        scene.visible = 0
        self.loader.filename = 'nonexistent_file.sim'
        objects, errors = self.loader.loadObjects(True)
        self.assertEqual(Loader.FILE_NOT_OPENED, errors)
        self.assertEqual(None, objects)
Example #33
0
    def get_survey(self):

        ld = Loader()
        course_initialization = Course_Initialization()

        survey_data = ld.load_csv(
            course_initialization.config.get_survey_csv_path())

        return survey_data
Example #34
0
    def pass_or_not(self):

        ld = Loader()
        course_initialization = Course_Initialization()

        student_pass_data = ld.load_csv(
            course_initialization.config.get_student_pass_csv_path())

        return student_pass_data
Example #35
0
def output_cluster(inputfiles):
    """Use already trained model to output clustered data."""
    try:
        model_dir = os.path.join(args.output_dir, 'models', 'clustered')
        data_dir = os.path.join(args.output_dir, 'clustered')
        if os.path.exists(data_dir):
            shutil.rmtree(data_dir)
        os.mkdir(data_dir)

        tf.reset_default_graph()
        saucie = SAUCIE(None, restore_folder=model_dir)

        print("Finding all binary codes")
        all_codes = {}
        for counter, f in enumerate(inputfiles):
            x = get_data(f)
            load = Loader(data=x, shuffle=False)

            acts = saucie.get_layer(load, 'layer_c')
            acts = acts / acts.max()
            binarized = np.where(acts > .000001, 1, 0)

            unique_rows, counts = np.unique(binarized, axis=0, return_counts=True)
            for unique_row in unique_rows:
                unique_row = tuple(unique_row.tolist())
                if unique_row not in all_codes:
                    all_codes[unique_row] = len(all_codes)

        print("Found {} clusters".format(len(all_codes)))

        print("Starting to output {} clustered files...".format(len(inputfiles)))
        for counter, f in enumerate(inputfiles):
            fname = os.path.split(f)[-1]
            print("Outputing file {}".format(counter))
            x = get_data(f)
            load = Loader(data=x, shuffle=False)
            acts = saucie.get_layer(load, 'layer_c')
            acts = acts / acts.max()
            binarized = np.where(acts > .000001, 1, 0)

            clusters = -1 * np.ones(x.shape[0])
            for code in all_codes:
                rows_equal_to_this_code = np.where(np.all(binarized == code, axis=1))[0]
                clusters[rows_equal_to_this_code] = all_codes[code]

            embeddings = saucie.get_layer(load, 'embeddings')

            rawdata = get_data(f, return_rawfile=True)
            outcols = rawdata.columns.tolist() + ['Cluster', 'Embedding_SAUCIE1', 'Embedding_SAUCIE2']
            rawdata = pd.concat([rawdata, pd.DataFrame(clusters), pd.DataFrame(embeddings[:, 0]), pd.DataFrame(embeddings[:, 1])], axis=1)
            outfile = os.path.join(data_dir, fname)
            fcswrite.write_fcs(outfile, outcols, rawdata)

    except Exception as ex:
        # if it didn't run all the way through, clean everything up and remove it
        shutil.rmtree(data_dir)
        raise(ex)
    def test_blank(self):
        """A slot that has nothing is ignored."""
        self.loader.data['fighter'] = yaml.load("""
            Peach:
              00:
        """)
        _singles_as_list(self.loader.data)
        Loader.load(self.loader)

        self.assertFalse(os.path.exists('destination/fighter/Peach'))
Example #37
0
    def _load_tasks(self, tasks_dir='.'):
        from loader import Loader
        ld = Loader(tasks_dir)
        if ld.load_tasks() > 0:
            self._tasks = self._tasks + ld.tasks()

            for t in self._tasks:
                q = multiprocessing.Queue()
                wrapper = TaskWrapper(t, q)
                self._wrappers[str(wrapper)] = {'proc': None, 'queue': q, 'wrapper': wrapper}
    def test_load(self):
        """Given a generic source name (no extension), files with correct 
        filetypes are copied from source to destination and renamed."""
        Loader.load(self.loader)

        self.assertTrue(os.path.exists(
            'destination/fighter/Peach/FitPeach00.pcs'
        ))
        self.assertTrue(os.path.exists(
            'destination/fighter/Peach/FitPeach00.pac'
        ))
    def test_spaces(self):
        """Handle source files with spaces in their directory and file names."""
        self.loader.data['fighter'] = yaml.load("""
            Peach:
              00: Peach as Rosalina/Rosalina skin
        """)
        _singles_as_list(self.loader.data)
        Loader.load(self.loader)

        self.assertTrue(os.path.exists(
            'destination/fighter/Peach/FitPeach00.pcs'
        ))
Example #40
0
    def parseCommand(self, usr, msg, chan):

        plgname = msg.split()[0].replace("!", "")
        plugins = Loader()
        for plugin in plugins.load():
            plg = plugins.get(plugin)

            if msg.startswith(("!" + plugin["name"])):
                args = msg.replace(("!" + plugin["name"]), "")
                self.current_chan = chan
                result = plg.do(args, coriolis=self)
                if result:
                    self.msg(chan, result)
Example #41
0
def main(argv):
	global PYBICO_VERBOSE

	try:
		opts, args = getopt.getopt(argv, "hvl:s:i:e:u:p:", ["help"])
	except getopt.GetoptError as err:
		print(str(err))
		usage()
		sys.exit(2)

	PYBICO_VERBOSE = False
	load_format = "txt"
	save_format = "xlsx"
	load_filename = ""
	save_filename = ""
	password_path = ""
	user = ""

	for o, a in opts:
		if o == "-v":
			PYBICO_VERBOSE = True
		elif o in ("-h", "--help"):
			usage()
			sys.exit()
		elif o == "-u":
			user = a
		elif o == "-p":
			password_path = a
		elif o == "-l":
			load_filename = a
		elif o == "-s":
			save_filename = a
		elif o == "-i":
			load_format = a
		elif o == "-e":
			save_format = a
		else:
			assert False, "unhandled option"

	f = open(password_path, 'r')
	password = f.read().strip('\n')

	db = DB(user, password)
	if load_filename != "":
		l = Loader()
		data = l.load(load_format, load_filename)
		db.add(data)
	if save_filename != "":
		data = db.get()
		s = Saver()
		s.save(data, save_format, save_filename)
    def test_destination_list(self):
        """When list of destinations are given, copies are made to each one."""
        self.loader.data['destination'] = yaml.load("""
              - destination
              - another_fighter
        """)
        _singles_as_list(self.loader.data)
        Loader.load(self.loader)

        for destination in self.loader.data['destination']:
            self.assertTrue(os.path.exists(
                destination + '/fighter/Peach/FitPeach00.pcs'
            ))
            self.assertTrue(os.path.exists(
                destination + '/fighter/Peach/FitPeach00.pac'
            ))
Example #43
0
    def merge(self, config_src, signal_update=False, namespace=None,
                    monitor=False, do_subs=True):
        """ Merge configs

        :param config_src:  URI(s) or dictionaries to load config(s) from to
                            be merged into the main config
        :type config_src:   a string or dictionary or list of strings and/or
                            dictionaries

        """
        namespace = self._get_namespace(namespace)
        if self._configs.get(namespace, None) is None:
            raise ValueError('no config to merge with!')
        if not isinstance(config_src, list):
            config_src = [config_src]
        for config in config_src:
            if isinstance(config, basestring):
                if monitor:
                    self.start_src_monitor(config)
                config = Loader.load(config)
            self._configs[namespace]._merge(bunchify(config))
            if do_subs:
                self._do_subs(namespace)
        if signal_update:
            self.signal_update(namespace)
Example #44
0
    def load(self, config_src, signal_update=True, namespace=None,
                   monitor=False, sub_key=None):
        """ Load config from source(s)

        :param config_src:  URI(s) or dictionaries to load the config from. If
                            config_src is a list, then the first config is
                            loaded as the main config with subsequent configs
                            meged into it.
        :type config_src:   a string or dictionary or list of strings and/or
                            dictionaries

        """
        namespace = self._get_namespace(namespace)
        merge_configs = []
        if isinstance(config_src, list):
            merge_configs = config_src[1:]
            config_src = config_src[0]
        if isinstance(config_src, basestring):
            if monitor:
                self.start_src_monitor(config_src)
            config_src = Loader.load(config_src)
        self._configs[namespace] = Config(bunchify(config_src))
        self.merge(merge_configs, False, namespace, monitor, False)
        self._sub_keys[namespace] = sub_key
        self._do_subs(namespace)
        self._configs[namespace]._freeze()
        if signal_update:
            self.signal_update(namespace)
    def test_single_filetype_present(self):
        """When not all the default filetypes are present for a generic path,
        takes all of those present and ignores the lack of the rest."""
        self.loader.data['fighter'] = yaml.load("""
            Peach:
              00: Peach_pcs/Rosalina
        """)
        _singles_as_list(self.loader.data)
        Loader.load(self.loader)

        self.assertTrue(os.path.exists(
            'destination/fighter/Peach/FitPeach00.pcs'
        ))
        self.assertFalse(os.path.exists(
            'destination/fighter/Peach/FitPeach00.pac'
        ))
Example #46
0
 def __init__(self):
     super().__init__()
     self.loader = Loader()
     self.shooter = Shooter()
     self.drive = Drive(config.robotDrive, config.leftJoy, config.hsButton,
                        config.alignButton)
     self.componets = [ self.loader, self.shooter, self.drive ]
Example #47
0
 def __init__(self, sprite = None):
     pygame.sprite.Sprite.__init__(self)
     self.image = Loader().load(name = sprite, color_key = True)
     self.rect = self.image.get_rect()
     self.rect.centerx = WIDTH - (WIDTH/5)
     self.rect.centery = HEIGHT / 2
     self.speed = [0.5, -0.5]
    def test_explicit(self):
        """Source files with a specified extension are copied, not renamed."""
        self.loader.data['fighter'] = yaml.load("""
            Peach:
              00: Peach/Rosalina.pcs
              01: Peach/Rosalina.pac
        """)
        _singles_as_list(self.loader.data)
        Loader.load(self.loader)

        self.assertTrue(os.path.exists(
            'destination/fighter/Peach/Rosalina.pcs'
        ))
        self.assertTrue(os.path.exists(
            'destination/fighter/Peach/Rosalina.pac'
        ))
 def train(self):
     for (cate_name, cate_no) in DC_CATEGORY_NO_MAPPING.iteritems():
         X_train = Loader.load_vectors([_ for _ in self._training_data if _._label == cate_no])
         if not X_train:
             continue
         X_train = np.array(np.mat(';'.join(X_train)))
         self.classifiers[cate_name].fit(X_train)
    def test_slot_list(self):
        """When a slot has a list, the list is handled properly."""
        self.loader.data['fighter'] = yaml.load("""
            Peach:
              00:
                - Peach/Rosalina
        """)
        _singles_as_list(self.loader.data)
        Loader.load(self.loader)

        self.assertTrue(os.path.exists(os.path.join(
            self.loader.data['destination'][0], 'fighter', 'Peach', 'FitPeach00.pcs'
        )))
        self.assertTrue(os.path.exists(os.path.join(
            self.loader.data['destination'][0], 'fighter', 'Peach', 'FitPeach00.pac'
        )))
  def __init__(self, sess, image_size=48, model_type="deep",
               batch_size=25, dataset="shape"):
    """Initialize the parameters for an Deep Visual Analogy network.

    Args:
      image_size: int, The size of width and height of input image
      model_type: string, The type of increment function ["add", "deep"]
      batch_size: int, The size of a batch [25]
      dataset: str, The name of dataset ["shape", ""]
    """
    self.sess = sess

    self.image_size = image_size
    self.model_type = model_type
    self.batch_size = batch_size
    self.dataset = dataset
    self.loader = Loader(self.dataset, self.batch_size)

    self.sample_dir = "samples"
    if not os.path.exists(self.sample_dir):
      os.makedirs(self.sample_dir)

    # parameters used to save a checkpoint
    self._attrs = ['batch_size', 'model_type', 'image_size']
    self.options = ['rotate', 'scale', 'xpos', 'ypos']

    self.build_model()
 def __init__(self):
     self.loader = Loader()
     self.img = self.loader.load_image("mouse.png",True)
     self.pos = (400,300)
     self.to_pos = self.pos
     self.moving = False
     self.visible = False
Example #53
0
 def __init__(self, x_position = 0, sprite = None):
     pygame.sprite.Sprite.__init__(self)
     self.image = Loader().load(name = sprite, color_key = True)
     self.rect = self.image.get_rect()
     self.rect.centerx = x_position
     self.rect.centery = HEIGHT - 50
     self.speed = 0.5
Example #54
0
    def __init__(self):
        self.loader = Loader()
        self.desk = self.loader.load_image("back.png")
        self.back = self.desk.copy()

        self.cards1 = self.loader.load_image("cards1.png",True)
        self.cards2 = self.loader.load_image("cards2.png",True)
        
        self.card_back = self.loader.load_image("card.png",True)
        self.card_images = []
        for i in range(18):
            self.card_images.append(self.loader.load_image("img%d.png" % (i+1)))

        self.gfxcards = []
        self.gfxcards_hidden = []
        self.delay = 0

        self.title_fnt = self.loader.load_font("KLEPTOMA.TTF", 50) 
        self.text_fnt = self.loader.load_font("scribble.TTF", 24) 
        self.small_text_fnt = self.loader.load_font("scribble.TTF", 15) 

        self.START_SCREEN = 0
        self.GAME_PLAY = 1
        self.GAME_OVER = 2

        self.card_snd = self.loader.load_sound("card.wav")

        self.goto_start()
Example #55
0
    def __init__(self):
        FSM.__init__(self, "Core Game Control")
        self.loader = Loader(self.enterScenario)

        self.accept('f1', base.toggleWireframe)
        self.accept('f2', base.toggleTexture)
        self.accept('f3', self.toggleDebug)
        self.accept('r', self.clearScene)
    def test_reserved(self):
        """Slots marked as RESERVED are ignored."""
        self.loader.data['fighter'] = yaml.load("""
            Peach:
              00: RESERVED
              01: Peach/Rosalina
        """)
        _singles_as_list(self.loader.data)
        Loader.load(self.loader)

        self.assertFalse(glob.glob('destination/fighter/Peach/*00*.*'))
        self.assertTrue(os.path.exists(
            'destination/fighter/Peach/FitPeach01.pcs'
        ))
        self.assertTrue(os.path.exists(
            'destination/fighter/Peach/FitPeach01.pac'
        ))
class RobotMouse(object):
    def __init__(self):
        self.loader = Loader()
        self.img = self.loader.load_image("mouse.png",True)
        self.pos = (400,300)
        self.to_pos = self.pos
        self.moving = False
        self.visible = False
        
    def hide(self):
        self.visible = False
       
    def is_moving(self):
        return self.moving
               
    def goto(self,pos):
        self.to_pos = pos
        self.visible = True
        self.moving = True
        
    def _speed(self,a,b):
        f = min(a,b)
        t = max(a,b)
        s = (t-f)/2        
        return min(s,10)
        
    def _update(self):
        if not self.moving:
            return
        
        cx,cy = self.pos
        tx,ty = self.to_pos
        
        if cx<tx:
            cx+=self._speed(cx,tx)
        elif cx>tx:
            cx-=self._speed(cx,tx)

        if cy<ty:
            cy+=self._speed(cy,ty)
        elif cy>ty:
            cy-=self._speed(cy,ty)

        if abs(cx-tx) <= 2:
            cx = tx
        if abs(cy-ty) <= 2:
            cy = ty

        self.pos = (cx,cy)

        if cx == tx and cy == ty:
            self.moving = False

    def draw(self,dest):
        if not self.visible:
            return
        self._update()
        dest.blit(self.img,self.pos)
    def test_stage(self):
        """Stage textures (.pac and .rel) placed in the right places."""
        self.loader.data['source'] = yaml.load("""
              stage: source/stage
        """)
        self.loader.data['stage'] = yaml.load("""
            melee:
              PALUTENA: Palutena/Clocktower
        """)
        _singles_as_list(self.loader.data)
        Loader.load(self.loader)

        self.assertTrue(os.path.exists(
            'destination/stage/melee/STGPALUTENA.PAC'
        ))
        self.assertTrue(os.path.exists(
            'destination/module/st_palutena.rel'
        ))
Example #59
0
    def layerControl(self):
        if len(self.allVectorLayers) > 1:
            self.targetLayer = self.allVectorLayers[self.dlg.cmbTargetLayer.currentIndex()]
            sourceLayer = self.allVectorLayers[self.dlg.cmbSourceLayer.currentIndex()]

            openedAttrWindow = False #This variable is used for detecting opened attribute windows. During loading operation, opened windows make qgis crashed (maybe a bug)
            #so they must be closed
            for dialog in QApplication.instance().allWidgets():
                #I noticed that in my laptop getting all Qt widget's (in QGIS) names give error. But no problem with desktop. Here is try-except ;)
                try:
                    if dialog.objectName() in [u'QgsAttributeTableDialog', u'AttributeTable']:
                        openedAttrWindow = True
                except:
                    pass

            if openedAttrWindow:
                QMessageBox.warning(None,u'Notification', u'Please close all attribute windows to start the process.')

            else:
                #checking target and source layers must not be in editing mode.
                if not self.targetLayer.isEditable() and not sourceLayer.isEditable():
                    # checking for targetLayer editing capability.
                    isEditable = self.targetLayer.dataProvider().capabilities() & QgsVectorDataProvider.AddFeatures
                    if isEditable:
                        # checking targetLayer and sourceLayer are not same
                        if self.targetLayer.extent() != sourceLayer.extent() or self.targetLayer.publicSource() != sourceLayer.publicSource():
                            # checking layers geometry types
                            if self.targetLayer.geometryType() == sourceLayer.geometryType():
                                self.loader = Loader(targetLayer=self.targetLayer,sourceLayer=sourceLayer)
                                self.loader.setOptions(onlySelected=self.dlg.checkBox.isChecked())

                                self.dlg.btnStart.setEnabled(False)
                                self.dlg.btnStop.setEnabled(True)
                                self.dlg.btnStop.clicked.connect(self.loader.stop)
                                self.iface.mapCanvas().setRenderFlag(False)#QGIS can not render dramatic changes in the target layer feature count and crashes down. So before starting we need to stop rendering.

                                QObject.connect(self.loader, SIGNAL("progressLenght"), self.setProgressLength)
                                QObject.connect(self.loader, SIGNAL("progress"), self.setProgress)
                                QObject.connect(self.loader, SIGNAL("error"), self.error)
                                QObject.connect(self.loader, SIGNAL("finished()"), self.done)
                                QObject.connect(self.loader, SIGNAL('status'), self.setStatus)
                                # QObject.connect(self.loader, SIGNAL('insertFeature'), self.insert)
                                self.loader.start()
                                self.start_time = timeit.default_timer()#for calculating total run time

                            else:
                                QMessageBox.warning(self.dlg, u'Error',
                                                     u'The layers geometry types have to be same to start the process.')
                        else:
                            QMessageBox.warning(self.dlg, u'Error', u'Target Layer and Source Layer must be different.')
                    else:
                        QMessageBox.warning(self.dlg, u'Error', u'Target Layer does not support editing.')
                else:
                    QMessageBox.warning(self.dlg, u'Error', u'Target Layer and Source Layer must not be in editing mode.')
        else:
            QMessageBox.warning(self.dlg, u'Error', u'There must be at least two vector layers added in QGIS canvas.')
Example #60
0
 def __init__(self):
     self.loader = Loader()
     self.img = self.loader.load_image("mouse.png",True)
     self.moving = False
     self.visible = False
     
     self.pos = vec2d(400.0,300.0)
     self.to_pos = vec2d(400.0,300.0)
     self.heading = vec2d(1.0,0.0) 
     self.dist = 0