Пример #1
0
 def __init__(self):
     self.screen = pygame.display.set_mode(SCREEN_SIZE, 0, 32)
     self.cursors = [
         pygame.image.load(
             get_file_path('img/ui/cursor.png')).convert_alpha(),
         pygame.image.load(
             get_file_path('img/ui/cursor_click.png')).convert_alpha()]
Пример #2
0
 def __init__(self, fielddata):
     BaseModel.__init__(self)
     self.footholds = []
     self.footholds_count = 0
     self.background_img = pygame.image.load(
         get_file_path(fielddata['background_img'])).convert()
     self.tiles_img = pygame.image.load(
         get_file_path(fielddata['tiles_img'])).convert_alpha()
     for fh in fielddata['footholds']:
         tmp = {
             'pos': fh['pos'], 'width': fh['size'][0],
             'image': pygame.Surface(fh['size'], SRCALPHA)}
         x = 0
         for t in fh['tiles']:
             repeat, sub_pos, rand_tile = 1, (0, 0), False
             if 'repeat' in t:
                 repeat = t['repeat']
             if isinstance(t['sub_pos'], list):
                 rand_tile = True
             for _ in range(repeat):
                 sub_pos = t['sub_pos'][randindex(len(t['sub_pos']))] if\
                     rand_tile else t['sub_pos']
                 x = self._build_image(tmp['image'], x, sub_pos, t['size'])
         self.footholds.append(tmp)
         self.footholds_count += 1
Пример #3
0
def runModel(STORE_PATH,
             model_func=build_model,
             EPOCHS=10000,
             patience=30,
             batch_size=5000,
             verbose=0,
             feature_func=getFeatures,
             validation_split=0.2,
             tbText=[]):
    configText = "EPOCHS={}, patience={}, batch_size={}, verbose={}, validation_split={}".format(
        EPOCHS, patience, batch_size, verbose, validation_split)
    tbText.append(
        lambda: tf.summary.text('Config', tf.convert_to_tensor(configText)))
    data = datautil.getData()
    features = feature_func()
    data = datautil.normalize(features, data)
    training, test = datautil.datasets(data, tbText=tbText)

    tbText.append(lambda: tf.summary.text('Features',
                                          tf.convert_to_tensor(str(features))))
    x_train, y_train, x_test, y_test, x_train_ordered = getAllXYs(
        training, test, features)
    model = model_func((x_train.shape[1], ))

    model, bestModel = trainModel(model,
                                  x_train,
                                  y_train,
                                  STORE_PATH,
                                  EPOCHS=EPOCHS,
                                  patience=patience,
                                  batch_size=batch_size,
                                  verbose=verbose,
                                  validation_split=validation_split)

    lossFinalStr = util.trainingTestingLoss(model, x_test, y_test,
                                            "Final Model")[0]
    print(lossFinalStr)
    tbText.append(
        lambda: tf.summary.text('Testing Loss: {}'.format("Final Model"),
                                tf.convert_to_tensor(lossFinalStr)))

    lossStr = util.trainingTestingLoss(bestModel, x_test, y_test,
                                       "Best Model")[0]
    print(lossStr)
    tbText.append(
        lambda: tf.summary.text('Testing Loss: {}'.format("Best Model"),
                                tf.convert_to_tensor(lossStr)))

    test = util.generatePredictions(bestModel, test, x_test, features)
    test.to_csv(util.get_file_path(STORE_PATH, 'test', 'csv', 'csv'))

    training = util.generatePredictions(bestModel, training, x_train_ordered,
                                        features)
    training.to_csv(util.get_file_path(STORE_PATH, 'training', 'csv', 'csv'))
Пример #4
0
 def __init__(self, source_table_batch: SourceTableBatch):
     self.source_table_batch = source_table_batch
     self.source_table = source_table_batch.source_table
     self.source = source_table_batch.source_table.source
     self.file_name = get_file_name(self.source_table_batch)
     self.file_location = get_file_path(self.source_table_batch)
     self.stage_name = config.snowflake_stage_name[self.source.source]
Пример #5
0
 def __init__(self):
     Group.__init__(self)
     BaseModel.__init__(self)
     # load image
     _money_image = pygame.image.load(
         get_file_path('img/item/money.png')).convert_alpha()
     _subsurface_data = [(25, 24), (25, 24), (33, 30), (32, 31)]
     _y = 0
     self.money_images = []
     for _sub_data in _subsurface_data:
         _tmp_list = [_money_image.subsurface(
             (i*_sub_data[0], _y), _sub_data) for i in range(4)]
         _y += _sub_data[1]
         self.money_images.append(_tmp_list)
     _item_rare_image = pygame.image.load(
         get_file_path('img/item/rare_42x44.png')).convert_alpha()
     self.item_rare_images = [_item_rare_image.subsurface(
         (i*ITEM_RARE_SIZE[0], 0), ITEM_RARE_SIZE) for i in range(6)]
     # load icons, but now only load one image
     self.item_icons = pygame.image.load(
         get_file_path('img/item/04000019.png')).convert_alpha()
Пример #6
0
 def __init__(self):
     self._image = pygame.image.load(
         get_file_path('img/ui/damage.png')).convert_alpha()
     self.damage_images = {
         'normal': [self._image.subsurface(
             (i*NORMAL_DAMAGE[0], 0), NORMAL_DAMAGE) for i in range(10)],
         'critical': [self._image.subsurface(
             (i*CRITICAL_DAMAGE[0], NORMAL_DAMAGE[1]),
             CRITICAL_DAMAGE) for i in range(10)],
         'critical_icon': self._image.subsurface(
             (0, NORMAL_DAMAGE[1]+CRITICAL_DAMAGE[1]), (44, 38))}
     self.damage_queue = IQueue(MAX_DAMAGE_COUNT)
     self.passed_time_second = 0
Пример #7
0
 def load_map_data(self, filename):
     filename = 'map/' + filename + '.pkl'
     with open(get_file_path(filename), 'rb') as f:
         return pickle.load(f)
Пример #8
0
 def load_mob_data(self, filename):
     filename = 'mob/' + filename + '.pkl'
     data = None
     with open(get_file_path(filename), 'rb') as f:
         data = pickle.load(f)
     return data
Пример #9
0
data = {
    'background_img': 'img/area/GrassSoil/back.png',
    'tiles_img': 'img/area/GrassSoil/tiles.png',
    'mob_id': ['mob0100100'],
    'footholds': [{
        'pos': (450, 350),
        'size': (336, 176),
        'tiles': [
            {'sub_pos': (142, 54), 'size': (26, 176)},
            {'sub_pos': [(0, 54), (71, 54)],
                'size': (71, 176), 'repeat': 4},
            {'sub_pos': (168, 54), 'size': (26, 176)}]
    }, {
        'pos': (0, 512),
        'size': (639, 176),
        'tiles': [
            {'sub_pos': [(0, 54), (71, 54)],
                'size': (71, 176), 'repeat': 9}]
    }, {
        'pos': (639, 464),
        'size': (321, 176),
        'tiles': [
            {'sub_pos': [(0, 54), (71, 54)],
                'size': (71, 176), 'repeat': 5}]
    }]
}

with open(get_file_path('map/map0001.pkl'), 'wb') as f:
    pickle.dump(data, f)
Пример #10
0
def get_connection():
    return sqlite3.connect(util.get_file_path('labelling/tweets.db'))
Пример #11
0
    total_items = get_total_items(collection_handle)
    while len(handles) < total_items and (len(handles) == 0
                                          or handles[-1]['year'] < stop_year):
        time.sleep(5)
        new_handles = get_handles_on_browse_page(collection_handle, offset,
                                                 rpp)
        if not len(new_handles):
            break
        handles = handles + new_handles
        offset = offset + rpp
        print("Total handles downloaded: ", len(handles))
    print("Downloaded {} handles for collection {}.".format(
        str(len(handles)), collection_handle))
    return handles


if __name__ == "__main__":
    for handle in get_sub_community_handles(
            "mit_depts_with_subcommunities.json"):
        time.sleep(5)
        if os.path.exists(util.get_file_path(handle)):
            print("Data file for {} exists. Skipping.".format(handle))
            continue
        handles = download_handles_in_collection(handle)
        if handles:
            with open(util.get_file_path(handle), mode='w') as fp:
                json.dump(handles, fp, indent=2)
        else:
            logger.error(
                "Got empty list for {}. Skipping saving.".format(handle))