Beispiel #1
0
def create_game(size):
    try:
        game = Game(size=size)
        session.add(game)
        session.commit()
        print('New game created. Got id', game.id)
        return game
    except:
        return None
Beispiel #2
0
def compute_feature_over_db():
    cl = KMeanPaletteClassifier()
    total = len(session.query(Entry).all())
    for i, d in enumerate(session.query(Entry).all()):  #type:Entry
        img = cv2.imread(d.thumbnail_path)
        pred = cl.fit(img)[0]
        print(i, total)
        d.color_labels = json.dumps(pred)

    session.commit()
Beispiel #3
0
def add_player(name, secret):
    try:
        player = Player(name=name, secret=secret)
        session.add(player)
        session.commit()
        print('New player with name', name, 'registered, got id', player.id)
        return player
    except exc.IntegrityError:
        session.rollback()
        return None
Beispiel #4
0
def compute_feature_over_db(func, ):
    total = len(session.query(Entry).all())
    cl = KMeanPaletteClassifier()

    for i, d in enumerate(session.query(Entry).all()):  #type:Entry
        if i % 10 == 0:
            print(i, total, np.round(i / total * 100, 2), "%")
        frame = cv2.imread(d.thumbnail_path)

        pred = xception_process(d.thumbnail_path)
        d.xception_string = json.dumps(pred)

        pred = cl.fit(frame)[0]
        d.color_labels = json.dumps(pred)

        if frame is not None:
            func(d, frame)
    session.commit()
Beispiel #5
0
async def main():
    scrap_filters = session.query(ScrapFilters).all()

    async with aiohttp.ClientSession() as aiohttp_session:
        new_articles = []
        for scrap_filter in scrap_filters:
            new_articles += fetch_new_articles_with_filter(
                aiohttp_session, scrap_filter)

        if new_articles:
            print(f'Sending notifications...')
        for article in new_articles:
            await notify_users_about_article(article, aiohttp_session)
            await asyncio.sleep(0.20)

        await aiohttp_session.close()

        if new_articles:
            rows = list(
                map(lambda a: PostedArticles(article_id=a['id']),
                    new_articles))
            session.add_all(rows)
            session.commit()
        print('Done.')
Beispiel #6
0
file = open(embeddings_path, 'rb')
_caption_embeddings = pickle.load(file)
file.close()

csv_path = 'data/captions.csv'
csv = pd.read_csv(csv_path)
_CAPTIONS = csv['caption'].tolist()
_IMAGE_IDS = csv['thumbnail_id'].tolist()

print(len(_CAPTIONS))
entries = session.query(Entry).all()

res_emb = []
d = dict()
try:
    for e in entries:  #type:Entry
        d[e.thumbnail_path.split("/")[2]] = e
    for (c, p, emb) in zip(_CAPTIONS, _IMAGE_IDS, _caption_embeddings):
        print(c, p)
        d[p].caption = c
        res_emb.append((d[p].id, emb))
        print(p)
    session.commit()
except Exception as e:
    session.rollback()
    raise e

res_emb = sorted(res_emb, key=lambda x: x[0])
res_emb = [r[1] for r in res_emb]
with open("embedding.pickle", "wb") as f:
    pickle.dump(res_emb, f)
Beispiel #7
0
 def delete(self) -> None:
     session.delete(self)
     session.commit()
Beispiel #8
0
 def save(self) -> None:
     session.add(self)
     session.commit()