Esempio n. 1
0
def process(date):
    db_mysql.clear_tags(date)
    s = Session(get_engine())
    gs = generate_groups(date).values()

    mosts = {}

    for i, it in enumerate(gs):
        fn = lambda (x, y): int(db_mysql.cache_tag(x, "atk_eff%s" % (y + 1)))
        atk_fn = lambda (x, y): int(db_mysql.cache_tag(x, "attack%s" %
                                                       (y + 1)))

        combined_fn = lambda v: -(atk_fn(v) * 10 + fn(v))

        arr = sorted(it, key=combined_fn)

        if atk_fn(arr[0]) > 0:
            nk = arr[0][0]
            if nk not in mosts: mosts[nk] = 0
            mosts[nk] += 1

        for j, jt in enumerate(arr):
            s.add(
                Effectives(date=date,
                           src_id=jt[0],
                           atk_id=jt[1],
                           group_id=i,
                           group_idx=j))
            s.commit()

    for it in mosts:
        db_mysql.add_tag(it, "most", mosts[it])
Esempio n. 2
0
def process(date):
    db_mysql.clear_tags(date)
    s = Session(get_engine())
    gs = generate_groups(date).values()

    mosts = {}

    for i, it in enumerate(gs):
        fn = lambda (x, y): int(db_mysql.cache_tag(x, "atk_eff%s" % (y + 1)))
        atk_fn = lambda (x, y): int(db_mysql.cache_tag(x, "attack%s" % (y+1)))

        combined_fn = lambda v: -(atk_fn(v) * 10 + fn(v))

        arr = sorted(it, key = combined_fn)

        if atk_fn(arr[0]) > 0:
            nk = arr[0][0]
            if nk not in mosts: mosts[nk] = 0
            mosts[nk] += 1

        for j, jt in enumerate(arr):
            s.add(Effectives(date=date, src_id=jt[0], atk_id=jt[1], group_id=i, group_idx=j))
            s.commit()

    for it in mosts:
        db_mysql.add_tag(it, "most", mosts[it])
Esempio n. 3
0
Base = declarative_base()


class Effectives(Base):
    __tablename__ = 'eff_atks'
    __table_args__ = (UniqueConstraint('date', 'group_id', 'group_idx'), )

    id = Column(Integer, primary_key=True)
    date = Column(Unicode(128, collation='utf8_general_ci'), nullable=False)
    src_id = Column(Integer, nullable=False)
    atk_id = Column(Integer, nullable=False)
    group_id = Column(Integer, nullable=False)
    group_idx = Column(Integer, nullable=False)


engine = e.get_engine()
r = redis.StrictRedis()

Base.metadata.create_all(engine)

# singleton session... no...
session = Session(e.get_engine())


def clear_tags(date):
    with engine.connect() as conn:
        conn.execute(text("DELETE FROM eff_atks WHERE date = :date"),
                     date=date)
        conn.execute(text("DELETE FROM tags WHERE name = 'most' AND src_id IN "
                          "(SELECT id FROM src WHERE category = :date)"),
                     date=date)
Esempio n. 4
0
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine

from models.engine import get_engine

Base = automap_base()

engine = get_engine()

# reflect the tables
Base.prepare(engine, reflect=True)

Src = Base.classes.src
War = Base.classes.war
Effective = Base.classes.eff_atks
Tags = Base.classes.tags
Public_v0 = Base.classes.public_v0

# For eff_ackts table, refer db_mysql.py

Esempio n. 5
0
"""
Train and create prediction.
"""
from sys import exit
from io import BytesIO
import sqlalchemy as sa
import pickle
import pandas as pd
from sklearn.ensemble import RandomForestClassifier as RF
from skimage import io
import numpy as np
import json
from models import engine, db_mysql
import concurrent.futures

engine = engine.get_engine()


def getTrain(label):
    df = pd.io.sql.read_sql_query(
        '''
        SELECT `data_url`, `value` FROM `tags`
        LEFT JOIN `src` ON `src_id` = `src`.`id`
        WHERE `name` = '%s' AND `probability` IS NULL
        ORDER BY `src_id` DESC LIMIT 1800
        ''' % (label), engine)

    def dbg(x):
        print(x)
        return np.load(BytesIO(db_mysql.cache_src(x))).flatten()
Esempio n. 6
0
Base = declarative_base()

class Effectives(Base):
    __tablename__ = 'eff_atks'
    __table_args__ = (
            UniqueConstraint('date', 'group_id', 'group_idx'),
            )

    id = Column(Integer, primary_key = True)
    date = Column(Unicode(128, collation = 'utf8_general_ci'), nullable = False)
    src_id = Column(Integer, nullable = False)
    atk_id = Column(Integer, nullable = False)
    group_id = Column(Integer, nullable = False)
    group_idx = Column(Integer, nullable = False)

engine = e.get_engine()
r = redis.StrictRedis()

Base.metadata.create_all(engine)

# singleton session... no...
session = Session(e.get_engine())

def clear_tags(date):
    with engine.connect() as conn:
        conn.execute(text("DELETE FROM eff_atks WHERE date = :date"), date = date)
        conn.execute(text("DELETE FROM tags WHERE name = 'most' AND src_id IN "
            "(SELECT id FROM src WHERE category = :date)"), date = date)

def add_tag(src_id, tag_name, value):
    with engine.connect() as conn:
Esempio n. 7
0
"""
Train and create prediction.
"""
from sys import exit
from io import BytesIO
import sqlalchemy as sa
import pickle
import pandas as pd
from sklearn.ensemble import RandomForestClassifier as RF
from skimage import io
import numpy as np
import json
from models import engine, db_mysql
import concurrent.futures

engine = engine.get_engine()

def getTrain(label):
    df = pd.io.sql.read_sql_query(
        '''
        SELECT `data_url`, `value` FROM `tags`
        LEFT JOIN `src` ON `src_id` = `src`.`id`
        WHERE `name` = '%s' AND `probability` IS NULL
        ORDER BY `src_id` DESC LIMIT 1800
        ''' % (label), engine
    )

    def dbg(x):
        print(x)
        return np.load(BytesIO(db_mysql.cache_src(x))).flatten()