Exemplo n.º 1
0
    def test_inputs(self):
        """checks for valid input """
        Base._Base__nb_objects = 0

        b_zero = Base()
        self.assertEqual(b_zero.id, 1)

        b_int = Base(17)
        self.assertEqual(b_int.id, 17)

        b_negint = Base(-29)
        self.assertEqual(b_negint.id, -29)
Exemplo n.º 2
0
    def test_incrementation(self):
        """ obj id increments """
        Base._Base__nb_objects = 0

        b1 = Base()
        self.assertEqual(b1.id, 1)

        b2 = Base()
        self.assertEqual(b2.id, 2)

        b3 = Base()
        self.assertEqual(b3.id, 3)
Exemplo n.º 3
0
    def log_recommendation(recommendation, recommended_by, notes, location,
                           kind, url, when):
        with get_values(recommendation, recommended_by, notes, location, kind,
                        url, when) as values:
            recommendation, recommended_by, notes, location, kind, url, when = values

            when = when.strip()
            if when:
                when = parse_timestr(when)

            with Base.get_session() as sess:
                thing = Thing.get_row(recommendation, recommended_by, kind,
                                      sess)
                if abs(datetime.datetime.now() -
                       thing.modified_at).total_seconds() > 1:
                    html_display_of_duplicate_recommendation.value = str(thing)
                    #run_search(thing.name).to_html()
                    message = "an entry with this thing.name already exists"
                    raise Exception(message)

                thing.notes = notes
                thing.location_str = location
                thing.url = url

                if when:
                    thing.created_at = when

                sess.commit()
        return
Exemplo n.º 4
0
def parse() -> None:
    url = 'https://m.ru.sputniknews.kz/archive/'
    page = get(url, headers={'User-Agent': UserAgent(verify_ssl=False).random})
    soup = BeautifulSoup(page.text, 'lxml')
    table = soup.find_all('div', class_='b-plainlist__info')
    count = 0
    for item in table:
        data = item.find('span',
                         class_='b-plainlist__date').get_text(strip=True)
        data_mod = datetime.strptime(data, "%H:%M %d.%m.%Y")
        title = item.find('h2',
                          class_='b-plainlist__title').get_text(strip=True)
        link = 'https://m.ru.sputniknews.kz' + item.find(
            'h2', class_='b-plainlist__title').find('a').get('href')

        query_double_file = db.session.query(Base).filter_by(
            title=title).first()
        if not query_double_file:
            save_db = Base(
                date=f"{data_mod.year}.{data_mod.month}.{data_mod.day}",
                title=title,
                link=link)
            db.session.add(save_db)
            count += 1
    db.session.commit()
    save_logs(count)
Exemplo n.º 5
0
    def add_base(token):

        body = request.get_json()
        name = body.get('name')
        planet = body.get('planet')

        if name is None or planet is None:
            abort(422)

        try:
            new_base = Base(name=name, planet=planet)
            new_base.insert()
            new_base = new_base.format()
            return jsonify({"success": True, "base": new_base})
        except BaseException:
            abort(500)
Exemplo n.º 6
0
    def delete_DB(self):

        engine = assert_database_type()

        base = Base.Base()
        for tbl in reversed(base.metadata.sorted_tables):
            tbl.drop(engine, checkfirst=True)
Exemplo n.º 7
0
def main():
    test_seen_loader = torch.utils.data.DataLoader(AttributeDataset(
        args.data_dir,
        args.dataset,
        features_path=args.gan_path,
        mode='test_seen',
        generalized=True,
        normalize=args.normalize,
        sentences=args.sentences),
                                                   batch_size=args.batch_size,
                                                   shuffle=False)

    test_unseen_loader = torch.utils.data.DataLoader(
        AttributeDataset(args.data_dir,
                         args.dataset,
                         features_path=args.gan_path,
                         mode='test_unseen',
                         generalized=True,
                         normalize=args.normalize,
                         sentences=args.sentences),
        batch_size=args.batch_size,
        shuffle=False)

    # instanciate the models
    if args.mlp:
        mlp = MLP(args.dim_input, [args.nhidden * 2], args.nhidden)
    else:
        mlp = LinearProjection(args.dim_input, args.nhidden)
    embed = LinearProjection(args.nhidden, args.dim_embed)

    if args.sentences:
        cam_key = 'sentences'
    else:
        cam_key = 'emb'

    if args.gan_path is not None:
        cam_key = 'full_' + cam_key

    cam = torch.from_numpy(test_seen_loader.dataset.data[cam_key].T)
    proxies = ProxyNet(args.n_classes, args.dim_embed, proxies=cam)

    model = Base(mlp, embed, proxies)

    criterion = ProxyLoss(temperature=args.temp)

    if args.cuda:
        mlp.cuda()
        embed.cuda()
        model.cuda()
        proxies.cuda()

    # loading
    checkpoint = torch.load(args.model_path)
    model.load_state_dict(checkpoint['state_dict'])
    txt = ("=> loaded checkpoint '{}' (epoch {})".format(
        args.model_path, checkpoint['epoch']))
    print(txt)

    compute_scores(test_seen_loader, test_unseen_loader, model, criterion)
Exemplo n.º 8
0
 def get(self, search_data):
     """Finds a test data record by the search_data"""
     record = None
     for item in self.data:
         match = True
         for field_name, field_value in search_data.items():
             if item[field_name] != field_value:
                 match = False
                 break
         if match:
             record = item
             break
     if record:
         return record
     else:
         raise Base.RecordNotFoundException('No record found that matches requested criteria.')
Exemplo n.º 9
0
    def init_DB(self):

        # if connection_string.startswith('sqlite'):
        #     db_file = re.sub("sqlite.*:///", "", connection_string)
        #     os.makedirs(os.path.dirname(db_file))

        # 3 commands for creating database

        base = Base.Base()
        Flow.Flow()

        engine = assert_database_type()
        base.metadata.create_all(engine)

        response = "OK"
        return response
Exemplo n.º 10
0
 def to_db(self, ss, ul):
     for i in ss:
         if i['port'] == '8089':
             continue
         else:
             if i['server_name'] in 'localhost':
                 server_name = i['server_name'].replace(
                     'localhost', str(self.ipaddr))
             else:
                 server_name = i['server_name']
             if i['location'] and isinstance(i['location'], list):
                 node_id = []
                 for l in i['location']:
                     if l['name']:
                         upstream = str(l['proxy_pass']).split('//')[-1]
                         for ups in ul:
                             if ups['name'] == upstream:
                                 for servers in ups['server']:
                                     if str(
                                             servers[-1]
                                     ).count('.') == 3 and str(
                                             servers[-1]).startswith('10'):
                                         if 'weight' in servers[-1]:
                                             server = str(
                                                 servers[-1]).split()[0]
                                             weight = str(
                                                 servers[-1]).split(
                                                 )[-1].split('=')[-1]
                                         else:
                                             server = servers[-1]
                                             weight = 1
                                         app_one = Base(
                                             AppNode,
                                             condition={
                                                 'db_status':
                                                 1,
                                                 'bus_ip':
                                                 server[-1].split(':')[0],
                                                 'bus_port':
                                                 int(server[-1].split(':')
                                                     [-1])
                                             }).findone()
                                         load_one = Base(
                                             LoadNginx,
                                             condition={
                                                 'db_status':
                                                 1,
                                                 'server_ip':
                                                 self.ipaddr,
                                                 'nginx_port':
                                                 int(i['port']),
                                                 'server_name':
                                                 server_name,
                                                 'url_suffix':
                                                 l['name'],
                                                 'upstream':
                                                 upstream,
                                                 'agent_node_ip':
                                                 server[-1].split(':')[0],
                                                 'agent_node_port':
                                                 int(server[-1].split(':')
                                                     [-1]),
                                             }).findone()
                                         if app_one:
                                             app_info = app_one.server_name
                                         else:
                                             app_info = ''
                                         if load_one:
                                             load_one.environment = ip_env(
                                                 self.ipaddr)
                                             load_one.agent_node_info = app_info
                                             Base.update()
                                         else:
                                             load_add = {
                                                 'db_status':
                                                 1,
                                                 'environment':
                                                 ip_env(str(self.ipaddr)),
                                                 'server_ip':
                                                 self.ipaddr,
                                                 'nginx_port':
                                                 int(i['port']),
                                                 'server_name':
                                                 server_name,
                                                 'url_suffix':
                                                 l['name'],
                                                 'upstream':
                                                 upstream,
                                                 'status':
                                                 1,
                                                 'agent_node_ip':
                                                 server[-1].split(':')[0],
                                                 'agent_node_port':
                                                 int(server[-1].split(':')
                                                     [-1]),
                                                 'agent_node_info':
                                                 app_info,
                                                 'node_weight':
                                                 weight,
                                                 'node_status':
                                                 1,
                                             }
                                             LoadNginx.save_loadnginx(
                                                 load_add)
                                             load_one = Base(
                                                 LoadNginx,
                                                 condition={
                                                     'db_status':
                                                     1,
                                                     'server_ip':
                                                     self.ipaddr,
                                                     'nginx_port':
                                                     int(i['port']),
                                                     'server_name':
                                                     server_name,
                                                     'url_suffix':
                                                     l['name'],
                                                     'upstream':
                                                     upstream,
                                                     'agent_node_ip':
                                                     server[-1].split(':')
                                                     [0],
                                                     'agent_node_port':
                                                     int(server[-1].split(
                                                         ':')[-1])
                                                 }).findone()
                                         node_id.append(load_one.id)
                 if node_id:
                     load_all = Base(LoadNginx,
                                     condition={
                                         'db_status': 1,
                                         'server_ip': self.ipaddr,
                                         'nginx_port': int(i['port']),
                                         'server_name': server_name
                                     }).fetchall()
                     for i in load_all:
                         if i.id not in node_id:
                             i.db_status = 0
                             Base.update()
Exemplo n.º 11
0
things_dir = os.path.dirname(os.path.abspath(__file__))
if things_dir not in sys.path:
    sys.path.insert(0, things_dir)

from sqlalchemy.orm import sessionmaker
import pandas as pd
from IPython.display import display
from ipywidgets import widgets, interact_manual
import psycopg2
from ipywidgets import HTML

from things_config import engine
from models import Thing, Base

Session = sessionmaker(bind=engine)
Base.set_sess(Session)
sess = Session()


def parse_timestr(timestr):
    timestr = subprocess.check_output('date -d"{}"'.format(timestr),
                                      shell=True).decode().strip()
    return parser.parse(timestr)


columns = [
    'tid',
    'name',
    'recommended_by',
    'notes',
    'location',
Exemplo n.º 12
0
class User(Base):
    __tablename__ = 'users'

    id = ('INTEGER', 'not null')
    username = ('INTEGER', '')


class Posts(Base):
    __tablename__ = 'posts'

    id = ('INTEGER', 'not null')
    postname = ('INTEGER', 'PRIMARY KEY', 'AUTOINCREMENT')
    user_id = ('INTEGER', 'not null')

    __relationships__ = (('user_id', 'users', 'id'), )


if __name__ == '__main__':
    conn = sqlite3.connect('db')

    print(Base(conn=conn).create_table(User))
    print(Base(conn=conn).create_table(Posts))

    print(Posts(conn=conn, lazy=True, id=1).select())
    print((User(username=1, conn=conn).update(id=123, username=1)))
    #print(Base(conn=conn).drop_table())
    print(User(conn=conn).update(id=123, username=123))
    print(User(conn=conn).insert(id=1, username=123))
    print(User(conn=conn).select('id'))
Exemplo n.º 13
0
from sqlalchemy.orm.session import make_transient

engine1 = create_engine('sqlite:///data.db', echo=False)
engine2 = create_engine('postgresql:///buildchicago', echo=False)

Session1 = sessionmaker(bind=engine1)
Session2 = sessionmaker(bind=engine2)

session1 = Session1()
session2 = Session2()

Base.metadata.drop_all(engine2)
Base.metadata.create_all(engine2)

# must ensure that table occurs before join tables / foreign keys
# not sure how to do that
for model in Base.__subclasses__():
	print model
	for obj in session1.query(model).all():
		print obj
		make_transient(obj)
		session2.add(obj)
	session2.commit()

print ""

for model in Base.__subclasses__():
	print model, session1.query(model).count(), session2.query(model).count()

session1.close()
session2.close()
Exemplo n.º 14
0
def main():
    if args.gan_path is None:
        both = False
    else:
        both = True

    if args.validation:
        train_loader = torch.utils.data.DataLoader(AttributeDataset(
            args.data_dir,
            args.dataset,
            features_path=args.gan_path,
            mode='train',
            both=both,
            normalize=args.normalize,
            sentences=args.sentences),
                                                   batch_size=args.batch_size,
                                                   shuffle=True)
        val_seen_loader = torch.utils.data.DataLoader(
            AttributeDataset(args.data_dir,
                             args.dataset,
                             features_path=args.gan_path,
                             mode='val_seen',
                             generalized=True,
                             normalize=args.normalize,
                             sentences=args.sentences),
            batch_size=args.batch_size,
            shuffle=False)
        val_unseen_loader = torch.utils.data.DataLoader(
            AttributeDataset(args.data_dir,
                             args.dataset,
                             features_path=args.gan_path,
                             mode='val_unseen',
                             generalized=True,
                             normalize=args.normalize,
                             sentences=args.sentences),
            batch_size=args.batch_size,
            shuffle=False)
    else:
        trainval_loader = torch.utils.data.DataLoader(
            AttributeDataset(args.data_dir,
                             args.dataset,
                             features_path=args.gan_path,
                             mode='trainval',
                             both=both,
                             normalize=args.normalize,
                             sentences=args.sentences),
            batch_size=args.batch_size,
            shuffle=True)

    test_seen_loader = torch.utils.data.DataLoader(AttributeDataset(
        args.data_dir,
        args.dataset,
        features_path=args.gan_path,
        mode='test_seen',
        generalized=True,
        normalize=args.normalize,
        sentences=args.sentences),
                                                   batch_size=args.batch_size,
                                                   shuffle=False)

    test_unseen_loader = torch.utils.data.DataLoader(
        AttributeDataset(args.data_dir,
                         args.dataset,
                         features_path=args.gan_path,
                         mode='test_unseen',
                         generalized=True,
                         normalize=args.normalize,
                         sentences=args.sentences),
        batch_size=args.batch_size,
        shuffle=False)

    # instanciate the models
    if args.mlp:
        mlp = MLP(args.dim_input, [args.nhidden * 2], args.nhidden)
    else:
        mlp = LinearProjection(args.dim_input, args.nhidden)
    embed = LinearProjection(args.nhidden, args.dim_embed)

    if args.sentences:
        cam_key = 'sentences'
    else:
        cam_key = 'emb'

    if args.validation:
        cam = torch.from_numpy(train_loader.dataset.data[cam_key].T)
    else:
        cam = torch.from_numpy(trainval_loader.dataset.data[cam_key].T)
    proxies = ProxyNet(args.n_classes, args.dim_embed, proxies=cam)

    model = Base(mlp, embed, proxies)

    criterion = ProxyLoss(temperature=args.temp)

    if args.cuda:
        mlp.cuda()
        embed.cuda()
        model.cuda()
        proxies.cuda()

    parameters_set = []

    layers = []
    for c in mlp.children():
        if isinstance(c, nn.Linear) or isinstance(c, nn.ModuleList):
            layers.extend(list(c.parameters()))

    for c in embed.children():
        if isinstance(c, nn.Linear):
            layers.extend(list(c.parameters()))

    parameters_set.append({'params': layers, 'lr': args.lr})

    optimizer = optim.SGD(parameters_set,
                          lr=args.lr,
                          momentum=0.9,
                          nesterov=True,
                          weight_decay=5e-5)

    n_parameters = sum([p.data.nelement() for p in model.parameters()])
    print('  + Number of params: {}'.format(n_parameters))

    scheduler = CosineAnnealingLR(optimizer, args.epochs)

    best_acc = 0
    print('Random results:')
    if args.validation:
        validate(val_seen_loader, val_unseen_loader, model, criterion)
    else:
        validate(test_seen_loader, test_unseen_loader, model, criterion)

    for epoch in range(args.start_epoch, args.epochs + 1):
        # update learning rate
        if args.lr_decay:
            scheduler.step()

        # train for one epoch
        if args.validation:
            train(train_loader, model, criterion, optimizer, epoch)
            validate(val_seen_loader, val_unseen_loader, model, criterion)
        else:
            train(trainval_loader, model, criterion, optimizer, epoch)
            validate(test_seen_loader, test_unseen_loader, model, criterion)

        # saving
        save_checkpoint({'epoch': epoch, 'state_dict': model.state_dict()})

    print('\nFinal evaluation on last epoch model:')
    validate(test_seen_loader, test_unseen_loader, model, criterion)