Пример #1
0
class Data:
    """

    Data is a set of variables which are essentially global variables which hold information
    about the gcode file opened, the machine which is connected, and the user's settings. These
    variables are NOT thread-safe. The queue system should always be used for passing information
    between threads.

    """
    """
    Data available to all widgets
    """

    # Gcodes contains all of the lines of gcode in the opened file
    clients = []
    gcode = []
    gcodeFileUnits = "INCHES"
    compressedGCode = None
    compressedGCode3D = None

    version = "101.25"
    stockFirmwareVersion = ""
    customFirmwareVersion = ""
    controllerFirmwareVersion = 0
    # all of the available COM ports
    comPorts = []
    # This defines which COM port is used
    comport = ""
    # The index of the next unread line of Gcode
    gcodeIndex = 0
    # Index of changes in z
    zMoves = []
    # Holds the current value of the feed rate
    feedRate = 20
    # holds the address of the g-code file so that the gcode can be refreshed
    gcodeFile = ""
    importFile = ""
    # the current position of the cutting head
    currentpos = [0.0, 0.0, 0.0]
    target = [0.0, 0.0, 0.0]
    units = "MM"
    tolerance = 0.5
    gcodeShift = [0.0, 0.0]  # the amount that the gcode has been shifted
    message = ""  # used to update the client
    logger = Logger(
    )  # the module which records the machines behavior to review later
    config = Config()
    # Background image stuff, persist but not saved
    backgroundFile = None
    backgroundTexture = None
    backgroundManualReg = []
    backgroundRedraw = False
    """
    Flags
    """
    # sets a flag if the gcode is being uploaded currently
    uploadFlag = 0
    previousUploadStatus = 0
    manualZAxisAdjust = False
    # this is used to determine the first time the position is received from the machine
    firstTimePosFlag = 0
    # report if the serial connection is open
    connectionStatus = 0
    # is the calibration process currently underway 0 -> false
    calibrationInProcess = False
    inPIDVelocityTest = False
    inPIDPositionTest = False
    PIDVelocityTestVersion = 0
    PIDPositionTestVersion = 0
    """
    Pointers to Objects
    """
    serialPort = None  # this is a pointer to the program serial port object
    requestSerialClose = False  # this is used to request the serialThread to gracefully close the port
    triangularCalibration = None  # points to the triangular calibration object
    opticalCalibration = None  # points to the optical calibration object
    opticalCalibrationImage = None  # stores the current image
    opticalCalibrationImageUpdated = False  # stores whether its been updated or not
    opticalCalibrationTestImage = None  # stores the current image
    opticalCalibrationTestImageUpdated = False  # stores whether its been updated or not
    cameraImage = None
    cameraImageUpdated = False
    continuousCamera = False
    """

    Colors

    """
    fontColor = "[color=7a7a7a]"
    drawingColor = [0.47, 0.47, 0.47]
    posIndicatorColor = [0, 0, 0]
    targetIndicatorColor = [1, 0, 0]
    """
    Misc UI bits that need to be saved between invocations (but not saved)
    """
    zPush = None
    zPushUnits = "MM"
    zReadoutPos = 0.00
    zPopupUnits = None
    zStepSizeVal = 0.1
    """
    Queues
    """
    message_queue = LoggingQueue(logger)
    ui_controller_queue = queue.Queue()
    ui_queue1 = UIQueue()
    console_queue = queue.Queue()  # used for printing to terminal
    mcp_queue = queue.Queue(
    )  # used for sending messages to WebMCP(if enabled)
    webMCPActive = False  # start false until WebMCP connects
    gcode_queue = queue.Queue()
    quick_queue = queue.Queue()
    """
    Position and Error values
    """
    xval = 0.0
    yval = 0.0
    zval = 0.0
    pausedzval = 0.0

    previousPosX = 0.0
    previousPosY = 0.0
    previousPosZ = 0.0

    shutdown = False

    def __init__(self):
        """

        Initializations.

        """
        self.logger.data = self
        self.config.data = self
Пример #2
0
from pymongo import MongoClient, GEO2D
from kafka import KafkaProducer
from config.config import Config
import json
from datetime import datetime

cfg = Config().get()


class StorageHelper:
    client = MongoClient(cfg['storage']['ipaddress'], cfg['storage']['port'])
    db = client[cfg['storage']['dbname']]

    def __init__(self):
        return

    # Store record in Kafka or MongoDB
    def store(self, record, check_old=True):
        print("store")
        json_record = json.loads(record, encoding="utf8")
        ts = datetime.timestamp(datetime.now())
        collection = StorageHelper.db[json_record['source']]
        try:
            if (check_old):
                if collection.find({"id": json_record['id']}).count() == 0:
                    collection.insert_one(json_record)
                    #print(str(datetime.datetime.now()) + ' ' + '              Saved to ' + cfg['collectors']['common']['destination'] + ' ' + record)
                else:
                    collection.update_one({"id": json_record['id']},
                                          {"$set": json_record},
                                          upsert=False)
Пример #3
0
    image = image.astype(np.float32, copy=False)
    image -= 127.5
    image /= 127.5
    return image


def get_path_list(imgs_path):
    img_list = list()
    f = open(imgs_path, 'r')
    for names in f:
        img_list.append('data/' + names.replace("\n", ''))
    return img_list


if __name__ == '__main__':
    opt = Config()
    if opt.backbone == 'resnet18':
        model = resnet_face18(opt.use_se)
    elif opt.backbone == 'resnet34':
        model = resnet34()
    elif opt.backbone == 'resnet50':
        model = resnet50()
    model = DataParallel(model)
    # load_model(model, opt.test_model_path)
    model.load_state_dict(torch.load(opt.test_model_path))
    model.to(torch.device("cuda"))
    model.eval()
    img_list = get_path_list('list.txt')
    # print(img_list)
    feat, count = get_features(model, img_list, opt.test_batch_size)
    print("The count is", count)
Пример #4
0
def validate(fix=False):
    config = Config()
    mongo_client = MongoClient(host=config.get_mongo_url())
    database_name = config.get_db_name()

    # db collections
    players_col = mongo_client[database_name][M.Player.collection_name]
    tournaments_col = mongo_client[database_name][M.Tournament.collection_name]
    rankings_col = mongo_client[database_name][M.Ranking.collection_name]
    users_col = mongo_client[database_name][M.User.collection_name]
    pending_tournaments_col = mongo_client[database_name][M.PendingTournament.collection_name]
    merges_col = mongo_client[database_name][M.Merge.collection_name]
    sessions_col = mongo_client[database_name][M.Session.collection_name]
    raw_files_col = mongo_client[database_name][M.RawFile.collection_name]
    regions_col = mongo_client[database_name][M.Region.collection_name]

    # get sets of ids for cross-referencing
    player_ids = set([p.get('_id') for p in players_col.find()])
    tournament_ids = set([t.get('_id') for t in tournaments_col.find()])
    ranking_ids = set([r.get('_id') for r in rankings_col.find()])
    user_ids = set([u.get('_id') for u in users_col.find()])
    pending_tournament_ids = set([pt.get('_id') for pt in pending_tournaments_col.find()])
    merge_ids = set([m.get('_id') for m in merges_col.find()])
    raw_file_ids = set([rf.get('_id')
        for rf in raw_files_col.find({}, {'data': 0})])
    region_ids = set([r.get('_id') for r in regions_col.find()])

    # Player checks
    for p in players_col.find():
        player = M.Player.load(p, context='db')
        error_header = '[ERROR player "{}" ({})]'.format(player.id, player.name)
        modified = False

        # check: player valid
        valid, validate_errors = player.validate()
        if not valid:
            print error_header, validate_errors

        # check: player regions are all valid regions
        for r in player.regions:
            if r not in region_ids:
                print error_header, 'invalid region {}'.format(r)
        if fix:
            # fix: remove invalid regions from player regions
            if any([r not in region_ids for r in player.regions]):
                modified = True
                player.regions = [r for r in player.regions if r in region_ids]


        # check: ratings all have valid regions
        for r in player.ratings.keys():
            if r not in region_ids:
                print error_header, 'invalid rating region {}'.format(r)
                if fix:
                    # fix: remove rating from player
                    del player.ratings[r]
                    modified = True

        # check: merge_parent is real player if exists
        if player.merge_parent is not None:
            if player.merge_parent not in player_ids:
                print error_header, 'invalid merge_parent {}'.format(player.merge_parent)
                if fix:
                    # fix: set merge_parent to None, unset merged
                    player.merge_parent = None
                    player.merged = False
                    modified = True

        # check: merge_children are real players
        for mc in player.merge_children:
            if mc not in player_ids:
                print error_header, 'invalid merge_child {}'.format(mc)
        if fix:
            # fix: remove child from merge_children
            if any([mc not in player_ids for mc in player.merge_children]):
                modified = True
                player.merge_children = [mc for mc in player.merge_children if mc in player_ids]

        if fix and modified:
            print error_header, 'fixing player..'
            players_col.update({'_id': player.id}, player.dump(context='db'))


    # Tournament checks
    for t in tournaments_col.find():
        tournament = M.Tournament.load(t, context='db')
        error_header = '[ERROR tournament "{}" ({})]'.format(tournament.id, tournament.name)
        modified = False

        # check: tournament valid
        valid, validate_errors = tournament.validate()
        if not valid:
            print error_header, validate_errors
            if fix:
                # fix: set players equal to set of players in matches
                modified = True
                tournament.players = list({match.winner for match in tournament.matches} | \
                              {match.loser for match in tournament.matches})
                tournament.orig_ids = list(tournament.players)

        # check: tournament empty
        if len(tournament.matches)==0 or len(tournament.players)==0:
            print error_header, 'tournament empty'

        # check: tournament regions are all valid regions
        for r in tournament.regions:
            if r not in region_ids:
                print error_header, 'invalid region {}'.format(r)
        if fix:
            # fix: remove invalid regions
            if any([r not in region_ids for r in tournament.regions]):
                modified = True
                tournament.regions = [r for r in tournament.regions if r in region_ids]

        # check: raw_id maps to real raw_file if exists
        if tournament.raw_id is not None:
            if tournament.raw_id not in raw_file_ids:
                print error_header, 'invalid raw_file_id {}'.format(tournament.raw_id)
                if fix:
                    # fix: set raw_id to None
                    modified = True
                    tournament.raw_id = None

        # check: all players are valid players
        for p in tournament.players:
            if p not in player_ids:
                print error_header, 'invalid player {}'.format(p)
                if fix:
                    # fix: FIX MANUALLY
                    print '[FIX] fix manually'

        # check: all original ids are valid players
        for p in tournament.orig_ids:
            if p not in player_ids:
                print error_header, 'invalid orig_id {}'.format(p)

        # TODO: check that orig_ids end up merging to players?

        if fix and modified:
            print error_header, 'fixing tournament..'
            tournaments_col.update({'_id': tournament.id}, tournament.dump(context='db'))


    # Pending Tournament checks
    for pt in pending_tournaments_col.find():
        tournament = M.PendingTournament.load(pt, context='db')
        error_header = '[ERROR pending_tournament "{}" ({})]'.format(tournament.id, tournament.name)

        # check: pt valid
        valid, validate_errors = tournament.validate()
        if not valid:
            print error_header, validate_errors

        # check: tournament empty
        if len(tournament.matches)==0 or len(tournament.players)==0:
            print error_header, 'tournament empty'

        # check: tournament regions are all valid regions
        for r in tournament.regions:
            if r not in region_ids:
                print error_header, 'invalid region {}'.format(r)

        # check: raw_id maps to real raw_file if exists
        if tournament.raw_id is not None:
            if tournament.raw_id not in raw_file_ids:
                print error_header, 'invalid raw_file_id {}'.format(tournament.raw_id)

        if fix and modified:
            print error_header, 'fixing ranking..'
            rankings_col.update({'_id': ranking.id}, ranking.dump(context='db'))



    # Ranking checks
    for r in rankings_col.find():
        ranking = M.Ranking.load(r, context='db')
        error_header = '[ERROR ranking ({})]'.format(ranking.id)
        modified = False

        # check: ranking valid
        valid, validate_errors = ranking.validate()
        if not valid:
            print error_header, validate_errors

        # check: ranking region is valid region
        if ranking.region not in region_ids:
            print error_header, 'invalid region {}'.format(ranking.region)
            if fix:
                # fix: FIX MANUALLY
                print '[FIX] fix manually'

        # check: ranking tournaments are valid tournaments
        for t in ranking.tournaments:
            if t not in tournament_ids:
                print error_header, 'invalid tournament {}'.format(t)
        if fix:
            # fix: remove invalid tournaments from ranking
            if any([t not in tournament_ids for t in ranking.tournaments]):
                modified = True
                ranking.tournaments = [t for t in ranking.tournaments if t in tournament_ids]

        if fix and modified:
            print error_header, 'fixing ranking..'
            rankings_col.update({'_id': ranking.id}, ranking.dump(context='db'))


    # User checks
    for u in users_col.find():
        user = M.User.load(u, context='db')
        error_header = '[ERROR user "{}" ({})]'.format(user.username, user.id)
        modified = False

        # check: user valid
        valid, validate_errors = user.validate()
        if not valid:
            print error_header, validate_errors

        # check: admin_regions are valid regions
        for r in user.admin_regions:
            if r not in region_ids:
                print error_header, 'invalid region {}'.format(r)
        if fix:
            # fix: remove invalid regions from admin_regions
            if any([r not in region_ids for r in user.admin_regions]):
                modified = True
                user.admin_regions = [r for r in user.admin_regions if r in region_ids]

        if fix and modified:
            print error_header, 'fixing user..'
            users_col.update({'_id': user.id}, user.dump(context='db'))

    # Session checks
    for s in sessions_col.find():
        session = M.Session.load(s, context='db')
        error_header = '[ERROR session ({})]'.format(session.session_id)

        # check: session valid
        valid, validate_errors = session.validate()
        if not valid:
            print error_header, validate_errors

        # check: session user is valid user
        if session.user_id not in user_ids:
            print error_header, 'invalid user_id {}'.format(session.user_id)
            if fix:
                # fix: delete session
                print error_header, 'deleting session...'
                sessions_col.remove({'session_id': session.session_id})

    # Merge checks
    for m in merges_col.find():
        merge = M.Merge.load(m, context='db')
        error_header = '[ERROR merge ({})]'.format(merge.id)
        modified = False

        # check: merge valid
        valid, validate_errors = merge.validate()
        if not valid:
            print error_header, validate_errors

        # check: requester is a valid user
        if merge.requester_user_id is not None and merge.requester_user_id not in user_ids:
            print error_header, 'invalid requester {}'.format(merge.requester_user_id)
            if fix:
                # fix: set requester to None
                merge.requester_user_id = None
                modified = True

        # check: source_player is a valid player
        if merge.source_player_obj_id not in player_ids:
            print error_header, 'invalid source player {}'.format(merge.source_player_obj_id)
            if fix:
                # fix: FIX MANUALLY
                print '[FIX] fix manually'

        # check: target_player is a valid player
        if merge.target_player_obj_id not in player_ids:
            print error_header, 'invalid target player {}'.format(merge.target_player_obj_id)
            if fix:
                # fix: delete merge (probably target player was deleted 
                # since he belonged to no tournaments)
                print '[FIX] deleting merge'
                merges_col.remove({'_id': merge.id})
                print 'deleted merge'

        if fix and modified:
            print error_header, 'fixing merge..'
            merges_col.update({'_id': merge.id}, merge.dump(context='db'))

    # Fancier checks

    # check: no player with no tournaments
    pt_lists = {pid: [] for pid in player_ids}
    for t in tournaments_col.find():
        tournament = M.Tournament.load(t, context='db')
        for player in tournament.players:
            if player in player_ids:
                pt_lists[player].append(tournament.id)

    for p in players_col.find():
        player = M.Player.load(p, context='db')
        error_header = '[ERROR player "{}" ({})]'.format(player.id, player.name)
        if len(pt_lists[player.id]) == 0 and not player.merged:
            print error_header, 'player has no tournaments'
            if fix:
                print error_header, 'deleting player...'
                players_col.remove({'_id': player.id})

    print 'db validation complete'
Пример #5
0
 def __init__(self):
     self.common= CommonUtil()
     self.cookie = OperToken().get_cookie()
     self.db = Opera_DB()
     self.base_url = Config().base_url
                "amounts" : { $push : "$totalPrice" }
            }
        }
    }
]);
"""

# python imports
import os, sys
sys.path.append(os.path.realpath("src"))
import pymongo
from config.config import Config
from booksomeplace.domain.booking import BookingService

# setting up the connection + collection
service = BookingService(Config())

# formulate query
pipeline = [{
    '$match': {
        '$and': [{
            'bookingInfo.arrivalDate': {
                '$regex': '^2019'
            },
            'bookingInfo.paymentStatus': 'confirmed'
        }]
    }
}, {
    '$bucket': {
        'groupBy': '$bookingInfo.arrivalDate',
        'boundaries': ['2019-01-01', '2019-04-01', '2019-07-01', '2019-10-01'],
Пример #7
0
import os
from config.config import Config
from rf.pipeline_classifier import *
from infrastructure.file_system import get_path

path = get_path()

data_file_path = path
result_file_path = path + '/rf_classifier/countries'

if not os.path.isdir(result_file_path):
    os.makedirs(result_file_path)

config = Config(data_file_path, result_file_path)

pipeline_classifier_countries(config)
Пример #8
0
    def alteraLink(self):
        config = Config()
        '''Resgata todos as disciplinas'''

        serverUrl = config.dominio + "/webservice/rest/server.php" + "?wstoken=" + \
                        config.altLinkToken + "&wsfunction=" + "core_course_get_courses" + "&moodlewsrestformat=" + config.formatoRest

        response = requests.post(serverUrl)
        disciplinasAva = response.json()
        '''Resgata todas as categorias'''

        criteria = {
            'criteria[0][key]': 'parent',
            'criteria[0][value]': config.categoriaSemestrePolos
        }
        serverUrlCat = config.dominio + "/webservice/rest/server.php" + "?wstoken=" + \
                       config.altLinkToken + "&wsfunction=" + "core_course_get_categories" + "&moodlewsrestformat=" + config.formatoRest

        response = requests.post(serverUrlCat, criteria)
        categoriasAva = response.json()

        for discModelo in disciplinasAva:
            if (discModelo['categoryid'] == config.categoriaUnija):
                '''Retorna as disciplinas especificas levando em conta a modelo'''

                for discEspecifica in disciplinasAva:
                    for categoriaPolos in categoriasAva:
                        if (discEspecifica['categoryid'] ==
                                categoriaPolos['id']):
                            shortNameModelo = discModelo['shortname'].split(
                                "_")
                            shortNameEspecifica = discEspecifica[
                                'shortname'].split("_")
                            '''if (shortNameModelo[1] == shortNameEspecifica[1]):'''
                '''Resgata todos os grupos de determinada discplina'''

                param = {'courseid': discModelo['id']}
                serverUrl = config.dominio + "/webservice/rest/server.php" + "?wstoken=" + \
                            config.altLinkToken + "&wsfunction=" + "core_group_get_course_groups" + "&moodlewsrestformat=" + config.formatoRest

                response = requests.post(serverUrl, param)
                disciplinasGrupos = response.json()

                try:
                    mySql = DbConnect().mysqlConnect()
                    mysql = mySql.cursor()

                    sql = """SELECT id,instance FROM mdl_course_modules WHERE course = %s AND module = 20
                                """ % discModelo['id']

                    mysql.execute(sql)
                    resultado = mysql.fetchall()

                    print(resultado)

                except Exception as e:
                    print(e)

                finally:
                    if (mySql.is_connected()):
                        mySql.close()

                cont = len(disciplinasGrupos)

                for c in range(cont):
                    sqlModules = """ UPDATE mdl_course_modules 
                                SET availability = '{"op":"&","c":[{"type":"group","id": %s}],"showc":[false]}', visible = 1 
                                WHERE id = %s AND course = %s """
                    valModules = (disciplinasGrupos[c - 1]['id'], links,
                                  discModelo['id'])

                    sqlUrl = """ UPDATE mdl_url SET externalurl = %s,display = 5 WHERE id = %s AND course = %s """
                    externalurl = 'http://localhost/moodle/course/view.php?id='
                    valUrl = (disciplinasGrupos[c - 1]['id'], links,
                              discModelo['id'])

                    links = resultado[c][0]
                    urlinstance = resultado[c][1]

                    try:
                        mySql = DbConnect().mysqlConnect()
                        mysql = mySql.cursor()

                        mysql.execute(sqlModules, valModules)
                        mysql.execute(sqlModules, valUrl)
                        mySql.commit()

                    except Exception as e:
                        print(e)

                    finally:
                        if (mySql.is_connected()):
                            mySql.close()
Пример #9
0
    def __init__(self,
                 directory,
                 is_symmetric=True,
                 init_table=True,
                 is_link_predict=False,
                 is_test_missing=False,
                 missing_p=0.0):
        """
        a graph object.
        : param directory: data directory
        """
        self.directory = directory
        self.is_symmetirc = is_symmetric
        self.config = Config()
        self.is_test_missing = is_test_missing
        self.missing_p = missing_p
        if 'Flickr' in directory:
            self.config.train_node_size = 0.05
        if 'BlogCatalog' in directory:
            self.config.train_node_size = 0.6

        self.nodes_table_size = 1 * 10**8
        self.nodes_labels_table_size = 1 * 10**7

        self.nodes_file = os.path.join(self.directory, 'nodes.csv')
        if is_link_predict:
            self.edges_file = os.path.join(self.directory, 'train_edges.csv')
            self.test_edges_file = os.path.join(self.directory,
                                                'test_edges.csv')

        else:
            self.edges_file = os.path.join(self.directory, 'edges.csv')
        self.labels_info_file = os.path.join(self.directory, 'labels_info.csv')
        self.train_labels_file = os.path.join(self.directory,
                                              'train_labels.csv')
        self.validate_labels_file = os.path.join(self.directory,
                                                 'validate_labels.csv')
        self.test_labels_file = os.path.join(self.directory, 'test_labels.csv')
        self.labels_file = os.path.join(self.directory, 'labels.csv')
        self.train_nodes_file = os.path.join(
            self.directory,
            'train_nodes{}.csv'.format(self.config.train_node_size))
        self.test_nodes_file = os.path.join(
            self.directory,
            'test_nodes{}.csv'.format(self.config.train_node_size))

        self.nodes_set = self.get_nodes_set(self.nodes_file)
        self.nodes_num = len(self.nodes_set)
        self.nodes_ids = self.get_nodes_ids()
        self.nodes_adj_edges_set, self.nodes_in_degree, self.nodes_in_set = \
            self.get_nodes_adj_edges_set(self.nodes_set, self.edges_file, is_symmetric)
        if is_link_predict:
            self.test_nodes_adj_edges_set, _, _ = self.get_nodes_adj_edges_set(
                self.nodes_set, self.test_edges_file, is_symmetric)
        self.nodes_out_degree = self.get_out_degree()
        self.nodes_degree_prob = self.get_nodes_prob(self.nodes_out_degree)
        self.nodes_in_degree_prob = self.get_nodes_prob(self.nodes_in_degree)
        if init_table:
            self.nodes_degree_table = get_nodes_table(self.nodes_degree_prob,
                                                      self.nodes_table_size)

        self.all_labels_set, self.train_labels_set, self.validate_labels_set, self.test_labels_set = self.get_labels(
        )
        self.labels_num = len(self.train_labels_set)
        self.all_labels_ids, self.train_labels_ids, self.validate_labels_ids, self.test_labels_ids = self.get_labels_ids(
        )
        self.all_nodes_labels, self.emb_nodes_labels, self.emb_validate_nodes_labels, self.clf_nodes_labels, self.train_nodes_labels, self.test_nodes_labels \
            = self.get_nodes_labels()
Пример #10
0
def config(environment):
    return Config(environment)
Пример #11
0
 def get_configs(self):
     c = Config()
     self.net = c.network
     self.aws_bucket = c.aws_bucket
     self.bb_bucket = c.bb_bucket
Пример #12
0
#!/usr/bin/env/ python
# -*- coding: utf-8 -*-
# @date: 2019/8/2 13:44
# @author: zhangcw
# @content: test the code of transX model

from models.transE import transE
from config.config import Config

model = Config()
model.set_train_model(transE)
model.train()
model.test()
Пример #13
0
import unittest
import requests
import json
from datetime import datetime
from ddt import ddt, data
from common.HTMLTestRunner import HTMLTestRunner
from common.excel_tools import ExcelTools
from config.config import Config


testxlsx = Config().base_path + '/testdata/xlsx/test_register_data.xlsx'
testdata = ExcelTools('r', testxlsx).dict_values()


@ddt
class TestRegister(unittest.TestCase):

    @classmethod
    def setUpClass(cls) -> None:
        """需要登录的操作可写在此方法内"""
        cls.s = requests.Session()

    @classmethod
    def tearDownClass(cls) -> None:
        pass

    def setUp(self):
        pass

    def tearDown(self):
        pass
Пример #14
0
import databases
from sqlalchemy import Integer, Float, Table, create_engine, Column, MetaData, String
from config.config import Config

OBJ_CONFIG = Config()

# Postgres DB
DATABASE_URL = OBJ_CONFIG.DATABASE_URL
database = databases.Database(DATABASE_URL)
metadata = MetaData()

corpus_schema = Table(
    'corpus', metadata,
    Column('id', Integer, autoincrement=True, primary_key=True),
    Column('text', String), Column('anger', Float), Column('disgust', Float),
    Column('fear', Float), Column('happiness', Float),
    Column('sadness', Float), Column('surprise', Float))

engine = create_engine(DATABASE_URL)
metadata.create_all(engine)
Пример #15
0
import sys
import argparse
from converter.extractor import Extractor
from gui.presenter.presenter import Presenter
from gui.view.configurator import App

from config.config import Config
config = Config(True)
config.loadPreferences()
extractor = Extractor(config)


def parseArgs():
    parser = argparse.ArgumentParser()
    parser.add_argument("--type",
                        "-t",
                        type=str,
                        default='txt',
                        help="Output file type: txt or ...")
    parser.add_argument("--input_file",
                        "-in",
                        type=str,
                        default='',
                        help="Path to input pdf file")
    parser.add_argument("--output_file",
                        "-out",
                        type=str,
                        default='',
                        help="Path to output file")
    parser.add_argument("--metaTitle",
                        action="store_true",
Пример #16
0
def main():
    log_dir = os.path.join('logs', '000')
    opt = Config()
    if opt.display:
        visualizer = Visualizer()

    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    torch.manual_seed(1)

    train_dataset = Dataset(opt.train_root,
                            opt.train_list,
                            phase='train',
                            input_shape=opt.input_shape)
    trainloader = data.DataLoader(train_dataset,
                                  batch_size=opt.train_batch_size,
                                  shuffle=True,
                                  num_workers=opt.num_workers)

    #identity_list = get_lfw_list(opt.lfw_test_list)
    #img_paths = [os.path.join(opt.lfw_root, each) for each in identity_list]

    if opt.loss == 'focal_loss':
        criterion = FocalLoss(gamma=2)
    else:
        criterion = torch.nn.CrossEntropyLoss()

    if opt.backbone == 'resnet18':
        model = resnet_face18(use_se=opt.use_se)
    elif opt.backbone == 'resnet34':
        #model = resnet34()
        model = resnet_face34(use_se=opt.use_se)
    elif opt.backbone == 'resnet50':
        model = resnet50()

    if opt.metric == 'add_margin':
        metric_fc = AddMarginProduct(512,
                                     opt.num_classes,
                                     s=30,
                                     m=0.35,
                                     device=device)
    elif opt.metric == 'arc_margin':
        metric_fc = ArcMarginProduct(512,
                                     opt.num_classes,
                                     s=30,
                                     m=0.5,
                                     easy_margin=opt.easy_margin,
                                     device=device)
    elif opt.metric == 'sphere':
        metric_fc = SphereProduct(512, opt.num_classes, m=4, device=device)
    else:
        metric_fc = torch.nn.Linear(512, opt.num_classes)

    # view_model(model, opt.input_shape)
    #print(model)
    model.to(device)
    summary(model, input_size=opt.input_shape)
    model = DataParallel(model)
    metric_fc.to(device)
    metric_fc = DataParallel(metric_fc)

    print('{} train iters per epoch:'.format(len(trainloader)))

    if opt.optimizer == 'sgd':
        optimizer = torch.optim.SGD([{
            'params': model.parameters()
        }, {
            'params': metric_fc.parameters()
        }],
                                    lr=opt.lr,
                                    weight_decay=opt.weight_decay)
    else:
        optimizer = torch.optim.Adam([{
            'params': model.parameters()
        }, {
            'params': metric_fc.parameters()
        }],
                                     lr=opt.lr,
                                     weight_decay=opt.weight_decay)

    scheduler = StepLR(optimizer, step_size=opt.lr_step, gamma=0.1)

    #start = time.time()
    for epoch in range(opt.max_epoch):
        scheduler.step()
        print('Epoch %d/%d' % (epoch, opt.max_epoch))
        train(opt, model, metric_fc, device, trainloader, criterion, optimizer,
              scheduler)
        validate(opt, model, device, epoch, log_dir)
Пример #17
0
def main():
    print('Reading arguments')
    parser = argparse.ArgumentParser(description="LSTM CRF implementation")
    opt = parse_arguments(parser)
    conf = Config(opt)
    conf_conll = Config_conll(opt)
    conf_ontonotes = Config_ontonotes(opt)

    reader = Reader(conf.digit2zero)
    setSeed(opt, conf.seed)

    trains_0 = reader.read_conll(conf.train_file_1, 0, conf.train_num, True)
    devs_0 = reader.read_conll(conf.dev_file_1, 0, conf.dev_num, False)
    tests_0 = reader.read_conll(conf.test_file_1, 0, conf.test_num, False)

    trains_1 = reader.read_conll(conf.train_file_2, 1, conf.train_num, True)
    devs_1 = reader.read_conll(conf.dev_file_2, 1, conf.dev_num, False)
    tests_1 = reader.read_conll(conf.test_file_2, 1, conf.test_num, False)

    trains_all = trains_0 + trains_1
    devs_all = devs_0 + devs_1
    tests_all = tests_0 + tests_1

    if conf.context_emb != ContextEmb.none:
        print('Loading the elmo vectors for all datasets.')
        conf.context_emb_size = reader.load_elmo_vec(
            conf.train_file_1 + "." + conf.context_emb.name + ".vec", trains_1)
        reader.load_elmo_vec(
            conf.dev_file_1 + "." + conf.context_emb.name + ".vec", devs_1)
        reader.load_elmo_vec(
            conf.test_file_1 + "." + conf.context_emb.name + ".vec", tests_1)

    conf.use_iobes(trains_all)
    conf.use_iobes(devs_all)
    conf.use_iobes(tests_all)
    conf.build_label_idx(trains_all)

    conf.build_word_idx(trains_all, devs_all, tests_all)
    conf.build_emb_table()

    ids_train = conf.map_insts_ids(trains_all)
    ids_dev = conf.map_insts_ids(devs_all)
    ids_test = conf.map_insts_ids(tests_all)

    conf_conll.label_size = conf.label_size_0
    conf_conll.label2idx = conf.label2idx_0
    conf_conll.idx2labels = conf.idx2labels_0
    conf_ontonotes.label_size = conf.label_size_1
    conf_ontonotes.label2idx = conf.label2idx_1
    conf_ontonotes.idx2labels = conf.idx2labels_1

    print("num chars: " + str(conf.num_char))
    # print(str(config.char2idx))

    print("num words: " + str(len(conf.word2idx)))
    # print(config.word2idx)
    if opt.mode == "train":
        learn_from_insts(conf, conf_conll, conf_ontonotes, conf.num_epochs,
                         trains_all, devs_all, tests_all)
    else:
        ## Load the trained model.
        test_model(conf, tests_all)
        # pass

    print(opt.mode)
Пример #18
0
 def __init__(self):
     self.const = Constants()
     self.utils = Utils()
     self.config = Config()
Пример #19
0
import torch.nn as nn
import torch.optim as optim
from model.Unet import UNeT
from loss.diceloss import Loss
from torch.utils.data import DataLoader
from hyperparams.hyperparams import hyperparameters
from dataloader.dataloader import ImageLoader, ImageList
from collections import defaultdict
import torch
from torchvision import transforms
from config.config import Config
from tqdm import tqdm
from utils.one_hot_encoder import HotEncoder

conf = Config('config.json')
conf = conf.load_conf()
IMAGE_DIR = conf["Train Data"]
ANNOTATIONS_DIR = conf["Annotations Data"]
TEST_DATA = conf["Test Data"]
MODEL_SAVE = conf["Model Save"]
IMAGE_RESOLUTION = tuple(map(int, conf['Resolution'].split(',')))

transforms_compose = transforms.Compose([transforms.ToTensor()])
params = hyperparameters(train_percentage=1,
                         batch_size=1,
                         epoch=50,
                         n_classes=2)

if torch.cuda.is_available():
    net = UNeT(n_classes=2, n_channels=3).cuda()
else:
Пример #20
0
    clf = RandomForestRegressor(n_estimators=500)
    output_pred = cross_val_predict(clf, otu_df, adherence, cv=2)
    mse_list = [mean_squared_error([adherence[i]], [output_pred[i]]) for i in range(0, len(adherence))]
    return mse_list


box_points = 'outliers'

path = get_path()

in_path = path
out_path = path + '/rf_regressor/supp_fig_7'
if not os.path.isdir(out_path):
    os.makedirs(out_path)

config = Config(in_path, out_path)

common_subjects = config.get_common_subjects_with_adherence()

target_keys = ['compliance160', 'country']
countries = ['Italy', 'UK', 'Holland', 'Poland', 'France']

metadata, obs_dict = config.get_target_subject_dicts(common_subjects, target_keys, 'T0')

adherence = {}
subjects_country = {}
for country in countries:
    codes = obs_dict['country'][country]

    for code in codes:
Пример #21
0
import codecs
import csv
from contextlib import closing

import MySQLdb
import requests

from config.config import Config

config = Config('./config/config.ini')
config.load_config()


class SqlTools:
    def __init__(self):
        self.conn = MySQLdb.connect(config.host, config.user, config.passwd,
                                    config.db)

    def connect(self):
        self.conn = MySQLdb.connect(config.host, config.user, config.passwd,
                                    config.db)

    def show_version(self):
        if self.conn:
            cursor = self.conn.cursor()
            cursor.execute("SELECT VERSION()")
            data = cursor.fetchone()
            return 'Database-Version ' + str(data)

    def query(self, sql):
        if self.conn:
Пример #22
0
 def __init__(self, command_line_argument):
     self.__config = Config()
     self.__dirs = Dirs()
     self.__debug = Debug()
     self.__command_line_argument = command_line_argument
Пример #23
0
 def __init__(self):
     self.config = Config()
     self.github = Github(self.config.get_auth_token())
     self.gitRestApi = GitRestApi()
Пример #24
0
def main():
    opt = Config()
    opt.num_classes = len(get_train_labels(opt.train_root, opt.criteria_list))
    opt.metric = 'liner'

    distance_path = opt.distance_path
    mean_path = opt.mean_files_path
    alpha_rank = opt.ALPHA_RAN

    labellist = getlabellist(opt.criteria_list)
    train_labels = get_train_labels(opt.train_root, opt.criteria_list)

    # recreate or first create
    weibull_model = weibull_tailfitting(mean_path,
                                        distance_path,
                                        train_labels,
                                        tailsize=opt.WEIBULL_TAIL_SIZE,
                                        distance_type=opt.distance_type)

    # data loader
    test_dataset = Dataset(opt.test_root,
                           opt.test_list,
                           phase='test',
                           input_shape=opt.input_shape)

    test_loader = data.DataLoader(test_dataset,
                                  batch_size=opt.test_batch_size,
                                  shuffle=True,
                                  num_workers=opt.num_workers)

    # load model , both of feature, fc_modeal
    if opt.backbone == 'resnet18':
        model = resnet_face18(opt.use_se)
    elif opt.backbone == 'resnet34':
        model = resnet34()
    elif opt.backbone == 'resnet50':
        model = resnet50()
    else:
        raise TypeError('backbone: {} is not expected'.format(opt.backbone))

    model = DataParallel(model)
    model.to(device)
    if device == 'cuda':
        model.load_state_dict(opt.test_model_path)
    else:
        model.load_state_dict(
            torch.load(opt.test_model_path, map_location={'cuda:0': 'cpu'}))
    model.eval()

    if opt.metric == 'add_margin':
        metric_fc = AddMarginProduct(512, opt.num_classes, s=30, m=0.35)
    elif opt.metric == 'arc_margin':
        metric_fc = ArcMarginProduct(512,
                                     opt.num_classes,
                                     s=30,
                                     m=0.5,
                                     easy_margin=opt.easy_margin)
    elif opt.metric == 'sphere':
        metric_fc = SphereProduct(512, opt.num_classes, m=4)
    else:
        metric_fc = nn.Linear(512, opt.num_classes)

    metric_fc.to(device)
    metric_fc = DataParallel(metric_fc)
    if device == 'cuda':
        metric_fc.load_state_dict(opt.test_metric_fc_path)
    else:
        metric_fc.load_state_dict(
            torch.load(opt.test_metric_fc_path, map_location={'cuda:0':
                                                              'cpu'}))

    metric_fc.eval()
    print(labellist)
    openmax_preds_list = []
    softmax_preds_list = []
    ans_preds_list = []
    softmax_data_list_known = []
    softmax_data_list_unknown = []

    # # data loader
    # test_dataset = Dataset('estimate_visualize', 't-SNE_test1568010965.919611.png', phase='test', input_shape=opt.input_shape)
    #
    # test_loader = data.DataLoader(test_dataset,
    #                               batch_size=1,
    #                               shuffle=True,
    #                               num_workers=opt.num_workers)
    # # from PIL import Image
    # # img = Image.open('estimate_visualize/t-SNE_test1568010965.919611.png', )
    # # img = np.array(img)
    # # img = img[np.newaxis,:,:]
    # # print(img.shape)
    # # tt = test_dataset.transforms
    # # # for t in tt:
    # # img = tt(img)
    # # # img = img.resize(256,256)
    # # # img = torch.Tensor(img)
    # for i, (imgs, label_ids) in enumerate(test_loader):
    #     # compute feature and estimate score → create img_preds that contains feature, score
    #     imgs_feature = model(imgs)
    #     # scores = metric_fc(imgs_feature, label_ids)
    #     scores = metric_fc(imgs_feature)
    #     scores = scores.detach().numpy()
    #     print(scores)
    # # ff = model(img)
    # # score = metric_fc(ff)
    # print(scores)
    # print(softmax(scores[0]))

    for i, (imgs, label_ids) in enumerate(test_loader):
        # compute feature and estimate score → create img_preds that contains feature, score
        imgs_feature = model(imgs)
        # scores = metric_fc(imgs_feature, label_ids)
        scores = metric_fc(imgs_feature)
        scores = scores.detach().numpy()
        scores = np.array(scores)[:, np.newaxis, :]
        temp_labels = [labellist[pid] for pid in label_ids]
        for ii, (score, label) in enumerate(zip(scores, temp_labels)):
            openmax_predict, softmax_predict = openmax(
                score,
                weibull_model,
                train_labels,
                eu_weight=opt.euc_scale,
                alpharank=alpha_rank,
                distance_type=opt.distance_type)

            softmax_ans = labellist[np.argmax(softmax_predict)]
            # type 1
            # openmax_ans = labellist[np.argmax(openmax_predict)] if np.argmax(openmax_predict) < len(
            #     train_labels) else 'unknown'

            # type2
            # openmax_ans = softmax_ans if np.sort(score, axis=1)[0][::-1][0] > opt.SCORE_THRESHOLD else 'unknown'

            # type3
            openmax_ans = softmax_ans if np.sort(
                score, axis=1)[0][::-1][0] / np.linalg.norm(
                    score, ord=2) > opt.SCORE_NORMALIZED else 'unknown'

            # type4
            openmax_ans = softmax_ans if np.sort(
                score, axis=1)[0][::-1][0] / np.linalg.norm(
                    score[score > 0],
                    ord=2) > opt.SCORE_NORMALIZED else 'unknown'

            ans_label = label if labellist.index(label) < len(
                train_labels) else 'unknown'
            if ans_label == 'unknown':
                softmax_data_list_unknown.append(
                    np.sort(score, axis=1)[0][::-1][0] /
                    np.linalg.norm(score[score > 0], ord=2))
                if np.sort(score, axis=1)[0][::-1][0] / np.linalg.norm(
                        score[score > 0], ord=2) > 0.7:
                    import matplotlib.pyplot as plt
                    print(label)
                    plt.imshow(np.array(imgs[ii][0]))
                    plt.savefig('estimate_visualize/{}_{}.jpg'.format(
                        i, label))
                    plt.show()
            else:
                softmax_data_list_known.append(
                    np.sort(score, axis=1)[0][::-1][0] /
                    np.linalg.norm(score[score > 0], ord=2))

            # if ans_label == 'unknown':
            #     softmax_data_list_unknown.append(score[0])
            # else:
            #     softmax_data_list_known.append(score[0])
            openmax_preds_list.append(openmax_ans)
            softmax_preds_list.append(softmax_ans)
            ans_preds_list.append(ans_label)
            print(
                'predict_softmax: {}, predict_openmax: {}, answer: {}'.format(
                    softmax_ans, openmax_ans, ans_label))

    # create_mean_graph(softmax_data_list_known)
    # create_mean_graph(softmax_data_list_unknown)
    show_histgram(softmax_data_list_unknown)
    show_histgram(softmax_data_list_known)

    # accuracy check
    soft_acc = accuracy(softmax_preds_list, ans_preds_list)
    open_acc = accuracy(openmax_preds_list, ans_preds_list)
    print('softmax:', soft_acc / len(ans_preds_list))
    print('openmax:', open_acc / len(ans_preds_list))
Пример #25
0
            j = 2
            for i in range(self.rows - 2):
                s = dict()
                s['CaseId'] = i + 1
                values = self.sheet.row_values(j)
                for x in range(self.cols):
                    s[self.keys[x]] = values[x]
                r.append(s)
                j += 1
            return r

    def write_excel(self, row, col, value):
        self.ws.cell(row, col).value = value
        self.wbw.save(self.file)


if __name__ == "__main__":
    excel_file = Config().base_path + '/testdata/xlsx/test_register_data.xlsx'
    et = ExcelTools('r', excel_file)
    data = et.read_excel()
    print(et.get_rows())
    print(et.get_cols())
    print(et.get_cell_value(2, 6))
    print(et.get_cell_value(3, 6))
    if et.get_cell_value(4, 6) == '':
        print('true')
    print(next(data))
    print(next(data))
    print(next(data))
    print(et.dict_values())
Пример #26
0
 def __init__(self):
     self.info = Config().platform
     self.androidname = 'android'
     self.iosname = 'ios'
     self.platform = Config.platform['run']
Пример #27
0
import logging
from config.config import Config

config = Config()
config.parse()

logger = logging.getLogger('Application')
level = config.get('log_level')

logger.setLevel(level)
formatter = logging.Formatter(
    '%(asctime)s : %(name)s - %(levelname)s : %(message)s')
sh = logging.StreamHandler()
sh.setLevel(level)
sh.setFormatter(formatter)
logger.addHandler(sh)
# fh = logging.FileHandler('logs/app.log')
# fh.setFormatter(formatter)
# logger.addHandler(fh)
Пример #28
0
	def __init__(self):
		self.java = Java()
		self.utils = Utils()
		self.config = Config()
Пример #29
0
    if os.path.exists(db) is True:
        print("Old database found")
        new_db = data.network + '_' + data.delegate + '.db'
        os.chdir(u.tbw)
        run(["cp", old_db, new_db])
        run(["rm", old_db])
        print(
            "Converted old database to new naming format. Please restart script"
        )
        quit()


if __name__ == '__main__':

    # get config data
    data = Config()
    network = Network(data.network)
    u = Util(data.network)
    client = u.get_client(network.api_port)

    dynamic = Dynamic(data.database_user, data.voter_msg, data.network,
                      network.api_port)
    transaction_fee = data.atomic * 0.1
    multi_transaction_fee = data.atomic * data.multi_fee

    # initialize db connection
    # get database
    arkdb = ArkDB(network.database, data.database_user,
                  network.database_password, data.public_key)

    #conversion check for pre 2.3 databases
Пример #30
0
 def __init__(self):
     self.db = records.Database(Config().database)