def calc_distances(self): logger = logging.getLogger(_name_) n = len(self._tids) for i in range(0, n): j = i + 1 logger.debug("calc topn " + str(i)) while j < n: logger.debug("calc topn dists(" + str(i) + ", " + str(j) + ")") to = n - j if (to > Cfg.instance().threads_max()): to = Cfg.instance().threads_max() if (to > n): to = n threads = [] for k in range(0, to): t = CT(Cfg.instance().dist() , Cfg.instance().targets_dists_tablename() , Cfg.instance().targets_tablename() , self._tids[i] , self._tids[j]) t.start() threads.append(t) j += 1 for t in threads: t.join() logger.debug("cos calculation complete")
def solve_realisation(self, aid, iid): cluster = Cluster(aid, 0.9) ids = US.instance().ids() dist = DF.instance().create(Cfg.instance().dist()) active_user = US.instance().get(aid) for id in ids: if not iid in US.instance().get(id).content: continue user = US.instance().get(id) cluster.add(user, dist(user, active_user)) if (cluster.size() >= Cfg.instance().cluster_size()): return self.pred(cluster, iid)
def __init__(self): Set.__init__(self) reader = RF.instance().create(Config.instance().read_source_name(), Config.instance().users_tablename()) self._set = dict() uids = reader.ids() threads = [] for i in range(0, len(uids)): t = RT(Config.instance().users_tablename(), uids[i], self._set) t.start() threads.append(t) for t in threads: t.join()
def __init__(self): Set.__init__(self) reader = RF.instance().create(Config.instance().read_source_name() , Config.instance().targets_tablename()) self._set = dict() logger = logging.getLogger(__name__) tids = reader.ids() threads = [] for i in range(0, len(tids)): t = RT(Config.instance().targets_tablename(), tids[i], self._set) t.start() threads.append(t) for t in threads: t.join()
def __init__(self): self._pool = None self._inited = False self._lock = Lock() self._con = None self._lock.acquire() config = Config.instance().config() self._pool = psycopg2.pool.ThreadedConnectionPool( 5, Config.instance().max_conn(), user=config["user"], password=config["password"], host=config["host"], port=config["port"], database=config["db"]) self._lock.release()
def run(self): df = DF.instance().create(Config.instance().dist()) logger = logging.getLogger(_name_) if (self._set_name == Config.instance().targets_tablename()): t1 = TS.instance().get(self._id1) t2 = TS.instance().get(self._id2) print("d(" + t1.id() + ", " + t2.id() + ")") d = df(t1, t2) data = dict() data[t2.id()] = d if (t1.id() == t2.id()): d = 0 t = Target(t1.id(), data) w = WF.instance().create(Config.instance().write_source_name(), self._table_name) w.write(t) data = dict() data[t1.id()] = d t = Target(t2.id(), data) w.write(t) TDM.instance().add(t1.id(), t2.id(), d)
def setUp(self): try: self.driver = DriverProvider('chrome').driver_provider self.config = Config().config self.driver.get(self.config['baseURL']) self.base_page = BasePage(self.driver) self.login_page = LoginPage(self.driver) except Exception as e: print(e) self.driver.quit()
def decode_auth_token(auth_token): """ Decodes the auth token :param auth_token: :return: integer|string """ try: payload = jwt.decode(auth_token, Config.get_config()["SECRET_KEY"]) return True, payload['sub'] except jwt.ExpiredSignatureError: logging.info("SIGNATURE EXPIRED") return False, 'Signature expired. Please log in again.' except jwt.InvalidTokenError: logging.info("INVALID TOKEN") return False, 'Invalid token. Please log in again.'
def solve_realisation(self, aid): logger = logging.getLogger(__name__) topn = [] imagine = IF.instance().create("test") user = imagine.do("1") logger = logging.getLogger(__name__) # TODO: create items subset with cluster class for id in TS.instance().ids(): item = TS.instance().get(id) d = Dists.hamming(item, user) if (d < 0.2): logger.debug("append: " + id) topn.append(id) if (len(topn) >= Cfg.instance().topN_number()): break return topn
def setUp(self): self.path = os.path.dirname(os.path.realpath(__file__)) self.config = Config(dotenv=os.path.join(self.path, '.env.testing')) # Remap config paths self.config.paths.files = os.path.join(self.path, self.config.paths.files) self.config.paths.temp = os.path.join(self.path, self.config.paths.temp) self.config.paths.output = os.path.join(self.path, self.config.paths.output) self.cloudkey = CloudKey(config=self.config.cloudkey) self.remux = UBVRemux(config=self.config.paths, auto_create_tmp=False) tmp_bs = _load_boostrap(self.path) with patch('utilities.cloudkey.CloudKey.get_bootstrap', return_value=tmp_bs) as p: # noqa: F841 self.cameras = self.cloudkey.get_cameras() self.test_data = _load_testdata(self.path)
def encode_auth_token(self): """ Generates the Auth Token :return: string """ try: payload = { 'exp': datetime.utcnow() + timedelta(days=30, seconds=0), 'iat': datetime.utcnow(), 'sub': self.id } return jwt.encode( payload, Config.get_config()["SECRET_KEY"], algorithm='HS256' ) except Exception as e: return e
def __init__(self): super().__init__(command_prefix=self.get_server_prefix, case_insensitive=True) self.config = Config().get_config() self.session = aiohttp.ClientSession(loop=self.loop) self.prefix = 'ritsu ' self.database_client = DatabaseClient( self.config['database']['user'], self.config['database']['password'], self.config['database']['database'], self.config['database']['host']) print('Attempting to connect to database') asyncio.get_event_loop().run_until_complete( self.database_client.connect()) print('Connection successful') self.cache = GuildCache(self) self.run(self.config['main_bot']['token'])
def solve_realisation(self, aid): logger = logging.getLogger(_name_) if (TDM.instance().is_empty()): self.calc_distances() v = self.create_user_vector(aid) mul_rslt = dict() for id in self._tids: column = TDM.instance().get_column(id) logger.debug(id + " column " + str(column) + " " + str(v)) mul = np.dot(v, column) mul_rslt[id] = mul sorted_tids = sorted(mul_rslt.items(), key = lambda x: x[1], reverse = True) topn = [] user = US.instance().get(aid) for i in range(0, len(v)): id = sorted_tids[i][0] if (id in user.content()): continue topn.append(id) if (len(topn) == Cfg.instance().topN_number()): break return topn
def read(self): reader = RF.instance().create(Config.instance().read_source_name(), self._tname) if (reader.count() == 0): return tids = reader.ids() threads = [] contents = dict() for i in range(0, len(tids)): t = RT(self._tname, tids[i], contents) t.start() threads.append(t) for t in threads: t.join() for id1 in contents: pair = (id1, id1) self._data[pair] = 1 for id2 in contents[id1].content(): d = contents[id1].content()[id2] pair = (id1, id2) self._data[pair] = d pair = (id2, id1) self._data[pair] = d
def __init__(self, path: str = "config.yaml"): urllib3.disable_warnings() self._config = Config.get_instance(path)
from logging.handlers import RotatingFileHandler # tmpl_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../frontend/templates') # static_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../static') # app = Flask(__name__, template_folder=tmpl_dir, static_folder=static_dir) from services.user_service import verify_user_token from utilities.config import Config application = Flask(__name__) app = application app.url_map.strict_slashes = False CORS(app) Config.load_config() app.register_blueprint(user_api) app.register_blueprint(list_api) @auth.verify_token def verify_token(token): app.logger.info(token) success, user = verify_user_token(token) if success: g.current_user = user return success @app.errorhandler(InvalidUsage)
def __init__(self, id): Thread.__init__(self) self.__name = str(id) WF.instance().register("psql_test", PW(Config.instance().config(), "test"))
from input_output.postgresql_writer import PsqlWriter as PW from utilities.config import Config as Config from core.solver import SF as SF from core.item_based_topn import ItemBasedTopn as IBS from math_model.distance_factory import DistanceFactory as DF from math_model.distances import Distances as Distances from math_model.agent import Agent as Agent from core.fuzz_topn import FuzzTopn as FT from math_model.imagine_factory import ImagineFactory as IF from core.item_based_topn import ItemBasedTopn as IbTopn from core.test_imagine_factory import TestImagine as TI from core.ub_pred import UbPred as UP from core.fuzz_pred import FuzzPred as FP RF.instance().register( Config.instance().read_source_name() + "_" + Config.instance().targets_tablename(), PR(Config.instance().config(), Config.instance().targets_tablename())) RF.instance().register( Config.instance().read_source_name() + "_" + Config.instance().users_tablename(), PR(Config.instance().config(), Config.instance().users_tablename())) RF.instance().register( Config.instance().read_source_name() + "_" + Config.instance().targets_dists_tablename(), PR(Config.instance().config(), Config.instance().targets_dists_tablename())) WF.instance().register( Config.instance().write_source_name() + "_" +
def __init__(self): DM.__init__(self, Config.instance().targets_dists_tablename())
# "--no-cleanup", "-nc", # type=str2bool, nargs='?', const=True, default=False, # help="Do not remove UBV files after processing." # ) return parser if __name__ == "__main__": # Parse Arguments p = parse_arguments() args = p.parse_args() print(args) # Handle the default environment config if args.environment: if os.path.exists(args.environment): config = Config(dotenv=args.environment) else: raise FileNotFoundError( f"Cannot locate .env file at {args.environment}") sys.exit(1) else: config = Config(dotenv='.env') # Create the logging object logger = logging.getLogger() logging_handlers = [] # Add the stdout logging_handlers.append(logging.StreamHandler(sys.stdout)) if config.logs.format: log_format = config.logs.format else: log_format = '[%(asctime)s] {%(filename)s:%(lineno)d} ' \
def setUp(self): self.path = os.path.dirname(os.path.realpath(__file__)) self.config = Config(dotenv=os.path.join(self.path, '.env.testing')) self.cloudkey = CloudKey(config=self.config.cloudkey)
model_config = ModelConfig( subtoken_embedding_vocab_size=len(vocabularies.subtoken_to_index), ast_embedding_vocab_size=len(vocabularies.node_to_index), decoder_embedding_vocab_size=len(vocabularies.target_to_index), decoder_hidden_dim=256, encoder_hidden_dim=256, decoder_dropout=0.5, encoder_dropout=0.5, ) general_params = GeneralParams(batch_size=128, target_length=4, max_contexts=100) config = Config( vocabularies, special_characters=special_characters, model_config=model_config, general_params=general_params, ) use_cuda = True if use_cuda: device = torch.device("cuda") else: device = torch.device("cpu") dataset = C2SDataSet.fromConfig(train_file_path, config) dataloader = DataLoader(dataset, batch_size=config.general_params.batch_size, pin_memory=True, shuffle=True) n_examples = len(dataset)
def gt(a, b): rslt = a > b if (Cfg.instance().cmp_order() == "straight"): return rslt return not rslt