def __init__(self, name, **kwargs): super().__init__(name=name, **kwargs) if type(self.data) is dict: self.data = Mapper(self.data) if type(self.location) is not str: self.location = Mapper(self.location)(kwargs) try: es = Elasticsearch( **self.location, request_timeout=0.2, retries=False, ignore=404) # TODO url=self.location, ssl_context, http_auth es.info() self.location = es except ImproperlyConfigured as e: raise NotFound("ElasticSearch rejected {}\n-----\n\t{}".format( pformat(self.location), e)) except TransportError as e: raise NotFound( "Failed to reach ElasticSearch at {}\n-----\n\t{}".format( pformat(self.location), e.error)) except: raise NotFound( "Unable to connect to ElasticSearch at host:{}".format( self.location.get('host')))
def __init__(self, lcg, map_size): self.current_player_index = None self.game_started = False self._players = [] self.lcg = lcg self.handlers = Handlers(self) self.mapper = Mapper(self, *map_size)
def setUp(self): # TODO: we need prf_map, prf_imgs and prf_eth_map self.rtfparser = RTF_Parser() shutil.copyfile("newganmanager/.user/default_cfg.json", "newganmanager/testing/.user/cfg.json") self.pm = Profile_Manager("No Profile", "newganmanager/testing") self.mapper = Mapper("newganmanager/test/", self.pm) self.pm.prf_cfg["img_dir"] = "newganmanager/test/" # data: UID, first_nat, sec_nat, eth-code self.data_simple = self.rtfparser.parse_rtf( "newganmanager/test/test_simple.rtf") self.data_all_cases = self.rtfparser.parse_rtf( "newganmanager/test/test_allcases.rtf") self.data_subset1 = self.rtfparser.parse_rtf( "newganmanager/test/allcases_subset1.rtf") self.data_subset2 = self.rtfparser.parse_rtf( "newganmanager/test/allcases_subset2.rtf") self.data_exclusive = self.rtfparser.parse_rtf( "newganmanager/test/test_exclusive.rtf") for eth in [ "African", "Asian", "EECA", "Italmed", "SAMed", "South American", "SpanMed", "YugoGreek", "MENA", "MESA", "Caucasian", "Central European", "Scandinavian", "Seasian" ]: map = [eth + str(i) for i in range(20)] self.mapper.eth_map[eth] = set(map)
def cards_to_database(database, cards_file): """ put data in cards_file into database. """ mapper = Mapper(configuration.map_file) conn = sqlite3.connect(database) cursor = conn.cursor() cursor.execute("DROP TABLE IF EXISTS cards") cursor.execute(""" CREATE TABLE IF NOT EXISTS cards (posid TEXT, time INTEGER, statid TEXT) """) cursor.execute("CREATE INDEX time_index ON cards(time)") cursor.execute("CREATE INDEX statid_index ON cards(statid)") cursor.execute("CREATE INDEX posid_index ON cards(posid)") with open(cards_file, 'r') as reader: for line in reader: parts = line.strip().split(',') assert len(parts) == 15 if not mapper.has_statid(parts[9]): continue if parts[5].count(':') == 1: parts[5] = parts[5] + ":00" parts[5] = datetime.strptime(parts[5], "%Y/%m/%d %H:%M:%S") parts[5] = calendar.timegm(parts[5].utctimetuple()) cursor.execute("INSERT INTO cards VALUES (?, ?, ?)", (parts[3], parts[5], parts[9])) cursor.close() conn.commit() conn.close()
def get_games_info(options, games, steamgames, winedb): styledprint.print_info_begin('Pulling Games Information') CACHE_PATH = os.path.join('cache', 'games.p') cache = Cache(CACHE_PATH) cachedgames = cache.load_from_cache() cleansteamgames = utils.DictCaseInsensitive() cleanwinedb = utils.DictCaseInsensitive() if (options.fuzzymatching): parallelism.split_submit_job(steamgames, cleansteamgames, clean_names) parallelism.split_submit_job(winedb, cleanwinedb, clean_names) URLS_MAPPING = os.path.join('mappings', 'urlsmapping.txt') urlsmapping = Mapper(URLS_MAPPING) parallelism.split_submit_job(games, games, start_loop, options, cachedgames, steamgames, winedb, cleansteamgames, cleanwinedb, urlsmapping, parallelism.get_number_of_cores()) if (not options.dryrun): newcachedgames = cache.merge_old_new_cache(cachedgames, games) cache.save_to_cache(newcachedgames) urlsmapping.save_mapping() styledprint.print_info_end('Pulling Games Information Done')
def map(website): print website websites = json.load(open("websites.json", "r")) if not website in websites: websites.append(website) json.dump(websites, open("websites.json", "w")) tmp_website = "https://" + website try: website = requests.get(tmp_website).url except requests.exceptions.SSLError: website = requests.get("http://" + website).url finally: if not website: return "failure" m = Mapper(website) print "Started scraping links for " + website call([ "python", "worker.py", pickle.dumps(m), website, str(1), pickle.dumps([]) ]) return "success" return "exists"
def runMr(): had.data() if request.method == 'GET': out = had.showData('/') createMR(out) return render_template('mr.html', name='Dagen') header = request.form['header'] files = request.form['files'] event = request.form['event'] mp = Mapper() if files[-3] == 'csv': ma = mp.mapCsv(files, header) elif files[-3] == 'xes': ma = mp.mapXes(files, header) with open("/src/src/templates/res.html", 'w') as out: out.write("""<html> <body> <table>""") for entry in ma: out.write("<tr>") out.write("<td>" + entry + "</td>") out.write("</tr>") out.write("""</table> </body> </html> """) return render_template('res.html', name='Dagen')
def __init__(self, config=None): """Initialise the Connector and starts to listen to incoming messages. :param config: Configuration to use (default config if None). """ self.log = logging.getLogger(self.__class__.__name__) if config is None: config = ConnConfig() self.log.info("Falling back to default configuration.") # errors up to here are allowed to terminate the program mappings = self._read_mappings(config.mappings) self.mapper = Mapper(mappings) self.log.debug("Mappings read.") self.sender = Sender(config.send.address, config.send.port, config.broker.endpoint_prefix + config.connector_id, config.broker.topic, config.connector_id) self.log.info("Sender created.") self.receiver = Receiver(self.RECEIVER_NAME, config.listen.address, config.listen.port) self.log.info("Receiver created.") self.receiver.listen("/", self.handle_receive)
def testFailureWithBrokerUnconformData(self): """Test failure when arguments are not convertable to broker types.""" mapper = Mapper([self.VALID_MAPPING_PLAIN]) broken = deepcopy(self.VALID_INPUT_PLAIN) broken["timestamp"] = "asdkfasdf" self.assertIsNone(mapper.transform(broken)) broken = deepcopy(self.VALID_INPUT_PLAIN) broken["ipv4"] = "127.0.0.999" self.assertIsNone(mapper.transform(broken)) broken = deepcopy(self.VALID_INPUT_PLAIN) broken["ipv6"] = "2001:0:509c:564e:34ae:3a9a:3f57:fd91:9999" self.assertIsNone(mapper.transform(broken)) broken = deepcopy(self.VALID_INPUT_PLAIN) broken["port"] = 9999999 # out of range self.assertIsNone(mapper.transform(broken)) broken["port"] = "foo" self.assertIsNone(mapper.transform(broken)) broken = deepcopy(self.VALID_INPUT_PLAIN) broken["string"] = {} # cannot be cast to string self.assertIsNone(mapper.transform(broken)) broken = deepcopy(self.VALID_INPUT_PLAIN) broken["number"] = "abc" self.assertIsNone(mapper.transform(broken)) broken = deepcopy(self.VALID_INPUT_PLAIN) broken["array"] = 123 self.assertIsNone(mapper.transform(broken))
def __init__(self): """Docstring.""" self.gameScreen = pygame.Surface(settings.SCREEN_SIZE) self.world = Mapper() self.turnTime = settings.TURN_TIME self.controller = Controller() self.running = False
def run(self, config_json, to_file=True): self.create_dirs() results = None try: config = Config(config_json, self.database_config) source_sparql = SPARQL(config, 'source') target_sparql = SPARQL(config, 'target') info_logger = InfoLogger( 'InfoLogger', '{}_{}'.format(source_sparql.get_query_hash(), target_sparql.get_query_hash())) source_cache = Cache(info_logger, config, source_sparql, 'source') source_cache.create_cache() target_cache = Cache(info_logger, config, target_sparql, 'target') target_cache.create_cache() mapper = Mapper(info_logger, config, source_sparql, target_sparql) results = mapper.map(to_file) except ConfigNotValidError as e: results = "Config not valid" print(e) except HTTPError as e: print(e) except JSONDecodeError as e: print(e) return results
def __init__(self, connection_config): self.connection_config = connection_config provider_name = self.connection_config['cloud_provider_name'] provider_name = transfer_cloud_provider_name(provider_name) from mapper import Mapper self.mapper = Mapper(provider_name) self.driver = self.mapper.connect(self.connection_config)
def __init__(self): #may want to enhance this with a pre-load file to prepopulate the DB self.db = {} # dictionary of DNSClassifierEntrys self.mapper = Mapper() self.new_callbacks = [] # For each new entry self.update_callbacks = [] # For each time an entry is updated self.all_callbacks = [] # When entry is updated or new self.class_callbacks = {} # Dictionary of lists of callbacks per
def __init__(self, provider_config=None, is_verbose_output=False): super(ProviderManager, self).\ __init__(provider_config, is_verbose_output) provider_name = provider_config['connection']['cloud_provider_name'] provider_name = transfer_cloud_provider_name(provider_name) from mapper import Mapper self.mapper = Mapper(provider_name)
def test_given_an_image_with_no_points_a_point_map_returned(self): img = cv2.imread( os.path.join(self.test_data_path, 'SimpleTestImage5.png'), 1) expected = [-1 for i in range(0, 20)] colour = [255, 255, 255] threshold = 0 mapper = Mapper(colour, threshold) actual = mapper.get_points(img) self.assertEquals(expected, actual)
def __init__(self, fs, name, addr, opts): self.addr = addr self.jt_addr = opts["jt_addr"] self.jt = ServerProxy(self.jt_addr) self.hb_timeout = 0.2 # heartbeat timeout in seconds self.on = True self.mapper = Mapper(opts, fs, "map" + name, addr) self.reducer = Reducer(fs, "reduce" + name, addr, opts, RPCMapperClient())
def __init__(self, dirname, dict_path, channel=1): self.dirname = dirname self.channel = channel self.filelists = [ k for k in fs.recursive_walk(self.dirname) if k.endswith('.png') ] logger.info("Found {} png files ...".format(len(self.filelists))) self.mapper = Mapper(dict_path)
def test_given_a_threshold_items_in_threshold_work_for_blue(self): img = cv2.imread( os.path.join(self.test_data_path, 'BlueThresholdTest.png'), 1) threshold = 20 expected = [0, 0, 0, -1, -1] colour = [128, 128, 128] mapper = Mapper(colour, threshold) actual = mapper.get_points(img) self.assertEquals(expected, actual)
def make_stats_mapper(periods): period_mappers = {period: Mapper() for period in periods} mapper = Mapper() for period, period_mapper in period_mappers.items(): for threshold in THRESHOLDS: period_mapper.project_list( 'thresholds', make_threshold_mapper(period, threshold)) period_mapper.project_one('errorLevels', ['errors_' + period]) for conversion in CONVERSIONS: period_mapper.project_list( 'conversions', make_conversion_mapper(period, conversion)) mapper.project_one(period, period_mapper) return mapper
def test_given_an_colour_image_and_specific_colour_a_point_map_returned( self): img = cv2.imread( os.path.join(self.test_data_path, 'SimpleTestImage2.png'), 1) expected = [i for i in range(0, 20)] colour = [255, 128, 0] threshold = 0 mapper = Mapper(colour, threshold) actual = mapper.get_points(img) self.assertEquals(expected, actual)
def make_indicator_mapper(period, indicator): mapper = Mapper() mapper.prop('type', indicator) value_key = indicator if indicator not in NON_PREFIXED_INDICATORS: value_key = '_'.join([indicator, period]) mapper.project_one('value', make_data_path(value_key)) return mapper
def __init__(self, **kwargs): if not hasattr(self, 'location'): super().__init__(**kwargs) self.location = Mapper(self.location) self.data = Mapper(self.data) self.count_reset_func = kwargs.get('count_reset_func') self.query_limit = kwargs.get('query_limit') if self.query_limit: self.count = 0 if self.count_reset_func and not callable(self.count_reset_func): raise Exception('{} count_reset_func must be a function that resets the count'.format(self.name)) elif type(self.query_limit) is not int: raise Exception('{} query_limit must be an integer, the maximum number of allowed queries'.format(self.name)) else: from types import MethodType self.count_reset_func = MethodType(self.count_reset_func, self) pass pass pass
def createMapperTwo(self): mapperTwo = Mapper(5002 + (self.num - 1) * 10, self.ip, 2) mapperTwo.logging(False) mapperTwo.log('Starting Up') # execution code goes here mapperTwo.listen() # exiting mapperTwo.log('Exiting')
def createMapperOne(self): mapperOne = Mapper(5001 + (self.num - 1) * 10, self.ip, 1) mapperOne.logging(False) mapperOne.log('Starting Up') # execution code goes here mapperOne.listen() # exiting mapperOne.log('Exiting')
def make_threshold_mapper(period, threshold): mapper = Mapper() mapper.prop('type', threshold.get('type')) mapper.prop('dangerValue', threshold.get('danger_value')) value_key = '_'.join([threshold.get('prefix'), period]) mapper.project_one('value', make_data_path(value_key)) return mapper
def __init__(self, **params): pb = {'blocksize': params['block_size'], 'hashtype': params['hash_algorithm'], 'archipelago_cfile': params['archipelago_cfile'], } self.blocker = Blocker(**pb) pm = {'namelen': self.blocker.hashlen, 'archipelago_cfile': params['archipelago_cfile'], } self.mapper = Mapper(**pm)
def test_a_threshold_can_be_changed(self): img = cv2.imread( os.path.join(self.test_data_path, 'GreenThresholdTest.png'), 1) initial_threshold = 20 new_threshold = 21 expected = [0, 0, 0, 0, 0] colour = [128, 128, 128] mapper = Mapper(colour, initial_threshold) mapper.set_threshold(new_threshold) actual = mapper.get_points(img) self.assertEquals(expected, actual)
def testNestedMapSuccess(self, mapper=Mapper([VALID_MAPPING_NESTED])): """Test the default successful scenario.""" expect = map(pb.data, ( self.VALID_MAPPING_NESTED.get('name'), TestMapper._validate_map_time(self.TEST_DATE_STRING), TestMapper._validate_map_addr(self.TEST_IPV4), TestMapper._validate_map_addr(self.TEST_IPV6), self.TEST_PORT, self.TEST_STRING, self.TEST_NUMBER, TestMapper._validate_map_array(self.TEST_ARRAY))) self._compare_messages(mapper, expect, self.VALID_INPUT_NESTED)
def load_video(file, feature_type="sift"): cap = cv.VideoCapture(file) map = Mapper("orb") ret, frame = cap.read() M = map.proccess_frame(frame) h, w, d = frame.shape out = cv.VideoWriter('images/outpy.avi',\ cv.VideoWriter_fourcc('M','J','P','G'),\ cap.get(cv.CAP_PROP_FPS), \ (w,h)) failes = 0 i = 0 frame2 = 0 max_t = 0 while failes < 4: s = time.time() i += 1 pframe = frame ret, frame = cap.read() if ret == False: break M = map.proccess_frame(frame) if M is None: print("failllllll") failes += 1 out.write(pframe) continue h, w, d = frame.shape pts = np.float32([[0, 0], [0, h - 1], [w - 1, h - 1], [w - 1, 0]]).reshape(-1, 1, 2) dst = cv.perspectiveTransform(pts, M) img = copy.deepcopy(map.get_map()) for line in range(4): cv.line(img, tuple(dst[line][0]), tuple(dst[(line + 1) % 4][0]), (255, 0, 0), 5) failes = 0 print(i) out.write(img) e = time.time() if (e - s > max_t): max_t = e - s print("pip time is " + str(e - s)) print("max t is: " + str(max_t)) cap.release() out.release()
def __init__(self, train_or_test, shuffle=True): assert train_or_test in ['train', 'test'] fname_list = cfg.train_list if train_or_test == "train" else cfg.test_list self.train_or_test = train_or_test fname_list = [fname_list ] if type(fname_list) is not list else fname_list self.imglist = [] for fname in fname_list: self.imglist.extend(get_imglist(fname)) self.shuffle = shuffle self.mapper = Mapper()