def cmap_dump(self): font = TTFont(self.fontfile) #TODO(ahmetcelik) cmap in format 12 should be used if it exists cmapTable = font['cmap'].getcmap(3, 10) if not cmapTable: cmapTable = font['cmap'].getcmap(3, 1) assert cmapTable, 'Unicode cmap table required' cmap = cmapTable.cmap # unicode table codepoints = [] glyphs = [] for code, name in cmap.iteritems(): id = font.getGlyphID(name) glyphs.append(id) codepoints.append(code) if self.debug: print id, name, code font.close() cp_dumper = Dumper(self.folder + '/codepoints') cp_dumper.dump_array(codepoints, 'I', '>') cp_dumper.close() gid_dumper = Dumper(self.folder + '/gids') gid_dumper.dump_array(glyphs, 'H', '>') gid_dumper.close()
def serialize_TOC(self, output_idx, output_data): """Dump the TOC data to the file """ # TODO(bstell) remove this is not used anywhere if self.tocReady: dumper = Dumper(output_idx) dumper.dump(self.TOC) dumper.close() dumper = Dumper(output_data) dumper.dumpForEach(self.TOCEntries) dumper.close()
def serialize_cff(self, output_idx, output_data): """Dump the Glyf data to the file """ if self.cffReady: dumper = Dumper(output_idx) dumper.dump(self.CffTable) dumper.dump_for_each(self.glyphs_info) dumper.close() dumper = Dumper(output_data) dumper.dump_for_each(self.glyphs_data) dumper.close()
def test_dump_msg_entities(self): """Show that entities are correctly parsed and stored""" message = types.Message( id=1, to_id=types.PeerUser(321), date=datetime.now(), message='No entities' ) dumper = Dumper(self.dumper_config) fmt = BaseFormatter(dumper.conn) # Test with no entities await dumper.dump_message(message, 123, None, None) dumper.commit() assert not next(fmt.get_messages_from_context(123, order='DESC')).formatting # Test with many entities text, entities = markdown.parse( 'Testing message with __italic__, **bold**, inline ' '[links](https://example.com) and [mentions](@hi), ' 'as well as `code` and ``pre`` blocks.' ) entities[3] = types.MessageEntityMentionName( entities[3].offset, entities[3].length, 123 ) message.id = 2 message.date -= timedelta(days=1) message.message = text message.entities = entities await dumper.dump_message(message, 123, None, None) dumper.commit() msg = next(fmt.get_messages_from_context(123, order='ASC')) assert utils.decode_msg_entities(msg.formatting) == message.entities
def test_formatter_get_chat(self): """ Ensures that the BaseFormatter is able to fetch the expected entities when using a date parameter. """ chat = types.Chat( id=123, title='Some title', photo=types.ChatPhotoEmpty(), participants_count=7, date=datetime.now(), version=1 ) dumper = Dumper(self.dumper_config) fmt = BaseFormatter(dumper.conn) for month in range(1, 13): await dumper.dump_chat(chat, None, timestamp=int(datetime( year=2010, month=month, day=1 ).timestamp())) dumper.commit() cid = tl_utils.get_peer_id(chat) # Default should get the most recent version date = fmt.get_chat(cid).date_updated assert date == datetime(year=2010, month=12, day=1) # Expected behaviour is to get the previous available date target = datetime(year=2010, month=6, day=29) date = fmt.get_chat(cid, target).date_updated assert date == datetime(year=2010, month=6, day=1) # Expected behaviour is to get the next date if previous unavailable target = datetime(year=2009, month=12, day=1) date = fmt.get_chat(cid, target).date_updated assert date == datetime(year=2010, month=1, day=1)
def start(): bot = Bot(token=config.bot_token) dumper = Dumper(config.backups_path, db.get_connection()) while True: loop = asyncio.get_event_loop() loop.run_until_complete(send_backups(bot, dumper)) time.sleep(config.backup_interval_hour * 60 * 60)
def main(): dump = Dumper(); dump.arpDump(); print('\n Reading ARPDump: '+dumper.readARPDump()) dump.arpDump(); dump.ipConfigDump('wlan0'); print('\n Reading IFConfigDump: '+dumper.readIFConfigDump()) startScanning(); dump.arpDump();
def main(): parser = argparse.ArgumentParser(description='frida-ios-dump') parser.add_argument('-host', dest='ssh_host', help='SSH Host') parser.add_argument('-port', dest='ssh_port', help='SSH Port') parser.add_argument('-username', dest='ssh_username', help='SSH Username') parser.add_argument('-password', dest='ssh_password', help='SSH Password') parser.add_argument('-list', dest='list_applications', action='store_true', help='List the installed apps') parser.add_argument( '-dump', dest='dump_ipa', help='Bundle identifier or display name of the app to dump') parser.add_argument('-out', dest='output_directory', help='Destination of the resulting ipa file') # Parse arguments. args = parser.parse_args() if not args.dump_ipa and not args.list_applications: parser.print_help() return False # Find connected iPhone. device = get_device() if device is None: print('Unable to find a connected usb device with Frida.') return False result = False print('Connected to Frida on device \'%s\'.' % device.name) # List applications on device. if args.list_applications: result = list_applications.execute(device) # Dump decrypted ipa from device. elif args.dump_ipa: dumper = Dumper(device, args.output_directory) if not dumper.connect_ssh(args.ssh_host, args.ssh_port, args.ssh_username, args.ssh_password): return False if not dumper.launch_app(args.dump_ipa): return False result = dumper.execute() return result
def test_dumping(self): dump_handler = Dumper(config.backups_path, db.get_connection()) dumps = dump_handler.dump() self.assertEqual(True, len(dumps) > 0, 'Dumps paths list is empty') self.assertEqual( True, len(os.listdir(config.backups_path)) > 0, '%s dir must be contains backups' % config.backups_path ) dump_handler.clear()
def dump_closure_map(fontfile, outputfolder): """Takes closure of each glyph in the font and dump them into the two seperate files. Index file used to locate a glyph in data file. Data file contains closure lists """ font = TTFont(fontfile) closurer = ClosureTaker(font) glyph_metadata = Dumper(outputfolder + '/closure_idx') glyph_data = Dumper(outputfolder + '/closure_data') bigEndian = '>' fmt_offset = '>l' # offset - length fmt_size = '>H' fmt_elem = 'H' elem_size = struct.calcsize(fmt_elem) offset = 0 for g in font.getGlyphOrder(): closurer.clear() closurer.add_glyph_names([g]) glyphsClosure = closurer.closure() id = closurer.glyph_name_to_id[g] if len(glyphsClosure) == 1 and id in glyphsClosure: # recording not needed glyph_metadata.dump_fmt(-1, fmt_offset) glyph_metadata.dump_fmt(0, fmt_size) else: size = elem_size * len(glyphsClosure) glyph_data.dump_array(glyphsClosure, fmt_elem, bigEndian) glyph_metadata.dump_fmt(offset, fmt_offset) glyph_metadata.dump_fmt(size, fmt_size) #print id, g, glyphsClosure offset += size font.close() glyph_data.close() glyph_metadata.close()
def metadata(self): output = self.folder + '/metadata' font = TTFont(self.fontfile) metadata = {'numGlyphs': 0, 'has_hmtx': False, 'has_vmtx': False} metadata['numGlyphs'] = font['maxp'].numGlyphs if 'hmtx' in font: metadata['has_hmtx'] = True metadata['numberOfHMetrics'] = len(font['hmtx'].metrics) if 'vmtx' in font: metadata['has_vmtx'] = True metadata['numberOfVMetrics'] = len(font['vmtx'].metrics) dumper = Dumper(output) dumper.dumpObject(metadata) dumper.close()
def test_formatter_get_messages(self): """ Ensures that the BaseFormatter is able to correctly yield messages. """ dumper = Dumper(self.dumper_config) msg = types.Message( id=1, to_id=123, date=datetime(year=2010, month=1, day=1), message='hi' ) for _ in range(365): await dumper.dump_message(msg, 123, forward_id=None, media_id=None) msg.id += 1 msg.date += timedelta(days=1) msg.to_id = 300 - msg.to_id # Flip between two IDs dumper.commit() fmt = BaseFormatter(dumper.conn) # Assert all messages are returned assert len(list(fmt.get_messages_from_context(123))) == 365 # Assert only messages after a date are returned min_date = datetime(year=2010, month=4, day=1) assert all(m.date >= min_date for m in fmt.get_messages_from_context( 123, start_date=min_date )) # Assert only messages before a date are returned max_date = datetime(year=2010, month=4, day=1) assert all(m.date <= max_date for m in fmt.get_messages_from_context( 123, end_date=max_date )) # Assert messages are returned in a range assert all(min_date <= m.date <= max_date for m in fmt.get_messages_from_context( 123, start_date=min_date, end_date=max_date )) # Assert messages are returned in the correct order desc = list(fmt.get_messages_from_context(123, order='DESC')) assert all(desc[i - 1] > desc[i] for i in range(1, len(desc))) asc = list(fmt.get_messages_from_context(123, order='ASC')) assert all(asc[i - 1] < asc[i] for i in range(1, len(asc)))
def setUpClass(cls): cls.dumper_config = {'DBFileName': 'test_db', 'OutputDirectory': 'test_work_dir', 'MaxSize': 0} # TODO test with different configurations assert not Path(cls.dumper_config['OutputDirectory']).exists() Path(cls.dumper_config['OutputDirectory']).mkdir() config = configparser.ConfigParser() config.read('config.ini') config = config['TelegramAPI'] cls.client = TelegramClient(None, config['ApiId'], config['ApiHash']) login_client(cls.client, gen_username(10)) dumper = Dumper(cls.dumper_config) dumper.check_self_user(cls.client.get_me().id)
def __init__(self): # create the filer, light manager, button manager, and dumper self.filer = Filer() self.lights = LightManager() self.buttons = ButtonManager() self.dumper = Dumper("dashdrive") # create constants self.TICK_RATE = 0.125 self.WAIT_TIME = 5.0 # try to log a new config session try: self.filer.log( "---------- New Config Session: " + str(datetime.datetime.now()) + " ----------\n", True) except: # assume the device's storage is full. Wipe it and try again self.wipeFiles(False) self.__init__()
def main(): global vaultFactory mydir = os.path.dirname(os.path.realpath(__file__)) parser = argparse.ArgumentParser() parser.add_argument('--src', required=True) parser.add_argument('--mark', choices=["none", "both", "start", "end"]) parser.add_argument('--dump', action='store_true') parser.add_argument('--dumpPasswords', action='store_true') parser.add_argument('--out') # Generate a file to set some variable param = parser.parse_args() loggingConfFile = os.path.join(mydir, "./logging.yml") logging.config.dictConfig( yaml.load(open(loggingConfFile), Loader=yaml.SafeLoader)) sourceFile = os.path.normpath(os.path.abspath(param.src)) if not os.path.isfile(sourceFile): ERROR("File '{}' does not exists".format(sourceFile)) logger.info("Will handle '{}'".format(sourceFile)) sourceFileDir = os.path.dirname(sourceFile) cluster = yaml.load(open(sourceFile), Loader=yaml.SafeLoader) targetFolder = misc.appendPath( sourceFileDir, cluster["build_folder"] if "build_folder" in cluster else "build") misc.ensureFolder(targetFolder) logger.info("Build folder: '{}'".format(targetFolder)) if "config_file" in cluster: baseConfigFile = cluster["config_file"] else: baseConfigFile = "ezconfig.yml" config, configFile = buildConfig(sourceFileDir, baseConfigFile) plugins = [] plugins.append(Plugin("core", misc.appendPath(mydir, "../plugins/core"))) logger.debug("Plugins path:'{}'".format(config[PLUGINS_PATH])) appendPlugins(plugins, cluster, config[PLUGINS_PATH]) schema = buildSchema(mydir, plugins) configSchema, safeConfigSchema = buildConfigSchema(mydir, config[PLUGINS_PATH]) if param.dump: dumper = Dumper(targetFolder, param.dumpPasswords) dumper.dump("schema.json", schema) dumper.dump("config-schema.json", configSchema) dumper.dump("safe-config-schema.json", safeConfigSchema) else: dumper = None k = kwalify(source_data=cluster, schema_data=schema) k.validate(raise_exception=False) if len(k.errors) != 0: ERROR("Problem {0}: {1}".format(sourceFile, k.errors)) k = kwalify(source_data=config, schema_data=configSchema) k.validate(raise_exception=False) if len(k.errors) != 0: ERROR("Configuration problem {0}: {1}".format(configFile, k.errors)) data = {} data['sourceFileDir'] = sourceFileDir data["targetFolder"] = targetFolder data['ezclusterHome'] = misc.appendPath(mydir, "..") data["rolePaths"] = set() data["configFile"] = configFile model = {} model['cluster'] = cluster model["config"] = config model['data'] = data initVault(model) if SAFE_CONFIG in model and safeConfigSchema != None: k = kwalify(source_data=model[SAFE_CONFIG], schema_data=safeConfigSchema) k.validate(raise_exception=False) if len(k.errors) != 0: ERROR("Configuration problem {0}: {1}".format( model["data"][_SAFE_CONFIG_FILE_], k.errors)) for plugin in plugins: plugin.groom(model) for plugin in plugins: plugin.groom2(model) targetFileByName = buildTargetFileByName(plugins) if param.dump: dumper.dump("cluster.json", model['cluster']) dumper.dump("data.json", model['data']) dumper.dump("targetFileByName.json", targetFileByName) dumper.dump("config.json", config) if SAFE_CONFIG in model and dumper.unsafe: dumper.dump("safeConfig.json", model[SAFE_CONFIG]) for plugin in plugins: plugin.dump(model, dumper) generate(targetFileByName, targetFolder, model, param.mark, dumper) if "out" in param: f = open(param.out, "w+") f.write("# Generated by ezcluster:\n") if "buildScript" in model["data"]: f.write('BUILD_SCRIPT="{}"\n'.format(model["data"]["buildScript"])) f.close()
def test_dump_methods(self): """Test await dumper.dump_* works""" dumper = Dumper(self.dumper_config) message = types.Message( id=777, to_id=types.PeerUser(123), date=datetime.now(), message='Hello', out=True, via_bot_id=1000, fwd_from=types.MessageFwdHeader( date=datetime.now() - timedelta(days=1), from_id=321 ) ) fwd_id = await dumper.dump_forward(message.fwd_from) await dumper.dump_message(message, 123, forward_id=fwd_id, media_id=None) message = types.Message( id=778, to_id=types.PeerUser(321), date=datetime.now(), message='Hello', out=False, via_bot_id=1000, media=types.MessageMediaPhoto( caption='Hi', ttl_seconds=40, photo=types.Photo( id=2357, access_hash=-123456789, date=datetime.now(), sizes=[ types.PhotoSize( type='X', w=100, h=100, size=100 * 100, location=types.FileLocation( dc_id=2, volume_id=5, local_id=7532, secret=987654321 ) ) ] ) ) ) loc = await dumper.dump_media(message.media) await dumper.dump_message(message, 123, forward_id=None, media_id=loc) await dumper.dump_message_service(context_id=123, media_id=loc, message=types.MessageService( id=779, to_id=123, date=datetime.now(), action=types.MessageActionScreenshotTaken() )) me = types.User( id=123, is_self=True, access_hash=13515, first_name='Me', username='******', phone='1234567' ) await dumper.dump_user(photo_id=None, user_full=types.UserFull( user=me, link=types.contacts.Link( my_link=types.ContactLinkContact(), foreign_link=types.ContactLinkContact(), user=me ), notify_settings=types.PeerNotifySettings(0, 'beep'), common_chats_count=3 )) await dumper.dump_chat(photo_id=None, chat=types.Chat( id=7264, title='Chat', photo=types.ChatPhotoEmpty(), participants_count=5, date=datetime.now() - timedelta(days=10), version=1 )) channel = types.Channel( id=8247, title='Channel', photo=types.ChatPhotoEmpty(), username='******', participants_count=17, date=datetime.now() - timedelta(days=5), version=7 ) channel_full = types.ChannelFull( id=8247, about='Just a Channel', read_inbox_max_id=1051, read_outbox_max_id=8744, unread_count=1568, chat_photo=types.PhotoEmpty(id=176489), notify_settings=types.PeerNotifySettingsEmpty(), exported_invite=types.ChatInviteEmpty(), bot_info=[] ) await dumper.dump_supergroup(channel_full, channel, photo_id=None) await dumper.dump_channel(channel_full, channel, photo_id=None)
from http.server import ThreadingHTTPServer, BaseHTTPRequestHandler from http_helpers.routing import RoutingTable, Route from http_helpers.objects import Request from handlers import index_handler, machine_name_handler, \ create_machine_handler, machine_manufacturer_handler, \ machine_master_key_handler, machine_meta_handler, \ create_vending_keys_handler, get_data_handler from vmf import VendingMachinesFactory from vmkeys import VMKeys from dumper import Dumper VENDING_MACHINES = VendingMachinesFactory() VM_KEYS = VMKeys() DUMPER = Dumper(VENDING_MACHINES, VM_KEYS).start() def create_route_table(): return RoutingTable( Route("GET", "/", index_handler.IndexHandler()), Route("CREATE", "/vending_machine", create_machine_handler.CreateMachineHandler(VENDING_MACHINES)), Route("GET", "/machine_name", machine_name_handler.MachineNameHandler(VENDING_MACHINES)), Route( "GET", "/machine_manufacturer", machine_manufacturer_handler.MachineManufacturerHandler( VENDING_MACHINES)), Route("GET", "/machine_meta", machine_meta_handler.MachineMetaHandler(VENDING_MACHINES)),
from dumper import Dumper from rx import Observable, Observer from rx.internal import extensionmethod from rx.subjects import Subject in_ = '23432/XX428/X21X71' Observable.from_(in_) \ .flat_map(lambda q: Observable.range(1, 2) if q == 'X' else Observable.just(q)) \ .buffer_with_count(2) \ .map(lambda x, i: i) \ .take(10) \ .subscribe(Dumper('s'))
def test_interrupted_dump(self): """ This method will ensure that all messages are retrieved even on weird conditions. """ if not ALLOW_NETWORK: raise unittest.SkipTest('Network tests are disabled') dumper = Dumper(self.dumper_config) dumper.chunk_size = 1 SEND, DUMP = True, False actions = ( (3, SEND), (2, DUMP), (2, SEND), (2, DUMP), # Actually one will be dumped then back to start (1, SEND), (2, DUMP), (1, SEND), (2, DUMP), # Actually one will be saved and the other updated (2, SEND), (3, DUMP), (1, SEND), (1, DUMP), (1, DUMP), ) self.client(functions.messages.DeleteHistoryRequest('me', 0)) downloader = Downloader(self.client, self.dumper_config, dumper, loop=asyncio.get_event_loop()) which = 1 for amount, what in actions: if what is SEND: print('Sending', amount, 'messages...') for _ in range(amount): self.client.send_message('me', str(which)) which += 1 time.sleep(1) else: print('Dumping', amount, 'messages...') chunks = (amount + dumper.chunk_size - 1) // dumper.chunk_size dumper.max_chunks = chunks downloader.start('me') messages = self.client.get_message_history('me', limit=None) print('Full history') for msg in reversed(messages): print('ID:', msg.id, '; Message:', msg.message) print('Dumped history') fmt = BaseFormatter(dumper.conn) my_id = self.client.get_me().id dumped = list(fmt.get_messages_from_context(my_id, order='DESC')) for msg in dumped: print('ID:', msg.id, '; Message:', msg.text) print('Asserting dumped history matches...') assert len(messages) == len(dumped), 'Not all messages were dumped' assert all(a.id == b.id and a.message == b.text for a, b in zip(messages, dumped)),\ 'Dumped messages do not match' print('All good! Test passed!') self.client.disconnect()
from __future__ import print_function from dumper import dump, dumps, Dumper import dumper import io import sys buff = io.StringIO() dumper.default_dumper = Dumper(output=buff) # BEGIN TEST CASES def do_dump_scalars(): dump(1) dump('a') dump("foo") dump('''string with a newline''') return "1'a''foo''string\\nwith a newline'" def test_do_dump_scalars(): assert_output_as_expected(do_dump_scalars) def do_dumps_multi_values(): s = dumps(1, " is less than ", 10) # returns unicode string in py2 if sys.version < (3, 0): s = s.encode('ascii', 'replace') # convert back to regular string dump(s) return "\"1' is less than '10\""
def sha1_fingerprint(self): dumper = Dumper(self.folder + '/sha1_fingerprint') fingerprint = FontInfo.getInformation(self.fontfile, ['SHA1'])['SHA1'] dumper.dump(fingerprint) dumper.close()
def result_dumper(pair): pair[1].subscribe(Dumper('result left=%s' % pair[0]))