def test_dump_msg_entities(self): """Show that entities are correctly parsed and stored""" message = types.Message(id=1, to_id=types.PeerUser(321), date=datetime.now(), message='No entities') dumper = Dumper(self.dumper_config) fmt = BaseFormatter(dumper.conn) # Test with no entities dumper.dump_message(message, 123, None, None) dumper.commit() assert not next(fmt.get_messages_from_context(123, order='DESC')).formatting # Test with many entities text, entities = markdown.parse( 'Testing message with __italic__, **bold**, inline ' '[links](https://example.com) and [mentions](@hi), ' 'as well as `code` and ``pre`` blocks.') entities[3] = types.MessageEntityMentionName(entities[3].offset, entities[3].length, 123) message.id = 2 message.date -= timedelta(days=1) message.message = text message.entities = entities dumper.dump_message(message, 123, None, None) dumper.commit() msg = next(fmt.get_messages_from_context(123, order='ASC')) assert utils.decode_msg_entities(msg.formatting) == message.entities
def cmap_dump(self): font = TTFont(self.fontfile) #TODO(ahmetcelik) cmap in format 12 should be used if it exists cmapTable = font['cmap'].getcmap(3, 10) if not cmapTable: cmapTable = font['cmap'].getcmap(3, 1) assert cmapTable,'Unicode cmap table required' cmap = cmapTable.cmap # unicode table codepoints = [] glyphs = [] for code, name in cmap.iteritems(): id = font.getGlyphID(name) glyphs.append(id) codepoints.append(code) if self.debug: print id,name,code font.close() cp_dumper = Dumper(self.folder + '/codepoints') cp_dumper.dump_array(codepoints, 'I', '>') cp_dumper.close() gid_dumper = Dumper(self.folder + '/gids') gid_dumper.dump_array(glyphs, 'H', '>') gid_dumper.close()
def test_formatter_get_chat(self): """ Ensures that the BaseFormatter is able to fetch the expected entities when using a date parameter. """ chat = types.Chat( id=123, title='Some title', photo=types.ChatPhotoEmpty(), participants_count=7, date=datetime.now(), version=1 ) dumper = Dumper(self.dumper_config) fmt = BaseFormatter(dumper.conn) for month in range(1, 13): await dumper.dump_chat(chat, None, timestamp=int(datetime( year=2010, month=month, day=1 ).timestamp())) dumper.commit() cid = tl_utils.get_peer_id(chat) # Default should get the most recent version date = fmt.get_chat(cid).date_updated assert date == datetime(year=2010, month=12, day=1) # Expected behaviour is to get the previous available date target = datetime(year=2010, month=6, day=29) date = fmt.get_chat(cid, target).date_updated assert date == datetime(year=2010, month=6, day=1) # Expected behaviour is to get the next date if previous unavailable target = datetime(year=2009, month=12, day=1) date = fmt.get_chat(cid, target).date_updated assert date == datetime(year=2010, month=1, day=1)
def __init__(self, path_or_url, attr_list): Dumper.__init__(self, path_or_url, attr_list) self.connection = None self.init_sql = self.generate_init_sql() self.attribute_names = map(lambda a: format_attr_name(a), self.attr_list) self.insert_sql = self.generate_insert_sql() print self.insert_sql
def main(): dump = Dumper(); dump.arpDump(); print('\n Reading ARPDump: '+dumper.readARPDump()) dump.arpDump(); dump.ipConfigDump('wlan0'); print('\n Reading IFConfigDump: '+dumper.readIFConfigDump()) startScanning(); dump.arpDump();
def serialize_TOC(self, output_idx, output_data): """Dump the TOC data to the file """ if self.tocReady: dumper = Dumper(output_idx) dumper.dump(self.TOC) dumper.close() dumper = Dumper(output_data) dumper.dumpForEach(self.TOCEntries) dumper.close()
def main(): parser = argparse.ArgumentParser(description='frida-ios-dump') parser.add_argument('-host', dest='ssh_host', help='SSH Host') parser.add_argument('-port', dest='ssh_port', help='SSH Port') parser.add_argument('-username', dest='ssh_username', help='SSH Username') parser.add_argument('-password', dest='ssh_password', help='SSH Password') parser.add_argument('-list', dest='list_applications', action='store_true', help='List the installed apps') parser.add_argument( '-dump', dest='dump_ipa', help='Bundle identifier or display name of the app to dump') parser.add_argument('-out', dest='output_directory', help='Destination of the resulting ipa file') # Parse arguments. args = parser.parse_args() if not args.dump_ipa and not args.list_applications: parser.print_help() return False # Find connected iPhone. device = get_device() if device is None: print('Unable to find a connected usb device with Frida.') return False result = False print('Connected to Frida on device \'%s\'.' % device.name) # List applications on device. if args.list_applications: result = list_applications.execute(device) # Dump decrypted ipa from device. elif args.dump_ipa: dumper = Dumper(device, args.output_directory) if not dumper.connect_ssh(args.ssh_host, args.ssh_port, args.ssh_username, args.ssh_password): return False if not dumper.launch_app(args.dump_ipa): return False result = dumper.execute() return result
def serialize_TOC(self, output_idx, output_data): """Dump the TOC data to the file """ # TODO(bstell) remove this is not used anywhere if self.tocReady: dumper = Dumper(output_idx) dumper.dump(self.TOC) dumper.close() dumper = Dumper(output_data) dumper.dumpForEach(self.TOCEntries) dumper.close()
def test_dumping(self): dump_handler = Dumper(config.backups_path, db.get_connection()) dumps = dump_handler.dump() self.assertEqual(True, len(dumps) > 0, 'Dumps paths list is empty') self.assertEqual( True, len(os.listdir(config.backups_path)) > 0, '%s dir must be contains backups' % config.backups_path ) dump_handler.clear()
def metadata(self): output = self.folder + '/metadata' font = TTFont(self.fontfile) metadata = {'numGlyphs': 0, 'has_hmtx': False, 'has_vmtx': False} metadata['numGlyphs'] = font['maxp'].numGlyphs if 'hmtx' in font: metadata['has_hmtx'] = True metadata['numberOfHMetrics'] = len(font['hmtx'].metrics) if 'vmtx' in font: metadata['has_vmtx'] = True metadata['numberOfVMetrics'] = len(font['vmtx'].metrics) dumper = Dumper(output) dumper.dumpObject(metadata) dumper.close()
def test_formatter_get_messages(self): """ Ensures that the BaseFormatter is able to correctly yield messages. """ dumper = Dumper(self.dumper_config) msg = types.Message( id=1, to_id=123, date=datetime(year=2010, month=1, day=1), message='hi' ) for _ in range(365): await dumper.dump_message(msg, 123, forward_id=None, media_id=None) msg.id += 1 msg.date += timedelta(days=1) msg.to_id = 300 - msg.to_id # Flip between two IDs dumper.commit() fmt = BaseFormatter(dumper.conn) # Assert all messages are returned assert len(list(fmt.get_messages_from_context(123))) == 365 # Assert only messages after a date are returned min_date = datetime(year=2010, month=4, day=1) assert all(m.date >= min_date for m in fmt.get_messages_from_context( 123, start_date=min_date )) # Assert only messages before a date are returned max_date = datetime(year=2010, month=4, day=1) assert all(m.date <= max_date for m in fmt.get_messages_from_context( 123, end_date=max_date )) # Assert messages are returned in a range assert all(min_date <= m.date <= max_date for m in fmt.get_messages_from_context( 123, start_date=min_date, end_date=max_date )) # Assert messages are returned in the correct order desc = list(fmt.get_messages_from_context(123, order='DESC')) assert all(desc[i - 1] > desc[i] for i in range(1, len(desc))) asc = list(fmt.get_messages_from_context(123, order='ASC')) assert all(asc[i - 1] < asc[i] for i in range(1, len(asc)))
def setUpClass(cls): cls.dumper_config = {'DBFileName': 'test_db', 'OutputDirectory': 'test_work_dir', 'MaxSize': 0} # TODO test with different configurations assert not Path(cls.dumper_config['OutputDirectory']).exists() Path(cls.dumper_config['OutputDirectory']).mkdir() config = configparser.ConfigParser() config.read('config.ini') config = config['TelegramAPI'] cls.client = TelegramClient(None, config['ApiId'], config['ApiHash']) login_client(cls.client, gen_username(10)) dumper = Dumper(cls.dumper_config) dumper.check_self_user(cls.client.get_me().id)
def start(): bot = Bot(token=config.bot_token) dumper = Dumper(config.backups_path, db.get_connection()) while True: loop = asyncio.get_event_loop() loop.run_until_complete(send_backups(bot, dumper)) time.sleep(config.backup_interval_hour * 60 * 60)
def __init__(self): # create the filer, light manager, button manager, and dumper self.filer = Filer() self.lights = LightManager() self.buttons = ButtonManager() self.dumper = Dumper("dashdrive") # create constants self.TICK_RATE = 0.125 self.WAIT_TIME = 5.0 # try to log a new config session try: self.filer.log( "---------- New Config Session: " + str(datetime.datetime.now()) + " ----------\n", True) except: # assume the device's storage is full. Wipe it and try again self.wipeFiles(False) self.__init__()
def cmap_dump(self): font = TTFont(self.fontfile) cmap = font['cmap'].getcmap(3, 1).cmap # unicode table assert cmap, 'Unicode cmap table required' codepoints = [] glyphs = [] for code, name in cmap.iteritems(): id = font.getGlyphID(name) glyphs.append(id) codepoints.append(code) if self.debug: print id,name,code font.close() cp_dumper = Dumper(self.folder + '/codepoints') cp_dumper.dump_array(codepoints, 'H', '>') cp_dumper.close() gid_dumper = Dumper(self.folder + '/gids') gid_dumper.dump_array(glyphs, 'H', '>') gid_dumper.close()
def __init__(self, **traits): super(FFnetApp, self).__init__(**traits) self.network = Network(app = self) self.training_data = TrainingData(app = self) self.testing_data = TrainingData(app = self) self.recall_data = TrainingData(app = self) self.data = self.training_data # by default self.dumper = Dumper(app=self) self.trainer = TncTrainer(app = self) # default trainer self.shared = Shared() self.logs = Logger() self.plots = MPLPlots() self.logs.logger.info('Welcome! You are using ffnet-%s.' %ffnet_version) self.shell = {'app':self}
def __init__(self, path_or_url): Dumper.__init__(self, path_or_url, None) self.client = None
def sha1_fingerprint(self): dumper = Dumper(self.folder + '/sha1_fingerprint') fingerprint = FontInfo.getInformation(self.fontfile, ['SHA1'])['SHA1'] dumper.dump(fingerprint) dumper.close()
class Configurer: # Configurer Properties: # filer The Filer object used to write logs/package output # lights The LightManager used for toggling LEDs # buttons The ButtonManager used for user input # dumper The Dumper object used to dump files to a flash drive # Configurer Constants: # TICK_RATE The time interval (in seconds) at which the configurer # ticks to check for/make updates # WAIT_TIME The time the Configurer waits before automatically going # into dash-cam mode (this is in seconds) # Constructor def __init__(self): # create the filer, light manager, button manager, and dumper self.filer = Filer() self.lights = LightManager() self.buttons = ButtonManager() self.dumper = Dumper("dashdrive") # create constants self.TICK_RATE = 0.125 self.WAIT_TIME = 5.0 # try to log a new config session try: self.filer.log( "---------- New Config Session: " + str(datetime.datetime.now()) + " ----------\n", True) except: # assume the device's storage is full. Wipe it and try again self.wipeFiles(False) self.__init__() # Main function def main(self): # set up loop variables ticks = 0.0 tickSeconds = 0.0 # the terminate codes are as follows: # -1 Don't terminate # 0 Terminate and launch dash cam # 1 Terminate and shut down terminateCode = -1 self.filer.log("Configuration mode...\n") # main loop while (terminateCode < 0): # slowly flash the yellow light (twice every second) self.lights.setLED([0], tickSeconds.is_integer() or (tickSeconds + 0.5).is_integer()) tickString = "[Ticks: {t1:9.2f}] [Running Time: {t2:9.2f}]" tickString = tickString.format(t1=ticks, t2=int(tickSeconds)) tickString = "[config] " + tickString # if the WAIT_TIME has been exceeded, terminate with code 0 if (tickSeconds == self.WAIT_TIME): tickString += " (Wait time exceeded: terminating configuration and launching dash cam...)" terminateCode = 0 # check for user input (red/yellow hold: shut down) if (self.buttons.isPowerPressed() and self.buttons.durations[0] * self.TICK_RATE >= 2.0 and self.buttons.isCapturePressed() and self.buttons.durations[1] * self.TICK_RATE >= 2.0): self.filer.log("Red/Yellow buttons held. Shutting down...") # create a controller and use its shutdown sequence shutdown_pi(self.lights, [self.buttons]) # check for user input (output config) elif (self.buttons.isCapturePressed() and self.buttons.durations[1] * self.TICK_RATE < 1.0 and self.buttons.durations[1] * self.TICK_RATE >= 0.25 and not self.buttons.isPowerPressed()): self.filer.log("Entering output config...\n") # disable yellow LED self.lights.setLED([0], False) self.mainOutput() # reset button durations (so shutdown doesn't trigger) self.buttons.durations = [0, 0] # reset the ticks/tickSeconds ticks = 0.0 tickSeconds = 0.0 # flash yellow LED to indicate mode switch self.lights.flashLED([0], 2) # check for user input (connect config) elif (self.buttons.isPowerPressed() and self.buttons.durations[0] * self.TICK_RATE < 1.0 and self.buttons.durations[0] * self.TICK_RATE >= 0.25 and not self.buttons.isCapturePressed()): self.filer.log("Entering connect config...\n") # disable yellow LED self.lights.setLED([0], False) self.mainConnect() # reset button durations (so shutdown doesn't trigger) self.buttons.durations = [0, 0] # reset the ticks/tickSeconds ticks = 0.0 tickSeconds = 0.0 # flash yellow LED to indicate mode switch self.lights.flashLED([0], 2) # only log the tickString if the ticks are currently on a second if (tickSeconds.is_integer()): self.filer.log(tickString + "\n") # update the ticks ticks += 1 tickSeconds += self.TICK_RATE # sleep for one TICK_RATE sleep(self.TICK_RATE) # the loop was terminated: determine why if (terminateCode == 0): # force GPIO-using classes to clean up self.lights.__del__() self.buttons.__del__() self.filer.log( "--------- Config Session Ended: " + str(datetime.datetime.now()) + " ---------\n\n", True) # create a controller to launch the dash cam cont = Controller() cont.main() # Output Mode main function def mainOutput(self): # set up loop variables ticks = 0.0 tickSeconds = 0.0 # terminate codes are as follows: # -1 Don't terminate # 0 Terminate and return to config terminateCode = -1 # main loop while (terminateCode < 0): # slowly flash the red/blue lights (twice every second) self.lights.setLED([1, 2], tickSeconds.is_integer() or (tickSeconds + 0.5).is_integer()) # create a tick string tickString = "[Ticks: {t1:9.2f}] [Running Time: {t2:9.2f}]" tickString = tickString.format(t1=ticks, t2=int(tickSeconds)) tickString = "[config-output] " + tickString # get button durations before updating them captureDuration = self.buttons.durations[1] powerDuration = self.buttons.durations[0] # call the button methods to update the button durations self.buttons.isCapturePressed() self.buttons.isPowerPressed() # check for red AND yellow button duration if (captureDuration > 0.0 and captureDuration < ticks and powerDuration > 0.0 and powerDuration < ticks): # if the buttons are released, go back if (not self.buttons.isCapturePressed() and not self.buttons.isPowerPressed()): tickString += " (Capture/Power buttons were held)" terminateCode = 0 # check for red button duration elif (captureDuration > 0.0 and captureDuration < ticks and powerDuration == 0.0): # flash at 1.5 seconds (and still being held down) to indicate # that files will be sent to the flash drive upon button release if (captureDuration * self.TICK_RATE >= 1.5 and (captureDuration * self.TICK_RATE) - 1.5 <= self.TICK_RATE and self.buttons.isCapturePressed()): self.lights.setLED([1, 2], False) self.lights.flashLED([0], 1) # if the button is released... if (not self.buttons.isCapturePressed()): # if released under 1.5 seconds, package output if (captureDuration * self.TICK_RATE < 1.5): # disable all lights self.lights.setLED([0, 1, 2], False) # package the output self.filer.packageOutput("output.zip", self.lights) # otherwise, dump to flash drive elif (captureDuration * self.TICK_RATE >= 1.5): # disable all lights self.lights.setLED([0, 1, 2], False) # dump output to flash drive, if it's plugged in if (self.dumper.driveExists()): self.filer.log("Drive found. Dumping files...\n") self.lights.setLED([2], True) # dump files self.dumper.dumpToDrive(self.filer) # flash the blue/red lights to show success self.lights.flashLED([1, 2], 3) # otherwise, flash red light to show the drive wasn't found else: self.filer.log( "Drive not found. Cannot dump files.\n") self.lights.flashLED([1], 3) # check for yellow button (convert videos) elif (powerDuration > 0.0 and powerDuration < ticks and captureDuration == 0.0): # flash at 1.5 seconds (if the button is still held) to indicate # that files will be deleted upon button release if (powerDuration * self.TICK_RATE >= 1.5 and (powerDuration * self.TICK_RATE) - 1.5 <= self.TICK_RATE and self.buttons.isPowerPressed()): self.lights.setLED([1, 2], False) self.lights.flashLED([0], 1) # if the button is released if (not self.buttons.isPowerPressed()): # if released under 1.5 seconds, convert the videos if (powerDuration * self.TICK_RATE < 1.5): self.lights.setLED([0, 1, 2], False) # convert videos to mp4 self.filer.convertVideos(self.lights) # otherwise, delete the output elif (powerDuration * self.TICK_RATE >= 1.5): self.wipeFiles() # log tick string if the tick is on a second if (tickSeconds.is_integer()): self.filer.log(tickString + "\n") # update ticks ticks += 1 tickSeconds += self.TICK_RATE # sleep for one TICK_RATE sleep(self.TICK_RATE) # print termination message if (terminateCode == 0): self.filer.log("Returning to config...\n") # disable blue/red LEDs self.lights.setLED([1, 2], False) # Helper function for mainOutput() that wipes all media files from the device. # Takes in an optional argument of whether or not to toggle the lights when # wiping the files def wipeFiles(self, toggleLights=True): if (toggleLights): # set the red LED to ON while files are deleted self.lights.setLED([0, 1, 2], False) self.lights.setLED([1], True) # invoke system commands to wipe the media/log files os.system("sudo rm -rf ../logs") os.system("sudo rm -rf ../media") sleep(7) # sleep for a short time before attempting anything else # since the current log file was destroyed, write to # a new one stating what happened self.filer.checkDirectories() self.filer.log("[config-output] Wiping all output files...\n") if (toggleLights): # flash red/blue alternating to indicate the files were # permanently deleted self.lights.flashLED([1, 2], 4) self.lights.setLED([1, 2], False) # Connect Mode main function def mainConnect(self): # set up loop variables ticks = 0.0 tickSeconds = 0.0 # terminate codes are as follows: # -1 Don't terminate # 0 Terminate and return to config terminateCode = -1 # main loop while (terminateCode < 0): # slowly flash the blue/yellow lights (twice every second) self.lights.setLED([0, 2], tickSeconds.is_integer() or (tickSeconds + 0.5).is_integer()) # create tick string tickString = "[Ticks: {t1:9.2f}] [Running Time: {t2:9.2f}]" tickString = tickString.format(t1=ticks, t2=int(tickSeconds)) tickString = "[config-connect] " + tickString # check for red/yellow button hold (back to config) if (self.buttons.isCapturePressed() and self.buttons.durations[1] * self.TICK_RATE >= 1.0 and self.buttons.isPowerPressed() and self.buttons.durations[0] * self.TICK_RATE >= 1.0): tickString += " (Capture/Power buttons were held)" terminateCode = 0 # log the tick string if the tickSeconds is on a second if (tickSeconds.is_integer()): self.filer.log(tickString + "\n") # update ticks ticks += 1 tickSeconds += self.TICK_RATE # sleep for one TICK_RATE sleep(self.TICK_RATE) # print termination message if (terminateCode == 0): self.filer.log("Returning to config...\n") # disable blue/yellow LEDs self.lights.setLED([0, 2], False)
def test_interrupted_dump(self): """ This method will ensure that all messages are retrieved even on weird conditions. """ if not ALLOW_NETWORK: raise unittest.SkipTest('Network tests are disabled') dumper = Dumper(self.dumper_config) dumper.chunk_size = 1 SEND, DUMP = True, False actions = ( (3, SEND), (2, DUMP), (2, SEND), (2, DUMP), # Actually one will be dumped then back to start (1, SEND), (2, DUMP), (1, SEND), (2, DUMP), # Actually one will be saved and the other updated (2, SEND), (3, DUMP), (1, SEND), (1, DUMP), (1, DUMP), ) self.client(functions.messages.DeleteHistoryRequest('me', 0)) downloader = Downloader(self.client, self.dumper_config, dumper, loop=asyncio.get_event_loop()) which = 1 for amount, what in actions: if what is SEND: print('Sending', amount, 'messages...') for _ in range(amount): self.client.send_message('me', str(which)) which += 1 time.sleep(1) else: print('Dumping', amount, 'messages...') chunks = (amount + dumper.chunk_size - 1) // dumper.chunk_size dumper.max_chunks = chunks downloader.start('me') messages = self.client.get_message_history('me', limit=None) print('Full history') for msg in reversed(messages): print('ID:', msg.id, '; Message:', msg.message) print('Dumped history') fmt = BaseFormatter(dumper.conn) my_id = self.client.get_me().id dumped = list(fmt.get_messages_from_context(my_id, order='DESC')) for msg in dumped: print('ID:', msg.id, '; Message:', msg.text) print('Asserting dumped history matches...') assert len(messages) == len(dumped), 'Not all messages were dumped' assert all(a.id == b.id and a.message == b.text for a, b in zip(messages, dumped)),\ 'Dumped messages do not match' print('All good! Test passed!') self.client.disconnect()
def main(): global vaultFactory mydir = os.path.dirname(os.path.realpath(__file__)) parser = argparse.ArgumentParser() parser.add_argument('--src', required=True) parser.add_argument('--mark', choices=["none", "both", "start", "end"]) parser.add_argument('--dump', action='store_true') parser.add_argument('--dumpPasswords', action='store_true') parser.add_argument('--out') # Generate a file to set some variable param = parser.parse_args() loggingConfFile = os.path.join(mydir, "./logging.yml") logging.config.dictConfig( yaml.load(open(loggingConfFile), Loader=yaml.SafeLoader)) sourceFile = os.path.normpath(os.path.abspath(param.src)) if not os.path.isfile(sourceFile): ERROR("File '{}' does not exists".format(sourceFile)) logger.info("Will handle '{}'".format(sourceFile)) sourceFileDir = os.path.dirname(sourceFile) cluster = yaml.load(open(sourceFile), Loader=yaml.SafeLoader) targetFolder = misc.appendPath( sourceFileDir, cluster["build_folder"] if "build_folder" in cluster else "build") misc.ensureFolder(targetFolder) logger.info("Build folder: '{}'".format(targetFolder)) if "config_file" in cluster: baseConfigFile = cluster["config_file"] else: baseConfigFile = "ezconfig.yml" config, configFile = buildConfig(sourceFileDir, baseConfigFile) plugins = [] plugins.append(Plugin("core", misc.appendPath(mydir, "../plugins/core"))) logger.debug("Plugins path:'{}'".format(config[PLUGINS_PATH])) appendPlugins(plugins, cluster, config[PLUGINS_PATH]) schema = buildSchema(mydir, plugins) configSchema, safeConfigSchema = buildConfigSchema(mydir, config[PLUGINS_PATH]) if param.dump: dumper = Dumper(targetFolder, param.dumpPasswords) dumper.dump("schema.json", schema) dumper.dump("config-schema.json", configSchema) dumper.dump("safe-config-schema.json", safeConfigSchema) else: dumper = None k = kwalify(source_data=cluster, schema_data=schema) k.validate(raise_exception=False) if len(k.errors) != 0: ERROR("Problem {0}: {1}".format(sourceFile, k.errors)) k = kwalify(source_data=config, schema_data=configSchema) k.validate(raise_exception=False) if len(k.errors) != 0: ERROR("Configuration problem {0}: {1}".format(configFile, k.errors)) data = {} data['sourceFileDir'] = sourceFileDir data["targetFolder"] = targetFolder data['ezclusterHome'] = misc.appendPath(mydir, "..") data["rolePaths"] = set() data["configFile"] = configFile model = {} model['cluster'] = cluster model["config"] = config model['data'] = data initVault(model) if SAFE_CONFIG in model and safeConfigSchema != None: k = kwalify(source_data=model[SAFE_CONFIG], schema_data=safeConfigSchema) k.validate(raise_exception=False) if len(k.errors) != 0: ERROR("Configuration problem {0}: {1}".format( model["data"][_SAFE_CONFIG_FILE_], k.errors)) for plugin in plugins: plugin.groom(model) for plugin in plugins: plugin.groom2(model) targetFileByName = buildTargetFileByName(plugins) if param.dump: dumper.dump("cluster.json", model['cluster']) dumper.dump("data.json", model['data']) dumper.dump("targetFileByName.json", targetFileByName) dumper.dump("config.json", config) if SAFE_CONFIG in model and dumper.unsafe: dumper.dump("safeConfig.json", model[SAFE_CONFIG]) for plugin in plugins: plugin.dump(model, dumper) generate(targetFileByName, targetFolder, model, param.mark, dumper) if "out" in param: f = open(param.out, "w+") f.write("# Generated by ezcluster:\n") if "buildScript" in model["data"]: f.write('BUILD_SCRIPT="{}"\n'.format(model["data"]["buildScript"])) f.close()
def serialize_glyf(self, output_idx, output_data): """Dump the Glyf data to the file """ if self.glyfReady: dumper = Dumper(output_idx) dumper.dump(self.GlyphTable) dumper.dump_for_each(self.glyphs_info) dumper.close() dumper = Dumper(output_data) dumper.dump_for_each(self.glyphs_data) dumper.close()
def test_dump_methods(self): """Test await dumper.dump_* works""" dumper = Dumper(self.dumper_config) message = types.Message( id=777, to_id=types.PeerUser(123), date=datetime.now(), message='Hello', out=True, via_bot_id=1000, fwd_from=types.MessageFwdHeader( date=datetime.now() - timedelta(days=1), from_id=321 ) ) fwd_id = await dumper.dump_forward(message.fwd_from) await dumper.dump_message(message, 123, forward_id=fwd_id, media_id=None) message = types.Message( id=778, to_id=types.PeerUser(321), date=datetime.now(), message='Hello', out=False, via_bot_id=1000, media=types.MessageMediaPhoto( caption='Hi', ttl_seconds=40, photo=types.Photo( id=2357, access_hash=-123456789, date=datetime.now(), sizes=[ types.PhotoSize( type='X', w=100, h=100, size=100 * 100, location=types.FileLocation( dc_id=2, volume_id=5, local_id=7532, secret=987654321 ) ) ] ) ) ) loc = await dumper.dump_media(message.media) await dumper.dump_message(message, 123, forward_id=None, media_id=loc) await dumper.dump_message_service(context_id=123, media_id=loc, message=types.MessageService( id=779, to_id=123, date=datetime.now(), action=types.MessageActionScreenshotTaken() )) me = types.User( id=123, is_self=True, access_hash=13515, first_name='Me', username='******', phone='1234567' ) await dumper.dump_user(photo_id=None, user_full=types.UserFull( user=me, link=types.contacts.Link( my_link=types.ContactLinkContact(), foreign_link=types.ContactLinkContact(), user=me ), notify_settings=types.PeerNotifySettings(0, 'beep'), common_chats_count=3 )) await dumper.dump_chat(photo_id=None, chat=types.Chat( id=7264, title='Chat', photo=types.ChatPhotoEmpty(), participants_count=5, date=datetime.now() - timedelta(days=10), version=1 )) channel = types.Channel( id=8247, title='Channel', photo=types.ChatPhotoEmpty(), username='******', participants_count=17, date=datetime.now() - timedelta(days=5), version=7 ) channel_full = types.ChannelFull( id=8247, about='Just a Channel', read_inbox_max_id=1051, read_outbox_max_id=8744, unread_count=1568, chat_photo=types.PhotoEmpty(id=176489), notify_settings=types.PeerNotifySettingsEmpty(), exported_invite=types.ChatInviteEmpty(), bot_info=[] ) await dumper.dump_supergroup(channel_full, channel, photo_id=None) await dumper.dump_channel(channel_full, channel, photo_id=None)
def cmap_dump(self): font = TTFont(self.fontfile) #TODO(ahmetcelik) cmap in format 12 should be used if it exists cmapTable = font['cmap'].getcmap(3, 10) if not cmapTable: cmapTable = font['cmap'].getcmap(3, 1) assert cmapTable, 'Unicode cmap table required' cmap = cmapTable.cmap # unicode table codepoints = [] glyphs = [] for code, name in cmap.iteritems(): id = font.getGlyphID(name) glyphs.append(id) codepoints.append(code) if self.debug: print id, name, code font.close() cp_dumper = Dumper(self.folder + '/codepoints') cp_dumper.dump_array(codepoints, 'I', '>') cp_dumper.close() gid_dumper = Dumper(self.folder + '/gids') gid_dumper.dump_array(glyphs, 'H', '>') gid_dumper.close()
def result_dumper(pair): pair[1].subscribe(Dumper('result left=%s' % pair[0]))
def serialize_cff(self, output_idx, output_data): """Dump the Glyf data to the file """ if self.cffReady: dumper = Dumper(output_idx) dumper.dump(self.CffTable) dumper.dump_for_each(self.glyphs_info) dumper.close() dumper = Dumper(output_data) dumper.dump_for_each(self.glyphs_data) dumper.close()
def dump_closure_map(fontfile, outputfolder): """Takes closure of each glyph in the font and dump them into the two seperate files. Index file used to locate a glyph in data file. Data file contains closure lists """ font = TTFont(fontfile) closurer = ClosureTaker(font) glyph_metadata = Dumper(outputfolder + "/closure_idx") glyph_data = Dumper(outputfolder + "/closure_data") bigEndian = ">" fmt_offset = ">l" # offset - length fmt_size = ">H" fmt_elem = "H" elem_size = struct.calcsize(fmt_elem) offset = 0 for g in font.getGlyphOrder(): closurer.clear() closurer.add_glyph_names([g]) glyphsClosure = closurer.closure() id = closurer.glyph_name_to_id[g] if len(glyphsClosure) == 1 and id in glyphsClosure: # recording not needed glyph_metadata.dump_fmt(-1, fmt_offset) glyph_metadata.dump_fmt(0, fmt_size) else: size = elem_size * len(glyphsClosure) glyph_data.dump_array(glyphsClosure, fmt_elem, bigEndian) glyph_metadata.dump_fmt(offset, fmt_offset) glyph_metadata.dump_fmt(size, fmt_size) # print id, g, glyphsClosure offset += size font.close() glyph_data.close() glyph_metadata.close()
def dump_closure_map(fontfile, outputfolder): """Takes closure of each glyph in the font and dump them into the two seperate files. Index file used to locate a glyph in data file. Data file contains closure lists """ font = TTFont(fontfile) closurer = ClosureTaker(font) glyph_metadata = Dumper(outputfolder + '/closure_idx') glyph_data = Dumper(outputfolder + '/closure_data') bigEndian = '>' fmt_offset = '>l' # offset - length fmt_size = '>H' fmt_elem = 'H' elem_size = struct.calcsize(fmt_elem) offset = 0 for g in font.getGlyphOrder(): closurer.clear() closurer.add_glyph_names([g]) glyphsClosure = closurer.closure() id = closurer.glyph_name_to_id[g] if len(glyphsClosure) == 1 and id in glyphsClosure: # recording not needed glyph_metadata.dump_fmt(-1, fmt_offset) glyph_metadata.dump_fmt(0, fmt_size) else: size = elem_size * len(glyphsClosure) glyph_data.dump_array(glyphsClosure, fmt_elem, bigEndian) glyph_metadata.dump_fmt(offset, fmt_offset) glyph_metadata.dump_fmt(size, fmt_size) #print id, g, glyphsClosure offset += size font.close() glyph_data.close() glyph_metadata.close()
from dumper import Dumper from rx import Observable, Observer from rx.internal import extensionmethod from rx.subjects import Subject in_ = '23432/XX428/X21X71' Observable.from_(in_) \ .flat_map(lambda q: Observable.range(1, 2) if q == 'X' else Observable.just(q)) \ .buffer_with_count(2) \ .map(lambda x, i: i) \ .take(10) \ .subscribe(Dumper('s'))
def __init__(self, path_or_url, attr_list, keyspace_name='packets'): Dumper.__init__(self, path_or_url, attr_list) self.cluster = None self.session = None self.keyspace_name = keyspace_name
from http.server import ThreadingHTTPServer, BaseHTTPRequestHandler from http_helpers.routing import RoutingTable, Route from http_helpers.objects import Request from handlers import index_handler, machine_name_handler, \ create_machine_handler, machine_manufacturer_handler, \ machine_master_key_handler, machine_meta_handler, \ create_vending_keys_handler, get_data_handler from vmf import VendingMachinesFactory from vmkeys import VMKeys from dumper import Dumper VENDING_MACHINES = VendingMachinesFactory() VM_KEYS = VMKeys() DUMPER = Dumper(VENDING_MACHINES, VM_KEYS).start() def create_route_table(): return RoutingTable( Route("GET", "/", index_handler.IndexHandler()), Route("CREATE", "/vending_machine", create_machine_handler.CreateMachineHandler(VENDING_MACHINES)), Route("GET", "/machine_name", machine_name_handler.MachineNameHandler(VENDING_MACHINES)), Route( "GET", "/machine_manufacturer", machine_manufacturer_handler.MachineManufacturerHandler( VENDING_MACHINES)), Route("GET", "/machine_meta", machine_meta_handler.MachineMetaHandler(VENDING_MACHINES)),
from __future__ import print_function from dumper import dump, dumps, Dumper import dumper import io import sys buff = io.StringIO() dumper.default_dumper = Dumper(output=buff) # BEGIN TEST CASES def do_dump_scalars(): dump(1) dump('a') dump("foo") dump('''string with a newline''') return "1'a''foo''string\\nwith a newline'" def test_do_dump_scalars(): assert_output_as_expected(do_dump_scalars) def do_dumps_multi_values(): s = dumps(1, " is less than ", 10) # returns unicode string in py2 if sys.version < (3, 0): s = s.encode('ascii', 'replace') # convert back to regular string dump(s) return "\"1' is less than '10\""
class FFnetApp(HasTraits): network = Instance(Network) data = Instance(TrainingData) training_data = Instance(TrainingData) testing_data = Instance(TrainingData) recall_data = Instance(TrainingData) dumper = Instance(Dumper) trainer = Instance(Trainer) shared = Instance(Shared) logs = Instance(Logger) plots = Instance(MPLPlots, transient=True) shell = PythonValue(Dict) mode = Enum('train', 'test', 'recall') algorithm = Enum('tnc') #, 'bfgs', 'cg') running = DelegatesTo('trainer') net = DelegatesTo('network') data_status = DelegatesTo('data', prefix='status') selected = DelegatesTo('plots') def __init__(self, **traits): super(FFnetApp, self).__init__(**traits) self.network = Network(app = self) self.training_data = TrainingData(app = self) self.testing_data = TrainingData(app = self) self.recall_data = TrainingData(app = self) self.data = self.training_data # by default self.dumper = Dumper(app=self) self.trainer = TncTrainer(app = self) # default trainer self.shared = Shared() self.logs = Logger() self.plots = MPLPlots() self.logs.logger.info('Welcome! You are using ffnet-%s.' %ffnet_version) self.shell = {'app':self} def new(self): net = self.network.create() if net is not None: self.mode = 'train' self.data.normalize = True self._new_net_setup() def load(self): net = self.network.load() if net is not None: self.mode = 'recall' self._new_net_setup() def save_as(self): self.network.save_as() def export(self): self.network.export() def dump(self): self.dumper.configure_traits(kind='modal') def settings(self): if self.net: self._pmode = self.mode self.edit_traits(view='settings_view', kind='livemodal') def train_start(self): self.logs.logger.info('Training network: %s' %self.network.filename) self.trainer.train() def train_stop(self): self.trainer.running = False def reset(self): if self.net: self.net.randomweights() self.logs.logger.info('Weights has been randomized!') self.clear() def about(self): from about import about about.open() def donate(self): import webbrowser url = 'https://sourceforge.net/p/ffnet/donate' webbrowser.open(url) def cite(self): from pyface.api import information import os try: basedir = os.path.dirname(os.path.realpath(__file__)) + '/' except NameError: #__file__ not defined if this is main script basedir = '' fname = basedir + 'data/cite.txt' citations = open(fname, 'r').read() msg = u'You are encouraged to cite in your papers one (or all) of the following:\n\n\n' + \ unicode(citations, 'utf-8').replace(u'\ufeff', '') information(None, msg, title = "Citing ffnet/ffnetui") def clear(self): self.shared.populate() self.plots.selected.replot() def _arrange_plots(self): if self.mode == 'train': plots = [ErrorAnimation, RegressionAnimation, TOAnimation, DOAnimation, ITOAnimation, DIOAnimation, GraphAnimation] elif self.mode == 'test': plots = [RegressionPlot, TOPlot, DOAnimation, ITOPlot, DIOAnimation, GraphAnimation] else: plots = [OPlot, DOAnimation, IOPlot, DIOAnimation, GraphAnimation] self.plots.classes = plots self.plots.selected.replot() def _new_net_setup(self): self.shared.populate() # Clears all data from previous training data_status = False if self.data.status == 2: data_status = self.data.load() # here we test data if not data_status: self.data.status = 0 self.settings() else: self._arrange_plots() self.logs.logger.info('Using previously loaded data.') def _mode_changed(self): if self.mode == 'train': self.data = self.training_data elif self.mode == 'test': self.data = self.testing_data else: self.data = self.recall_data def _algorithm_changed(self, new): if new == 'tnc': self.trainer = TncTrainer(app=self) if new == 'cg': self.trainer = CgTrainer(app=self) if new == 'bfgs': self.trainer = BfgsTrainer(app=self) def _selected_changed(self, old, new): new.app = self # TODO: Plots should be initialized with 'app=self' ? traits_view = View(VSplit(UItem('plots', style = 'custom', height=0.75, visible_when='len(object.plots.plots) > 0 and ' '((mode in ["train", "test"] and data_status == 2) or ' '(mode in ["recall"] and data_status == 1))'), Tabbed(UItem('logs', style='custom', dock = 'tab', export = 'DockWindowShell'), UItem('shell', label = 'Shell', editor = ShellEditor( share = True ), dock = 'tab', export = 'DockWindowShell' ), ) ), handler = FFnetAppHandler(), title = 'ffnetui-%s' %ffnetui_version, width = 0.6, height = 0.8, resizable = True, toolbar = toolbar, menubar = menubar, #statusbar = [StatusItem(name = 'net_info', width=0.5), #StatusItem(name = 'data_info', width=0.5)] ) settings_view = View(Item('mode', emphasized=True), '_', Group(Item('object.data.input_loader', style='custom', label='Input file',), Item('object.data.target_loader', style='custom', label='Target file'), Group(Item('object.data.validation_patterns', label = 'Validation patterns [%]'), Item('object.data.validation_type'), Item('object.data.normalize')), visible_when = 'mode == "train"'), Group(Item('object.data.input_loader', style='custom', label='Input file',), Item('object.data.target_loader', style='custom', label='Target file'), visible_when = 'mode == "test"'), Group(Item('object.data.input_loader', style='custom', label='Input file',), visible_when = 'mode == "recall"'), '_', Group(Item('algorithm', label = 'Training algorithm'), UItem('object.trainer', style='custom'), Item('object.trainer.best_weights'), visible_when='mode == "train"'), buttons = ['OK', 'Cancel'], handler = SettingsHandler(), title = 'Settings...', resizable = True, #scrollable = True, width = 0.4)