def getJarOutputStream(self): if not self._jarOutputStream: if self._manifest: self._jarOutputStream = JarOutputStream(FileOutputStream(self._jarFile), self._manifest) else: self._jarOutputStream = JarOutputStream(FileOutputStream(self._jarFile)) return self._jarOutputStream
def GenerateAllianceSets(): if os.name != 'java': return start_time = datetime.datetime.utcnow() print("Starting generation of Alliance Training and Testing Sets at {0}". format(start_time.isoformat())) training_mrc = marc4j.MarcStreamWriter( FileOutputStream(os.path.join("Alliance", "carl-training.mrc"))) training_mrc.setConverter(marc4j.converter.impl.AnselToUnicode()) testing_mrc = marc4j.MarcStreamWriter( FileOutputStream(os.path.join("Alliance", "carl-testing.mrc"))) testing_mrc.setConverter(marc4j.converter.impl.AnselToUnicode()) training_pos = [] print("Generating random record positions") for i in xrange(1, 20000): rand_int = random.randint(1, int(CARL_STATS.get('total'))) range_key = get_range(rand_int, SORTED_POSITIONS) CARL_POSITIONS[range_key]['training-recs'].append(rand_int) training_pos.append(rand_int) while 1: rand_int2 = random.randint(1, int(CARL_STATS.get('total'))) if training_pos.count(rand_int2) < 1: range_key = get_range(rand_int2, SORTED_POSITIONS) CARL_POSITIONS[range_key]['testing-recs'].append(rand_int2) break print("Finished random position generation, elapsed time={0}".format( (datetime.datetime.utcnow() - start_time).seconds / 60.0)) for key, row in CARL_POSITIONS.iteritems(): print("\nStarting retrival of {0} records from {1}".format( (len(row.get('training-recs')) + len(row.get('testing-recs'))), row.get('filename'))) print("Elapsed time={0}min".format( (datetime.datetime.utcnow() - start_time).seconds / 60.0)) collection_file = FileInputStream( os.path.join(DATA_ROOT, "Coalliance-Catalog", row['filename'])) collection_reader = marc4j.MarcStreamReader(collection_file) offset = key[0] counter = 0 while collection_reader.hasNext(): counter += 1 record = collection_reader.next() if row.get('training-recs').count(counter + offset) > 0: training_mrc.write(record) if row.get('testing-recs').count(counter + offset) > 0: testing_mrc.write(record) if not counter % 100: sys.stderr.write(".") if not counter % 1000: sys.stderr.write("{0}".format(counter)) training_mrc.close() testing_mrc.close() end_time = datetime.datetime.utcnow() print("Finished generation of CARL sets at {0}".format( end_time.isoformat())) print("Total time is {0} minutes".format( (end_time - start_time).seconds / 60.0))
def GenerateColoradoCollegeSets(marc_file, max_recs=984751): if os.name != 'java': return start_time = datetime.datetime.utcnow() print("Start GenerateColoradoCollegeSets at {0}".format( start_time.isoformat())) collection_file = FileInputStream( os.path.join(DATA_ROOT, "TIGER-MARC21", marc_file)) collection_reader = marc4j.MarcStreamReader(collection_file) training_mrc = marc4j.MarcStreamWriter( FileOutputStream(os.path.join("ColoradoCollege", "cc-training.mrc"))) training_mrc.setConverter(marc4j.converter.impl.AnselToUnicode()) testing_mrc = marc4j.MarcStreamWriter( FileOutputStream(os.path.join("ColoradoCollege", "cc-testing.mrc"))) random_rec_positions = [] print("Generating random sequences") for i in xrange(1, 20000): random_rec_positions.append(random.randint(1, max_recs)) training_pos = list(set(random_rec_positions)) random_rec_positions = [] for i in xrange(1, 20000): while 1: rand_int = random.randint(1, max_recs) if training_pos.count(rand_int) < 1: random_rec_positions.append(rand_int) break testing_pos = list(set(random_rec_positions)) counter = 0 print("Iterating through {0}".format(marc_file)) while collection_reader.hasNext(): counter += 1 record = collection_reader.next() if training_pos.count(counter) > 0: try: training_mrc.write(record) except: print("Failed to write training {0} {1}".format( row.title().encode('utf-8', 'ignore'), counter)) if testing_pos.count(counter) > 0: try: testing_mrc.write(record) except: print("Failed to write testing {0} {1}".format( row.title().encode('utf-8', 'ignore'), counter)) if not counter % 100: sys.stderr.write(".") if not counter % 1000: sys.stderr.write(" {0} ".format(counter)) training_mrc.close() testing_mrc.close() end_date = datetime.datetime.utcnow() print( "Finished generating Colorado College training and testing sets at {0}" .format(end_date.isoformat())) print("Total time {0} minutes".format( (end_date - start_time).seconds / 60.0))
def exportAll(): try: ALSBConfigurationMBean = findService( "ALSBConfiguration", "com.bea.wli.sb.management.configuration.ALSBConfigurationMBean") print "ALSBConfiguration MBean found" print project if project == "None": ref = Ref.DOMAIN collection = Collections.singleton(ref) if passphrase == None: print "Export the config" theBytes = ALSBConfigurationMBean.export( collection, true, None) else: print "Export and encrypt the config" theBytes = ALSBConfigurationMBean.export( collection, true, passphrase) else: ref = Ref.makeProjectRef(project) print "Export the project", project collection = Collections.singleton(ref) theBytes = ALSBConfigurationMBean.exportProjects( collection, passphrase) aFile = File(exportJar) out = FileOutputStream(aFile) out.write(theBytes) out.close() print "ALSB Configuration file: " + exportJar + " has been exported" if customFile != "None": print collection query = EnvValueQuery( None, Collections.singleton(EnvValueTypes.WORK_MANAGER), collection, false, None, false) customEnv = FindAndReplaceCustomization( 'Set the right Work Manager', query, 'Production System Work Manager') print 'EnvValueCustomization created' customList = ArrayList() customList.add(customEnv) print customList aFile = File(customFile) out = FileOutputStream(aFile) Customization.toXML(customList, out) out.close() print "ALSB Dummy Customization file: " + customFile + " has been created" except: raise
def permissionsload(context, main=None, add=None, filterinfo=None, session=None, elementId=None, data=None): permissions = PermissionsCursor(context) roles = RolesCursor(context) rolesCustomPerms = rolesCustomPermsCursor(context) customPerms = customPermsCursor(context) customPermsTypes = customPermsTypesCursor(context) cursors = [ roles, permissions, customPermsTypes, customPerms, rolesCustomPerms ] files = [ 'roles', 'permissions', 'customPermsTypes', 'customPerms', 'rolesCustomPerms' ] for i in range(len(cursors)): filePath = os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), files[i] + '.xml') # raise Exception(filePath) if add == 'upload': dataStream = FileOutputStream(filePath) elif add == 'download': dataStream = FileInputStream(filePath) exchange = DataBaseXMLExchange(dataStream, cursors[i]) if add == 'upload': exchange.downloadXML() elif add == 'download': exchange.uploadXML() dataStream.close()
def GenSubjectNav(self): subjectService = __spring__.getBean("subjectService") mGradeId = subjectService.getGradeIdList() MetaGrade = ArrayList() metaSubject = ArrayList() for grade in mGradeId: mGrade = subjectService.getGrade(int(grade)) MetaGrade.add(mGrade) subj = subjectService.getSubjectByGradeId(int(grade)) m = ArrayList() if subj != None: for sj in range(0, subj.size()): m.add(subj[sj].metaSubject) metaSubject.add({"gradeName" : mGrade.gradeName, "gradeId" : grade, "metaSubject" : m }) map = HashMap() map.put("metaGrade", MetaGrade) map.put("meta_Grade", MetaGrade) map.put("SubjectNav", metaSubject) map.put("SiteUrl", CommonUtil.getContextUrl(request)) templateProcessor = __spring__.getBean("templateProcessor") str = templateProcessor.processTemplate(map, "/WEB-INF/ftl/site_subject_nav.ftl", "utf-8") rootPath = JitarRequestContext.getRequestContext().getServletContext().getRealPath("/") fileIndex = rootPath + "html" + File.separator + "subject_nav.html" try: file = File(fileIndex) fw = OutputStreamWriter(FileOutputStream(file), "utf-8") # fw = FileWriter(file, False) fw.flush() fw.write(str) fw.close() finally: file = None fw = None
def actionPerformed(self,e=None): i18n = ToolsLocator.getI18nManager() panel = e.getSource() if not panel.isLabelingEnabled(): return labeling = panel.getLabelingStrategy() if labeling == None: return layer = panel.getLayer() initialPath = None getFile = getattr(layer.getDataStore().getParameters(),"getFile",None) if getFile != None: initialPath=getFile().getParent() else: initialPath = ToolsUtilLocator.getFileDialogChooserManager().getLastPath("OPEN_LAYER_FILE_CHOOSER_ID", None) f = filechooser( OPEN_FILE, title=i18n.getTranslation("_Select_a_file_to_save_the_labeling"), initialPath=initialPath, multiselection=False, filter=("gvslab",) ) if f==None : return trace("filename %s" % f) try: fos = FileOutputStream(f) persistenceManager = ToolsLocator.getPersistenceManager() persistenceManager.putObject(fos, labeling) finally: fos.close()
def proc1(context): #result = lyraplayer.getInstance(context, main='''{"sessioncontext":{"sid":"admin","userdata":"default","phone":"","username":"******","fullusername":"******","urlparams":{"urlparam":{"@name":"patientId","@value":["56"]}},"email":"","login":"******","sessionid":"06635C092D9884D757C65CC9D21DCD10","related":{"xformsContext":{"formData":{"schema":{"data":{"@visitDate":"2015-05-22T17:49:04"},"hypotheses":{"@block":"0","hypothesis":{"@name":"\u0422\u0435\u0441\u0442\u043e\u0432\u0430\u044f \u0440\u0430\u0437\u0440\u0430\u0431\u043e\u0442\u043a\u0430 \u0441 \u0442\u0430\u0431\u043b\u0438\u0446\u0430\u043c\u0438","@id":"11"}},"@xmlns":""}},"partialUpdate":"false","@id":"inspectionCard"}},"ip":"127.0.0.1"}}''') #result = lyraplayer.getInstance(context, main='testgrain.testform.TestForm') result = lyraplayer.getFormInstance(context, 'testgrain.testform.TestForm') fos = FileOutputStream('c:/temp/debug.log') try: print result.findRec() print result.move( '=', '''<schema recversion="1" formId="testgrain.testform.TestForm"> <ff1 type="INT" null="false" local="true">5</ff1> <ff2 type="INT" null="false" local="true">0</ff2> <id type="INT" null="false" local="false">2</id> <attrVarchar type="VARCHAR" null="false" local="false">e</attrVarchar> <attrInt type="INT" null="false" local="false">11</attrInt> <f1 type="BIT" null="false" local="false">true</f1> <f2 type="BIT" null="false" local="false">true</f2> <f4 type="REAL" null="false" local="false">4.0</f4> <f5 type="REAL" null="false" local="false">4.0</f5> <f6 type="VARCHAR" null="false" local="false">1</f6> <f7 type="VARCHAR" null="true" local="false"/> <f8 type="DATETIME" null="false" local="false">2015-11-04T19:25:54</f8> <f9 type="DATETIME" null="false" local="false">2015-11-20T19:25:56</f9> </schema> ''') #result.serialize(fos) finally: fos.close() print 'done'
def extract_obj(self, file_path, res, offset): try: f = File(file_path) # check same name file. counter = 0 while True: # The same file name is not exists. if not f.exists(): break # Count up the file name. counter += 1 stem = u"".join(file_path.split(u".")[:-1]) ex = file_path.split(u".")[-1] _file_path = u"{}({}).{}".format(stem, counter, ex) f = File(_file_path) fos = FileOutputStream(f) fos.write(res[offset:]) self._stdout.printf("save as \"%s\".\n\n", f.getPath()) fos.close() except Exception as e: self._stderr.println("[!] In extract_obj.") self._stderr.println(e)
def unzip_atp_wallet(wallet_file, location): if not os.path.exists(location): os.mkdir(location) buffer = jarray.zeros(1024, "b") fis = FileInputStream(wallet_file) zis = ZipInputStream(fis) ze = zis.getNextEntry() while ze: fileName = ze.getName() newFile = File(location + File.separator + fileName) File(newFile.getParent()).mkdirs() fos = FileOutputStream(newFile) len = zis.read(buffer) while len > 0: fos.write(buffer, 0, len) len = zis.read(buffer) fos.close() zis.closeEntry() ze = zis.getNextEntry() zis.closeEntry() zis.close() fis.close()
def __init__(self, dump_path, dsl_pipe): self.dump_path = dump_path if not os.path.exists(dsl_pipe): raise Exception(""" The path to pipe for DSL reporter %s does not exists. Make sure SPADE is running and DSL reporter has been setup. For more infromation, take a look at http://code.google.com/p/data-provenance/wiki/Pipe""" ) self.pipe = BufferedWriter( OutputStreamWriter(FileOutputStream(dsl_pipe))) self.logger = logging.getLogger(self.__class__.__name__) self.user_data = self._read_json_data("me_data") self.user_newsfeed = self._read_json_data("me_newsfeed") self.friends = dict() self.created_user_nodes = set() for f in self._read_json_data("me_friends"): fuid = f['id'] try: self.friends[fuid] = self._read_json_data("%s_info" % fuid) except IOError, e: logger.info( "Skipping data for friend %s; unable to read data" % f['name']) self.friends[fuid] = f
def write_ordered_variables(program_name, variable_map, file_path, append=False): """ Write variables to file while preserving order of the variables. :param program_name: name of the calling program :param variable_map: map or variable properties to write to file :param file_path: the file to which to write the properties :param append: defaults to False. Append properties to the end of file :raises VariableException if an error occurs while storing the variables in the file """ _method_name = 'write_ordered_variables' _logger.entering(program_name, file_path, append, class_name=_class_name, method_name=_method_name) pw = None try: pw = PrintWriter(FileOutputStream(File(file_path), Boolean(append)), Boolean('true')) for key, value in variable_map.iteritems(): formatted = '%s=%s' % (key, value) pw.println(formatted) pw.close() except IOException, ioe: _logger.fine('WLSDPLY-20007', file_path, ioe.getLocalizedMessage()) ex = exception_helper.create_variable_exception( 'WLSDPLY-20007', file_path, ioe.getLocalizedMessage(), error=ioe) _logger.throwing(ex, class_name=_class_name, method_name=_method_name) if pw is not None: pw.close() raise ex
def zipdir(basedir, archivename): assert os.path.isdir(basedir) fos = FileOutputStream(archivename) zos = ZipOutputStream(fos) add_folder(zos, basedir, basedir) zos.close() return archivename
def transformReport(self, stylesheet, xml, output, params): from java.io import FileInputStream from java.io import FileOutputStream from java.io import ByteArrayOutputStream from javax.xml.transform import TransformerFactory from javax.xml.transform.stream import StreamSource from javax.xml.transform.stream import StreamResult self.xsl = FileInputStream("%s" % stylesheet) self.xml = FileInputStream("%s" % xml) self.html = FileOutputStream("%s" % output) try: self.xslSource = StreamSource(self.xsl) self.tfactory = TransformerFactory.newInstance() self.xslTemplate = self.tfactory.newTemplates(self.xslSource) self.transformer = self.xslTemplate.newTransformer() self.source = StreamSource(self.xml) self.result = StreamResult(self.html) for self.key, self.value in params.items(): self.transformer.setParameter(self.key, self.value) self.transformer.transform(self.source, self.result) finally: self.xsl.close() self.xml.close() self.html.close()
def retrieve_an_archive(cust_id, archive_name, user_id, chunk_hint, file_name, output_ids): print "\nretrieve_a_chunk routine:: output file_name is :", file_name try: outfile_stream = FileOutputStream(file_name) except: print "retrieve_a_chunk routine:: Failed to open output stream on file : ", file_name, "..returning" return None # retrieve data mc = ManagementContainer.getInstance() rm = mc.getRecoveryManager() l_cust_id = Integer.parseInt(cust_id); l_user_id = Integer.parseInt(user_id); sosw = IRecoveryManager.SimpleOutputStreamWrapper(outfile_stream) try: rm.createPerUserActiveRecoveryArchiveFile(l_cust_id, archive_name, l_user_id, sosw, chunk_hint) except: print "retrieve_a_chunk routine:: `Exception while creating active recovery archive..returning" sys.exc_info()[1].printStackTrace() print("*** print_exc:") traceback.print_exc(file=sys.stdout) outfile_stream.close() raise outfile_stream.close() return get_chunk_hint(file_name, output_ids)
def __init__(self, filename): self.cabfile = cab.CabCreator(self) self.cabfile.create(FileOutputStream(filename)) folder = cab.CabFolderEntry() folder.setCompression(cab.CabConstants.COMPRESSION_LZX, 20) #print folder.compressionToString() self.cabfile.newFolder(folder)
def extractZip(zip, dest): "extract zip archive to dest directory" logger.info("Begin extracting:" + zip + " --> " + dest) mkdir_p(dest) zipfile = ZipFile(zip) entries = zipfile.entries() while entries.hasMoreElements(): entry = entries.nextElement() if entry.isDirectory(): mkdir_p(os.path.join(dest, entry.name)) else: newFile = File(dest, entry.name) mkdir_p(newFile.parent) zis = zipfile.getInputStream(entry) fos = FileOutputStream(newFile) nread = 0 buffer = ByteBuffer.allocate(1024) while True: nread = zis.read(buffer.array(), 0, 1024) if nread <= 0: break fos.write(buffer.array(), 0, nread) fos.close() zis.close() logger.info("End extracting:" + str(zip) + " --> " + str(dest))
def write_variables(program_name, variable_map, file_path, append=False): """ Write the dictionary of variables to the specified file. :param program_name: name of tool that invoked the method which will be written to the variable properties file :param variable_map: the dictionary of variables :param file_path: the file to which to write the properties :param append: defaults to False. Append properties to the end of file :raises VariableException if an error occurs while storing the variables in the file """ _method_name = 'write_variables' _logger.entering(program_name, file_path, append, class_name=_class_name, method_name=_method_name) props = Properties() for key in variable_map: value = variable_map[key] props.setProperty(key, value) comment = exception_helper.get_message('WLSDPLY-01731', program_name) output_stream = None try: output_stream = FileOutputStream(File(file_path), Boolean(append)) props.store(output_stream, comment) output_stream.close() except IOException, ioe: ex = exception_helper.create_variable_exception('WLSDPLY-20007', file_path, ioe.getLocalizedMessage(), error=ioe) _logger.throwing(ex, class_name=_class_name, method_name=_method_name) if output_stream is not None: output_stream.close() raise ex
def updateDeployedVersion(serviceGroupName, serviceName, interfaceVersion, mavenVersionId, environmentName) : propInputStream = FileInputStream(os.environ["HOME"] + "/osb_deployed_versions.properties") configProps = Properties() configProps.load(propInputStream) propInputStream.close() configProps.put(serviceGroupName + "." + serviceName + "." + interfaceVersion + "." + environmentName, mavenVersionId) configProps.store(FileOutputStream(os.environ["HOME"] + "/osb_deployed_versions.properties"), "OSB Deployed versions. Updated on " + str(datetime.now()))
def get_file(self, src_file, dst_file): sftp = SFTPv3Client(self.client) dst_fd = None src_fd = None try: dst_file = os.path.abspath(dst_file.replace('/', os.sep)) dst_dir = os.path.dirname(dst_file) if not os.path.exists(dst_dir): os.makedirs(dst_dir) dst_fd = FileOutputStream(dst_file) stats = sftp.stat(src_file) src_size = stats.size src_fd = sftp.openFileRO(src_file) size = 0 arraysize = SSHClient.BUFFER_SIZE data = jarray.zeros(arraysize, 'b') while True: moredata = sftp.read(src_fd, size, data, 0, arraysize) datalen = len(data) if moredata == -1: break if src_size - size < arraysize: datalen = src_size - size dst_fd.write(data, 0, datalen) size += datalen finally: if src_fd: sftp.closeFile(src_fd) if dst_fd: dst_fd.flush() dst_fd.close() sftp.close()
def createOutput(outfile): try: fout = FileOutputStream(outfile) return PrintStream(fout) except IOException: print "Error opening output file" return None
def getFile(self, name): fname = apply( os.path.join, tuple([self.outdir] + string.split(name, '.'))) + '.class' file = File(fname) File(file.getParent()).mkdirs() return FileOutputStream(file)
def syncNmapPortConfigFile(agentPath): ''' Sync nmap port config with global probe's "port number to port name" mapping ''' logger.debug('synchronizing nmap port config file') portConfigFilename = agentPath + CollectorsParameters.getDiscoveryConfigFolder( ) + CollectorsParameters.FILE_SEPARATOR + 'portNumberToPortName.xml' mamservice = File(portConfigFilename) nmapservice = File(agentPath + CollectorsParameters.getDiscoveryResourceFolder() + CollectorsParameters.FILE_SEPARATOR + 'nmap-services') if nmapservice.lastModified() > mamservice.lastModified(): return nmapFile = FileOutputStream(nmapservice) document = SAXBuilder(0).build(mamservice) # document = parse(portConfigFilename) ports = XmlWrapper(document.getRootElement().getChildren('portInfo')) for port in ports: if int(port.getAttributeValue("discover")): portNumber = port.getAttributeValue("portNumber") portName = port.getAttributeValue("portName") portProtocol = port.getAttributeValue("portProtocol") nmapFile.write("%s\t%s/%s\r\n" % (portName, portNumber, portProtocol)) nmapFile.close()
def persistNewProfileName(domainConfigurationDirectory, profileName): from oracle.fabric.profiles.impl import ProfileConstants; fileName = domainConfigurationDirectory + File.separatorChar + 'server-profile-mbean-config.xml' profileProperties = Properties() profileProperties.loadFromXML(FileInputStream(fileName)) profileProperties.setProperty(ProfileConstants.CURRENT_SOA_PROFILE_PROPERTY_NAME, profileName) profileProperties.storeToXML(FileOutputStream(fileName), None)
def save_example(self, label, image_byte_array): output_file_name = label + "_" + str(java.lang.System.currentTimeMillis()) + ".png" save_path = File(self.dir_path, output_file_name).getCanonicalPath() fileos = FileOutputStream(save_path) for byte in image_byte_array: fileos.write(byte) fileos.flush() fileos.close()
def fake_plugin_jar(name, plugins_config): output = FileOutputStream(name) zip = ZipOutputStream(output) entry = ZipEntry('plugins.config') zip.putNextEntry(entry) zip.write(plugins_config) zip.closeEntry() zip.close()
def save_versions(self): print "save versions" versionsProperties = Properties() outFile = FileOutputStream(self.versionsFileName) versionsProperties.setProperty("script", self.app.SCRIPTVERSION) versionsProperties.setProperty("tools", self.app.TOOLSVERSION) versionsProperties.store(outFile, None) outFile.close()
def save_config(self): """Save preferences to config file """ out = FileOutputStream(app.configFileName) app.properties.store(out, None) out.close() #load new preferences self.config = ConfigLoader(self)
def SlowGenerateAllianceSets(): if os.name != 'java': return start_time = datetime.datetime.utcnow() print("Starting generation of Alliance Training and Testing Sets at {0}". format(start_time.isoformat())) training_mrc = marc4j.MarcStreamWriter( FileOutputStream(os.path.join("Alliance", "carl-training.mrc"))) training_mrc.setConverter(marc4j.converter.impl.AnselToUnicode()) testing_mrc = marc4j.MarcStreamWriter( FileOutputStream(os.path.join("Alliance", "carl-testing.mrc"))) training_pos, testing_pos = [], [] print("Creating random positions") for i in xrange(1, 20000): rand_int = random.randint(1, int(CARL_STATS.get('total'))) training_pos.append(rand_int) while 1: rand_int2 = random.randint(1, int(CARL_STATS.get('total'))) if training_pos.count(rand_int2) < 1: testing_pos.append(rand_int2) break print("Retrieve MARC records for Training Set") for i, position in enumerate(sorted(training_pos)): record = get_carl_marc_record(position) if record is not None: training_mrc.write(record) if not i % 100: sys.stderr.write(".") if not i % 1000: sys.stderr.write("{0}".format(i)) training_mrc.close() print("Retrieve MARC records for Testing Set") for i, position in enumerate(sorted(testing_pos)): record = get_carl_marc_record(position) if record is not None: testing_mrc.write(record) if not i % 100: sys.stderr.write(".") if not i % 1000: sys.stderr.write("{0}".format(i)) testing_mrc.close() end_time = datetime.datetime.utcnow() print("Finished generation of CARL sets at {0}".format( end_time.isoformat())) print("Total time is {0} minutes".format( (end_time - start_time).seconds / 60.0))
def _buildProjectJar(element, document): component = element.getRootElement().getComponent() larchJarURL = app_in_jar.getLarchJarURL() chosenJarURL = None if larchJarURL is None: openDialog = JFileChooser() openDialog.setFileFilter( FileNameExtensionFilter('Larch executable JAR (*.jar)', ['jar'])) response = openDialog.showDialog(component, 'Choose Larch JAR') if response == JFileChooser.APPROVE_OPTION: sf = openDialog.getSelectedFile() if sf is not None: chosenJarURL = sf.toURI().toURL() else: return jarFile = None bFinished = False while not bFinished: saveDialog = JFileChooser() saveDialog.setFileFilter( FileNameExtensionFilter('JAR file (*.jar)', ['jar'])) response = saveDialog.showSaveDialog(component) if response == JFileChooser.APPROVE_OPTION: sf = saveDialog.getSelectedFile() if sf is not None: if sf.exists(): response = JOptionPane.showOptionDialog( component, 'File already exists. Overwrite?', 'File already exists', JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE, None, ['Overwrite', 'Cancel'], 'Cancel') if response == JFileChooser.APPROVE_OPTION: jarFile = sf bFinished = True else: bFinished = False else: jarFile = sf bFinished = True else: bFinished = True else: bFinished = True if jarFile is not None: outStream = FileOutputStream(jarFile) documentBytes = document.writeAsBytes() nameBytesPairs = [('app.larch', documentBytes)] app_in_jar.buildLarchJar(outStream, nameBytesPairs, larchJarURL=chosenJarURL) outStream.close()