def get_article(request): code = request.GET.get('code', None) collection = request.GET.get('collection', None) fmt = request.GET.get('format', 'json') body = request.GET.get('body', 'false') if not body in ['true', 'false']: raise HTTPBadRequest("parameter 'metaonly' must be 'true' or 'false'") body = asbool(body) article = request.databroker.get_article(code, collection=collection, replace_journal_metadata=True, body=body) if article: if fmt == 'xmlwos': return Response(Export(article).pipeline_sci(), content_type="application/xml") if fmt == 'xmldoaj': return Response(Export(article).pipeline_doaj(), content_type="application/xml") if fmt == 'xmlrsps': return Response(Export(article).pipeline_rsps(), content_type="application/xml") if fmt == 'xmlpubmed': return Response(Export(article).pipeline_pubmed(), content_type="application/xml") return Response(json.dumps(article), content_type="application/json")
def init(): args = sys.argv[1:] if len(args) == 0: route = raw_input("> [route_parser] Insert a route to query with, or nothing to run a test route.] \n \ > ") if len(args) == 0: route = "4-40 Beach Dr Even#, 650-776 Mountjoy Ave Even#, 2019-2027 Runnymede Ave Odd# (19)" print "[route_parser] No args provided - Running the following test route: " print route parsedroute = parse(route) print "[route_parser] Parsed the test route: \n", parsedroute ex = Export() ex.set_login_details(None, None) ex.set_parsed_routes(parsedroute) ex.generate_excel() print "[route_parser] Successfully parsed route description and wrote data to file." return wb = load_workbook(filename = '%s' % args[0], read_only=True) ws = wb[wb.get_sheet_names()[0]] print ws result = [] for row in ws.iter_rows(): for cell in row: if cell.column == 5 and cell.value != None: print cell.value print result
def export_to_csv(cls, export_data, file_name_path): """ let data to a csv file """ export = Export() export.export_data_to_csv(export_data, file_name_path)
def export(self, file, fold_path=None): exp = Export(file, self, fold_path) f = open(file, "w") f.write(exp.data) f.close() return
def main(): info_db = YInfoDB(INFO_DB) tpis_info = TPIs(TPI_DIR) exports = Export(EXPORT) pacs_info = PACs(PAC_DIR) #for ttpi in tpis_info.get_all_tpi(): # print ttpi.data c_price = ContractPrice(tpis_info, exports) if not c_price.is_ok(): LOG.error("Error when generating contract price") return #pacs_info.show_pacs() if c_price.process_pac(pacs_info) == False: LOG.error("Error When processing PACs") return if c_price.process_infodb(info_db) == False: LOG.error("Error when processing info db") return c_price.show_contract_price() outbook = OutputWorkbook(TEMPLATE_FILE) nanos, normal_pns, pc_pns = c_price.get_data() ct = outbook.get_contract_sheet() dt = outbook.get_detail_sheet() it = outbook.get_invoice_sheet() LOG.info("nano=%s", nanos) LOG.info("normal=%s", normal_pns) LOG.info("pc=%s", pc_pns) #first process nano process_nano(ct, dt, it, nanos, c_price, info_db) #second normal process_other(ct, dt, it, normal_pns, c_price) #third, 微型电脑主机 process_other(ct, dt, it, pc_pns, c_price) process_others(ct, dt, it, CONTRACT_ID, pacs_info, c_price) outbook.save(DEST_FILE) ##now accouting save_account(tpis_info, exports, c_price) save_hawb_total(tpis_info, c_price) print u"处理成功!!!!!!"
def export_result(self): if not self.last_scan_type: print("No result found, did you scan ?") return xporter = Export(self.opt['string']['exp_basename']) if (self.opt['string']['export_format'] == "txt"): xporter.export_txt(self.json_res, self.last_scan_type) elif (self.opt['string']['export_format'] == "json"): xporter.export_json(self.json_res) else: print("Unknow export format") print("Available formats : txt, json")
def __init__(self): """Creates the main UI dialog box and sets up the sections in the desired layout.""" QDialog.__init__(self) self.font_type = "Courier" self.font_size = 10 font = QFont(self.font_type, self.font_size) font.setFixedPitch(True) QApplication.setFont(font, "QPlainTextEdit") self.ts_start = datetime.datetime.now() self.ui_flags = FLAGS self.dlg = uic.loadUi("dep.ui", self) has_pickled_project = self.unpickle_project() self.verbose = FLAGS.verbose self.s3_ignore_fullsize_color = FLAGS.s3_ignore_fullsize_color self.s3_sample_frame = FLAGS.s3_sample_frame self.project_root = FLAGS.project_root self.path_project = config.DOCKER_INPUT_ROOT self.configure_farm() self.full_size = self.dlg.frameSize() verify_data.set_default_top_level_paths(self) if not has_pickled_project: verify_data.verify(self) dep_util.set_full_size_widths(self) self.calibrate = Calibration(self) self.background = Background(self) self.depth = DepthEstimation(self) self.export = Export(self) self.sections = [ self.calibrate, self.background, self.depth, self.export ] global sections sections = self.sections self.setup_sections_layout() self.setup_sections_signals() self.setup_project() self.setup_clock() self.dlg.show()
def __init__(self, *args, **kwargs): path, output, Format = args file.exists(output) files = [] self.findpythonfiles(path, files) comments = [] self.findcomments(files, comments) del files, path self.clearpaths(comments) Export(comments, output, Format)
def __init__(self, job_params): self.task = job_params try: if self.task['crawl'] is True: c = Crawler(self.task) c.crawl() elif self.task['report'] is True: #crawtext.py report <project> [((--email=<email>| -e <email>) -u <user> -p <passwd>)| (-o <outfile> |--o=<outfile>)] Report(self.task) elif self.task['export'] is True: #crawtext.py export [results|sources|logs|queue] <project> [(-o <outfile> |--o=<outfile>)] [-t <type> | --type=<type>] Export(self.task) # elif self.task['extract'] is True: #new method for extract every url except KeyError: print self.task["project"] print "Project %s not configured properly" % str( self.task["project"]) s = Scheduler(self.task) s.delete() print "deleting project"
def build(builder, options=None, time_limit=None, import_build=True): if not time_limit: time_limit = DefaultTimeout() if not options: options = Options() action_map = [ ("clean", builder.clean), ("build", builder.build), ("test", builder.test), ] actions = options.action invoke = NullInvoke() if os.environ.has_key("PKG_CONFIG_PATH"): del os.environ["PKG_CONFIG_PATH"] if os.environ.has_key("LD_LIBRARY_PATH"): del os.environ["LD_LIBRARY_PATH"] if os.environ.has_key("LIBGL_DRIVERS_PATH"): del os.environ["LIBGL_DRIVERS_PATH"] # TODO: add this stuff if (options.result_path): # if we aren't posting to a server, don't attempt to write # status invoke = ProjectInvoke(options) invoke.set_info("start_time", time.time()) # start a thread to limit the run-time of the build to = TimeOut(time_limit) to.start() if options.hardware != "builder" and check_gpu_hang(): return if import_build: Export().import_build_root() if type(actions) is str: actions = [actions] # clean out the test results directory, so jenkins processes only # the files for the current build if "test" in actions: test_out_dir = ProjectMap().source_root() + "/test" if os.path.exists(test_out_dir): rmtree(test_out_dir) # Walk through the possible actions in order, if those actions are not # requested go on. The order does matter. for k, a in action_map: if k not in actions: continue options.action = a try: a() except: # we need to cancel the timer first, in case # set_status fails, and the timer is left running to.end() invoke.set_info("status", "failed") # must cancel timeout timer, which will prevent process from ending raise # must cancel timeout timer, which will prevent process from # ending. cancel the timer first, in case set_status fails, and # the timer is left running to.end() invoke.set_info("end_time", time.time()) invoke.set_info("status", "success")
def export_file_action(self): dialog = Export(parent=self, df=self.table) if dialog.exec_(): dialog.export_to_file()
def __init__(self): self.config = Config() self.Loader = Loader() self.Export = Export() self.DataProcess = DataProcess()
def test_basic_export(): ex = Export() ex.set_login_details(None, None) searchdict = {"City": "Colwood"} ex.set_search_dict(searchdict) ex.generate_excel()
def execute(self, parameters, messages): """Runs the script""" # Get the user's input fc = parameters[0].valueAsText field_mappings = parameters[1].valueAsText fields = parameters[1].valueAsText.split(';') fields.append('SHAPE@XY') output_dir = parameters[2].valueAsText output_name = parameters[3].valueAsText convert_to_wgs84 = self.toBool(parameters[4].valueAsText) convert_to_geojson = self.toBool(parameters[5].valueAsText) convert_to_kmz = self.toBool(parameters[6].valueAsText) convert_to_csv = self.toBool(parameters[7].valueAsText) convert_metadata = self.toBool(parameters[8].valueAsText) debug = self.toBool(parameters[9].valueAsText) # Setup vars output_path = output_dir + '\\' + output_name shp_output_path = output_dir + '\\shapefile' shp_temp_output_path = output_dir + '\\shapefile\\temp\\' shapefile = shp_output_path + '\\' + output_name + '.shp' temp_shapefile = shp_output_path + '\\temp\\' + output_name + '.shp' if debug: AddMessage('Field infos:') AddMessage(field_mappings) try: arcpy.Delete_management('temp_layer') except: if debug: AddMessage('Did not have a temp_layer feature ' + 'class to delete') if not os.path.exists(shp_output_path): os.makedirs(shp_output_path) if debug: AddMessage('Created directory ' + shp_output_path) if not os.path.exists(shp_temp_output_path): os.makedirs(shp_temp_output_path) else: for file in os.listdir(shp_temp_output_path): file_path = os.path.join(shp_temp_output_path, file) try: if os.path.isfile(file_path): os.unlink(file_path) except: AddWarning('Unable to delete ' + file + 'from the temp folder. This ' + 'may become a problem later') pass arcpy.MakeFeatureLayer_management(fc, 'temp_layer', '', '', field_mappings) arcpy.CopyFeatures_management('temp_layer', temp_shapefile) if convert_to_wgs84: AddMessage('Converting spatial reference to WGS84...') arcpy.Project_management( temp_shapefile, shapefile, "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433],METADATA['World',-180.0,-90.0,180.0,90.0,0.0,0.0174532925199433,0.0,1262]]", "WGS_1984_(ITRF00)_To_NAD_1983", "PROJCS['NAD_1983_StatePlane_Pennsylvania_South_FIPS_3702_Feet',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',1968500.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-77.75],PARAMETER['Standard_Parallel_1',39.93333333333333],PARAMETER['Standard_Parallel_2',40.96666666666667],PARAMETER['Latitude_Of_Origin',39.33333333333334],UNIT['Foot_US',0.3048006096012192]]" ) AddMessage('Projection conversion completed.') else: AddMessage('Exporting shapefile already in WGS84...') arcpy.FeatureClassToShapefile_conversion(temp_shapefile, shp_output_path) try: arcpy.Delete_management('temp_layer') except: AddError('Unable to delete in_memory feature class') AddMessage('Compressing the shapefile to a .zip file...') export = Export(output_dir, output_name, debug) zip = export.zip() if zip: AddMessage('Finished creating ZIP archive') if convert_to_geojson: AddMessage('Converting to GeoJSON...') output = output_path + '.geojson' geojson = esri2open.toOpen(shapefile, output, includeGeometry='geojson') if geojson: AddMessage('Finished converting to GeoJSON') if convert_to_kmz: AddMessage('Converting to KML...') kmz = export.kmz() if kmz: AddMessage('Finished converting to KMZ') if convert_to_csv: AddMessage('Converting to CSV...') csv = export.csv() if csv: AddMessage('Finished converting to CSV') if convert_metadata: AddMessage('Converting metadata to Markdown ' + 'README.md file...') md = export.md() if md: AddMessage('Finished converting metadata to ' + 'Markdown README.md file') # Delete the /temp directory because we're done with it shutil.rmtree(shp_output_path + '\\temp') if (debug): AddMessage('Deleted the /temp folder because we don\'t' + ' need it anymore') return
CustomActionsModel = CustomActionsModel() ProjectsModel = ProjectsModel() CopyProjectModel = CopyProjectModel() DomainsModel = DomainsModel() ConversationsModel = ConversationsModel() RefreshDbModel = RefreshDbModel() IntentsModel = IntentsModel() IntentDetailModel = IntentDetailModel() ResponseModel = ResponseModel() ResponseDetailModel = ResponseDetailModel() StoryDetailModel = StoryDetailModel() StoryModel = StoryModel() EntityModel = EntityModel() ExportProjectModel = ExportProjectModel() ImportProjectModel = ImportProjectModel() Export = Export() # Setting Expiry for redis cache GLOBAL_EXPIRY = 60 # Initiate redis try: r = redis.Redis(host=os.environ['REDIS_URL'], port=os.environ['REDIS_PORT'], charset="utf-8", decode_responses=True) logger.info("Trying to connect to Redis Docker container ") except KeyError: logger.debug("Local run connecting to Redis ") r = redis.Redis(host='localhost',
def export(self, _): Export(self)
def run(self): """Run method that performs all the real work""" #trigger help button #helpButton.clicked.connect(showPluginHelp()) # Create the dialog (after translation) and keep reference self.dlg = profileAARDialog() #initialize the Errorhandler errorhandler = ErrorHandler(self.iface) magicbox = Magic_Box(self.iface) export = Export(self.iface) '''DEFINE OUTPUT PATH''' #Choose file if button is clicked self.dlg.outputPath.clear() self.dlg.outputButton.clicked.connect(self.select_output_file) '''SELECT INPUT IN GUI''' # CHOOSE INPUT LAYER # read layers from qgis layers and filter out the pointlayers to display in the input combobox self.dlg.inputCombo.setFilters(QgsMapLayerProxyModel.PointLayer) # CHOOSE COLUMNS FOR Z-VALUE, VIEW AND PR-NUMBER # CALLS FUNCTION LAYER_FIELD (once on startup on activation, to enable using when only one point fc is present) self.dlg.inputCombo.activated.connect(self.layer_field) self.dlg.inputCombo.currentIndexChanged.connect(self.layer_field) self.dlg.helpButton.clicked.connect(self.show_help) '''SHORT BLOCK OF PLUGIN CODE (runs the dialog and triggers the event after the OK button was pressed)''' # create/show the dialog self.dlg.show() # Run the dialog event loop result = self.dlg.exec_() # See if OK was pressed if result: inputCheck = False fieldCheck = False #Check if input fields are filled correctly an if the layer has correct properties inputCheck = errorhandler.input_check(self.dlg.outputPath.text()) '''GET INPUT FROM GUI TO VARIABLES/PREPARE LIST OF DATA''' #GET TEXT FROM METHOD AND DIRECTION #Read the method that is selected method = unicode(self.dlg.methodCombo.currentText()) #read the direction, that is selected direction = unicode(self.dlg.directionCombo.currentText()) #Get the selected layer selectedLayer = self.dlg.inputCombo.currentLayer() #PREPARE DATA LIST #Go thought all data rows in the selected layer iter = selectedLayer.getFeatures() #list for the data coord = [] #list for the different profile names profile_names = [] #check if the z values have the correct type and if the crs is projected fieldCheck = errorhandler.field_check(selectedLayer, self.dlg.zCombo.currentText()) height = False section = False if fieldCheck == True or inputCheck == True: sys.exitfunc() if self.dlg.hightBox.isChecked(): height = True if self.dlg.sectionBox.isChecked(): section = True point_id = 0 for feature in iter: # retrieve every feature with its geometry and attributes # fetch geometry # TODO: 3Nachkommastellen!! Bisher sind es nur 2..... geom = feature.geometry() #getting x and y coordinate x = round(geom.asPoint().x(), 3) y = round(geom.asPoint().y(), 3) #write coordinates and attributes (view, profile and z) in a list # TODO: Use dictinary or object #add an ID to each point point_id += 1 coord.append([x,y,feature[self.dlg.zCombo.currentText()],feature[self.dlg.viewCombo.currentText()], feature[self.dlg.profileCombo.currentText()], feature[self.dlg.useCombo.currentText()], point_id]) #write a list of profilenames (unique entries) if feature[self.dlg.profileCombo.currentText()] not in profile_names: profile_names.append(feature[self.dlg.profileCombo.currentText()]) '''WORK ON EVERY PROFILE IN LOOP''' # CREATE A LIST OF DATA FOR EVERY PROFILE # select every single profile in a loop coord_trans = [] height_points = [] cutting_line = [] for i in range(len(profile_names)): # instantiate a temporary list for a single profile coord_proc = [] # instantiate list for the view to check if all entries in one profile are the same view_check = [] #CHANGE # instantiate list for the selection to check if all entries in one profile are the same selection_check = [] # iterate through the features in coord, if the profilename matches store the features datalist in templist for x in range(len(coord)): if coord[x][4] == profile_names[i]: coord_proc.append(coord[x]) # write the unique view values in the checklist if coord[x][3] not in view_check: view_check.append(coord[x][3]) # CHANGE write the unique selection values in the checklist if coord[x][4] not in selection_check: selection_check.append(coord[x][5]) #Handle Errors depending on the attributes in the fields #Errorhandling: Checking the single Profiles for inconsestency #Therefore we need the data of the actual profile, the view_check with the view values and actual profile name, selection is 0 or 1 profileCheck = False if fieldCheck == False and inputCheck == False: profileCheck = errorhandler.singleprofile(coord_proc, view_check, str(profile_names[i]), selection_check) if profileCheck == False and fieldCheck == False and inputCheck == False: #Calculating the profile and add it to the list transform_return = magicbox.transformation(coord_proc, method, direction) coord_height_list = transform_return['coord_trans'] coord_trans.append(coord_height_list) #CHANGE If checked, the upper right poitn has to be exportet as point if height == True: height_points.append(magicbox.height_points(coord_height_list)) if section == True: cutting_line.append(sectionCalc(self, coord_proc, transform_return['cutting_start'],transform_return['linegress'], transform_return['ns_error']), ) if profileCheck == False: '''Export the data''' #For exporting we need the data, the path and the crs of the input data export.export(coord_trans, self.dlg.outputPath.text(), selectedLayer.crs()) #If points are checked, export them #CHANGE if height == True: export.export_height(height_points, self.dlg.outputPath.text(), selectedLayer.crs()) if section == True: #if a profile is recommended, we have to export it. To make it easy to display everything, export left point first printLogMessage(self,str(coord_proc[0][4]),'sec111t') export.export_section(cutting_line, coord_proc[0][4],self.dlg.outputPath.text(), selectedLayer.crs()) #Load the file to qgis automaticly layer = self.iface.addVectorLayer(self.dlg.outputPath.text(), "", "ogr") #CHANGE if height == True: filename = self.dlg.outputPath.text().split(".shp")[0] filename = filename + "_height.shp" layer = self.iface.addVectorLayer(filename, "", "ogr") if section == True: filename = self.dlg.outputPath.text().split(".shp")[0] filename = filename + "_section.shp" layer = self.iface.addVectorLayer(filename, "", "ogr") #if the loading of the layer fails, give a message if not layer: criticalMessageToBar(self, 'Error', 'Failed to open '+self.dlg.outputPath.text()) pass
def get_default_qtree_exports(self): exports = [ Export(type='rw', tohost=host, fromip=self.get_primary_ipaddr().ip) for host in self.get_site().get_hosts() ] return exports
def __init__(self): QMainWindow.__init__( self) #QMainWindow.__init__(self, None, Qt.FramelessWindowHint) self.setupUi(self) self.appversion = appversion self.setupTable() self.settings = Settings(self) self.export = Export(self) self.actionPublic_Mode.setChecked(self.settings["public_mode"]) self.factor.setValue(self.settings["zoom_factor"]) self.ocr_all_set = False self.color_image = None self.preview_image = None self.current_result = None self.newupd = None self.zoom = False self.minres = 0 self.fields = [ self.name, self.sell, self.buy, self.demand_num, self.demand, self.supply_num, self.supply ] self.canvases = [ self.name_img, self.sell_img, self.buy_img, self.demand_img, self.demand_text_img, self.supply_img, self.supply_text_img ] #setup buttons self.add_button.clicked.connect(self.addFiles) self.remove_button.clicked.connect(self.removeFile) self.remove_all_button.clicked.connect(self.removeAllFiles) self.add_all_button.clicked.connect(self.addAllScreenshots) self.save_button.clicked.connect(self.addItemToTable) self.skip_button.clicked.connect(self.nextLine) self.continue_button.clicked.connect(self.continueOCR) self.ocr_button.clicked.connect(self.performOCR) self.ocr_all.clicked.connect(self.runOCRAll) self.export_button.clicked.connect(self.export.exportToFile) self.bpc_button.clicked.connect(self.export.bpcExport) self.eddn_button.clicked.connect(self.export.eddnExport) self.clear_table.clicked.connect(self.clearTable) self.zoom_button.clicked.connect(self.drawOCRPreview) QObject.connect(self.actionHelp, SIGNAL('triggered()'), self.openHelp) QObject.connect(self.actionUpdate, SIGNAL('triggered()'), self.openUpdate) QObject.connect(self.actionAbout, SIGNAL('triggered()'), self.About) QObject.connect(self.actionOpen, SIGNAL('triggered()'), self.addFiles) QObject.connect(self.actionPreferences, SIGNAL('triggered()'), self.openSettings) QObject.connect(self.actionPublic_Mode, SIGNAL('triggered()'), self.toggleMode) QObject.connect(self.actionCommodity_Editor, SIGNAL('triggered()'), self.openEditor) self.error_close = False #set up required items for nn self.training_image_dir = unicode( self.settings.app_path.decode( 'windows-1252')) + u"\\nn_training_images\\" self.loadPlugins() self.restorePos() self.eddnthread = EDDNExport(self) QObject.connect(self.eddnthread, SIGNAL('finished(QString)'), self.export.eddnFinished) QObject.connect(self.eddnthread, SIGNAL('update(int,int)'), self.export.eddnUpdate) self.checkupadte = self.settings["updates_check"] self.thread = Worker() self.connect(self.thread, SIGNAL("output(QString, QString)"), self.showUpdateAvailable) if self.checkupadte: self.thread.check(self.appversion) if not self.settings.reg.contains('info_accepted'): self.infoDialog = InfoDialog() self.infoDialog.exec_() else: if not self.settings['info_accepted']: self.infoDialog = InfoDialog() self.infoDialog.exec_()
def get_results(self, n_iter, rmse): print('v{}'.format(self.ver)) # randomize functions are out of this class, they are just recalled in CreateOZN.fire() self.rs.open_ozone() # this is main loop for stochastic analyses # n_iter is maximum number of iterations for sim in range(int(n_iter)): sim_no = sim + self.sim_time # unique simulation ID based on time mask print('\n\nSimulation #{} -- {}/{}'.format(sim_no, sim + 1, n_iter)) # creating OZN file and writing essentials to the list self.to_write.clear() # redirect data to CSV and create OZN file for beam self.to_write, no_beam = CreateOZN(*self.paths, self.f_type).write_ozn() # beam simulation if not no_beam: if not self.single_sim(self.to_write, sim_no): self.falses += 1 self.details(sim_no) # moving Ozone files named by simulation ID # column simulation sim_no = '{}col'.format(sim_no) print('\nSimulation #{} -- {}/{}'.format(sim_no, sim + 1, n_iter)) if self.b2c(sim_no[:10]): # change coordinates to column print('There is no column available') no_beam = True if not self.single_sim(self.to_write, sim_no.split('a')[0]): self.falses += 1 self.details(sim_no) # saving column simulation details # choosing worse scenario as single iteration output and checking its correctness if not no_beam: print('beam: {}, col: {}'.format(self.results[-2][1], self.results[-1][1])) self.worse() print("Step finished OK") # exporting results every (self.save_samp) repetitions if (sim + 1) % self.save_samp == 0: e = Export(self.results, self.paths[1], self.ver) e.csv_write('stoch_rest') # check if RMSE is low enough to stop simulation if e.save(self.rset, self.t_crit, self.falses) and rmse == "rmse": print('Multisimulation finished due to RMSE condition') break self.results.clear() # safe closing code: self.rs.close_ozn() print("Multisimulation finished OK, well done engineer!")