def main(): logging.info("-" * 30) logging.info("Starting translate-helper") global_settings = settings.LoadSettings(_GLOBAL_JSON_SETTINGS_FILE) ts = settings.LoadSettings(_JSON_SETTINGS_FILE) logging.info("Successfully read settings") translate_helper = TranslateHelper( source_language=ts.source_language, target_language=ts.target_language, translator_settings=global_settings.translators) translate_helper.SetupTranslators() words = importer.read_csv(ts.word_list_file) logging.info("Word list loaded") df = translate_helper.GetTranslation(words) logging.info("Translation retrieved successfully") exporter.export(df, ts.output_file) logging.info("Output file created at [{0}]".format(ts.output_file)) logging.info("Finished translate-helper") logging.info("-" * 30)
def on_test_success(issue_key, master_path, testfolder_path, jira, test_id, sut_name, new_task): compare_results(issue_key, \ os.path.join(testfolder_path, "xray_report.json"), \ os.path.join(master_path, "master_xray_report.json"), \ os.path.join(master_path, "tests.json"), \ os.path.join(master_path, "first_run_xray_report.json")) result_html = import_updated_xray(jira, issue_key, \ master_path, \ os.path.join(master_path, "master_xray_report.json"), \ test_id, sut_name) if not new_task: result = combine_report.combine(uploader_common.read_bytes(os.path.join(master_path, "master_report.jsonl.zlib")), \ uploader_common.read_bytes(os.path.join(testfolder_path, "report.jsonl.zlib")), \ combine_report.CombineModes.DIFFERENT_BOOTS) with open(os.path.join(master_path, "master_report.jsonl.zlib"), 'wb') as report_file: report_file.write(result) report = uploader_common.read_bytes(os.path.join(master_path, "master_report.jsonl.zlib")) exporter.export(report, master_path) print("Exported " + master_path + "/master_report.jsonl.zlib") #Update attachments in Jira issue with jiralib.MultipleFileManager() as fm: with tempfile.TemporaryDirectory() as html_dir: html_path = os.path.join(html_dir, "result.html") with open(html_path, 'w') as f: f.write(result_html) fm.add_file(html_path) update_jira_attachments(jira, fm, master_path, issue_key, DEFAULT_CONF_FILE)
def hashCheck(self): # create temp folder tempdir = tempfile.mkdtemp("whatmigrate_hashcheck") # export exporter.export(self.torrentinfo,self.torrentfolder,self.mappings,tempdir) # hash check results = hashcheck.hashcheck(self.torrentinfo,tempdir) print " %d of %d pieces correct " % (results[0],results[1])+"(%d%%)" % (round(float(results[0])/results[1]*100),) return (tempdir,results)
def get(self): """ Give the XML file up for download. This is exported from whatever was inuide the datastore. """ self.response.headers['Content-Type'] = "application/xml" self.response.out.write(export())
def ut_download(user_name, password, pasted_urls): ut_headers = { 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.193 Safari/537.36', } ut_login_data = { 'username': user_name, 'password': password, '_eventId': 'submit', 'geolocation': '', 'submit': 'LOGIN' } ut_downloader = Downloader('http://elearn.ut.ac.ir/', '', ut_login_data, ut_headers, ut_headers) if not ut_downloader.login({'execution'}): print("username or password is incorrect!") time.sleep(10) return for url in pasted_urls: if re.match( r'https://elearn\d*\.ut\.ac\.ir/mod/adobeconnect\d*/joinrecording\.php.*', url): filename = re.findall('recording=(\d+)&', url)[0] if not ut_downloader.download_meeting(url): print('An error occurred during download...') time.sleep(10) continue exporter.export(filename) ut_downloader.download_other_files() print(filename + ' is ready!') else: print('Wrong URL format') time.sleep(10) ut_downloader.remove_temp_directory()
async def scan(socket): global running running = True exporter.create() steps = stepper.get_steps_per_scan() for i in range(steps): if not running: break print("step " + str(i)) progress_json = {'progress': ((i + 1) / steps) * 100} await socket.send(json.dumps(progress_json)) points = await cam.get_points() points_transformed = linearalgebra.transform( points, stepper.get_current_angle()) exporter.add_row(points_transformed) #point_json = {'points': []} #for p in range(len(points)): # point_json['points'].append({'point': points[p].tolist()}) point_json = {'points': []} for p in range(len(points_transformed)): point_json['points'].append( {'point': points_transformed[p].tolist()}) await socket.send(json.dumps(point_json)) await stepper.scan_step() name = "scan_" + str(datetime.datetime.now()).replace(" ", "_") url = {"url": name} await socket.send(json.dumps(url)) exporter.export(name) export_finished = {"exportFinished": "true"} await socket.send(json.dumps(export_finished)) running = False
def kntu_download(user_name, password, pasted_urls): kntu_headers = { 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.159 Safari/537.36', } kntu_login_data = { 'anchor': '', 'username': user_name, 'password': password, 'rememberusername': '******' } kntu_downloader = Downloader('https://vc4001.kntu.ac.ir/login/index.php', 'https://connect.kntu.ac.ir/', kntu_login_data, kntu_headers, kntu_headers) if not kntu_downloader.login({'logintoken'}): return for url in pasted_urls: if re.match( r'https://vc\d*\.kntu\.ac\.ir/mod/adobeconnect/joinrecording\.php.*', url): meeting_id = re.findall('recording=(\d+)&', url)[0] if not kntu_downloader.download_meeting(url): print('An error occurred during download...') time.sleep(10) continue exporter.export(meeting_id) kntu_downloader.download_other_files() print(meeting_id + ' is ready!') else: print('Wrong URL format') time.sleep(10) kntu_downloader.remove_temp_directory()
def ikiu_download(user_name, password, pasted_urls): headers = { 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.193 Safari/537.36', } login_data = { 'anchor': '', 'username': user_name, 'password': password, 'rememberusername': '******' } ikiu_downloader = Downloader( 'http://lms.ikiu.ac.ir/blocks/whc_backup/login.php', 'https://ac.aminidc.com', login_data, headers, headers) if not ikiu_downloader.login({'logintoken'}): return for url in pasted_urls: if re.match(r'https://ac\.aminidc\.com/(.*)/.*', url): filename = re.findall('recording=(\d+)&', url)[0] print('Downloading ' + filename + '...') if not ikiu_downloader.download_meeting(url): print('An error occurred during download...') time.sleep(10) continue exporter.export(filename) ikiu_downloader.download_other_files() print(filename + ' is ready!') else: print('Wrong URL format') time.sleep(10) ikiu_downloader.remove_temp_directory()
def projects(): form = ProjectForm() if request.method == 'GET': return render_template('projects.html', form=form) elif request.method == 'POST': projects = exporter.export(form.username.data) if projects == None: flash("Not a valid Ravelry username", 'error') return render_template('projects.html', form=form) else: sio = StringIO.StringIO() sio.write(projects) sio.seek(0) return send_file(sio, attachment_filename="projects.csv", as_attachment=True)
import exporter with open('catalogue.csv', 'w') as f: csv_string = exporter.export('http://catalogue.data.gov.bc.ca', 'columns.json') f.write(csv_string) #parse into data type files import dataset_export import update_datastore_content
def export_only(): for meeting in [x for x in os.listdir('./temp/') if x.endswith('.zip')]: meeting_name = meeting[:len(meeting) - 4] exporter.export(meeting_name)
dialect = args.dialect if (args.raw_folder == True): dataset = reader.UraLexReader("raw", args.correlate) else: dataset = reader.UraLexReader(versions.getLatestVersion(), args.correlate) exporter = exporter.UralexExporter(dataset) exporter.setMeaningList(args.meaning_list) exporter.setLanguageExcludeList(excluded_languages) exporter.setFormat(args.format, args.dialect) exporter.setCharsets(args.charsets) #print("Export") outlines = exporter.export() if args.outfile != None: if os.path.isfile(args.outfile): while True: prompt = input("File " + args.outfile + " already exists. Overwrite? (y/n)") if (prompt == "y" or prompt == "n"): break if (prompt == "n"): print("File not written.") sys.exit() f = open(args.outfile, "w") for line in outlines: f.write(line + "\n") f.close()
def get(self): """ Generate a page with XML in a textarea for copy paste. """ self.generate("export.html", {"xml": export(), "title": "Admin Export"})
import exporter from crawler import Crawler calId = "*****@*****.**" gCrawler = Crawler() myEvents = gCrawler.getYear(calId) exporter.export(myEvents)
# run using env as parameter: python run_exporter.py cad id api_key import exporter #parse into data type files import dataset_export import update_datastore_content #enter key as an argument from sys import argv script, env, res_id, api_key = argv with open(env + '.csv', 'w') as f: csv_string = exporter.export('https://' + env + '.data.gov.bc.ca', 'columns.json') f.write(csv_string) if __name__ == '__main__': dataset_export.export_type(env) update_datastore_content.update_resource(env, res_id, api_key)
def main(): """Setup the environment and starts the test session.""" (args, opts) = parse_options() single_test_index = None cfg = parse_file(opts["config"]) if opts["list"]: td = parse_file(args["test_file"]) suite = testdata.TestSuite(cfg + td) print_test_names_list(suite) exit(0) td = parse_file(args["test_file"]) if opts['valgrind']: cfg.append(testdata.Section('VALGRIND', [])) suite = testdata.TestSuite(cfg + td) if opts['run'] is not None and opts['export'] is None: single_test_index = opts['run'] - 1 suite = suite.test_case(single_test_index) if opts['export'] is not None: single_test_index = opts['export'] - 1 suite = suite.test_case(single_test_index) exporter.export(suite, single_test_index) execlass = (valgrind.ValgrindExecutor if opts["valgrind"] else executor.Executor) exe = execlass() if opts["format"] == "interactive": fmt = interactiveformatter.InteractiveFormatter() elif opts["format"] == "json": fmt = jsonformatter.JSONFormatter(indent=4, test_file=args["test_file"]) elif opts["format"] == "csv": fmt = csvformatter.CSVFormatter() elif opts["format"] == "html": fmt = htmlformatter.HTMLFormatter() elif opts["format"] == "text": fmtclass = (formatter.ColoredTextFormatter if opts["color"] else formatter.TextFormatter) fmt = fmtclass(verbosity=opts["verbosity"], maxerrors=opts["maxerrors"]) else: raise ValueError("Unknown format '{}'".format(opts["format"])) # Pvcheck returns as exit code the number of failed tests. # 255 represents a generic error. retcode = 255 program = [args['program']] if args['program_arguments'] is not None: program.extend(args['program_arguments']) with open(opts["logfile"], "at") as logfile: logfmt = jsonformatter.JSONFormatter(logfile, test_file=args["test_file"]) combfmt = formatter.CombinedFormatter([fmt, logfmt]) pvc = pvcheck.PvCheck(exe, combfmt) try: if single_test_index is None: failures = pvc.exec_suite(suite, program, timeout=opts["timeout"], output_limit=opts["output_limit"]) else: failures = pvc.exec_single_test(suite, program, timeout=opts["timeout"], output_limit=opts["output_limit"]) retcode = min(failures, 254) finally: # in case of exception (e.g. tested a non executable file) write a # newline to the json log logfile.write("\n") sys.exit(retcode)
# print(rgbColor.get()) Label(root, text="Width", pady=5).pack() width = Entry(root) width.insert(0, 20) width.pack() Label(root, text="Height", pady=5).pack() height = Entry(root) height.insert(0, 20) height.pack() Label(root, text="Color (hexadecimal)", pady=5).pack() hexa = Entry(root, textvariable=hexaColor).pack() hexaColor.trace("w", hexatorgb) # Label(root, text="Color (RGB)", pady=5).pack() # rgb = Entry(root, textvariable=rgbColor).pack() # rgbColor.trace("w", rgbtohexa) Label(root, text="Filename", pady=5).pack() filename = Entry(root) filename.insert(0, 'color') filename.pack() Button(root, text="Export", command=lambda: exporter.export(int(width.get()), int(height.get( )), red, green, blue, filename.get())).pack() root.mainloop()