def google_api(lat_lon: dict) -> bool: ''' Google Maps API Makes a .png file ''' # URL Parameters google_map_created = False params = "center={0},{1}&zoom=12&scale=1&size=600x300&maptype=roadmap&key=AIzaSyB6V3Ut60fkvrA-wp_9XgQreUiEDvMlfaU&format=png&visual_refresh=true&markers=size:mid%7Ccolor:0xff0000%7Clabel:1%7C{2},{3}".format( lat_lon['longitude'], lat_lon['latitude'], lat_lon['longitude'], lat_lon['latitude']) url_google_maps = "https://maps.googleapis.com/maps/api/staticmap?" + params try: # Call API response = requests.get(url_google_maps) # Handle Bad Request or Process if response.status_code == requests.codes.ok: FileIO.log("google request: ", str(response.status_code), "object type: ", response.headers.get('content-type')) open('static/city.png', 'wb').write(response.content) google_map_created = True except Exception as e: print('error with request processing!', e) FileIO.log('error with request processing!', e) google_map_created = False return google_map_created
def OnFdTextChanged(root, pgbar, pglabel, btn, entry, ft_label): fd_name = entry.get() file_io = FileIO() ft_existed = False if file_io.check_obj("save"): pic_dataset_paths = file_io.load_obj("save") for path in pic_dataset_paths: if path == fd_name: ft_existed = True break if ft_existed: pgbar["value"] = 100 pglabel.configure(text="100.00%") btn["text"] = "重新提取特征" if getattr(sys, 'frozen', False): application_path = os.path.dirname(sys.executable) elif __file__: application_path = os.path.dirname(__file__) ft_name = "feature_" + hashlib.md5(fd_name.encode()).hexdigest() + ".h5" ft_label["text"] = os.path.abspath(os.path.join(application_path, ft_name)) else: pgbar["value"] = 0 pglabel.configure(text="0.00%") btn["text"] = "提取特征" ft_label["text"] = "当前文件夹没有特征被提取" root.update()
class Model: def __init__(self, filename=None): self.__sheet = Sheet() self.__cursor = Cursor() self.__io = FileIO(filename) self.__programIsRunning = True self.__lastCommand = "" self.load() def getCursor(self): return self.__cursor def getSheet(self): return self.__sheet def load(self): self.__sheet.loadPublicObject(self.__io.loadFile()) def save(self): self.__io.saveFile(self.__sheet.getPublicObject()) def quit(self): self.__programIsRunning = False def checkProgramExecution(self): return self.__programIsRunning def echoCommand(self, c): self.__lastCommand = c def getLastCommand(self): return self.__lastCommand
def test_close_file(self): if (os.path.exists("foo")): os.remove("foo") f = FileIO("foo", "w") with self.assertRaises(Exception): f.close()
def fileOpen(self) -> None: if not self._menuFile.isEnabled(): return result: Tuple[str, str] = QFileDialog.getOpenFileName( self, "Select file", "", "JSON Files (*.json);;All Files (*.*)") if len(result) != 2 or len(result[0]) == 0: return fn = result[0] print("FILE READ \"{}\"".format(fn)) self.polygonDataHelper.clearAll() hdl = FileIO(self.polygonFactory) _, errs = hdl.readFile(fn) errs += self.polygonDataHelper.updateAllPolygonCache() self.polygonDataHelper.generateMapping() self._polygonList.polygonsChange() self._rasterSurface.repaint() if len(errs) > 0: ErrorListDrawer( "There were some errors while importing the JSON file:", errs, self).show()
def finish(self): #sort and write self.list = self.mergesort(self.list) validAccounts = [] master = [] for account in self.list: validAccounts.append(str(account.number)) # Format the balance to be at least three numbers if (account.balance == 0): balance = '000' else: balance = str(account.balance) if (len(balance) == 1): balance = '00' + balance elif (len(balance) == 2): balance = '0' + balance master.append('' + str(account.number) + ' ' + balance + ' ' + account.name) # Append the all zero account number to validAccounts validAccounts.append('0000000') FileIO.writeLines(self.newMasterFile, master) FileIO.writeLines(self.accountsFile, validAccounts)
def main(self, argv): try: opts, arg = getopt(argv, "hti:u:", ["help", "test", "input=", "user="******" {}'.format(opt)) self.usage() sys.exit(2) elif opt in ('-i', '--input'): logger.info('Detected switch" {}'.format(opt)) self.input_file = arg elif opt in ('-u', '--user'): logger.info('Detected switch" {}'.format(opt)) self.made_by = arg elif opt in ('-t', '--test'): logger.info('Detected switch" {}'.format(opt)) self.CONFIG = 'Test' self.show_params() file_io = FileIO() file_io.read(self.input_file, self.made_by, self.CONFIG)
def ShowOriginalImage(event): img_no = int(str(event.widget).split(".")[-1]) file_io = FileIO() imlist = file_io.load_obj("imlist") ipath = imlist[img_no] img = mpimg.imread(ipath) imgplot = plt.imshow(img) plt.show()
def __init__(self, filename=None): self.__sheet = Sheet() self.__cursor = Cursor() self.__io = FileIO(filename) self.__programIsRunning = True self.__lastCommand = "" self.load()
def test_loadFile_wrong_file(self): f1 = FileIO("tests/test1.csv", "r") loaded1 = f1.loadFile() f1.close() f2 = FileIO("tests/test2.csv", "r") loaded2 = f2.loadFile() f2.close() self.assertEqual((loaded1 == loaded2), False)
def finish(self): #sort and write self.list = mergesort(self.list) validAccounts = [] master = [] for account in self.list: validAccounts.append(account.number) master.append('' + account.number + ' ' + account.balance + ' ' + account.name) FileIO.writeLines(self.newMasterFile, master) FileIO.writeLines(self.accountsFile, validAccounts)
def _on_save(self, request): print('saving ' + request.filename) FileIO.write(self._dataset, request.filename) self._filepath = request.filename response = silkycoms.SaveProgress() self._coms.send(response, self._instance_id, request) self._add_to_recents(request.filename)
def test_readlines(self): filename = "kasta.env" assert not path.exists(filename) contents = ["one\n", "two three\n"] FileIO().writelines(filename, contents) assert FileIO().readlines(filename) == contents remove(filename) assert not path.exists(filename)
def test_writelines(self): filename = "kasta.env" assert not path.exists(filename) lines = ["first\n", "Second\n"] FileIO().writelines(filename, lines) assert path.exists(filename) assert FileIO().readlines(filename) == lines remove(filename) assert not path.exists(filename)
def __init__(self, filepath=None,**kw): """ Set up the structures we need """ # Initialise base class FileIO.__init__(self,filepath=filepath,**kw) # List which types of object we can read/write self.canRead = True self.canWrite = [ ]
def __init__(self, image_path, output_filename): self._point_coords = [] self._point_count = 0 self._line_slope = 0 self._lock_gui = False self._output_filename = output_filename self._file = FileIO("./data/") self._original_img = cv2.imread(image_path, cv2.IMREAD_COLOR) self._new_img = self._original_img.copy() self.setup_image_gui(self._new_img)
def AnalyseInputImage(queryDir, maxNo, model_name, img_canvas, root): h5f = h5py.File(model_name,'r') feats = h5f['dataset_1'][:] imgNames_utf = h5f['dataset_2'][:] imgNames = [] for i in imgNames_utf: imgNames.append(i.decode('utf-8')) h5f.close() model = VGGNet() queryVec = model.extract_feat(queryDir) scores = np.dot(queryVec, feats.T) rank_ID = np.argsort(scores)[::-1] rank_score = scores[rank_ID] maxres = int(maxNo) imlist = [imgNames[index] for i,index in enumerate(rank_ID[0:maxres])] file_io = FileIO() file_io.save_obj(imlist,"imlist") img_canvas.delete('all') vsbar = Scrollbar(frame_canvas, orient=VERTICAL, command=img_canvas.yview) vsbar.grid(row=0, column=1, sticky=NS) vsbar.config(command=img_canvas.yview) img_canvas.configure(yscrollcommand=vsbar.set) frame_images = Frame(img_canvas, bg="grey") img_canvas.create_window((0,0), window=frame_images, anchor='nw') img_no = 0 max_in_row = 0 height_total = 0 for i in imlist: basewidth = 300 img = Image.open(i) wpercent = (basewidth/float(img.size[0])) hsize = int((float(img.size[1])*float(wpercent))) max_in_row = max(max_in_row, hsize) img = img.resize((basewidth,hsize), Image.ANTIALIAS) render = ImageTk.PhotoImage(img) img_show = Label(frame_images, image=render, name=str(img_no)) img_show.bind("<Button-1>", ShowOriginalImage) img_show.image = render img_show.grid(row=img_no//3, column=img_no%3) img_no += 1 if img_no%3==0: height_total += max_in_row max_in_row = 0 frame_canvas.config(height=height_total) root.update() img_canvas.config(scrollregion=img_canvas.bbox("all"))
def __init__(self,**kw): # Now initialise base class FileIO.__init__(self,**kw) # Now set any variables that define the capabilities of this class self.canRead = True self.canWrite = [] self.fd = None # The file output file reader can work from a file or a list if kw.has_key('olist'): self.list = kw['olist'] else: self.list=None
def main(): """ Get configuration, get driver, and build handler and start it. """ args = get_args() port = args.port[0] baud = args.baud[0] use_ftdi = args.ftdi # Driver with context with serial_link.get_driver(use_ftdi, port, baud) as driver: # Handler with context with Handler(Framer(driver.read, driver.write)) as link: link.add_callback(serial_link.log_printer, SBP_MSG_LOG) link.add_callback(serial_link.printer, SBP_MSG_PRINT_DEP) data = open(args.file, 'rb').read() def progress_cb(size): sys.stdout.write("\rProgress: %d%% \r" % (100 * size / len(data))) sys.stdout.flush() print('Transferring image file...') FileIO(link).write("upgrade.image_set.bin", data, progress_cb=progress_cb) print('Committing file to flash...') code = shell_command(link, "upgrade_tool upgrade.image_set.bin", 300) if code != 0: print('Failed to perform upgrade (code = %d)' % code) return print('Resetting Piksi...') link(MsgReset(flags=0))
def home(): # Run new image per request to route lat_long = aqi_api() google_api(lat_long) aqi_json = FileIO.read_jsonfile('aqi.json') return render_template('index.html', lat_long=lat_long, aqi_json=aqi_json)
def test___init___file_exists(self): if (os.path.exists("foo")): os.remove("foo") f = open("foo", "w") f.close() with self.assertRaises(Exception): FileIO("foo", "w")
def json_to_xml() -> bool: ''' Converts json to xml return -> bool ''' try: # Data should be written appended to a file with correct xml header # An xsl file should have all html/css stlying to render data from xml xml_header = '<?xml version="1.0"?>' + "\n" + '<?xml-stylesheet type="text/xsl" href="aqi.xsl"?>' + "\n" json_data = readfromjson("aqi.json") #print(json2xml.Json2xml(json_data).to_xml()) xml_data = json2xml.Json2xml(json_data).to_xml() filename = "aqi.xml" FileIO.xml2file(xml_header, filename) FileIO.append_xml2file(xml_data, filename) return True except: return False
def fileSaveAs(self) -> None: if not self._menuFile.isEnabled(): return result: Tuple[str, str] = QFileDialog.getSaveFileName( self, "Select file", "", "JSON Files (*.json);;All Files (*.*)") if len(result) != 2 or len(result[0]) == 0: return fn = result[0] print("FILE WRITE \"{}\"".format(fn)) hdl = FileIO(self.polygonFactory) _, err = hdl.writeFile(fn) if err is not None: ErrorListDrawer( "There were some errors while exporting the JSON file:", [err], self).show()
def __init__(self): #load configuration settings _config = FileIO().loadJSONFile("config.json") self.twData = _config["twitter_auth_data"] self.mongoData = _config["mongo_config"] #parse command line options self.parser = ArgumentParser( description= "Grabs data from the Twitter API and stores it in a MongoDB Database." ) self.parser.add_argument('-n', '--name', type=str, nargs=1, required=True, help='Unique name of the current search.') self.parser.add_argument( '-t', '--terms', type=str, nargs='+', required=False, help= 'Provide a list of search terms used to collect data from the API.' ) self.parser.add_argument( '-f', '--file', type=str, nargs=1, required=False, help= 'Provide a path to a JSON file of terms to collect data from the API.' ) self.parser.add_argument( '-c', '--count', type=int, required=False, help="Collect the specified number of tweets.") self.parser.add_argument( '-l', '--listen', action='store_true', required=False, help="Just open the stream and listen. Don't connect to database.") self.args = self.parser.parse_args()
def __init__(self, path): self.path = path self.source, self.sink = Pipe() self.events = { 'reading': Event() } if self.path is '': self.reader = HackRFIO(self.source, self.events) else: self.reader = FileIO(self.path, self.source, self.events) self.reader.start() self.periodogram = Periodogram(self.sink, self.events) self.periodogram.start() try: self.reader.join() self.periodogram.join() except KeyboardInterrupt: self.reader.terminate() self.periodogram.terminate()
def load(self): '''identify the path by which too load the map info and pass along the data to the FileIO class and store the information to relavent attributes.''' file = tkFileDialog.askopenfilename() items = FileIO.load(file) self.tiles = items[0] self.objects = items[1] self.selected = None self.x_offset = items[2] self.y_offset = items[3]
def startStreaming(self): if self.db and self.auth: terms = [] if self.args.file: #load a list of terms from a file data = FileIO().loadFile(self.args.file[0]) terms = [term for term in data.strip().split('\n')] elif self.args.terms: #read in a list of terms from the commandline terms = self.args.terms self.db.tweets.insert( {"meta_data": { "terms": terms, "name": self.args.name[0] }}) #connect to twitter stream and collect some tweets! if terms: print "\n\nEnter [x] to quit the stream..." print "Connecting to stream..." listener = MongoStreamListener(self.db, listen=self.args.listen, limit=self.args.count, name=self.args.name[0]) streamer = tweepy.Stream(auth=self.auth, listener=listener, timeout=60) print "Connected. Filtering tweets...\n" streamer.filter(None, terms, async=True) while True: opt = getpass('') if opt == 'x': break print "Quitting..." streamer.disconnect() else: self.parser.print_usage()
def _on_open(self, request): print('opening ' + request.filename) mm = MemoryMap.create(self._buffer_path, 65536) dataset = DataSet.create(mm) try: FileIO.read(dataset, request.filename) self._dataset = dataset self._filepath = request.filename self._coms.send(None, self._instance_id, request) self._add_to_recents(request.filename) except Exception as e: base = os.path.basename(request.filename) message = 'Could not open {}'.format(base) cause = e.strerror self._coms.send_error(message, cause, self._instance_id, request)
def test_loadFile_good_file(self): f = FileIO("tests/test1.csv", "r") loaded = f.loadFile() f.close() reference = [ Cluster( '0', [Point([1, 1, 1]), Point([2, 2, 2]), Point([3.0, 3.0, 3.0])]), Cluster('1', [ Point([1, 1, 1]), Point([1, 1, 1]), Point([1, 1, 1]), Point([1, 1, 1]), Point([1, 1, 1]), Point([1, 1, 1]), ]), Cluster('2', [Point([1, 1, 1])]) ] self.assertEqual(loaded, reference)
def startStreaming(self): if self.db and self.auth: terms = [] if self.args.file: #load a list of terms from a file data = FileIO().loadFile(self.args.file[0]) terms = [term for term in data.strip().split('\n')] elif self.args.terms: #read in a list of terms from the commandline terms = self.args.terms self.db.tweets.insert({ "meta_data" : { "terms" : terms, "name" : self.args.name[0] } }) #connect to twitter stream and collect some tweets! if terms: print "\n\nEnter [x] to quit the stream..." print "Connecting to stream..." listener = MongoStreamListener(self.db, listen=self.args.listen, limit=self.args.count, name=self.args.name[0]) streamer = tweepy.Stream(auth=self.auth, listener=listener, timeout=60) print "Connected. Filtering tweets...\n" streamer.filter(None,terms, async=True) while True: opt = getpass('') if opt == 'x': break print "Quitting..." streamer.disconnect() else: self.parser.print_usage()
def GetFoldernameFromSelector(root, e): root.update() filename = askdirectory() if filename != '': file_io = FileIO() if file_io.check_obj("save"): pic_dataset_paths = file_io.load_obj("save") ft_existed = 0 for path in pic_dataset_paths: if path == filename: break ft_existed += 1 if ft_existed >= len(pic_dataset_paths): pic_dataset_paths.append(filename) else: tmp = pic_dataset_paths[ft_existed] pic_dataset_paths[ft_existed] = pic_dataset_paths[-1] pic_dataset_paths[-1] = tmp else: pic_dataset_paths = [filename] e["values"] = pic_dataset_paths e.current(len(pic_dataset_paths) - 1) root.update()
def __init__(self, oldMasterFile, newMasterFile, accountsFile): lines = FileIO.readLines(oldMasterFile) self.newMasterFile = newMasterFile self.accountsFile = accountsFile self.list = [] count = 0 for line in lines: line = line.strip("\r\n ") params = line.split(' ') if (len(params) != 3): Utility.fatal("Line " + str(count) + " is invalid - parameter count != 3") self.list.append(Account(int(params[0]), int(params[1]), params[2])) count += 1
def readAccounts(path): lines = FileIO.readLines(path) cleaned = [] if len(lines) < 1: raise ValueError('Empty Accounts File') # Clean and strip newlines from the numbers, then # ensure that they're valid account numbers for line in lines: clean = Utility.cleanString(line) if (not clean.isdigit() or len(clean.strip()) != 7 or ( clean != lines[-1].strip() and clean[0] == '0')): raise ValueError('Invalid accounts file, error: ' + clean) cleaned.append(clean) # Ensure that the last line is the all zero account number if (cleaned[-1] != "0000000"): raise ValueError('Invalid accounts file, missing zero account number at file end') return cleaned
def clear(self): self._transactions = [] FileIO.clear(self.path)
def finish(self): self._transactions.append('EOS 0000000 000 0000000 ***') FileIO.writeLines(self.path, self._transactions)
def __init__(self) -> None: FileIO.__init__(self) self.__updateInfo: Dict[str, str] = {} self.__cleanupFiles: List[Tuple[int, str]] = []
def reset_factory_defaults(self): # Delete settings file fio = FileIO(self.link) fio.remove('config') # Reset the Piksi self.link.send_message(ids.RESET, '')