def gather(self): # Defaults to --url, but can be overridden. name = self.extra.get("name", "url") url = self.options.get(name) if url is None: logging.warning("A --url is required. (Can be a local path.)") exit(1) # remote URL if url.startswith("http:") or url.startswith("https:"): # Though it's saved in cache/, it will be downloaded every time. remote_path = os.path.join(self.cache_dir, "url.csv") try: response = requests.get(url) utils.write(response.text, remote_path) except: logging.error("Remote URL not downloaded successfully.") print(utils.format_last_exception()) exit(1) # local path else: remote_path = url for domain in utils.load_domains(remote_path): yield domain
def gather(self): # Defaults to --url, but can be overridden. name = self.extra.get("name", "url") url = self.options.get(name) if url is None: logging.warn("A --url is required. (Can be a local path.)") exit(1) # remote URL if url.startswith("http:") or url.startswith("https:"): # Though it's saved in cache/, it will be downloaded every time. remote_path = os.path.join(self.cache_dir, "url.csv") try: response = requests.get(url) utils.write(response.text, remote_path) except: logging.error("Remote URL not downloaded successfully.") print(utils.format_last_exception()) exit(1) # local path else: remote_path = url for domain in utils.load_domains(remote_path): yield domain
def write_report(report): data_path = path_for(report, "json") utils.write( utils.json_for(report), os.path.join(utils.data_dir(), data_path) ) return data_path
def write_report(report): data_path = path_for(report, "json") utils.write( utils.json_for(report), "%s/%s" % (utils.data_dir(), data_path) ) return data_path
def test_write_read(self): filename = './write_demo.txt' test_filename = os.path.join(os.getcwd(), 'test', filename) content = 'test' utils.write(filename, content) self.assertTrue(os.path.exists(test_filename)) self.assertEqual(utils.read(filename), content) os.remove(test_filename)
def init(environment, options): global redirects global config cache_dir = options.get("_", {}).get("cache_dir", "./cache") redirects_file = options.get("a11y_redirects") config_file = options.get("a11y_config") # Parse redirects if redirects_file: if not redirects_file.endswith(".yml"): logging.error("--a11y_redirects should be a YML file") return False # if remote, try to download if redirects_file.startswith("http:") or redirects_file.startswith("https:"): redirects_path = os.path.join(cache_dir, "a11y_redirects.yml") try: response = requests.get(redirects_file) utils.write(response.text, redirects_path) except: logging.error("--a11y_redirects URL not downloaded successfully.") return False # Otherwise, read it off the disk else: redirects_path = redirects_file if (not os.path.exists(redirects_path)): logging.error("--a11y_redirects file not found.") return False with open(redirects_path, 'r') as f: redirects = yaml.load(f) # Get config if config_file: if not config_file.endswith(".json"): logging.error("--a11y_config should be a json file") return False # if remote, try to download if config_file.startswith("http:") or config_file.startswith("https:"): config_path = os.path.join(cache_dir, "a11y_config.json") try: response = requests.get(config_file) utils.write(response.text, config_path) except: logging.error("--a11y_config URL not downloaded successfully.") return False config = config_path return True
def write_report(report): data_path = path_for(report, "json") utils.write(utils.json_for(report), os.path.join(utils.data_dir(), data_path)) return data_path
#MySubwaySys = SubwaySystem() starttime=time.time() track = True while track: i = 0 while i < num: i+=1 print("tick", i) data = None messagelist = [] tracking_results = dmine.TrackTrains(feed_id) #MySubwaySys.attach_tracking_data(tracking_results) #print("*********************** Trains in system: " + str(MySubwaySys.NumberOfTrains) + " **************************") fname = 'tracking_results' + str(int(time.time())) + '.pkl' written = False while written is False: #try: written = utils.write(tracking_results, fname) print(written) #except: # written = False # print("Writing to disk failed, retrying...") # track = False # break time.sleep(delay - ((time.time() - starttime) % delay)) #wait until we are supposed to sample the next set of data. # fname = 'SubwaySys' + str(int(time.time())) + '.pkl' # utils.write(MySubwaySys, fname) # MySubwaySys.reset()
if __name__ == "__main__": Mdes = 32768 Ment = 64 print "Mdes:",Mdes print "Ment:",Ment if(len(sys.argv) == 3): file = utils.read(sys.argv[1]) if(file == None): print "Error I/O: the file doesn't exists." exit(0) print "Longitud de l'entrada a comprimir:",len(file) t1 = time.time() output = compress.compress(file,Mdes,Ment) t1 = time.time()-t1 print "Temps compressio:", t1 print "Compressio: %f : 1" % (float(len(file))/len(output)) utils.writeCompress(output, sys.argv[2]) elif(len(sys.argv) == 2): file = utils.readCompress(sys.argv[1]) t1 = time.time() decompressed = decompress.decompress(file,Mdes,Ment) print "Temps descompressio:",time.time()-t1 print "Descompressio: ",len(decompressed) utils.write(decompressed, sys.argv[1]+".wav") else: print "Hint: \n\t* compress: python lz77-wav.py my/path/myfile.txt /my/path/namecompressfile" print "\t* decompress: python lz77-wav.py my/path/namecompressfile"