def makeFile(s): #Underscores are allowed, remember that! if (InvChars(s)): return 0 if (s.isspace() or len(s) == 0): return 1 if (s == "Files"): return 2 #Set up to Update the Files File L = File.Read("Words/Files.txt") #Check if the name is available for i in L: if s == i: print("Three") return 3 L.append(s) #Delete blanks i = 0 while i < len(L): if (L[i].isspace()): L.remove(i) i = i + 1 #Update File.Write(File.cleanUp(L), "Words/Files.txt") #Write file File.Write("", "Words/" + s + ".txt") print("The File " + (s + ".txt") + " has been Created. Please Add Terms")
def __call__(self): for i in range(RETRIES): files = File.get_files_by_flag(self.task_id) if files: break else: time.sleep(10) files = self._cleanup(files) if not status.status[self.task_id] == True: self._execution_failure(files) try: for x in range(len(files)): filename = os.path.join(ASSET_PATH, files[x]['filename']) try: f = Upload.get_info(filename) except: continue for key, value in f.items(): if key == 'filename': value = os.path.basename(value) files[x][key] = value files[x]['queue_flag'] = None File.update_files(files) except: # The queue would freeze with an exception pass File.unset_flag(self.task_id)
def initGraphMatrix(GraphList): # get all nodes nodes = [] for line in GraphList: node_s,node_e = line if node_s not in nodes: nodes.append(node_s) if node_e not in nodes: nodes.append(node_e) # get node_dict node_num = len(nodes) node_dict = dict() node_i = 0 for node in nodes: node_dict[node]= node_i node_i += 1 # save dict Dict_file_dir = r"E:\workspace\DATA\web\NodeDict.data" DictFile = File(Dict_file_dir) DictFile.writeDict(node_dict) # -----need output this ------- GraphMatrix = [[0 for node_i in range(node_num)] for node_i in range(node_num)] for line in GraphList: node_s, node_e = line GraphMatrix[node_dict[node_s]][node_dict[node_e]] = 1 #**** no direction GraphMatrix[node_dict[node_e]][node_dict[node_s]] = 1 return GraphMatrix
def processData(assetIDList, fileName, reportDate): resultList = [] assetCounter = 0 try: with open(fileName, "r") as infile: reader = csv.reader(infile) for line in islice(reader, 12, None): #If all assets have been found, exit foundValue = [ dictionary for dictionary in assetIDList if dictionary["AssetID"] == line[2] ] if (assetCounter == 19): break if foundValue: assetCounter = assetCounter + 1 for hourIndex in range(len(line[3:])): #if(float(line[3:][hourIndex]) == 0): #If value is not 0, store it to the list tm = time(hourIndex) combined = datetime.combine(reportDate, tm) tempList = [ foundValue[0]['Asset'], combined, float(line[3:][hourIndex]) ] resultList.append(tempList) return resultList except Exception as e: File.deleteFile(fileName) raise e
def SaveMessage1(path, message): while True: try: print( "------ Would you like to save your message in a new file or in this file? ------\n" ) fileSituation = int( input("[1] New file\n[2] This file\n[0] Don't save \n-> ")) if fileSituation == 1: fileName = File.CreateFile() if fileName == "": continue File.WriteFile(fileName, message) break elif fileSituation == 2: File.WriteFile(path, message) break elif fileSituation == 0: break else: print("\n------ Invalid option ------\n") except ValueError: print("\n------ Invalid option ------\n")
def verify(self): ncaHeader = Fs.NcaHeader() ncaHeader.open( File.MemoryFile(self.ncaHeader, Type.Crypto.XTS, uhx(Keys.get('header_key')))) id = ncaHeader.rightsId[0:16].decode().upper() if str(self.titleId) != id: raise IndexError('Title IDs do not match! ' + str(self.titleId) + ' != ' + id) decKey = Keys.decryptTitleKey(uhx(self.titleKey), ncaHeader.masterKey) pfs0 = Fs.PFS0(self.sectionHeaderBlock) ''' print('encKey = ' + str(self.titleKey)) print('decKey = ' + str(hx(decKey))) print('master key = ' + str(ncaHeader.masterKey)) print('ctr = ' + str(hx(pfs0.cryptoCounter))) print('offset = ' + str(self.pfs0Offset)) ''' mem = File.MemoryFile(self.pfs0Header, Type.Crypto.CTR, decKey, pfs0.cryptoCounter, offset=self.pfs0Offset) magic = mem.read()[0:4] if magic != b'PFS0': raise LookupError('Title Key is incorrect!') return True
def initFromParam(cls, param, is_control, param_attr): import ChIPSeqReplicate import File cse = cls() cse.param = param cse.is_control = is_control ftgts = getattr(param, param_attr).split(',') cse.reps = [ ChIPSeqReplicate.initFromFastqFile(ffq) for ffq in ftgts ] for (i, rep) in enumerate(cse.reps): rep.param = param rep.index = i+1 rep.chipseqexp = cse tgt_fta = "%s/%s.tagAlign.gz" % (param.temp_dir, rep.name) rep.tagalign = File.initFromFullFileName(tgt_fta) if cse.is_control: frep0 = param.fchipseq_control_signals else: frep0 = param.fchipseq_target_signals cse.pooled_tagalign = File.initFromFullFileName(frep0) cse.peaks = File.initFromFullFileName(param.fall_chipseq_peaks) cse.final_peaks = File.initFromFullFileName(param.fidr_chipseq_peaks) return cse
def clone(link, destination, delete_existing=False, recursive=False, ignore_existing=False): if delete_existing: if has_changes(destination): Debug.throw("Can not delete repository with changes at path: " + destination) else: File.rm(destination) if File.exists(destination): if ignore_existing: Debug.info(destination + " exists") return Debug.throw("Git repository: " + link + " already exists for path: " + File.full_path(destination)) command = ["git", "clone", link, destination] if recursive: command += ["--recursive"] Shell.run(command)
def initGraphMatrix(GraphList): # get all nodes nodes = [] for line in GraphList: node_s, node_e = line if node_s not in nodes: nodes.append(node_s) if node_e not in nodes: nodes.append(node_e) # get node_dict node_num = len(nodes) node_dict = dict() node_i = 0 for node in nodes: node_dict[node] = node_i node_i += 1 # save dict Dict_file_dir = r"E:\workspace\DATA\web\NodeDict.data" DictFile = File(Dict_file_dir) DictFile.writeDict(node_dict) # -----need output this ------- GraphMatrix = [[0 for node_i in range(node_num)] for node_i in range(node_num)] for line in GraphList: node_s, node_e = line GraphMatrix[node_dict[node_s]][node_dict[node_e]] = 1 #**** no direction GraphMatrix[node_dict[node_e]][node_dict[node_s]] = 1 return GraphMatrix
def remote(path): if not File.exists(path): return "No folder" return Shell.get([ "git", "-C", File.full_path(path), "config", "--get", "remote.origin.url" ])
def startup(): print( "Keybasket v 1.4.2 \nCreated by Andrew Vella\nCopyright 2020\nMIT License\n\n" ) #If a set up is needed if not os.path.isdir("Words"): Restore.create() try: File.allFileExChange( False) #False means convert to text, True for convert to bin except FileNotFoundError: pass File.fileVerification() #make output file f = open("Output.txt", "w") try: r = File.getEmptyFiles() return r except ValueError: raise Error_Handler.FileReferenceError
def slec(self, name): i = action.makeFile(self.name.get()) if (i == 0): self.invalidChar() return None if (i == 1): self.Blank() return None if (i == 2): self.Reserved() return None if (i == 3 and File.arrayContains(File.Read("Words/Files.txt"), self.name.get())): self.Selected(name) return None else: self.NotFound(name) self.textbox.delete("1.0", END) self.printable = File.Read("Words/Files.txt") for i in self.printable: self.textbox.insert(END, (i + "\n"))
def updateIndex(ix, path, w=140, percentSize=10): writer = ix.writer() total = 0 for types in ("PDF", "TXT"): for fname, typ in File.getFilePathsByType(path, types): total += 1 print total counter = 0 print "Indexing " + types + " files:" for fname, typ in File.getFilePathsByType(path, types): rest = w - (len('Indexing ' + fname) % w) rest = rest - percentSize print 'Indexing ' + fname, print "." * rest, print "[ %3s%% ]" % (str(int(counter * 1.0 / total * 1.0 * 100))) try: content, err = File.getFileContent(fname, typ) content = content.decode("utf-8", 'replace') writer.add_document(title=unicode(fname.split('/')[-1]), path=unicode(fname), content=content, Type=unicode(typ)) if err: print "Error indexing " + fname + "\n" + err err = "" except UnicodeDecodeError: print 'The File ' + fname + " cannot be indexed" counter += 1 writer.commit()
def updateIndex(ix,path,w=140,percentSize=10): writer=ix.writer() total=0 for types in ("TXT","PDF"): process=subprocess.Popen("ls -R '"+path+"' | grep '."+types.lower()+"' | wc -l",shell=True,stdout=subprocess.PIPE) total,err=process.communicate() total=int(total) print total," files" # break counter=0 print "Indexing "+types+" files:" for fname,typ in File.getFilePathsByType(path,types): rest=w-(len('Indexing '+fname)%w) rest=rest-percentSize print 'Indexing '+fname, print "."*rest, print "[ %3s%% ]"%(str(int(counter*1.0/total*1.0*100))) try: content,err=File.getFileContent(fname,typ) content=content.decode("utf-8",'replace') writer.add_document(title=unicode(fname.split('/')[-1]),path=unicode(fname), content=content,Type=unicode(typ)) if err: print "Error indexing "+fname+"\n"+err err="" except UnicodeDecodeError: print 'The File '+fname+" cannot be indexed" counter+=1 writer.commit()
def __call__ (self): for i in range(RETRIES): files = File.get_files_by_flag (self.task_id) if files: break else: time.sleep (10) files = self._cleanup (files) if not status.status[self.task_id] == True: self._execution_failure (files) try: for x in range(len(files)): filename = os.path.join (ASSET_PATH, files[x]['filename']) try: f = Upload.get_info (filename) except: continue for key,value in f.items(): if key == 'filename': value = os.path.basename(value) files[x][key] = value files[x]['queue_flag'] = None File.update_files (files) except: # The queue would freeze with an exception pass File.unset_flag (self.task_id)
def updateIndex(ix,path,w=140,percentSize=10): writer=ix.writer() total=0 for types in ("PDF","TXT"): for fname,typ in File.getFilePathsByType(path,types): total+=1 print total counter=0 print "Indexing "+types+" files:" for fname,typ in File.getFilePathsByType(path,types): rest=w-(len('Indexing '+fname)%w) rest=rest-percentSize print 'Indexing '+fname, print "."*rest, print "[ %3s%% ]"%(str(int(counter*1.0/total*1.0*100))) try: content,err=File.getFileContent(fname,typ) content=content.decode("utf-8",'replace') writer.add_document(title=unicode(fname.split('/')[-1]),path=unicode(fname), content=content,Type=unicode(typ)) if err: print "Error indexing "+fname+"\n"+err err="" except UnicodeDecodeError: print 'The File '+fname+" cannot be indexed" counter+=1 writer.commit()
def createFile(self, evt): file = File(self.parent.dataDirectory + self.name) file.newFile(self.columns) self.parent.updateFiles(file) self.parent.panel.Labels.resetAxes() self.parent.hasData = file.hasData() self.Close()
def play_chess(file): CD.set_vars() window = set_up_window() board = open_board(file) File.menu(window, board) layout_board(window, board) window.mainloop()
def _root_dir(path='.'): _path = path while not File.is_root(_path): if File.exists(_path + '/.projectile'): return File.full_path(_path) else: return ""
def pring_folder_changes(path): if not File.exists(path): return for repo in File.get_files(path): if repo == ".DS_Store": continue if has_changes(path + "/" + repo): print(repo + " - has changes")
def main(): window = "" while (not window == 0 or window == 1): menu(["Generate", "Access Files", "About", "Quit"], mainTitle) window = processMenu(File.Read("Output.txt")) if (window == 1): menu(["Generate", "Access Files", "About", "Quit"], mainTitle) window = processMenu(File.Read("Output.txt"))
def onOutputChanged(self, output): Job.Job.onOutputChanged(self, output) listeners = 0 try: listeners = len(self.output['listeners']) + 2 except: pass File.replaceJson(Config.LISTENER_COUNT_FILE, listeners=listeners)
def to_put(self, chessboard, column): if chessboard.is_full(): File.print_and_write("棋盘放满了") return -1 while True: column = random.randint(1, 8) if chessboard.can_put(column): return chessboard.put(column, self)
def do_register(self, args): password = getpass.getpass(prompt='type in your password:'******'confirm your password:'******'./dbConfig/useradmin', password+'\n') print 'Register to PySQL suceessfully!!' else: print '[Error] the password you tyoe in are not the same :( Plz try again.'
def build(): if not File.exists(build_dir): prepare() File.cd(build_dir) if Args.ios: Cmake.build() else: Make.run() print("Project build time: " + Time.duration())
def robot_put(self, game): time.sleep(0.3) value = game.robot.to_put(game.chessboard, 0) if value != -1: File.print_and_write("轮到我了,我把O棋子放在第" + str(value[1]) + "列...") game.chessboard.print_chessboard() return value else: self.draw()
def root_dir(path='.'): _path = path print(_path) while not File.is_root(_path): if has_cmake_file(_path): if not has_parent_cmake_files(_path): return File.full_path(_path) _path += "/.." Debug.throw("CMake root directory not found for path: " + File.full_path(path))
def create_simulation(self, amount, query): file = File() print("Amount: " + str(amount)) print("Query: " + str(query)) # cont = 'simulate [<=%s] {%s}' % (str(amount), query) cont = query file.open_file('/q2.q') f = file.write_file_full(cont, file_name='/q2.q') return f
def make_imaginary_file(file_path): """ :param file_path: the file we want to be imaginary :return: - """ new_file_path = File(file_path) new_file_path = new_file_path.change_file_to_cloud(CLOUD_FORMAT) i_file = open(new_file_path, 'wb') i_file.close()
def writeDocument(self, outputDirectory): """ Writes the current source code (with proper finalization) of the document in a .tex file created in a temporary directory. Then compiles it using pdflatex and moves the resulting pdf file to the specified output directory. @param string outputDirectory : Absolute path to the directory to where the document must be moved after being produced. @return : @author """ # tries to create File objects to manipulate the source and the final documents try: self.finalDocument = File(outputDirectory, self.generateTitle() + '.pdf') # final document self.texSource = File(os.path.join(outputDirectory, "temp_" + self.generateTitle()), self.generateTitle() + '.tex') # source code except: raise DocumentError("Failed to access output directory.") # produces the documents contents self.generateContents() try: self.contents += u"\n\\end{document}\n\n" # finishes LaTeX document except: raise DocumentError("Failed while generating documents contents.") # opens a file to contain the LaTeX source code self.texSource.workWithFile() self.texSource.realFile = codecs.open(self.texSource.fileName, 'w', 'utf-8') # writes the content generated to source file self.texSource.realFile.write(self.contents) self.texSource.workWithFile() # changes directory # compiles the source into a pdf document #try: #compilationCommand = 'pdflatex -interaction=nonstopmode --output-directory=' + outputDirectory + ' ' + self.texSource.fileName #output = commands.getoutput(compilationCommand) abspath = outputDirectory + '/' + 'temp_' + self.texSource.fileName[0:-4] + '/' + self.texSource.fileName proc = subprocess.Popen(shlex.split('pdflatex -interaction=nonstopmode --output-directory=' + outputDirectory + ' ' + abspath)) proc.communicate() #try: # abspath = outputDirectory + '/' + 'temp_' + self.texSource.fileName[0:-4] + '/' + self.texSource.fileName # check_output(["pdftex", "-interaction=nonstopmode --output-directory=" + outputDirectory + ' ' + abspath], shell = True) #except CalledProcessError as e: # return e.output #except: # print b # raise DocumentError("Failed to compile LaTeX source code.") # erases source files self.texSource.deleteFile(True) self.finalDocument.workWithFile() os.remove(self.generateTitle() + ".aux") os.remove(self.generateTitle() + ".log") os.chdir(settings.PASTA_SUPERNOVA)
def pull_folder(path): if not File.exists(path): return for repo in File.get_files(path): if repo == ".DS_Store": continue full_path = path + "/" + repo if has_changes(full_path): Debug.throw(repo + " - has changes") pull(full_path)
def list_repos(path): if not File.exists(path): return [] result = [] for repo in File.get_files(path): repo_path = path + "/" + repo if is_git_repo(repo_path): result += [File.fold_user(repo_path)] return result
def run(): _project_name = "sand" if File.exists(build_dir + "/../source/sand") else project_name build() bin_dir = File.full_path(build_dir) + "/" if File.exists(bin_dir + "bin"): bin_dir += "bin/" Shell.run([bin_dir + _project_name])
def draw(self): File.print_and_write("*******难分胜负!@_@") try: os.rename(File.file, File.file.split('.')[0] + "(平局).txt") except Exception as e: pass else: pass input("按任意键结束…") exit(0)
def showContents(f): if (File.isEmpty("Words/" + f)): Ll = ["File is Empty"] return Ll if (not f == "Sample.txt"): return File.Read("Words/" + f) else: print("Sample Contents Not Shown. Select another File") return "None"
def main (k, m="means", init_type="random"): # Starting clustering timer start_cluster = timeit.default_timer() # Initialize clusters if init_type == "random": initial_clusters = Initialize.random_centers(k) else: init_type = "kplusplus" initial_clusters = Initialize.kmeans_plusplus(k, train_images_flat,\ dist_fn=Distance.sumsq) # Run clustering algorithm final_responsibilities, final_clusters = Kmeans.kmeans(k,train_images_flat, initial_clusters, distfn = Distance.sumsq, method=m) # Find and print clustering time end_cluster = timeit.default_timer() clustering_time = end_cluster - start_cluster print "Time spent clustering : ", clustering_time # Save representative images to file. title = m + "_" + init_type + "_cluster" + str(k) File.save_images(k, train_images, final_responsibilities, final_clusters, title) ########################################################################### # Calculate Accuracy # ########################################################################### # Calculate final accuracy for clusters final, cluster_set = Accuracy.final_accuracy(final_responsibilities, train_labels, train_images_flat, final_clusters) # Now see how well we can classify the dataset start_cluster_test = timeit.default_timer() predictions = ClassifyClusters.classify(cluster_set, test_images_flat, test_labels, distfn = Distance.sumsq) finish_cluster_test = timeit.default_timer() # find time it took to test testing_time = finish_cluster_test - start_cluster_test print "Time spent testing : ", testing_time ########################################################################### # Outputs # ########################################################################### # k, prediction level, cluster_set, results = {"k" : k, "prediction_accuracy" : predictions[1], "cluster_means" : cluster_set, "cluster_stats" : final, "clustering_time" : clustering_time, "testing_time" : testing_time} with open('./results/' + title + '/' + title + '_results.json', 'w') as outfile: json.dump(results, outfile, cls=File.NumpyEncoder)
def processMenu(L): try: #Load the next menu based on the selected choice if (L[0] == "Generate"): menu([ "Generate Account Names", "Generate Passwords", "Hash Key", "Two Key Hash", "Main Menu" ], "Generate", 80) window = processMenu(File.Read("Output.txt")) if (L[0] == "Access Files"): menu(["Select File", "Create File", "Update File", "Main Menu"], "Files", -20) window = processMenu(File.Read("Output.txt")) if (L[0] == "Generate Account Names"): genName(file) if (L[0] == "Generate Passwords"): genPwd(file) if (L[0] == "Hash Key"): hash() if (L[0] == "Two Key Hash"): twoKeyHash() if (L[0] == "Select File"): select() processSelect(File.Read("Output.txt")) if (L[0] == "Create File"): create() processCreate(File.Read("Output.txt")) if (L[0] == "Update File"): modify() if (L[0] == "About"): s = "" with open("Messages/about.txt", "r") as a: s = a.readlines( ) #a.read() for messagebox, a.readlines() for scrollable with open("Messages/readme.txt", "r") as r: s += r.readlines() ScrollText.Scrollable(s, "Info", "Readme for Keybasket 1.4.1", "850x320+0+0", False) a.close() r.close() if (L[0] == "Main Menu"): return 1 if (L[0] == "Quit"): return 0 except IndexError: pass
def onOutputChanged(self, output): if Config.POST_TO_TWITTER and output: try: t = output.get('title', None) if t and (not self.output or (self.output.get('title', None) != t)): File.replaceJson(Config.STATUS_TITLE_FILE, title=t) StatusJob.API.PostUpdate(FixText.fitToSize(t)) except: traceback.print_exc(file=sys.stdout) Job.Job.onOutputChanged(self, output)
def setRelation(matchObj, argList, tableConfPath, tableConfContent): dataType = matchObj.group().split()[1] if dataType != 'integer' and dataType != 'character': print '[Syntax Error] try "set --help" or "set -h" to get help.' return None elif dataType == 'integer': columnName = argList.pop(2) range = ' '.join(argList[-3:]) if len(argList) > 2 else 'range '+ str(-sys.maxint-1) + ' ' + str(sys.maxint) elif dataType == 'character': columnName = argList.pop() range = argList.pop() if len(argList) > 2 else '128' if tableConfContent.find(columnName) < 0: File.appendNewRecord(None, tableConfPath, columnName + ': ' + dataType + ' ' + range + '\n') else: print '[Setting Error] The column name already exist. Plz try "set --help" or "set -h" to get help.'
def __delete_attachment (self): """Delete files associated to assets.attachment""" target = self._asset._file if not target: return True return File.delete_file (target)
def do_set(self, args): if self.authority == 'admin': match = re.search('attribute\s*\w*', args) if match is None: if re.search('primary\s*key\s*\w*', args): argList = str2List(args) columnName = argList.pop() tableConfContent, tablePrimaryKeyIndex = File.findContentAndPrimaryKeyIndex(self.tableConf, self.tableConfPath, columnName) if tablePrimaryKeyIndex >= 0: File.setPrimaryKeyColumn(self.tableConf, self.tableConfPath, tableConfContent, tablePrimaryKeyIndex) self.primaryKeyExist = True elif tablePrimaryKeyIndex == -999: print '[Setting Error] Primary key already set.' else: print '[Setting Error] No such column for primary key setting, try "set --help" or "set -h" to get help.' else: print '[Syntax Error] try "set --help" or "set -h" to get help.' else: tableConfContent = File.findContentAndPrimaryKeyIndex(None, self.tableConfPath, '')[0] DBMS.setRelation(match, str2List(args), self.tableConfPath, tableConfContent) else: print '[Error] authority not enough.'
def findRoadsForCells(self, max, Loc, M) : if self.isOut(Loc) : raise Exception("Location out of the Map") if not isinstance(max, int) : raise Exception("Max n'est pas un int") self.initializeRoadAllCells() self.setCellRoad(Loc, [Loc]) F = File() F.enfiler(Loc) while self.getCellPoids(Loc) < max + 1 and not F.fileVide() : for Loc2 in self.getCellListeVoisins(Loc, M) : if self.getCellPoids(Loc2) > self.getCellPoids(Loc) + 1 or self.getCellPoids(Loc2) == 0 : self.setCellRoad(Loc2, self.getCellRoad(Loc) + [Loc2]) F.enfiler(Loc2) Loc = F.getValeur() F.defiler()
def __call__ (self): for i in range(RETRIES): files = File.get_files_by_flag (self.task_id) if files: break else: time.sleep (1) files = self._cleanup (files) for x in range(len(files)): filename = os.path.join (ASSET_PATH, files[x]['filename']) try: f = Upload.get_info (filename) except: continue for key,value in f.items(): if key == 'filename': value = os.path.basename(value) files[x][key] = value File.update_files (files)
def loadImgBMP(hDC, fileName): if not file.fileExists(fileName): raise IOError("File not found") err = False try: hbmp = UI.LoadImage(hDC, fileName, con.IMAGE_BITMAP, 0, 0, con.LR_LOADFROMFILE | con.LR_LOADREALSIZE) except Exception: err = True if not hbmp or err: raise IOError("Failed to load bmp") return hbmp
def admin_files_delete(): return_value = success('The file has been deleted.') payload = get_payload(request) afile = File.get(payload.get('id')) if afile: try: os.unlink(afile.path) except OSError: pass try: os.unlink(afile.thumbnail_path) except OSError: pass afile.delete() safe_commit() else: return_value = failure('File not found.') return jsonify(return_value)
def edit_asset_apply(): # Authentication fail = Auth.assert_is_role (Role.ROLE_EDITOR) if fail: return fail asset_id = CTK.post.pop('asset_id') if not asset_id: return CTK.HTTP_Error(400) acl = ACL() editable = acl.filter_assets ("ed" , [asset_id]) if not int(asset_id) in editable: return CTK.HTTP_Error(401) # Update the asset asset = Asset(asset_id) asset['edited_flag'] = 0 asset['creator_id'] = "%s" % (Auth.get_user_id()) sql_values = [] for key in ['asset_types_id', 'licenses_id', 'title', 'description', 'language', 'subject']: if CTK.post[key]: asset[key] = CTK.post[key] # Always evolve the version number post_version = asset['version'] if 'version' in CTK.post: post_version = int(CTK.post['version']) if post_version <= asset['version']: asset['version'] += 1 else: asset['version'] = post_version # Duplicate the attached file try: attachment = File.clone_file (asset._file) asset._file = attachment except IOError,e: # If file copying is not possible, report and abort msg = 'File duplication could not be performed while editing asset ID %s.' %(asset_id) msg += '\n%s\n' %(str(e)) print msg return {'ret':"error"}
def geocode(ip): if ip in IP_MAP: return IP_MAP[ip]; latlong = {} url = Config.IP_GEOCODE_URL + ip try: lines = File.readUrl(url).split('\n') except: Logger.LOGGER.error("Couldn't read URL %s", url) return for line in lines: parts = line.split(':') if len(parts) is 2: name, value = parts if name in ['Latitude', 'Longitude']: latlong[name.lower()] = value.strip() IP_MAP[ip] = latlong return latlong
def doRun(self): url = Config.ROOT_URL + self.desc.url output = self.process(File.readUrl(url)) if output != self.output: self.onOutputChanged(output)
def __init__(self, desc): self.desc = desc data = File.readFile(self.dataFile()) or '{}' self.output = json.loads(data)
#!/usr/bin/env python2 # # Entrance point to Crocode # # For checking the arguments import sys # For interpreting import Interactive import File # Error handling def err(val): pass if len(sys.argv) > 2: print "Proper usage: crocode (<file location>)" elif len(sys.argv) == 2: val = File.fload(sys.argv[1]) else: val = Interactive.interactive() if not val: err(val) else: print "G'day mate."
def onOutputChanged(self, output): File.replaceAtomic(self.dataFile(), json.dumps(output)) self.output = output
class Document (object): """ Class that models a LaTeX document, providing operations to create, add content to and produce a pdf document through a LaTeX. :version: :author: """ """ ATTRIBUTES String containing the LaTeX source code for the document. contents (public) PDF file that contains the final version of the document produced through the compilation of the LaTeX source code. finalDocument (public) Absolute path to the directory where are stored the templates to be used to generate content for the document. templateFolder (public) Tex file which contains the source code for the document. texSource (public) """ def __init__(self, templateFolder): """ Constructor method. @param string templateFolder : Absolute path to the directory where are stored the templates to be used to generate content for the document. @return : @author """ # verifies validity of templateFolder parameter if not isinstance(templateFolder, str): raise DocumentError("Invalid templateFolder parameter: must be a string.") elif not os.path.exists(templateFolder): raise DocumentError("Invalid templateFolder parameter: directory does not exist.") else: # tries to access the directory try: os.chdir(templateFolder) except: raise FileError("Invalid templateFolder parameter: access denied.") # attribute initialization self.templateFolder = templateFolder self.contents = "" self.finalDocument = None self.texSource = None def generateContents(self): """ Generates a string with the contents of the LaTeX source code that will be compiled in order to produce the document. Must be overwritten by child classes in order to generate the documents specific contents. @return : @author """ # abstract method pass def generateTitle(self): """ Produces the file name for the document. @return : @author """ # abstract method pass def insertNewPage(self): """ Inserts new page in the documents contents. @return : @author """ self.contents += "\n\\newpage\n" def renderFromTemplate(self, templateFileName, templateParameters): """ Concatenates, with the document contents, a string generated through the renderization of a specified template with the specified list of parameters. @param string templateFileName : Name of the template used to generate part of the documents contents. @param {} templateParameters : Dictionary of parameters to be used by the template in order to generate contents for the document. @return : @author """ # verifies if the parameters are valid if not isinstance(templateFileName, str): raise DocumentError("Invalid templateFileName parameter: must be a string") if not isinstance(templateParameters, dict): raise DocumentError("Invalid templateParameters parameter: must be a dictionary.") # opens template #try: templateModel = Template(codecs.open(os.path.join(self.templateFolder, templateFileName), 'r', 'utf-8').read()) #except: # raise DocumentError("Template error: could not open template.") # renders template try: self.contents += templateModel.render(Context(templateParameters)) except: raise DocumentError("Template error: could not render template.") def writeDocument(self, outputDirectory): """ Writes the current source code (with proper finalization) of the document in a .tex file created in a temporary directory. Then compiles it using pdflatex and moves the resulting pdf file to the specified output directory. @param string outputDirectory : Absolute path to the directory to where the document must be moved after being produced. @return : @author """ # tries to create File objects to manipulate the source and the final documents try: self.finalDocument = File(outputDirectory, self.generateTitle() + '.pdf') # final document self.texSource = File(os.path.join(outputDirectory, "temp_" + self.generateTitle()), self.generateTitle() + '.tex') # source code except: raise DocumentError("Failed to access output directory.") # produces the documents contents self.generateContents() try: self.contents += u"\n\\end{document}\n\n" # finishes LaTeX document except: raise DocumentError("Failed while generating documents contents.") # opens a file to contain the LaTeX source code self.texSource.workWithFile() self.texSource.realFile = codecs.open(self.texSource.fileName, 'w', 'utf-8') # writes the content generated to source file self.texSource.realFile.write(self.contents) self.texSource.workWithFile() # changes directory # compiles the source into a pdf document #try: #compilationCommand = 'pdflatex -interaction=nonstopmode --output-directory=' + outputDirectory + ' ' + self.texSource.fileName #output = commands.getoutput(compilationCommand) abspath = outputDirectory + '/' + 'temp_' + self.texSource.fileName[0:-4] + '/' + self.texSource.fileName proc = subprocess.Popen(shlex.split('pdflatex -interaction=nonstopmode --output-directory=' + outputDirectory + ' ' + abspath)) proc.communicate() #try: # abspath = outputDirectory + '/' + 'temp_' + self.texSource.fileName[0:-4] + '/' + self.texSource.fileName # check_output(["pdftex", "-interaction=nonstopmode --output-directory=" + outputDirectory + ' ' + abspath], shell = True) #except CalledProcessError as e: # return e.output #except: # print b # raise DocumentError("Failed to compile LaTeX source code.") # erases source files self.texSource.deleteFile(True) self.finalDocument.workWithFile() os.remove(self.generateTitle() + ".aux") os.remove(self.generateTitle() + ".log") os.chdir(settings.PASTA_SUPERNOVA)
import itertools from operator import itemgetter import time import Util import File import Cycle from Markets import Markets from Moves import Moves import Neighborhood start = time.time() #reperimento dei dati iniziali dal file di input e inizializzazione del problema con una soluzione banale markets = [] markets = File.readFile() cycles_dictionary=Cycle.CreateCycles() #come soluzione si intende la situazione dei supermercati penality = 0 bestsolution = copy.deepcopy(markets) bestsolutioncost, exceeded = Markets.cost(bestsolution, cycles_dictionary, penality) print "\t\t\t\tBest solution cost: ", bestsolutioncost sk = copy.deepcopy(markets) sk = Markets.updateWeights(sk) #ciclo contenente l'algoritmo tabulist = [] newneighborhood = True k = 0 stallcounter = 0
import os, struct,random,sys import numpy as np import matplotlib.pyplot as plt from array import array as pyarray from numpy import append, array, int8, uint8, zeros from sklearn.cluster import KMeans import File, Accuracy ############################################################################### # Load Data # ############################################################################### # load training and testing images and labels as 60,000 x 28 x 28 array train_images,train_labels = File.load_mnist("training",path=os.getcwd()) test_images,test_labels = File.load_mnist("testing",path=os.getcwd()) # flatten training images into 60,000 x 784 array train_images_flat = np.array([np.ravel(img) for img in train_images]) test_images_flat = np.array([np.ravel(img) for img in test_images]) ############################################################################### # Run Scikit_learn # ############################################################################### k = int(sys.argv[1]) # number of clusters (system argument) # Train k means model kmeans = KMeans(init='k-means++', n_clusters=k, n_init=10) kmeans_fit = kmeans.fit(train_images_flat) # Get the cluster assignments of each point of training images kmeans_labels = kmeans_fit.labels_
def initFromFastqFile(cls, ffq): csr = cls() csr.fastq = File.initFromFullFileName(ffq) csr.name = csr.fastq.basename return csr
#import all content in File.py import File print File.filePath #variable in the py is also imported File.readSampleFile() #import only one function from File import readSampleFile #File.filePath won't be available here readSampleFile() #import Line class from OOP import Line line = Line((1,2),(3,4)) print line.distance() print line.slope()
def initFromBedFile(cls, fbed): csr = cls() csr.tagalign = File.initFromFullFileName(fbed) csr.name = csr.tagalign.basename return csr