async def link(f_client, message, content): for i in content: if ';' in i: await message.channel.send( "sem ponto e virgula" ) return try: link_key = content[1] link_content = read.readFile('links', DATA_DIR, link_key ) if link_content != False: await message.channel.send( link_content[0][1] ) return else: print(read.ERROR_MESSAGE[read.ERROR_LOG]) except: await message.channel.send( "deu n" ) return link_content = "" for i in range(2, len(content) ): link_content += content[i] if len(link_content) == 0: await message.channel.send( "conteudo vazio ou link nao existe" ) return elif len(link_content) >= 100: await message.channel.send( "mt gtrande" ) return link_list = [[ link_key, link_content]] read.writeFile( link_list, 'links', DATA_DIR, separator=';')
async def on_ready(self): print('Logged on as', self.user) self.prefixo = "computador" self.BOT_MODS = read.readFile("\\bot_mods", CURRENT_DIR) # authenticate twitter self.auth_Twitter()
def OpenFile(): name = askopenfilename(initialdir="", filetypes=(("Text File", "*.csv"), ("All Files", "*.*")), title="Choose a file.") dataset = readFile(name) print(dataset) #Using try in case user types in unknown file or closes without choosing a file. try: with open(name, 'r') as UseFile: fileDataset = UseFile.read() except: print("No file exists")
def make_dataset(batch_size=15): table = readFile() dataset = [] X, Y = [], [] print(len(table)) for name_list in table: data_x, data_y = [], [] for name in name_list: y, x = slim_launch(name) x, y = np.array(x), np.array(y) data_x.append(x) data_y.append(y) data_x, data_y = np.array(data_x), np.array(data_y) for i in range(len(name_list) - batch_size): t2 = np.array(data_y[i + batch_size]) t1 = np.array(data_x[i:i + batch_size]) X.append(t1) Y.append(t2) return X, Y
# -*- coding: utf-8 -*- from arnold import * from mtoa.core import createOptions createOptions() import maya.cmds as cmds import read reload(read) fname = cmds.fileDialog2(fm=1, ff='FFVII: Remake .uexp (*.uexp);;')[0] time = read.readFile(fname) print "\n\n" + 'Import Time: ' + str(time) + ' seconds' + "\n\n" cmds.setAttr("defaultArnoldRenderOptions.renderDevice", 1) cmds.setAttr("defaultArnoldRenderOptions.AASamples", 10) cmds.setAttr("defaultArnoldRenderOptions.GIDiffuseDepth", 2) cmds.setAttr("defaultArnoldRenderOptions.GITotalDepth", 16) cmds.setAttr("defaultArnoldRenderOptions.autoTransparencyDepth", 32)
import read as r dataSet = r.readFile() ALPHA = 0.026 THETA_0, THETA_1, nrIterations = 0, 0, 0 M = float(len(dataSet)) CONVERGENCE_LIMIT = 10**-4 * 1.15 ITERATIONS_LIMIT = M**2 def getValue(row, i): return int(row.split(',')[i]) def costFunction(THETA_0, THETA_1): SUM = 0 for row in dataSet: X = getValue(row, 0) Y = getValue(row, 1) SUM += (THETA_0 + THETA_1 * X - Y) ** 2 cost = 1/(2*M) * SUM return cost #gradient descent iterations while True: SUM_THETA_0, SUM_THETA_1 = 0, 0 cost = costFunction(THETA_0, THETA_1) for row in dataSet: X = getValue(row, 0) Y = getValue(row, 1)
import read as r dataSet = r.readFile() ALPHA = 0.026 THETA_0, THETA_1, nrIterations = 0, 0, 0 M = float(len(dataSet)) CONVERGENCE_LIMIT = 10**-4 * 1.15 ITERATIONS_LIMIT = M**2 def getValue(row, i): return int(row.split(',')[i]) def costFunction(THETA_0, THETA_1): SUM = 0 for row in dataSet: X = getValue(row, 0) Y = getValue(row, 1) SUM += (THETA_0 + THETA_1 * X - Y)**2 cost = 1 / (2 * M) * SUM return cost #gradient descent iterations while True: SUM_THETA_0, SUM_THETA_1 = 0, 0 cost = costFunction(THETA_0, THETA_1)
# Proyecto Matematicas Computacionales # # Isaac Halabe # Matricula: A01021800 # Contacto: [email protected] # # Victor Ferzuli # Matricula: A01022166 # Contacto: [email protected] # Importacion de archivos from read import readFile from user import readUser from run import run automata = readFile() cadena = readUser() run(automata, cadena)
def run(args: str = None): # Get args given to program args = argparser.parser.parse_args(args) origin: open = None centralDirectory = [] info.set(args.verbosity) info.print(args, 3) # Check if zip file is empty if not os.path.exists(args.zip): # If action is add an empty file is allowed if args.action != "add": info.print("There is no file named: " + args.zip + "\nQuiting...", 0) exit(0) # Therefore create the file else: origin = open(args.zip, "x+b") # Open the file else: if args.action in ["add", "remove"]: # Make backup file shutil.move(args.zip, args.zip + ".bak.zip") # Open zip file origin = open(args.zip + ".bak.zip", "r+b") else: origin = open(args.zip, "r+b") # Check if zip file by reading first 2 characters test = origin.readline(2) if test != b"PK": if args.action != "add" and test == None: print("File " + args.zip + " is not a zip file. \nQuiting...") exit(0) else: info.print("Reading central header...", 2) # Read central header use MMAP to find central header zipmm = mmap.mmap(origin.fileno(), 0, access=mmap.ACCESS_READ) # Find start of central directory centralstart = zipmm.rfind(b'\x50\x4b\x05\x06') # Seek file to that position zipmm.seek(centralstart) # Read central header into a namedtuple endofcentral = structs.endOfCentral._make( structs.endOfCentralStruct.unpack( zipmm.read(structs.endOfCentralStruct.size))) # Start reading the entries zipmm.seek(endofcentral.offsetcentral) for _ in range(endofcentral.totalentries): # Create entry object entry = {} # Unpack header from file centralheader = structs.centralHeader._make( structs.centralHeaderStruct.unpack( zipmm.read(structs.centralHeaderStruct.size))) info.print(centralheader, 3) # Unpack other info from the file entry["header"] = centralheader entry["filename"] = sanitizePath( str(zipmm.read(centralheader.filenamelen), 'utf-8')) entry["extra"] = zipmm.read(centralheader.extralen) entry["comment"] = str(zipmm.read(centralheader.commentlen), 'utf-8') # Add file to central directory centralDirectory.append(entry) # Call function to handle each task case if args.action == "add": info.print("Adding files...") offset = 0 newfile = tempfile.NamedTemporaryFile("w+b") addpath = sanitizePath(args.path) # Read into for file in centralDirectory: # Check if file is in the zip file allready if addpath + os.sep + file["filename"] in [ addpath + os.sep + name for name in args.files ]: info.print('Overriding "' + file["filename"] + '" found in zip file.') # remove file from central directory centralDirectory.remove(file) continue info.print( 'Copying file "' + file["filename"] + '" to zip file...', 1) # Read file contents header, fname, extra, file = readFile(origin, file["header"].localoffset) # Write file contents towrite = structs.headerStruct.pack(*header) + fname + extra + file newfile.write(towrite) # Increment file offset offset += len(towrite) for file in args.files: info.print('Adding "' + file + '" to zip file.') # Get file metadata write, header = add(file, addpath, args.compresstype, offset) # Write file newfile.write(write) # Increment offset offset += len(write) # Add file to centralDirectory centralDirectory.append(header) # Write directory to file writeDirectory(newfile, centralDirectory, offset) writeChanges(args.zip, newfile) elif args.action == "remove": offset = 0 newfile = tempfile.NamedTemporaryFile("w+b") newCentralDirectory = [] # Read into for file in centralDirectory: # Ensure file is not to be removed if file["filename"] not in args.files: # Read the file header, fname, extra, content = readFile( origin, file["header"].localoffset) # And write it into the new ZIP file towrite = structs.headerStruct.pack( *header) + fname + extra + content # Fix offset in central directory newDirectory = file newHeader: structs.centralHeader = file["header"] newHeader = newHeader._replace(localoffset=offset) newDirectory["header"] = newHeader newCentralDirectory.append(newDirectory) # Write all changes offset += len(towrite) newfile.write(towrite) else: info.print('Removing "' + file["filename"] + '" from zip file.') if not newCentralDirectory: print("Removed all files") quit(0) # Finish writing the ZIP file writeDirectory(newfile, newCentralDirectory, offset) writeChanges(args.zip, newfile) elif args.action == "extract": for file in centralDirectory: # No folders 7zip!!! if file["filename"][-1] == "/": continue # Only extract the files requested if args.files: if file["filename"] not in args.files: continue info.print('Extracting: "' + file["filename"] + '"') # Read the file header, fname, extra, content = readFile( origin, file["header"].localoffset) # Decompress the file data = compress.Decompress(content, header.compression) # Check that file matches crc32 if crc32(data) != header.checksum: info.print('File: ' + file["filename"] + ' failed crc32. Skipping...') continue # Make the directories for the file outputpath = os.path.join(args.output, file["filename"]) os.makedirs(os.path.dirname(outputpath), exist_ok=True) # Write the file with open(outputpath, "w+b") as _file: _file.write(data) # Try to update the time to the modtime try: time = mktime(header.modtime, header.moddate) time = time.timestamp() os.utime(outputpath, (time, time)) # Windows sometimes wont let this operation happen except OSError: pass elif args.action == "info": info.print("Zip file: " + os.path.basename(args.zip)) info.print("Files:") # Print basic info from the central directory for file in centralDirectory: header = file["header"] info.print("Рци " + file["filename"] + " " + compress.CompressionTypes(header.compression).name + sizeof_fmt(header.uncommpressedsize))
from dataSimplex import dataSimplex from read import readFile fileName = input('please enter a filename: ') dSimpl = dataSimplex() readFile(fileName, dSimpl) print(dSimpl.n, dSimpl.m) for i in range(0, dSimpl.m): print(dSimpl.matrixA[i][:], dSimpl.unequals[i], dSimpl.vecB[i])
async def on_ready(self): print('Logged on as', self.user) self.prefixo = "computador" # load id from mods self.BOT_MODS = read.readFile("bot_mods", __location__)