def mayor(c1, c2): may = C.tamcola(c1) if C.tamcola(c2) > may: print("Segundoa palabra es mas grande que la primera") else: print("La primera palabra es mas grande que la segunda")
def test_basic(self): s = C.solve([('!', 'abc'), ('.', 'ad'), ('.', 'b'), ('!', 'cd'), ('?', 'c')]) self.assertEqual(s, 1) s = C.solve([('!', 'hello'), ('!', 'codeforces'), ('?', 'c'), ('.', 'o'), ('?', 'd'), ('?', 'h'), ('.', 'l'), ('?', 'e')]) self.assertEqual(s, 2) s = C.solve([('!', 'ababahalamaha'), ('?', 'a'), ('?', 'b'), ('?', 'a'), ('?', 'b'), ('?', 'a'), ('?', 'h')]) self.assertEqual(s, 0) print "basicOK"
def new(line): line = line.strip() if A.accept(line): return A.new(line) elif C.accept(line): return C.new(line) else: raise SyntaxError("Unknown instruction", (None, -1, 0, line))
def assertIO(self, input, output): stdout, stdin = sys.stdout, sys.stdin sys.stdout, sys.stdin = StringIO(), StringIO(input) C.resolve() sys.stdout.seek(0) out = sys.stdout.read()[:-1] sys.stdout, sys.stdin = stdout, stdin self.assertEqual(out, output)
def Invertir(cola): pila = P.Pila() P.crearpila(pila) while (not C.colavacia(cola)) or (not P.pilallena(pila)): y = C.supresion(cola) P.apilar(pila, y) while (not P.pilavacia(pila)) or (not C.colallena(cola)): z = P.desapilar(pila) C.insertarcola(cola, z)
def test_basic(self): s = C.solve(16) self.assertEqual(s, [1, 10, 28, 64, 136]) s = C.solve(6) self.assertEqual(s, [1, 5, 9, 21]) s = C.solve(2) self.assertEqual(s, [1, 3]) s = C.solve(3) self.assertEqual(s, [1, 6]) print "basicOK"
def test_basic(self): s = C.solve(9, 4) self.assertEqual(s, [1, 8]) # PRINT YES s = C.solve(8, 1) self.assertEqual(s, [8]) # PRINT YES s = C.solve(5, 1) self.assertEqual(s, []) # PRINT YES s = C.solve(3, 7) self.assertEqual(s, []) # PRINT YES print "basicOK"
def backup_remove_curmirror_local(): """Remove the older of the current_mirror files. Use at end of session""" assert Globals.rbdir.conn is Globals.local_connection curmir_incs = restore.get_inclist(Globals.rbdir.append("current_mirror")) assert len(curmir_incs) == 2 if curmir_incs[0].getinctime() < curmir_incs[1].getinctime(): older_inc = curmir_incs[0] else: older_inc = curmir_incs[1] C.sync() # Make sure everything is written before curmirror is removed older_inc.delete()
def __init__(self): self.A = Scalar.make_prob(A) self.B = Scalar.make_prob(B) self.C = Scalar.make_prob(C) self.A_s = Sparse.make_prob(self.A) self.B_s = Sparse.make_prob(self.B) self.C_s = Sparse.make_prob(self.C) self.Ms = (self.A, self.B, self.C, self.A_s, self.B_s, self.C_s) for M in self.Ms: M.normalize() return
def identicas(c1, c2): con_igual = 0 tam_ini = C.tamcola(c1) while (not C.colavacia(c1)): x = C.supresion(c1) y = C.supresion(c2) if (x == y): con_igual = con_igual + 1 if con_igual == tam_ini: return True else: return False
def test_basic(self): s = C.solve([5, 6]) print s self.assertTrue(valid([5, 6], s)) s = C.solve([2, 1, 2]) print s self.assertTrue(valid([2, 1, 2], s)) b = [9, 9, 10, 9, 10, 11] s = C.solve(b) print s self.assertTrue(valid(b, s)) print "basicOK"
def __init__(self): self.mod = HMM(P_S0.copy(), P_S0_ergodic.copy(), P_YS.copy(), P_SS.copy()) self.Cmod = C.HMM(P_S0.copy(), P_S0_ergodic.copy(), P_YS.copy(), P_SS.copy()) self.Smod = C.HMM_SPARSE(P_S0.copy(), P_S0_ergodic.copy(), P_YS.copy(), P_SS.copy()) self.mods = (self.mod, self.Cmod, self.Smod) self.S, Y = self.mod.simulate(1000) Y = (np.array(Y[0], np.int32), ) self.Y = Y return
def test_basic(self): s = C.solve(3, 3, 0) self.assertEqual(s, 3) s = C.solve(3, 2, 1) self.assertEqual(s, 4) s = C.solve(1, 1, 0) self.assertEqual(s, 1) s = C.solve(1, 10, 0) self.assertEqual(s, 10) s = C.solve(5, 2, 0) self.assertEqual(s, 2) print "bbasicOK"
def EA2Record(ea): """Convert ExtendedAttributes object to text record""" str_list = ["# file: %s" % C.acl_quote(ea.get_indexpath())] for (name, val) in ea.attr_dict.iteritems(): if not val: str_list.append(name) else: encoded_val = base64.encodestring(val).replace("\n", "") try: str_list.append("%s=0s%s" % (C.acl_quote(name), encoded_val)) except UnicodeEncodeError: log.Log("Warning: unable to store Unicode extended attribute %s" % repr(name), 3) return "\n".join(str_list) + "\n"
def __main__(argv): C.setOptions(argv) # print(f"argv |{argv}|{C.NEWLINE}") C.OPTIONSDICT[C.ROOTDIR] = FO.ABSPATH(C.OPTIONSDICT[C.ROOTDIR]) print(f"C.OPTIONSDICT |{C.OPTIONSDICT}|{C.NEWLINE}") C.OPTIONSDICT[C.ROOTDIR] = FO.fxDir(C.OPTIONSDICT[C.ROOTDIR]) print(f"C.OPTIONSDICT |{C.OPTIONSDICT}|{C.NEWLINE}") if C.OPTIONSDICT[C.ROOTDIR] is None: C.doError( f"illegal directory selected{C.NEWLINE}argv |{argv}|{C.NEWLINE}") C.exit(-1) dirList_ = FO.scanADir(C.OPTIONSDICT[C.ROOTDIR], C.OPTIONSDICT[FO.RECURSE]) with open(f"{C.OPTIONSDICT[C.ROOTDIR]}__UNRENAME__.zsh", "tw") as FDOut: for thisEntry_ in dirList_: if thisEntry_[FO.ISMEDIAFILETYPE] is True and C.OPTIONSDICT[ FO.MEDIAFILES] is True: hashedName_ = C.doAHash(C.OPTIONSDICT[C.HASHER], f"{thisEntry_[FO.PATH]}") allNewFilename_ = f"{thisEntry_[FO.PATHHEAD]}/{hashedName_}{thisEntry_[FO.EXTTAIL].lower()}" if C.OPTIONSDICT[C.TRIALRUN] is True: print( f"mv {C.DBLQT}{thisEntry_[FO.PATH]}{C.DBLQT} {C.DBLQT}{allNewFilename_}{C.DBLQT}{C.NEWLINE}" ) else: FO.RENAME(thisEntry_[FO.PATH], allNewFilename_) FDOut.write( f"mv {C.DBLQT}{allNewFilename_}{C.DBLQT} {C.DBLQT}{thisEntry_[FO.PATH]}{C.DBLQT}{C.NEWLINE}" ) elif thisEntry_[FO.ISKNOWNFILETYPE] is True and C.OPTIONSDICT[ FO.KNOWNFILES] is True: hashedName_ = C.doAHash(C.OPTIONSDICT[FO.HASHER], f"{thisEntry_[FO.PATH]}") allNewFilename_ = f"{thisEntry_[FO.PATHHEAD]}/{hashedName_}{thisEntry_[FO.EXTTAIL.lower()]}" if C.OPTIONSDICT[C.TRIALRUN] is True: print( f"mv {C.DBLQT}{thisEntry_[FO.PATH]}{C.DBLQT} {C.DBLQT}{allNewFilename_}{C.DBLQT}{C.NEWLINE}" ) else: FO.RENAME(thisEntry_[FO.PATH], allNewFilename_) FDOut.write( f"mv {C.DBLQT}{allNewFilename_}{C.DBLQT} {C.DBLQT}{thisEntry_[FO.PATH]}{C.DBLQT}{C.NEWLINE}" ) elif thisEntry_[FO.ISUNKNOWNFILETYPE] is True and C.OPTIONSDICT[ FO.UNKNOWNFILES] is True: hashedName_ = C.doAHash(C.OPTIONSDICT[FO.HASHER], f"{thisEntry_[FO.PATH]}") allNewFilename_ = f"{thisEntry_[FO.PATHHEAD]}/{hashedName_}{thisEntry_[FO.EXTTAIL].lower()}" if C.OPTIONSDICT[C.TRIALRUN] is True: print( f"mv {C.DBLQT}{thisEntry_[FO.PATH]}{C.DBLQT} {C.DBLQT}{allNewFilename_}{C.DBLQT}{C.NEWLINE}" ) else: FO.RENAME(thisEntry_[FO.PATH], allNewFilename_) FDOut.write( f"mv {C.DBLQT}{allNewFilename_}{C.DBLQT} {C.DBLQT}{thisEntry_[FO.PATH]}{C.DBLQT}{C.NEWLINE}" )
def InvertirCporP(): cola = C.Cola() C.crearcola(cola) pila = P.Pila() P.crearpila(pila) C.cargar_random(cola) print("Cola original: ", cola.datos) while (not C.colavacia(cola)) or (not P.pilallena(pila)): y = C.supresion(cola) P.apilar(pila, y) while (not P.pilavacia(pila)) or (not C.colallena(cola)): z = P.desapilar(pila) C.insertarcola(cola, z) print("Cola invertida por pila: ", cola.datos)
def EA2Record(ea): """Convert ExtendedAttributes object to text record""" str_list = ['# file: %s' % C.acl_quote(ea.get_indexpath())] for (name, val) in ea.attr_dict.iteritems(): if not val: str_list.append(name) else: encoded_val = base64.encodestring(val).replace('\n', '') try: str_list.append('%s=0s%s' % (C.acl_quote(name), encoded_val)) except UnicodeEncodeError: log.Log( "Warning: unable to store Unicode extended attribute %s" % repr(name), 3) return '\n'.join(str_list) + '\n'
def __init__(self): P_YS = Scalar.make_prob(B) P_YS.normalize() self.y_mod = Scalar.Discrete_Observations(P_YS) P_YS_s = Sparse.make_prob(P_YS) self.y_mod_s = Sparse.Discrete_Observations(P_YS) N = 20 Y = np.empty(N, dtype=np.int32) for i in range(N): Y[i] = (i + i % 2 + i % 3 + i % 5) % 2 self.Y = [Y] self.w = np.array(20 * [0, 0, 1.0]).reshape((N, 3)) self.w[0, :] = [1, 0, 0] self.w[3, :] = [0, 1, 0] self.Ys = [[Y[5:]], [Y[3:7]], [Y[:4]]]
def test_basic(self): s = C.solve([')())', ')', '((', '((', '(', ')', ')']) self.assertEqual(s, 2) s = C.solve(['(', '((', '(((', '(())']) self.assertEqual(s, 0) s = C.solve(['(())', '()']) self.assertEqual(s, 1) print "basicOK" #def test_advanced(self): s = C.solve(['())((', '(()))']) self.assertEqual(s, 0) s = C.solve([]) self.assertEqual(s, 0) print "advancedOK"
def _get(self): """Return pair (type, data) next in line on the file type is a single character which is either "o" for an object, "f" for file, "c" for a continution of a file, "h" for the close value of a file "e" for an exception, or None if no more data can be read. Data is either the file's data, if type is "c" or "f", or the actual object if the type is "o", "e", or "r" """ header = self.file.read(8) if not header: return None, None if len(header) != 8: assert None, "Header %s is only %d bytes" % (header, len(header)) type, length = header[0], C.str2long(header[1:]) buf = self.file.read(length) if type in ("o", "e", "h"): return type, cPickle.loads(buf) else: assert type in ("f", "c") return type, buf
def test1(): n = 3 a = [10, 20, 30] b = [40, 50, 60] c = [70, 80, 90] h = C.calc_happiness(n, a, b, c) assert max(h[n - 1, :]) == 210
def Record2EA(record): """Convert text record to ExtendedAttributes object""" lines = record.split("\n") first = lines.pop(0) if not first[:8] == "# file: ": raise metadata.ParsingError("Bad record beginning: " + first[:8]) filename = first[8:] if filename == ".": index = () else: index = tuple(C.acl_unquote(filename).split("/")) ea = ExtendedAttributes(index) for line in lines: line = line.strip() if not line: continue assert line[0] != "#", line eq_pos = line.find("=") if eq_pos == -1: ea.set(line) else: name = line[:eq_pos] assert line[eq_pos + 1 : eq_pos + 3] == "0s", "Currently only base64 encoding supported" encoded_val = line[eq_pos + 3 :] ea.set(name, base64.decodestring(encoded_val)) return ea
def test2(): n = 1 a = [100] b = [10] c = [1] h = C.calc_happiness(n, a, b, c) assert max(h[n - 1, :]) == 100
def test3(): n = 7 a = [6, 8, 2, 7, 4, 2, 7] b = [7, 8, 5, 8, 6, 3, 5] c = [8, 3, 2, 6, 8, 4, 1] h = C.calc_happiness(n, a, b, c) assert max(h[n - 1, :]) == 46
def from_string(self, acl_str): lines = acl_str.splitlines() if len(lines) != 2 or not lines[0][:8] == "# file: ": raise metadata.ParsingError("Bad record beginning: " + lines[0][:8]) filename = lines[0][8:] if filename == '.': self.index = () else: self.index = tuple(unicode(C.acl_unquote(filename)).split('/')) self.__acl = lines[1]
def check_token(self): t = self.token() if not t: self.json(R.expire('not token')) return self.finish() if not C.check_token(t): self.json(R.expire()) return self.finish()
def post(self): param = json.loads(self.request.body) code = C.md5(param, 16) if dao.get_share_link(code, param): url = f"{uri}/v/{code}".replace('//v', '/v') self.json(R.ok().add('url', url)) else: self.json(R.error(f'the poster [{param["posterId"]}] not exits.'))
def Record2ACL(record): """Convert text record to an AccessControlLists object""" newline_pos = record.find('\n') first_line = record[:newline_pos] if not first_line.startswith('# file: '): raise metadata.ParsingError("Bad record beginning: " + first_line) filename = first_line[8:] if filename == '.': index = () else: index = tuple(C.acl_unquote(filename).split('/')) return AccessControlLists(index, record[newline_pos:])
def get_proc(doc): "Gets all events and role fillers from a doc" dump = doc["cnlp"] # This was ripped out of compute-1 dependencies = dump["sentences"] dependencies = [s["indexeddependencies"] for s in dependencies] dependencies = [[C.dep_norm(i,d) for d in s] \ for i,s in enumerate(dependencies)] #dependencies[0] = C.remove_redundant(dependencies[0]) #corpus bug? dependencies = sum(dependencies, []) dep_raw = dependencies dependencies = C.dep_collapse(dependencies, dep_coll_table) #digs out words for us, cause words cross sentences boundaries words = C.relevant_words(dump["sentences"], C.relevant_verbs) #words = [w for w in words if w[2] not in stoplist] return {"dep_raw": dep_raw, "dependencies": dependencies, "words": words}
def make_app(p): path = "static" if C.indocker() else "../design/dist" settings = { 'debug': not C.indocker() or os.environ.get('POSTER_DEBUG', 'false') == 'true' } return Application([ (f"{p}api/login", ApiLoginHandler), (f"{p}api/user/posters", ApiUserPostersHandler), (f"{p}api/user/posters/copy/(.+)", ApiUserPostersCopyHandler), (f"{p}api/user/posters/(.+)", ApiUserPostersHandler), (f"{p}api/user/poster/(.+)", ApiPostersHandler), (f"{p}api/preview", ApiPreviewHandler), (f"{p}api/upload", ApiUploadHandler), (f"{p}api/link", ApiLinkHandler), (f"{p}v/(.+)", ApiViewHandler), (f'{p}(store/.*)$', StaticFileHandler, {"path": join(dirname(__file__), "data")}), (f'{p}resource/(.*)$', MyStaticFileHandler, {"path": join(dirname(__file__), "resource")}), (f'{p}(.*)$', StaticFileHandler, {"path": join(dirname(__file__), path), "default_filename": "index.html"}) ], **settings)
def addrorp(self, rorp): """Add a rorp to the buffer""" if rorp.file: pickle = cPickle.dumps((rorp.index, rorp.data, 1), 1) self.next_in_line = rorp.file else: pickle = cPickle.dumps((rorp.index, rorp.data, 0), 1) self.rorps_in_buffer += 1 self.array_buf.fromstring("r") self.array_buf.fromstring(C.long2str(long(len(pickle)))) self.array_buf.fromstring(pickle)
def Record2ACL(record): """Convert text record to an AccessControlLists object""" newline_pos = record.find("\n") first_line = record[:newline_pos] if not first_line.startswith("# file: "): raise metadata.ParsingError("Bad record beginning: " + first_line) filename = first_line[8:] if filename == ".": index = () else: index = tuple(C.acl_unquote(filename).split("/")) return AccessControlLists(index, record[newline_pos:])
def build(**args): """ Build a chunk of code, returning an object which contains the code's methods and/or classes. """ # Create a Builder object to build the chunk of code. b = C.Builder(**args) # Build the code and return an object which contains whatever # resulted from the build. return b.build()
def edit(text=_A, editor=_A, env=_A, require_save=_C, extension='.txt', filename=_A): B = filename A = editor from ._termui_impl import Editor as C A = C(editor=A, env=env, require_save=require_save, extension=extension) if B is _A: return A.edit(text) A.edit_file(B)
def moverCaP(): cola = C.Cola() C.crearcola(cola) C.cargar_random(cola) print("Cola: ", cola.datos) pila = P.Pila() P.crearpila(pila) if (not C.colavacia(cola)): while (not C.colavacia(cola)) or (not P.pilallena(pila)): x = C.supresion(cola) P.apilar(pila, x) print("Pila: ", pila.Datos) else: print("La cola está vacía")
def addfromfile(self, prefix_letter): """Read a chunk from the current file and add to array_buf prefix_letter and the length will be prepended to the file data. If there is an exception while reading the file, the exception will be added to array_buf instead. """ buf = robust.check_common_error(self.read_error_handler, self.currently_in_file.read, [Globals.blocksize]) if buf is None: # error occurred above, encode exception self.currently_in_file = None excstr = cPickle.dumps(self.last_exception, 1) total = "".join(('e', C.long2str(long(len(excstr))), excstr)) else: total = "".join((prefix_letter, C.long2str(long(len(buf))), buf)) if buf == "": # end of file cstr = cPickle.dumps(self.currently_in_file.close(), 1) self.currently_in_file = None total += "".join(('h', C.long2str(long(len(cstr))), cstr)) self.array_buf.fromstring(total)
def Regress(mirror_rp): """Bring mirror and inc directory back to regress_to_time Also affects the rdiff-backup-data directory, so Globals.rbdir should be set. Regress should only work one step at a time (i.e. don't "regress" through two separate backup sets. This function should be run locally to the rdiff-backup-data directory. """ inc_rpath = Globals.rbdir.append_path("increments") assert mirror_rp.index == () and inc_rpath.index == () assert mirror_rp.isdir() and inc_rpath.isdir() assert mirror_rp.conn is inc_rpath.conn is Globals.local_connection manager, former_current_mirror_rp = set_regress_time() set_restore_times() regress_rbdir(manager) ITR = rorpiter.IterTreeReducer(RegressITRB, []) for rf in iterate_meta_rfs(mirror_rp, inc_rpath): ITR(rf.index, rf) ITR.Finish() if former_current_mirror_rp: C.sync() # Sync first, since we are marking dest dir as good now former_current_mirror_rp.delete()
def make_file_dict(filename): """Generate the data dictionary for the given RPath This is a global function so that os.name can be called locally, thus avoiding network lag and so that we only need to send the filename over the network, thus avoiding the need to pickle an (incomplete) rpath object. """ if os.name != 'nt': try: return C.make_file_dict(filename) except OSError, error: # Unicode filenames should be process by the Python version if error.errno != errno.EILSEQ and error.errno != errno.EINVAL: raise
def __init__(self): P_YS = Scalar.make_prob(B) P_YS.normalize() self.y_mod = Scalar.Discrete_Observations(P_YS) P_YS_s = Sparse.make_prob(P_YS) self.y_mod_s = Sparse.Discrete_Observations(P_YS) N = 20 Y = np.empty(N, dtype=np.int32) for i in range(N): Y[i] = (i + i%2 + i%3 + i%5)%2 self.Y = [Y] self.w = np.array(20*[0,0,1.0]).reshape((N,3)) self.w[0,:] = [1,0,0] self.w[3,:] = [0,1,0] self.Ys = [[Y[5:]],[Y[3:7]],[Y[:4]]]
def _get(self): """Return (type, data or object) pair This is like UnwrapFile._get() but reads in variable length blocks. Also type "z" is allowed, which means end of iterator. An empty read() is not considered to mark the end of remote iter. """ if not self.buf: self.buf += self.file.read() if not self.buf: return None, None assert len(self.buf) >= 8, "Unexpected end of MiscIter file" type, length = self.buf[0], C.str2long(self.buf[1:8]) data = self.buf[8:8+length] self.buf = self.buf[8+length:] if type in "oerh": return type, cPickle.loads(data) else: return type, data
def addtobuffer(self): """Updates self.buffer, adding a chunk from the iterator. Returns None if we have reached the end of the iterator, otherwise return true. """ if self.currently_in_file: self.addfromfile("c") else: try: currentobj = self.iter.next() except StopIteration: return None if hasattr(currentobj, "read") and hasattr(currentobj, "close"): self.currently_in_file = currentobj self.addfromfile("f") else: pickle = cPickle.dumps(currentobj, 1) self.array_buf.fromstring("o") self.array_buf.fromstring(C.long2str(long(len(pickle)))) self.array_buf.fromstring(pickle) return 1
def comp_move(self): if self.comp == 'A': move = A.minimax(self) elif self.comp == 'B': vals = {} nodes = [] highest = 0 best = () moveset = self.valid_moves('W') for move in moveset: n = tree.Node(self, 3, 'W', move[0], move[1], moveset) # default depth level 3 nodes.append(n) for n in nodes: ret = B.alphabeta(n, 3, -10000, 10000, 'W') x, y = n.get_x(), n.get_y() vals[(x,y)] = ret if ret > highest: highest = ret best = (x,y) return best else: move = C.master(self) return move
def ACL2Record(acl): """Convert an AccessControlLists object into a text record""" return "# file: %s\n%s\n" % (C.acl_quote(acl.get_indexpath()), str(acl))
def __str__(self): return '# file: %s\n%s\n' % \ (C.acl_quote(encode(self.get_indexpath())), unicode(self.__acl))
def filename_to_index(self, filename): """Convert possibly quoted filename to index tuple""" if filename == ".": return () else: return tuple(C.acl_unquote(filename).split("/"))
def b_func(x): if x < 0: return x return A_alias.a_func(x - 1) + C.c_func(x - 2)
def add_misc(self, obj): """Add an arbitrary pickleable object to the buffer""" pickle = cPickle.dumps(obj, 1) self.array_buf.fromstring("o") self.array_buf.fromstring(C.long2str(long(len(pickle)))) self.array_buf.fromstring(pickle)
def addfinal(self): """Signal the end of the iterator to the other end""" self.array_buf.fromstring("z") self.array_buf.fromstring(C.long2str(0L))