def _cat_amd64(filepath, out_fd, use_sendfile): if use_sendfile: return open_file(filepath), sendfile('rax', out_fd) else: return (open_file(filepath), "xchg ebp, eax\ncat_helper1:", read_stack('rbp', 48, False), "test eax, eax\njle cat_helper2", write_stack(out_fd, 'rax'), "jmp cat_helper1\ncat_helper2:")
def _update_issues(self, issue_dicts): self._issue_dicts.update(issue_dicts) self._set_queried_issues() # cache the updated issues dict with open_file(self._ISSUE_CACHE_FILE, 'w') as outfile: outfile.write(json.dumps(self._issue_dicts))
def _load_cached_issues(self): try: self._update_issues(json.load(open_file(self._ISSUE_CACHE_FILE))) self._logs_window.log('Loaded {} issues from cache'.format( len(self._queried_issues))) except (ValueError, KeyError): self._logs_window.log('Failed to load cached issues')
def _cat_linux_arm(filepath, out_fd): return (open_file(filepath), 'mov r8, r0\ncat_helper1:', read_stack('r8', 48, False), 'cmp r0, #0\nble cat_helper2', write_stack(out_fd, 'r0'), 'b cat_helper1\ncat_helper2:')
def ls(filepath='.', out_fd=1): """Args: filepath, [out_fd (imm/reg) = STDOUT_FILENO] Opens a directory and writes its content to the specified file descriptor. """ return (open_file(filepath), "xchg ebp, eax\nls_helper1:", getdents('ebp', 255, False), "test eax, eax\njle ls_helper2", write_stack(out_fd, 'eax'), "jmp ls_helper1\nls_helper2:")
def main(): """The main function.""" if len(sys.argv) > 1: file = sys.argv[1] else: print("ERROR: No input file specified", file=sys.stderr) sys.exit(1) inf = open_file.open_file(file) process_map(inf, file.split('.')[0])
def _ls_linux_i386(filepath='.', out_fd = 1, in_fd = 0): out = (open_file(filepath), "xchg ebp, eax\n", "ls_helper1:\n", getdents('ebp', 255, False), "test eax, eax\n", "jle ls_helper2\n", write_stack(out_fd, 'eax'), "jmp ls_helper1\n", "ls_helper2:\n") return out
def ls(filepath = '.', out_fd = 1): """Args: filepath, [out_fd (imm/reg) = STDOUT_FILENO] Opens a directory and writes its content to the specified file descriptor. """ return (open_file(filepath), "xchg ebp, eax\nls_helper1:", getdents('ebp', 255, False), "test eax, eax\njle ls_helper2", write_stack(out_fd, 'eax'), "jmp ls_helper1\nls_helper2:")
def harvest_kv_pairs(filename): """Retrieve and save to files the k:v values of a set of specified tags. """ inf = open_file.open_file(filename) vals = get_kv(inf, [r".//node/tag", r".//way/tag", r".//relation/tag"]) for xpath in vals.keys(): name = _xpath2filename(xpath) with open("{}-{}.txt".format(KV_FILE_PREFIX, name), 'w') as outf: pprint.pprint({xpath: vals[xpath]}, stream=outf)
def harvest_attribs(filename): """Retrieve and save to files the values of certain tags' attribute. """ inf = open_file.open_file(filename) vals = get_attrib_values(inf, [r".//node[@lat]", r".//node[@lon]", r".//node[@user]", r".//relation[@user]", r".//relation/member[@role]", r".//relation/member[@type]", r".//way[@user]"]) for xpath in vals.keys(): name = _xpath2filename(xpath) with open("{}-{}.txt".format(ATTR_FILE_PREFIX, name), 'w') as outf: pprint.pprint({xpath: vals[xpath]}, stream=outf)
def _ls_linux_arm(filepath='.', out_fd = 1, in_fd = 0): out = (open_file(filepath), "mov r6, r0\n", # backup the file descriptor "loop:\n", getdents(in_fd), "sub r4, r4, r4\n", "cmp r0, r4\n", "ble next\n", write_stack(out_fd, size=255), "sub r4, r4, r4\n", "cmp r0, r4\n", "bgt loop\n", "next:\n", ) return out
def main(): """The main function. """ from argparse import ArgumentParser, FileType parser = ArgumentParser(description=__doc__) parser.add_argument("-a", "--attr", action="store_true", help="print the tree of the found tags and their attributes") parser.add_argument("-f", "--flat", action="store_true", help="print flat statistics of the found tags") parser.add_argument("-t", "--tree", action="store_true", help="print the tree of the found tags and their statistics") parser.add_argument("-o", "--out", dest="outf", default="-", type=FileType("w", encoding="UTF-8"), help="output file (if not specified, then sys.stdout)") parser.add_argument("filename", metavar="FILE", help="input OSM XML file (can be compressed or uncompressed)") args = parser.parse_args() if not (args.flat or args.attr or args.tree): args.flat = True if args.flat: print("> Flat mode enabled") if args.attr: print("> Attribute mode enabled") if args.tree: print("> Tree mode enabled") print("> Input file: " + args.filename) print("> Output will go to: " + args.outf.name) print() inf = open_file.open_file(args.filename) (tag_tree, tag_tree_counters, tag_counters) = parse_tree(inf) if args.flat: pprint.pprint(tag_counters, stream=args.outf) if args.attr: pprint.pprint(tag_tree, stream=args.outf) if args.tree: pprint.pprint(tag_tree_counters, stream=args.outf)
def write(self, data): ''' write and flush immediately ''' self.stream.write(data) self.stream.flush() def writelines(self, datas): ''' write and flush immediately ''' self.stream.writelines(datas) self.stream.flush() def __getattr__(self, attr): return getattr(self.stream, attr) sys.stdout = LogStream(open_file(LOG_FILE, 'w')) sys.stderr = LogStream(open_file(LOG_FILE, 'w')) parser = argparse.ArgumentParser(description='Open up the Jira CLI') status = parser.add_mutually_exclusive_group() status.add_argument('-a', '--all', help='display all issues (default)', action='store_const', dest='status', const=Query.STATUS_ALL) status.add_argument('-o', '--open', help='display open issues', action='store_const', dest='status',
import open_file import layers import folium myMap = folium.Map() myMap.add_child(layers.film_layer(open_file.open_file(str(input('Please write a year(numer only): '))))) myMap.add_child(layers.population_layer()) myMap.add_child(layers.okean_elzy_layer()) myMap.add_child(layers.ice_edge_layer()) myMap.add_child(folium.LayerControl()) myMap.save("myMap.html")
# -*- coding: utf-8 -*- __author__ = 'BiziurAA' from sql_dp import sql_db import sys from ya_translate import ya_translate from open_file import open_file from lemmatizer import lemmatizer if __name__ == "__main__": print('sdfsdf') of=open_file() sq = sql_db() row=[] yt = ya_translate() lem=lemmatizer() for filtered_sentence in of.read_file(): if filtered_sentence: for words in filtered_sentence: if len(words)>2: lemm_word=lem.lemm(words) if not sq.searh_word(lemm_word): print(lemm_word) translate_word=yt.get_english_words(lemm_word) if translate_word: sq.add_sql_db(lemm_word, translate_word) sq.sql_to_xls() del sq del of
from open_file import open_file from matplotlib import pyplot as plt plt.rcParams['font.sans-serif'] = ['SimHei'] plt.rcParams['axes.unicode_minus'] = False from data_process import * import math fn = [] epilepsy_1 = open_file('E:/DFA/DFA_DATA/癫痫异常脑电图/20121030105258.txt') epilepsy_2 = open_file('E:/DFA/DFA_DATA/癫痫异常脑电图/20121031103719.txt') epilepsy_3 = open_file('E:/DFA/DFA_DATA/癫痫异常脑电图/20121031111404.txt') epilepsy_4 = open_file('E:/DFA/DFA_DATA/癫痫异常脑电图/20121031113934.txt') epilepsy_5 = open_file('E:/DFA/DFA_DATA/癫痫异常脑电图/20121031163308.txt') normal_1 = open_file('E:/DFA/DFA_DATA/正常脑电图/20121030084615.txt') normal_2 = open_file('E:/DFA/DFA_DATA/正常脑电图/20121030091121.txt') normal_3 = open_file('E:/DFA/DFA_DATA/正常脑电图/20121030103952.txt') normal_4 = open_file('E:/DFA/DFA_DATA/正常脑电图/20121030110908.txt') normal_5 = open_file('E:/DFA/DFA_DATA/正常脑电图/20121030151450.txt') # plt.plot(epilepsy_1, label='abnormal_1 plot') # plt.plot(epilepsy_2, label='abnormal_2 plot') # plt.plot(normal_1, label='normal plot') # # generate a legend box # plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=0,ncol=3, mode="expand", borderaxespad=0.) # plt.show() # # # # temp_1,fitted_value_1,yk_1,fn_1 = process(epilepsy_1,4) # plt.plot(temp_1,epilepsy_1,'g',temp_1,yk_1,'b',temp_1,fitted_value_1,'r--') # plt.title('去趋势之后的癫痫脑电信号图Ⅰ(n=4)') # plt.show()
print() def importantCells(start, goal): start_x = int(start[0]) start_y = int(start[1]) goal_x = int(goal[0]) goal_y = int(goal[1]) return start_x, start_y, goal_x, goal_y ## ----- Sequential A* ----- ## total_time = 0 M, S, G = open_file("map1a.txt") sx, sy, gx, gy = importantCells(S, G) start_time = time.time() M, i = seqheu(M, sx, sy, gx, gy, 4) running_time = time.time() - start_time total_time += running_time M, S, G = open_file("map1b.txt") sx, sy, gx, gy = importantCells(S, G) start_time = time.time() M, i = seqheu(M, sx, sy, gx, gy, 4) running_time = time.time() - start_time total_time += running_time M, S, G = open_file("map1c.txt") sx, sy, gx, gy = importantCells(S, G)
from open_file import open_file, write_to_file def parse_acc_no(data): '''This function is to capture lines that begin with the string: Accession input: database ouput: return captured accession numbers ''' p = re.compile(r'ACCESSION *') # captures the whole line for i in data: if p.findall(i): return i write_to_file(parse_acc_no(open_file('chrom_CDS_8.txt'))) #works well enough, has trouble copying the output(acc_no) to a file. #issue is with write to file but may provide, perhaps a LOOP statement to write def parse_dna_seq(): '''This function captures DNA sequences from a variable database input: database output: return captured DNA sequences ''' #use Biopython here p = re.compile(r'Origin *') p.findall(database) print(p)
def _get_team_ids(): return open_file(TEAM_FILE).read().strip().split('\n')
path_x.append(start_node_x) path_y.append(start_node_y) path_x.reverse() path_y.reverse() for i in range(len(path_x)): if i % 10 == 1 and i != 1: print() print("(", path_x[i], ", ", path_y[i], "), ", end='') ''' 101 8 15 18 ''' gameMap, start, goal = open_file("map1j.txt") start_x = int(start[0]) start_y = int(start[1]) goal_x = int(goal[0]) goal_y = int(goal[1]) #x = generate_map(rows, columns) ''' input("Press Enter to Start") map_grid, i = seqheu(gameMap, start_x, start_y, goal_x, goal_y, 2) print(len(map_grid)) input("Show Path?") printPath(map_grid[i], start_x, start_y, goal_x, goal_y) '''