def submit(self, script) -> bool: """ Execute the task and write the process pid to the file""" try: exec(f"rm -f {self.log_path}", sync=True) if "submit" in script or "exec" in script: p_script = f"""{script} 2>&1 | tee {self.log_path} && ray stop""" else: p_script = ( f"""ray exec {os.path.join(YAML_DIR, self.name)}.yaml {script}""" ) self._state = self.RUNNING self.tasks[script] = exec(p_script, sync=False) pyt_script = (script.replace( f"ray submit {os.path.join(YAML_DIR, self.name)}.yaml ", "").replace("--stop", "").replace("--start", "").strip().split(" ")) pyt_script[0] = pyt_script[0].split("/")[-1] pyt_script = " ".join(pyt_script) _, pgrep_output = exec( f"""ray exec {os.path.join(YAML_DIR, self.name)}.yaml 'pgrep -f "{pyt_script}"'""", sync=True, ) with open("ray_pid.txt", "w") as f: pid = self.get_pids(pgrep_output)[0].strip() f.write(pid) except Exception as e: logger.error(e, exc_info=e, stack_info=True) self._state = self.ERROR return False return True
def gen_aids10k(): datadir = get_root_path() + '/data' dirin = datadir + '/AIDS' graphs = {} nodes_graphs = defaultdict(list) lesseq30 = set() disconnects = set() for file in glob(dirin + '/*.gexf'): gid = int(file.split('/')[-1].split('.')[0]) g = nx.read_gexf(file) if not nx.is_connected(g): print('{} not connected'.format(gid)) disconnects.add(gid) continue graphs[gid] = g nodes_graphs[g.number_of_nodes()].append(gid) if g.number_of_nodes() <= 30: lesseq30.add(gid) print(len(disconnects), disconnects) # exit(1) # print(nodes_graphs[222]) # print(nodes_graphs[2]) train_dir = '{}/AIDS10k/train'.format(datadir) test_dir = '{}/AIDS10k/test'.format(datadir) exec('mkdir -p {}'.format(train_dir)) exec('mkdir -p {}'.format(test_dir)) for num_node in range(5, 23): choose = sample(nodes_graphs[num_node], 1)[0] print('choose {} with {} nodes'.format(choose, num_node)) nx.write_gexf(graphs[choose], test_dir + '/{}.gexf'.format(choose)) lesseq30.remove(choose) for tid in sample(lesseq30, 10000): nx.write_gexf(graphs[tid], train_dir + '/{}.gexf'.format(tid)) print('Done')
def encrypt(code): if not _('ENCRYPTION') == 'YES': return code for file in tsfiles(code): if file.startswith('enc.'): continue print('Encrypting %s to enc.%s ... ' % (file, file), end='') key = exec(['openssl', 'rand', '16']).hex() iv = execstr(['openssl', 'rand', '-hex', '16']) exec([ 'openssl', 'aes-128-cbc', '-e', '-in', file, '-out', 'enc.%s' % file, '-p', '-nosalt', '-iv', iv, '-K', key ]) key_id = api('POST', 'key', {'iv': iv, 'key': key}) if not key_id: print('failed') open('out.m3u8', 'w').write(code) exit() print('done') code = re.sub( '(#EXTINF:.+$[\\r\\n]+^%s$)' % file, '#EXT-X-KEY:METHOD=AES-128,URI="%s/play/%s.key",IV=0x%s\n\\1' % (_('APIURL'), key_id, iv), code, 1, re.M) code = code.replace(file, 'enc.%s' % file) open('out.m3u8', 'w').write(code) return code
def stop(self, script) -> bool: """ Stop the process """ try: exec(f'pkill -9 -f "{script}"', sync=True) self._state = self.TERMINATED del self.tasks[script] except Exception as e: logger.error(e, exc_info=e, stack_info=True) self._state = self.ERROR return False return True
def mcs(g1, g2): nx.write_gexf(g1, 'temp_1.gexf') nx.write_gexf(g2, 'temp_2.gexf') # Force using bash instead of dash. # `source activate` does not work on dash. # graphembedding is a virtual python environment with networkx==2.0. # By default networkx==1.10 is assumed. cmd = 'source activate graphembedding && python mcs_cal.py' exec('/bin/bash -c "{}"'.format(cmd)) f = open('mcs_result.txt', 'r') return int(f.read())
def submit(self, script) -> bool: """ Execute the task """ try: exec(f"rm -f {self.log_path}", sync=True) p_script = f"{script} >> {self.log_path}" self._state = self.RUNNING self.tasks[script] = exec(p_script, sync=False) except Exception as e: logger.error(e, exc_info=e, stack_info=True) self._state = self.ERROR return False return True
def clean_up(): rp = get_result_path() for file in sorted_nicely(glob('{}/{}'.format(rp, f))): bnf = basename(file) print_info(file, bnf) t = prompt('Delete? [y/n]', ['y', 'n']) if t == 'y': exec('rm -rf {}'.format(file)) elif t == 'n': print('Skip') else: assert (False) print('Done')
def ged(g1, g2, algo, debug=False, timeit=False): # https://github.com/dan-zam/graph-matching-toolkit gp = get_gmt_path() append_str = get_append_str(g1, g2) src, t_datapath = setup_temp_data_folder(gp, append_str) meta1 = write_to_temp(g1, t_datapath, algo, 'g1') meta2 = write_to_temp(g2, t_datapath, algo, 'g2') if meta1 != meta2: raise RuntimeError('Different meta data {} vs {}'.format(meta1, meta2)) prop_file = setup_property_file(src, gp, meta1, append_str) rtn = [] if not exec( 'cd {} && java {}' ' -classpath {}/src/graph-matching-toolkit/bin algorithms.GraphMatching ' './properties/properties_temp_{}.prop'.format( gp, '-XX:-UseGCOverheadLimit -XX:+UseConcMarkSweepGC -Xmx50g' if algo == 'astar' else '', get_root_path(), append_str)): rtn.append(-1) else: d, t, lcnt, g1size, g2size, result_file = get_result( gp, algo, append_str) rtn.append(d) if g1size != g1.number_of_nodes(): print('g1size {} g1.number_of_nodes() {}'.format( g1size, g1.number_of_nodes())) assert (g1size == g1.number_of_nodes()) assert (g2size == g2.number_of_nodes()) if debug: rtn += [lcnt, g1, g2] if timeit: rtn.append(t) clean_up(t_datapath, prop_file, result_file) if len(rtn) == 1: return rtn[0] return tuple(rtn)
def is_running(self, script) -> bool: """ Check if the script is running""" if script in self.tasks and self.tasks[script].poll() is None: return True cmd = f'pgrep -f "/bin/sh -c {script}"' _, output = exec(cmd, sync=True) return len(output.replace("\n", "")) > 1
def rename(): rp = get_result_path() for dirpath, dirs, files in walk('{}/{}'.format(rp, f)): for bfn in files: if target in bfn: continue dest_bfn = bfn.replace(source, target) t = prompt('Rename {} to {}? [y/n]'.format(bfn, dest_bfn), ['y', 'n']) if t == 'y': exec('mv {} {}'.format(join(dirpath, bfn), join(dirpath, dest_bfn))) elif t == 'n': print('Skip') else: assert (False) print('Done')
def stop_cluster(self) -> bool: """ Shut down the cluster """ self._state = self.DONE try: logs = exec(f"ray down -y {self.config_path}", sync=True) logger.info(logs) except Exception as e: logger.error(e, exc_info=e, stack_info=True) self._state = self.ERROR return False return True
def stop(self, script) -> bool: """ Stop the process """ try: init_script = script if "ray" in script: script = script.split(" ")[2] with open("ray_pid.txt", "r") as f: pid = f.readline().strip() exec( f"ray exec {YAML_DIR}{self.name}.yaml 'pkill -s {pid}'", sync=True, ) self._state = self.TERMINATED del self.tasks[init_script] open("ray_pid.txt", "w").close() except Exception as e: logger.error(e, exc_info=e, stack_info=True) self._state = self.ERROR return False return True
def launch_cluster(self) -> bool: """Start a cluster with given configurations""" try: self._state = self.LAUNCHED logs = exec(f"ray up -y {self.config_path}", sync=True) logger.info(logs) print(logs) except Exception as e: logger.error(e, exc_info=e, stack_info=True) self._state = self.ERROR return False return True
def get_value(self, line, curr_value): if line and not ":" in line: utils.log("Skipping %s" % repr(line)) return curr_value out, err = utils.exec(config.global_config["get_ssid_command"]) if err: utils.log("GetSSID Error: %s" % repr(err)) utils.log("SSID: %s" % repr(out)) return out
def setup_property_file(src, gp, meta, append_str): destfile = '{}/properties/properties_temp_{}.prop'.format( \ gp, append_str) srcfile = '{}/{}.prop'.format(src, meta) if not isfile(srcfile): if 'beam' in meta: # for beam metasp = meta.split('_') s = int(metasp[0][4:]) if s <= 0: raise RuntimeError('Invalid s for beam search: {}'.format(s)) newmeta = '_'.join(['beam'] + metasp[1:]) srcfile = '{}/{}.prop'.format(src, newmeta) else: raise RuntimeError('File {} does not exist'.format(srcfile)) exec('cp {} {}'.format(srcfile, destfile)) for line in fileinput.input(destfile, inplace=True): line = line.rstrip() if line == 's=': # for beam print('s={}'.format(s)) else: print(line.replace('temp', 'temp_{}'.format(append_str))) return destfile
def main(): d = tempfile.mkdtemp() os.chdir(d) name = 'video%s' % os.path.splitext(argv[1])[1] shutil.copyfile(argv[1], name) bitrate = bit_rate(name) maxbitrate = maxbit_rate(name) duration = video_duration(name) repaircmd = genrepair(name, name, uploader().MAX_BYTES) os.system('clear') print('\n=================================') print('file: %s' % argv[1]) print('size: %s' % os.path.getsize(name)) print('bitrate: %s' % bitrate) print('max_bitrate: %s' % maxbitrate) print('duration: %s' % duration) print('genrepair: %s' % repaircmd) print('=================================\n') exec(['rm', '-rf', d])
def ged(g1, g2, algo): # https://github.com/dan-zam/graph-matching-toolkit gp = get_gmt_path() src, tp = setup_temp_folder(gp) meta1 = write_to_temp(g1, tp, algo, 'g1') meta2 = write_to_temp(g2, tp, algo, 'g2') if meta1 != meta2: raise RuntimeError('Different meta data {} vs {}'.format(meta1, meta2)) setup_property_file(src, gp, meta1) if not exec( 'cd {} && java -classpath {}/src/graph-matching-toolkit/bin algorithms.GraphMatching ./properties/properties_temp_{}.prop' .format(gp, get_root_path(), get_ts()), timeout=1000): return -1 return get_result(gp, algo)
def get_value(self, line, curr_value): if line: found = False for ip in self.ignored_ip_address: if " %s/" % ip in line: found = True break if found or self.interface not in line: utils.log("Skipping %s" % repr(line)) return curr_value out, err = utils.exec( "ip -4 address show %s | awk -F '[ /]*' '/inet/ { print $3 }'" % self.interface) if err: utils.log("ip address: %s" % repr(err)) ips = out.split('\n') for ip in ips: if ip not in self.ignored_ip_address: return ip return ""
def bit_rate(file): return int( exec([ 'ffprobe', '-v', 'error', '-show_entries', 'format=bit_rate', '-of', 'default=noprint_wrappers=1:nokey=1', file ]))
def encrypt(code): if _('ENCRYPTION_VERSION') == 'V1': print('ENCRYPTION_VERSION:%s' % _('ENCRYPTION_VERSION')) head = read_bytes('../dangdai-32x32.png') head1 = head[0:-12] head2 = head[-12:] tmpdir = os.getcwd()+'/tmp' if not os.path.exists(tmpdir): os.mkdir(tmpdir) os.chdir(tmpdir) for file in tsfiles(code): portion = os.path.splitext(file) if portion[1] == ".ts": newName = portion[0] + ".png" if os.path.isfile(newName): code = code.replace(file, newName) continue segment = read_bytes('../' + file) segment = zlib.compress(segment) done_segments = head1 + segment + head2 open(newName, 'wb').write(done_segments) code = code.replace(file, newName) #break #os.chdir('../') if _('ENCRYPTION_VERSION') == 'V2': print('ENCRYPTION_VERSION:%s' % _('ENCRYPTION_VERSION')) tmpdir = os.getcwd()+'/tmp' if not os.path.exists(tmpdir): os.mkdir(tmpdir) os.chdir(tmpdir) for file in tsfiles(code): # 1,获取切片大小 filesize = os.path.getsize('../' +file)+64 wh=math.ceil(math.sqrt(filesize/3))*4 # wh=math.ceil(math.sqrt(filesize*3)) print('切片大小:%s,图片尺寸:%s' % (filesize,wh)) data = open('../' + file, "rb").read() data = zlib.compress(data) print('压缩后切片大小:%s' % (len(data))) portion = os.path.splitext(file) newName = portion[0] + ".png" if os.path.isfile(newName): code = code.replace(file, newName) continue # 2,生成图片 new_img_file_name = new_image(wh, wh, portion[0], show_image=False) print('生成图片:%s' % (new_img_file_name)) # 3,隐写 in_img = cv2.imread(new_img_file_name) steg = LSBSteg(in_img) starttime = time.time() res = steg.encode_binary(data) duration = time.time()-starttime print('隐写完成时间:%s' % (duration)) cv2.imwrite(newName, res) print('隐写完成:%s' % (newName)) # embed(new_img_file_name, '../' + file) # 4,替换 code = code.replace(file, newName) """ # 1,获取切片大小 filesize = os.path.getsize('../' + file) + 64 wh = math.ceil(math.sqrt(filesize / 3)) print('切片大小:%s,图片尺寸:%s' % (filesize, wh)) data = open('../' + file, "rb").read() new_img_file_name = r'%s_%s_%s.png' % (wh, wh, md5(data)) if os.path.isfile('out' + new_img_file_name): code = code.replace(file, 'out' + new_img_file_name) continue # 2,生成图片 new_image(wh, wh, md5(data), show_image=False) print('生成图片:%s' % (new_img_file_name)) # 3,隐写 in_img = cv2.imread(new_img_file_name) steg = LSBSteg(in_img) starttime = time.time() res = steg.encode_binary(data) duration = time.time() - starttime print('隐写完成时间:%s' % (duration)) cv2.imwrite('out' + new_img_file_name, res) print('隐写完成:%s' % ('out' + new_img_file_name)) # embed(new_img_file_name, '../' + file) # 4,替换 code = code.replace(file, 'out' + new_img_file_name) """ if not _('ENCRYPTION') == 'YES': return code for file in tsfiles(code): if file.startswith('enc.'): continue print('Encrypting %s to enc.%s ... ' % (file, file), end='') key = exec(['openssl','rand','16']).hex() iv = execstr(['openssl','rand','-hex','16']) exec(['openssl','aes-128-cbc','-e','-in',file,'-out','enc.%s' % file,'-p','-nosalt','-iv',iv,'-K',key]) key_id = api('POST', 'key', data={'iv': iv, 'key': key}) if not key_id: print('failed') open('out.m3u8', 'w').write(code) exit() print('done') code = re.sub('(#EXTINF:.+$[\\r\\n]+^%s$)' % file, '#EXT-X-KEY:METHOD=AES-128,URI="%s/play/%s.key",IV=0x%s\n\\1' % (_('APIURL'), key_id, iv), code, 1, re.M) code = code.replace(file, 'enc.%s' % file) open('out.m3u8', 'w').write(code) return code
from utils import exec, get_model_path exp = 'siamese_regression_linux_2018-08-07T01:32:40' # imdb1kcoarse_2018-07-28T10:22:27 # aids700nef_2018-07-28T10:14:20 # linux_2018-07-28T10:14:56 # imdb1kcoarse_2018-07-28T10:15:00 # aids700nef_2018-07-28T10:16:44 # linux_2018-07-28T10:16:57 # imdb1kcoarse_2018-07-28T10:17:03 # aids700nef_2018-07-28T10:24:51 # linux_2018-07-28T10:25:36 # imdb1kcoarse_2018-07-28T10:25:39 exec( 'scp -r [email protected]:/home/yba/GraphEmbedding/model/Siamese/logs/{} ' '{}/Siamese/logs'.format(exp, get_model_path())) print('done')
def setup_property_file(src, gp, meta): file = '{}/properties/properties_temp_{}.prop'.format(gp, get_ts()) exec('cp {}/{}.prop {}'.format(src, meta, file)) for line in fileinput.input(file, inplace=True): print(line.rstrip().replace('temp', 'temp_' + get_ts()))
def gen_aids_small(name, additional=False): datadir = get_root_path() + '/data' dirin = datadir + '/AIDS40k_orig' sfn = get_save_path() + '/aids40k_orig' loaded = load_as_dict(sfn) if not loaded: graphs = {} nodes_graphs = defaultdict(list) lesseq30 = set() lesseq10 = set() disconnects = set() # Iterate through all 40k graphs. for file in glob(dirin + '/*.gexf'): gid = int(file.split('/')[-1].split('.')[0]) g = nx.read_gexf(file) if not nx.is_connected(g): print('{} not connected'.format(gid)) disconnects.add(gid) continue graphs[gid] = g nodes_graphs[g.number_of_nodes()].append(gid) if g.number_of_nodes() <= 30: lesseq30.add(gid) if g.number_of_nodes() <= 10: lesseq10.add(gid) save_as_dict(sfn, graphs, nodes_graphs, lesseq30, lesseq10, disconnects) else: graphs = loaded['graphs'] nodes_graphs = loaded['nodes_graphs'] lesseq30 = loaded['lesseq30'] lesseq10 = loaded['lesseq10'] disconnects = loaded['disconnects'] print(len(disconnects), 'disconnected graphs out of', len(graphs)) print(len(lesseq30), 'with <= 30 nodes') print(len(lesseq10), 'with <= 10 nodes') # exit(1) train_dir = '{}/{}/train'.format(datadir, name) if additional: train_data = load_data(name.lower(), train=True) test_dir_str = 'test2' else: exec('mkdir -p {}'.format(train_dir)) test_dir_str = 'test' test_dir = '{}/{}/{}'.format(datadir, name, test_dir_str) exec('mkdir -p {}'.format(test_dir)) if not additional: if name == 'AIDS10k': for num_node in range(5, 23): choose = random.Random(123).sample(nodes_graphs[num_node], 1)[0] print('choose {} with {} nodes'.format(choose, num_node)) nx.write_gexf(graphs[choose], test_dir + '/{}.gexf'.format(choose)) lesseq30.remove(choose) for tid in random.Random(123).sample(lesseq30, 10000): nx.write_gexf(graphs[tid], train_dir + '/{}.gexf'.format(tid)) elif name == 'AIDS700nef': lesseq10 = sample_from_lessthan10eq(train_dir, lesseq10, 560, graphs, 'train') sample_from_lessthan10eq(test_dir, lesseq10, 140, graphs, 'test') else: assert (name == 'AIDS10k') for num_node in range(5, 30): k = 4 from_li = nodes_graphs[num_node] print('sampling {} from {} (size={})'.format( k, len(from_li), num_node)) choose = random.Random(123).sample_exclude(from_li, k, train_data.get_gids()) print('choose {} with {} nodes'.format(choose, num_node)) for c in choose: nx.write_gexf(graphs[c], test_dir + '/{}.gexf'.format(c)) print('Done')
def clean_up(t_datapath, prop_file, result_file): for path in [t_datapath, prop_file, result_file]: exec('rm -rf {}'.format(path))
def video_codec(file): codecs = exec([ 'ffprobe', '-v', 'error', '-select_streams', 'v:0', '-show_entries', 'stream=codec_name', '-of', 'default=noprint_wrappers=1:nokey=1', file ]) return 'h264' if set(codecs.split('\n')).difference({'h264'}) else 'copy'
def video_codec(file): codecs = exec('ffprobe -v error -select_streams v:0 -show_entries stream=codec_name -of default=noprint_wrappers=1:nokey=1 %s' % file) return 'h264' if set(codecs.split('\n')).difference({'h264'}) else 'copy'
def setup_temp_folder(gp): tp = gp + '/data/temp_' + get_ts() exec('rm -rf {} && mkdir {}'.format(tp, tp)) src = get_root_path() + '/src/gmk_files' exec('cp {}/temp.xml {}/temp_{}.xml'.format(src, tp, get_ts())) return src, tp
def setup_temp_data_folder(gp, append_str): tp = gp + '/data/temp_{}'.format(append_str) exec('rm -rf {} && mkdir {}'.format(tp, tp)) src = get_root_path() + '/src/gmt_files' exec('cp {}/temp.xml {}/temp_{}.xml'.format(src, tp, append_str)) return src, tp
def bit_rate(file): return int(exec('ffprobe -v error -show_entries format=bit_rate -of default=noprint_wrappers=1:nokey=1 %s' % file))