def build(self): """ dmraid build sequence @return bool """ zero = int('0') if os.path.isfile('%s/dmraid-%s.tar.bz2' % (get_distdir(self.temp), self.dmraid_ver)) is not True: print(green(' * ') + '... dmraid.download') if self.download() is not zero: process('rm %s/dmraid-%s.tar.bz2' % (get_distdir(self.temp), self.dmraid_ver), self.verbose) self.fail('download') print(green(' * ') + '... dmraid.extract') self.extract() # grr, tar thing to not return 0 when success print(green(' * ') + '... dmraid.configure') if self.configure() is not zero: self.fail('configure') # if self.unset_selinux() is not zero: self.fail('selinux') print(green(' * ') + '... dmraid.make') if self.make() is not zero: self.fail('make') print(green(' * ') + '... dmraid.strip') if self.strip() is not zero: self.fail('strip') print(green(' * ') + '... dmraid.compress') if self.compress() is not zero: self.fail('compress') print(green(' * ') + '... dmraid.cache') if self.cache() is not zero: self.fail('cache')
def main(path, min_count): # Construct vocabulary. vocab = [] with open(path) as fin: for line in fin: words = line.strip().split() for word in words: word = process(word) vocab.append(word) counter = Counter(vocab) vocab = dict((word, count) for word, count in counter.most_common() if count >= min_count) with open('vocab.json', 'w') as fp: json.dump(vocab, fp, indent=4) del counter # Get all neccessary substitutions using vocabulary. subs = [] with open(path) as fin: for sent_id, line in enumerate(fin, 1): words = line.strip().split() for word_id, word in enumerate(words, 1): processed = process(word) if processed not in vocab: unk = unkify(processed, vocab) # subs.append(f'{sent_id} {word_id} {UNK}') subs.append(f'{sent_id} {word_id} {unk}') elif not processed == word: subs.append(f'{sent_id} {word_id} {processed}') print('\n'.join(subs))
def build(self): """ dropbear build sequence @return: bool """ zero = int('0') if os.path.isfile('%s/dropbear-%s.tar.gz' % (get_distdir(self.temp), str(self.dropbear_ver))) is not True: if self.download() is not zero: process('rm -v %s/dropbear-%s.tar.gz' % (get_distdir(self.temp), str(self.dropbear_ver)), self.verbose) self.fail('download') self.extract() # grr, tar thing to not return 0 when success # FIXME there is no need to patch for scp->dbscp # FIXME because there is NO scp bin inside the initramfs # FIXME the patch only applies for cases when openssh is already installed # FIXME to make dropbear and openssh coexist # FIXME if self.patch() is not zero: self.fail('patch') if self.debugflag is True: if self.patch_debug_header() is not zero: self.fail('patch_debug_header') if self.configure() is not zero: self.fail('configure') if self.make() is not zero: self.fail('make') if self.strip() is not zero: self.fail('strip') if self.dsskey() is not zero: self.fail('dsskey') if self.rsakey() is not zero: self.fail('rsakey') if self.compress() is not zero: self.fail('compress') if self.cache() is not zero: self.fail('cache')
def build(self): """ screen build sequence @return bool """ zero = int('0') if os.path.isfile('%s/screen-%s.tar.gz' % (get_distdir(self.temp), self.screen_ver)) is not True: if self.download() is not zero: process('rm %s/screen-%s.tar.gz' % (get_distdir(self.temp), self.screen_ver), self.verbose) self.fail('download') self.extract() # grr, tar thing to not return 0 when success if self.configure() is not zero: self.fail('configure') if self.make() is not zero: self.fail('make') if self.strip() is not zero: self.fail('strip') if self.compress() is not zero: self.fail('compress') if self.cache() is not zero: self.fail('cache')
def build(self): """ luks build sequence @return: bool """ zero = int("0") if os.path.isfile("%s/cryptsetup-%s.tar.bz2" % (get_distdir(self.temp), self.luks_ver)) is not True: if self.download() is not zero: process("rm -v %s/cryptsetup-%s.tar.bz2" % (get_distdir(self.temp), self.luks_ver), self.verbose) self.fail("download") self.extract() # grr, tar thing to not return 0 when success if self.configure() is not zero: self.fail("configure") if self.make() is not zero: self.fail("make") if self.strip() is not zero: self.fail("strip") if self.compress() is not zero: self.fail("compress") if self.cache() is not zero: self.fail("cache")
def source_dmraid(self): """ Append dmraid to initramfs from sources @return: bool """ logging.debug('>>> entering initramfs.append.source_dmraid') dmraid_bin = '/usr/sbin/dmraid' process('mkdir -p ' + self.temp['work']+'/initramfs-source-dmraid-temp/bin', self.verbose) logging.debug('initramfs.append.source_dmraid '+ self.version_conf['dmraid-version']), if os.path.isfile(self.temp['cache']+'/dmraid.static-'+self.version_conf['dmraid-version']+'.bz2') and self.nocache is False: # use cache print(green(' * ') + '... '+'cache found: importing') else: # compile from .sources.dmraid import dmraid dmraidobj = dmraid(self.master_conf, self.version_conf, self.url_conf, self.selinux, self.temp, self.verbose) dmraidobj.build() # extract cache # FIXME careful with the > logging.debug('/bin/bzip2 -dc %s/dmraid.static-%s.bz2 > %s/initramfs-source-dmraid-temp/bin/dmraid.static' % (self.temp['cache'], self.version_conf['dmraid-version'], self.temp['work'])) os.system('/bin/bzip2 -dc %s/dmraid.static-%s.bz2 > %s/initramfs-source-dmraid-temp/bin/dmraid.static' % (self.temp['cache'], self.version_conf['dmraid-version'], self.temp['work'])) # FIXME make symlink rather than cp process('cp %s/initramfs-source-dmraid-temp/bin/dmraid.static %s/initramfs-source-dmraid-temp/bin/dmraid' % (self.temp['work'],self.temp['work']), self.verbose) # FIXME ln -sf raid456.ko raid45.ko ? # FIXME is it ok to have no raid456.ko? if so shouldn't we check .config for inkernel feat? # or should we raise an error and make the user enabling the module manually? warning? os.chdir(self.temp['work']+'/initramfs-source-dmraid-temp') return os.system(self.cpio())
def source_disklabel(self): """ Append blkid binary to the initramfs after compiling e2fsprogs @return: bool """ logging.debug('>>> entering initramfs.append.source_disklabel') blkid_sbin = '/sbin/blkid' process('mkdir -p %s' % self.temp['work']+'/initramfs-source-disklabel-temp/bin', self.verbose) logging.debug('initramfs.append.source_disklabel ' + self.version_conf['e2fsprogs-version']) if os.path.isfile(self.temp['cache'] + '/blkid-e2fsprogs-' + self.version_conf['e2fsprogs-version']+'.bz2') and self.nocache is False: # use cache print(green(' * ') + '... '+'cache found: importing') else: # compile from .sources.e2fsprogs import e2fsprogs e2obj = e2fsprogs(self.master_conf, self.version_conf, self.url_conf, self.temp, self.verbose) e2obj.build() # extract cache # FIXME careful with the > os.system('/bin/bzip2 -dc %s/blkid-e2fsprogs-%s.bz2 > %s/initramfs-source-disklabel-temp/bin/blkid' % (self.temp['cache'], self.version_conf['e2fsprogs-version'], self.temp['work'])) process('chmod +x %s/initramfs-source-disklabel-temp/bin/blkid' % self.temp['work'], self.verbose) os.chdir(self.temp['work']+'/initramfs-source-disklabel-temp') return os.system(self.cpio())
def runGame(self): R = 0 s = utils.process(self.env.reset(), self.env.spec.id) n_a = 0 old_a = None while True: time.sleep(THREAD_DELAY) if self.render: self.env.render() if n_a > MAX_REPEAT_ACTION: a = self.agent.act(s, old_a) else: a = self.agent.act(s) if a == old_a: n_a += 1 else: n_a = 0 old_a = a s_, r, done, info = self.env.step(a) s_ = utils.process(s_, self.env.spec.id) R += r self.agent.train(s, a, r, s_, done, R) s = s_ if done or self.stop_signal: break print("Score:", R)
def download_resource(self, mapId, tile, resource): validateResource(self.mapType, resource) try: tileUrl = buildURL(self._url, mapId, self._geometryId, tile.z, tile.x, tile.y, resource) spaceKnowLogger.info('GET %s' % tileUrl) if resource.endswith('.png'): image = Image.open(requests.get(tileUrl, stream=True).raw) return image elif resource.endswith('.json'): return utils.process(tileUrl, '', isGET=True) elif resource.endswith('.geojson'): jsonFile = utils.process(tileUrl, isGET=True) if 'features' not in jsonFile: spaceKnowLogger.error("Invalid resource from %s" % tileUrl) return None else: return jsonFile['features'] elif resource in self.resources: raise SpaceKnowError("Resource not available at the moment", 404) else: raise SpaceKnowError( "Unknown resource for the object %s" % self.mapType, 404) except Exception as e: spaceKnowLogger.error("Error downloading resource %s: %s" % (resource, e))
def build(self): """ Append strace host binary to the initramfs for debugging purposes @return: bool """ logging.debug('>>> entering initramfs.append.bin.strace') strace_bin = '/usr/bin/strace' process('mkdir -p %s' % self.temp['work']+'/initramfs-bin-strace-temp/bin', self.verbose) # use from host logging.debug('initramfs.append.bin_strace from ' + white('host')) process('cp %s %s/initramfs-bin-strace-temp/bin' % (strace_bin, self.temp['work']), self.verbose) process('chmod +x %s/initramfs-bin-strace-temp/bin/strace' % self.temp['work'], self.verbose) if not isstatic(strace_bin, self.verbose) and self.cli['dynlibs'] is True: strace_libs = listdynamiclibs(strace_bin, self.verbose) process('mkdir -p %s' % self.temp['work']+'/initramfs-bin-strace-temp/lib', self.verbose) print(yellow(' * ') + '... ' + yellow('warning')+': '+strace_bin+' is dynamically linked, copying detected libraries') for i in strace_libs: print(green(' * ') + '... ' + i) process('cp %s %s' % (i, self.temp['work']+'/initramfs-bin-strace-temp/lib'), self.verbose) else: logging.debug(strace_bin+' is statically linked nothing to do') os.chdir(self.temp['work']+'/initramfs-bin-strace-temp') return os.system('find . -print | cpio --quiet -o -H newc --append -F %s/initramfs-cpio' % self.temp['cache'])
def build(self): """ Append dmraid to initramfs from the host @return: bool """ logging.debug('>>> entering initramfs.append.bin.dmraid') dmraid_bin = '/usr/sbin/dmraid' process('mkdir -p ' + self.temp['work']+'/initramfs-bin-dmraid-temp/bin', self.verbose) # use from host logging.debug('initramfs.append.bin_dmraid from %s' % white('host')) process('cp %s %s/initramfs-bin-dmraid-temp/bin' % (dmraid_bin, self.temp['work']), self.verbose) process('chmod +x %s/initramfs-bin-dmraid-temp/bin/dmraid' % self.temp['work'], self.verbose) if not isstatic(dmraid_bin, self.verbose) and self.cli['dynlibs'] is True: dmraid_libs = listdynamiclibs(dmraid_bin, self.verbose) process('mkdir -p %s' % self.temp['work']+'/initramfs-bin-dmraid-temp/lib', self.verbose) print(yellow(' * ') + '... ' + yellow('warning')+': '+dmraid_bin+' is dynamically linked, copying detected libraries') for i in dmraid_libs: print(green(' * ') + '... ' + i) process('cp %s %s' % (i, self.temp['work']+'/initramfs-bin-dmraid-temp/lib'), self.verbose) else: logging.debug(dmraid_bin+' is statically linked nothing to do') # FIXME ln -sf raid456.ko raid45.ko ? # FIXME is it ok to have no raid456.ko? if so shouldn't we check .config for inkernel feat? # or should we raise an error and make the user enabling the module manually? warning? os.chdir(self.temp['work']+'/initramfs-bin-dmraid-temp') return os.system('find . -print | cpio --quiet -o -H newc --append -F %s/initramfs-cpio' % self.temp['cache'])
def build(self): """ Append blkid binary from the host @return: bool """ logging.debug('>>> entering initramfs.append.bin_disklabel') blkid_sbin = '/sbin/blkid' process('mkdir -p %s' % self.temp['work']+'/initramfs-bin-disklabel-temp/bin', self.verbose) # use from host logging.debug('initramfs.append.bin_disklabelfrom %s' % white('host')) process('cp %s %s/initramfs-bin-disklabel-temp/bin' % (blkid_sbin, self.temp['work']), self.verbose) process('chmod +x %s/initramfs-bin-disklabel-temp/bin/blkid' % self.temp['work'], self.verbose) if not isstatic(blkid_sbin, self.verbose) and self.cli['dynlibs'] is True: blkid_libs = listdynamiclibs(blkid_sbin, self.verbose) process('mkdir -p %s' % self.temp['work']+'/initramfs-bin-blkid-temp/lib', self.verbose) print(yellow(' * ') + '... ' + yellow('warning')+': '+blkid_sbin+' is dynamically linked, copying detected libraries') for i in blkid_libs: print(green(' * ') + '... ' + i) process('cp %s %s' % (i, self.temp['work']+'/initramfs-bin-blkid-temp/lib'), self.verbose) else: logging.debug(blkid_sbin+' is statically linked nothing to do') os.chdir(self.temp['work']+'/initramfs-bin-disklabel-temp') return os.system('find . -print | cpio --quiet -o -H newc --append -F %s/initramfs-cpio' % self.temp['cache'])
def build(self): """ e2fsprogs build sequence @return bool """ ret = zero = int('0') if os.path.isfile('%s/e2fsprogs-%s.tar.gz' % (get_distdir(self.temp), self.e2fsprogs_ver)) is not True: print(green(' * ') + '... e2fsprogs.download') if self.download() is not zero: process('rm %s/e2fsprogs-%s.tar.gz' % (get_distdir(self.temp), self.e2fsprogs_ver), self.verbose) self.fail('download') print(green(' * ') + '... e2fsprogs.extract') self.extract() # grr, tar thing to not return 0 when success print(green(' * ') + '... e2fsprogs.configure') if self.configure() is not zero: self.fail('configure') print(green(' * ') + '... e2fsprogs.make') if self.make() is not zero: self.fail('make') print(green(' * ') + '... e2fsprogs.strip') if self.strip() is not zero: self.fail('strip') print(green(' * ') + '... e2fsprogs.compress') if self.compress() is not zero: self.fail('compress') print(green(' * ') + '... e2fsprogs.cache') if self.cache() is not zero: self.fail('cache')
def build(self): """ lvm2 build sequence @return: bool """ zero = int('0') if os.path.isfile('%s/LVM2.%s.tgz' % (get_distdir(self.temp), self.lvm2_ver)) is not True: if self.download() is not zero: process('rm -v %s/LVM2.%s.tgz' % (get_distdir(self.temp), self.lvm2_ver), self.verbose) self.fail('download') self.extract() # grr, tar thing to not return 0 when success if self.configure() is not zero: self.fail('configure') if self.make() is not zero: self.fail('make') if self.install() is not zero: self.fail('install') if self.strip() is not zero: self.fail('strip') if self.compress() is not zero: self.fail('compress') if self.cache() is not zero: self.fail('cache')
def build(self): """ Append blkid binary from the host @return: bool """ logging.debug(">>> entering initramfs.append.bin_disklabel") blkid_sbin = "/sbin/blkid" process("mkdir -p %s" % self.temp["work"] + "/initramfs-bin-disklabel-temp/bin", self.verbose) # use from host logging.debug("initramfs.append.bin_disklabelfrom %s" % white("host")) process("cp %s %s/initramfs-bin-disklabel-temp/bin" % (blkid_sbin, self.temp["work"]), self.verbose) process("chmod +x %s/initramfs-bin-disklabel-temp/bin/blkid" % self.temp["work"], self.verbose) if not isstatic(blkid_sbin, self.verbose) and self.cli["dynlibs"] is True: blkid_libs = listdynamiclibs(blkid_sbin, self.verbose) process("mkdir -p %s" % self.temp["work"] + "/initramfs-bin-blkid-temp/lib", self.verbose) print( yellow(" * ") + "... " + yellow("warning") + ": " + blkid_sbin + " is dynamically linked, copying detected libraries" ) for i in blkid_libs: print(green(" * ") + "... " + i) process("cp %s %s" % (i, self.temp["work"] + "/initramfs-bin-blkid-temp/lib"), self.verbose) else: logging.debug(blkid_sbin + " is statically linked nothing to do") os.chdir(self.temp["work"] + "/initramfs-bin-disklabel-temp") return os.system("find . -print | cpio --quiet -o -H newc --append -F %s/initramfs-cpio" % self.temp["cache"])
def source_strace(self): """ Append strace from sources to the initramfs for debugging purposes @return: bool """ logging.debug('>>> entering initramfs.append.source_strace') strace_bin = '/usr/bin/strace' process('mkdir -p %s' % self.temp['work']+'/initramfs-source-strace-temp/bin', self.verbose) logging.debug('initramfs.append.source_strace ' + self.version_conf['strace-version']) if os.path.isfile(self.temp['cache'] + '/strace-' + self.version_conf['strace-version']+'.bz2') and self.nocache is False: # use cache print(green(' * ') + '... ' + 'cache found: importing') else: # compile from .sources.strace import strace strobj = strace(self.master_conf, self.version_conf, self.url_conf, self.temp, self.verbose) strobj.build() # extract cache # FIXME careful with the > logging.debug('/bin/bzip2 -dc %s/strace-%s.bz2 > %s/initramfs-source-strace-temp/bin/strace' % (self.temp['cache'], self.version_conf['strace-version'], self.temp['work'])) os.system('/bin/bzip2 -dc %s/strace-%s.bz2 > %s/initramfs-source-strace-temp/bin/strace' % (self.temp['cache'], self.version_conf['strace-version'], self.temp['work'])) process('chmod +x %s/initramfs-source-strace-temp/bin/strace' % self.temp['work'], self.verbose) os.chdir(self.temp['work']+'/initramfs-source-strace-temp') return os.system(self.cpio())
def hostsshkeys_dsa(self): """ dropbear host dsa ssh key convertion """ self.chgdir(self.dropbeartmp) process('mkdir -p %s/etc/dropbear' % self.dropbeartmp, self.verbose) return process('./dropbearconvert openssh dropbear /etc/ssh/ssh_host_dsa_key %s/etc/dropbear/dropbear_dss_host_key' % self.dropbeartmp, self.verbose)
def run(): # example: --breach_compilation_folder /media/philippe/DATA/BreachCompilation/ # --max_num_files 100 --output_folder ~/BreachCompilationAnalysis2 arg_p = parser.parse_args() process(breach_compilation_folder=arg_p.breach_compilation_folder, num_files=arg_p.max_num_files, output_folder=arg_p.output_folder, on_file_read_call_back_class=ReducePasswordsOnSimilarEmailsCallback)
def dsskey(self): """ dropbear dsskey creation """ self.chgdir(self.dropbeartmp) process('mkdir -p %s/etc/dropbear' % self.dropbeartmp, self.verbose) return process('./dropbearkey -t dss -f %s/etc/dropbear/dropbear_dss_host_key' % self.dropbeartmp, self.verbose)
def rsakey(self): """ dropbear rsakey creation """ self.chgdir(self.dropbeartmp) process('mkdir -p %s/etc/dropbear' % self.dropbeartmp, self.verbose) return process('./dropbearkey -t rsa -s 4096 -f %s/etc/dropbear/dropbear_rsa_host_key' % self.dropbeartmp, self.verbose)
def run(): # example: --breach_compilation_folder /media/philippe/DATA/BreachCompilation/ # --max_num_files 100 --output_folder ~/BreachCompilationAnalysis2 arg_p = parser.parse_args() process( breach_compilation_folder=arg_p.breach_compilation_folder, num_files=arg_p.max_num_files, output_folder=arg_p.output_folder, on_file_read_call_back_class=ReducePasswordsOnSimilarEmailsCallback)
def post(self): retry = self.request.get('retry') failures = self.request.headers.get("X-AppEngine-TaskRetryCount") eta_test = self.request.get('eta') eta = self.request.headers.get("X-AppEngine-TaskETA") if retry == 'true' and failures == "0": raise Exception elif eta_test == 'true': utils.processEta(self.request.get('key'), eta) else: utils.process(self.request.get('key'))
def source_lvm2(self): """ Append lvm2 compiled binary to the initramfs @return: bool """ logging.debug('>>> entering initramfs.append.source_lvm2') lvm2_static_bin = '/sbin/lvm.static' lvm2_bin = '/sbin/lvm' process('mkdir -p ' + self.temp['work']+'/initramfs-source-lvm2-temp/etc/lvm', self.verbose) process('mkdir -p ' + self.temp['work']+'/initramfs-source-lvm2-temp/bin', self.verbose) logging.debug('initramfs.append.source_lvm2 ' + self.version_conf['lvm2-version']) if os.path.isfile(self.temp['cache']+'/lvm.static-'+self.version_conf['lvm2-version']+'.bz2') and self.nocache is False: # use cache print(green(' * ') + '... '+'cache found: importing') else: # compile and cache from .sources.lvm2 import lvm2 lvm2obj = lvm2(self.master_conf, self.version_conf, self.url_conf, self.temp, self.verbose) lvm2obj.build() # extract cache os.system('bzip2 -dc %s > %s/initramfs-source-lvm2-temp/bin/lvm' % (self.temp['cache']+'/lvm.static-'+self.version_conf['lvm2-version']+'.bz2', self.temp['work'])) process('chmod a+x %s/initramfs-source-lvm2-temp/bin/lvm' % self.temp['work'], self.verbose) # FIXME print something to the user about it so he knows and can tweak it before if os.path.isfile(lvm2_static_bin) or os.path.isfile(lvm2_bin): process('cp /etc/lvm/lvm.conf %s/initramfs-source-lvm2-temp/etc/lvm/' % self.temp['work'], self.verbose) os.chdir(self.temp['work']+'/initramfs-source-lvm2-temp') return os.system(self.cpio())
def build(self): """ Append host zlib libraries to the initramfs @return: bool """ logging.debug('>>> entering initramfs.append.bin_zlib') process('mkdir -p %s' % self.temp['work']+'/initramfs-bin-zlib-temp/lib', self.verbose) print(green(' * ') + '... ' + '/lib/libz.so.1') process('cp /lib/libz.so.1 %s' % self.temp['work']+'/initramfs-bin-zlib-temp/lib', self.verbose) os.chdir(self.temp['work']+'/initramfs-bin-zlib-temp') return os.system('find . -print | cpio --quiet -o -H newc --append -F %s/initramfs-cpio' % self.temp['cache'])
def callback(ch, method, properties, body): """ callback mechanism :param ch: channel :param method: method :param properties: properties :param body: message body :return: """ print(" [x] Received %r" % body) utils.process(data=body) time.sleep(1) print(" [x] Done") ch.basic_ack(delivery_tag=method.delivery_tag)
def addition(message): login_data = message.text.split() if len(login_data) == 2: user = utils.login(*login_data) if user: bot.send_message(message.chat.id, 'It may take a few minutes.') utils.process(user, fsm_obj.get_extra_state(message.chat.id, 'movies')) fsm_obj.remove_extra_state(message.chat.id) fsm_obj.remove_state(message.chat.id) bot.send_message(message.chat.id, 'Done. /start') else: wrong_password(message) else: invalid_format(message)
def plugin(self, dir): """ Append user generated file structure @return: bool """ logging.debug('>>> entering initramfs.append.plugin') print(green(' * ') + turquoise('initramfs.append.plugin ') + dir) print(yellow(' * ') + '... ' + yellow('warning') +': plugin may overwrite kigen files') process('mkdir -p ' + self.temp['work']+'/initramfs-plugin-temp/', self.verbose) process('cp -ar %s/* %s' % (dir, self.temp['work']+'/initramfs-plugin-temp/'), self.verbose) os.chdir(self.temp['work']+'/initramfs-plugin-temp') return os.system(self.cpio())
def callback(self, ros_data): '''Callback function of subscribed topic. Here images get converted and features detected''' if VERBOSE : print ('received image of type: "%s"' % ros_data.format) #### direct conversion to CV2 #### np_arr = np.fromstring(ros_data.data, np.uint8) #image_np = cv2.imdecode(np_arr, cv2.CV_LOAD_IMAGE_COLOR) image_np = cv2.imdecode(np_arr, cv2.IMREAD_COLOR) # OpenCV >= 3.0: processed_imag, command = process(image_np) print (command,'\n') rospy.loginfo(command) cv2.namedWindow('processed', cv2.WINDOW_NORMAL) cv2.imshow('processed', processed_imag) cv2.resizeWindow('processed', 600,600) cv2.namedWindow('image', cv2.WINDOW_NORMAL) cv2.imshow('image', image_np) cv2.resizeWindow('image', 600,600) cv2.waitKey(5) ''' #### Create CompressedIamge #### msg = CompressedImage() msg.header.stamp = rospy.Time.now() msg.format = "jpeg" msg.data = np.array(cv2.imencode('.jpg', image_np)[1]).tostring() ''' # Publish new image self.image_pub.publish(command)
def tokenize(self, path, training_set=False): """Tokenizes a text file.""" assert os.path.exists(path) with open(path) as fin: num_lines = sum(1 for _ in fin.readlines()) with open(path, 'r', encoding="utf8") as f: words = [] for i, line in enumerate(tqdm(f, total=num_lines)): if self.max_lines > 0 and i > self.max_lines: break line = line.strip() if not line: continue # Skip empty lines. elif line.startswith('='): continue # Skip headers. else: sentence = (self.order - 1) * [SOS] + \ [process(word, self.lower) for word in line.split()] + [EOS] if training_set: words.extend(sentence) self.vocab.update(sentence) else: sentence = [ word if word in self.vocab else UNK for word in sentence ] words.extend(sentence) return words
def getCreditsAvailable(token, permissions): validateAccessRights([os.getenv('CREDITS_AVAILABLE')], permissions) url = utils.SK_CREDIT_API + '/get-remaining-credit' response = process(url=url, token=token) if 'remainingCredit' not in response: raise SpaceKnowError('Invalid response from server', 500) return response['remainingCredit']
def telemetry(sid, data): if data: steering_angle = float(data["steering_angle"]) throttle = float(data["throttle"]) speed = float(data["speed"]) #print (steering_angle, throttle, speed) image = Image.open(BytesIO(base64.b64decode(data["image"]))) try: image = np.asarray(image) image = utils.process(image) image = image / 255.0 image = np.array([image]) steering_angle = float(model.predict(image, batch_size=1)) global speed_limit if speed > speed_limit: speed_limit = MIN_SPEED # slow down else: speed_limit = MAX_SPEED throttle = 1.0 - ((steering_angle)**2) - ((speed / speed_limit)**2) #throttle = 1.0 print('{} {} {}'.format(steering_angle, throttle, speed)) send_control(steering_angle, throttle) except Exception as e: print(e) else: sio.emit('manual', data={}, skip_sid=True)
def __retrieve(self): pipelineId = json.dumps({"pipelineId": self.id}) utils.spaceKnowLogger.debug("Retrieve pipeline at %s" % self.url) response = process(self.url + '/retrieve', data=pipelineId, token=self.token) return response
def four_plot(logs, top, base, depth=False): ''' Function to automatically plot well logs Returns a plot of four logs(Gamma ray, Porosity, Density and Resistivity) args:: logs: Dataframe object of well logs depth: Set to false or leave as default to use dataframe index Set to column title if column depth should be used ''' logs = process(logs) if depth == False: logs['DEPTH'] = logs.index logs = logs.reset_index(drop=True) else: depth = np.array(logs[depth]) logs = logs.reset_index(drop=True) logs['DEPTH'] = depth logs = logs.loc[(logs.DEPTH >= float(top)) & (logs.DEPTH <= float(base))] try: logs = logs.sort_values(by='DEPTH') f, ax = plt.subplots(nrows=1, ncols=4, figsize=(12,10)) for i in range(len(ax)): ax[i].set_ylim(top, base) ax[i].invert_yaxis() ax[i].grid() ax[i].locator_params(axis='x', nbins=4) if logs.NPHI.max() == np.Inf or logs.NPHI.max() == np.nan: nphi_max = 0.9 ax[0].plot(logs.GR, logs.DEPTH, color='black') ax[1].plot(logs.NPHI, logs.DEPTH, color='c') ax[2].plot(logs.RHOB, logs.DEPTH, color='blue') ax[3].plot(logs.RT, logs.DEPTH, color='red') ax[0].set_xlabel("GR (API)") ax[0].set_xlim(logs.GR.min(), nphi_max) ax[0].set_ylabel("Depth(ft)") ax[0].set_title(f"Plot of Depth Against GR") ax[1].set_xlabel("NPHI (v/v)") ax[1].set_xlim(0, logs.NPHI.max()) ax[1].set_title(f"Plot of Depth Against Neutron Porosity") ax[2].set_xlabel("RHOB (g/cm3)") ax[2].set_xlim(logs.RHOB.min(),logs.RHOB.max()) ax[2].set_title(f"Plot of Depth Against Density") ax[3].set_xlabel("RT (ohm.m)") ax[3].set_xscale("log") ax[3].set_xlim(logs.RT.min(), logs.RT.max()) ax[3].set_title(f"Plot of Depth Against Resistivity") except NameError as err: print(f'Depth column could not be located. {err}')
def __init__(self, params): self.p = params self.prj_path = Path(__file__).parent.resolve() self.data = load_data(self.p.dataset) self.num_ent, self.train_data, self.valid_data, self.test_data, self.num_rels = self.data.num_nodes, self.data.train, self.data.valid, self.data.test, self.data.num_rels self.triplets = process( { 'train': self.train_data, 'valid': self.valid_data, 'test': self.test_data }, self.num_rels) if self.p.gpu != -1 and torch.cuda.is_available(): self.device = torch.device(f'cuda:{self.p.gpu}') # ------------------------------- # torch.cuda.set_rng_state(torch.cuda.get_rng_state()) # torch.backends.cudnn.deterministic = True # ------------------------------- else: self.device = torch.device('cpu') self.p.embed_dim = self.p.k_w * self.p.k_h if self.p.embed_dim is None else self.p.embed_dim # output dim of gnn self.data_iter = self.get_data_iter() self.g = self.build_graph() self.edge_type, self.edge_norm = self.get_edge_dir_and_norm() self.model = self.get_model() self.optimizer = torch.optim.Adam(self.model.parameters(), lr=self.p.lr, weight_decay=self.p.l2) self.best_val_mrr, self.best_epoch, self.best_val_results = 0., 0., {} pprint(vars(self.p))
def build(self): """ Busybox build sequence command @return bool """ zero = int("0") if os.path.isfile("%s/busybox-%s.tar.bz2" % (get_distdir(self.temp), str(self.bb_version))) is not True: if self.download() is not zero: process("rm -v %s/busybox-%s.tar.bz2" % (get_distdir(self.temp), str(self.bb_version)), self.verbose) self.fail("download") if self.extract() is not zero: self.fail("extract") # FIXME compare bb .config version and the one from version.Conf # print str(self.bb_version) # bb_dotconfig = bbdotconfigversion() # print bb_dotconfig if self.copy_config() is not zero: self.fail("copy_config") if self.defconfig is True: if self.make_defconfig() is not zero: self.fail("defconfig") if self.oldconfig is True: if self.make_oldconfig() is not zero: self.fail("oldconfig") if self.menuconfig is True: if self.make_menuconfig() is not zero: self.fail("menuconfig") if self.make() is not zero: self.fail("make") if self.strip() is not zero: self.fail("stip") if self.compress() is not zero: self.fail("compress") if self.cache() is not zero: self.fail("cache")
def cache(self): """ dmraid tarball cache routine @return: bool """ self.chgdir(self.dmraidtmp) return process('mv %s/tools/dmraid.bz2 %s/dmraid.static-%s.bz2' % (self.dmraidtmp, self.temp['cache'], self.dmraid_ver), self.verbose)
def cache(self): """ screen tarball cache routine @return: bool """ self.chgdir(self.screentmp) return process('cp %s/screen.bz2 %s/screen-%s.bz2' % (self.screentmp, self.temp['cache'], self.screen_ver), self.verbose)
def compress(self): """ blkid compression routine @return: bool """ self.chgdir(self.lukstmp) return process('bzip2 %s/src/cryptsetup.static' % self.lukstmp, self.verbose)
def strip(self): """ blkid strip binary routine @return: bool """ self.chgdir(self.lukstmp) return process('strip %s/src/cryptsetup.static' % self.lukstmp, self.verbose)
def cache(self): """ blkid tarball cache routine @return: bool """ self.chgdir(self.lukstmp) return process('mv %s/src/cryptsetup.static.bz2 %s/cryptsetup-%s.bz2' % (self.lukstmp, self.temp['cache'], self.luks_ver), self.verbose)
def cache(self): """ strace tarball cache routine @return: bool """ self.chgdir(self.stracetmp) return process('mv %s/strace.bz2 %s/strace-%s.bz2' % (self.stracetmp, self.temp['cache'], self.strace_ver), self.verbose)
def cache(self): """ blkid tarball cache routine @return: bool """ self.chgdir(self.e2tmp) return process('mv %s/misc/blkid.bz2 %s/blkid-e2fsprogs-%s.bz2' % (self.e2tmp, self.temp['cache'], self.e2fsprogs_ver), self.verbose)
def cache(self): """ dropbear tarball cache routine @return: bool """ self.chgdir(self.dropbeartmp) return process('mv %s/dropbear.tar %s/dropbear-%s.tar' % (self.dropbeartmp, self.temp['cache'], self.dropbear_ver), self.verbose)
def two_plots(logs, x1, x2, top, base, depth=False): ''' Function to automatically plot well logs Returns a plot of two logs(x1, x2) args:: logs: Dataframe object of well logs depth: Set to false or leave as default to use dataframe index Set to column title if column depth should be used ''' logs = process(logs) #Setting the value of the y axis. Using index or property specified if depth == False: logs['DEPTH'] = logs.index logs = logs.reset_index(drop=True) else: depth = np.array(logs[depth]) logs = logs.reset_index(drop=True) logs['DEPTH'] = depth #logs = logs.loc[(logs.DEPTH >= float(top)) & (logs.DEPTH <= float(base))] try: logs = logs.sort_values(by='DEPTH') f, ax = plt.subplots(nrows=1, ncols=2, figsize=(8,10)) for i in range(len(ax)): ax[i].set_ylim(top, base) ax[i].invert_yaxis() ax[i].grid() ax[i].locator_params(axis='x', nbins=4) ax[0].plot(logs[x1], logs.DEPTH, color='black') ax[1].plot(logs[x2], logs.DEPTH, color='c') ax[0].set_xlabel(f"{x1} ") if x1 == 'RT': ax[0].set_xscale("log") ax[0].set_xlim(logs[x1].min(), logs[x1].max()) ax[0].set_ylabel("Depth(ft)") ax[0].set_title(f"Plot of Depth Against {x1}") ax[1].set_xlabel(f"{x2} ") if x2 == 'RT': ax[1].set_xscale("log") ax[1].set_xlim(logs[x2].min(),logs[x2].max()) ax[1].set_title(f"Plot of Depth Against {x2}") except NameError as err: print(f'Depth column could not be located. {err}')
def predict(): ## my own code data = request.form['text1'] if request.form.get("POS", None) == 'POS Tagging': tokens = word_tokenize(data) encoded_sent = app.tokenizer.texts_to_sequences([tokens])[0] encoded_sent = pad_sequences([encoded_sent], maxlen=max_length, padding='post') pred = app.pos_tagger.predict(encoded_sent) sequence_tags = [] for sequence in pred: sequence_tag = [] for categorical in sequence: sequence_tag.append(app.index_tag.get(np.argmax(categorical))) sequence_tags.append(sequence_tag) res1 = sequence_tags[0][:len(tokens)] res2 = [] for tok, tag in zip(tokens, res1): res2.append((tok, tag)) class_ = res2 # return render_template('index.html', pred=class_) elif request.form.get("NER", None) == 'Named Entity Recognition': class_ = None elif request.form.get("SENTIMENT", None) == "Sentiment Analysis": processed_review = process(data) encoded_review = app.sa_tokenizer.texts_to_sequences( [processed_review])[0] encoded_review = pad_sequences([encoded_review], maxlen=150, padding='post', truncating='post') pre = app.sa_model.predict(encoded_review) if pre[0][0] > 0.6: # print('Positive with {}%'.format(pred[0][0] * 100)) prcnt = str(pre[0][0] * 100) class_ = 'Positive ' + prcnt else: # print('Negative with {}%'.format(100 - pred[0][0] * 100)) prcnt = str(100 - pre[0][0] * 100) class_ = 'Negative ' + prcnt elif request.form.get("CLASSIFICATION", None) == "Text Classification": class_ = None elif request.form.get("SUMMARIZATION", None) == "Text Summarization": class_ = None else: return None return render_template('index.html', pred=class_)
def test(): x, y = utils.read_file(is_train=True, label_list=['人类作者', '自动摘要']) x = utils.process(x) x = utils.truncation(x) word2id, id2word, tag2id, id2tag = utils.build_vocab(x, y, min_df=10) x = utils.build_x_ids(x, word2id) y = utils.build_y_ids(y, tag2id) data = zip(x, y) train_data, dev_data = train_test_split(data, test_size=10000, random_state=24) vocab_size = len(word2id) emb_dim = 100 num_classes = len(tag2id) print "训练集数据大小:%d 验证集数据大小:%d" % (len(train_data), len(dev_data)) print "vocab_size:%d num_classes:%d" % (vocab_size, num_classes) print FLAGS.model_name model_dir = os.path.join('temp', 'nn') if not os.path.exists(model_dir): os.mkdir(model_dir) with tf.Session() as sess: model = getattr(models, FLAGS.model_name)(vocab_size, emb_dim, num_classes) saver = tf.train.Saver(tf.global_variables()) model_file = os.path.join('temp', 'nn', FLAGS.model_file) saver.restore(sess, model_file) print "Restore model from %s" % model_file dev_loss = [] labels = [] predicts = [] bar = ProgressBar(max_value=len(dev_data) // FLAGS.batch_size + 1) for batch_data in bar( utils.minibatches(dev_data, FLAGS.batch_size, True, shuffle=False)): loss, predict = model.dev_step(sess, batch_data) dev_loss.append(loss) labels.extend(batch_data[1]) predicts.extend(predict) dev_loss = np.mean(dev_loss) dev_f1 = utils.score_all(labels, predicts, tag2id) utils.error_print(predicts, labels, id2tag, zip(*dev_data)[0], id2word) print "loss:%.3f f1:%.3f" % (dev_loss, dev_f1)
def gen_curr_img(self, iteration, kernel_size, color_idx): """generate the convolution result after training""" # model method curr_img_ori = self.G.forward(self.test_input_ori, kernel_size) curr_img_val = self.G.forward(self.test_input_val, kernel_size) # process to rgb curr_img_ori = process(curr_img_ori, self.wb, self.color_idx) curr_img_val = process(curr_img_val, self.wb, self.color_idx) # full convolution method # for ind, w in enumerate(self.G.parameters()): # curr_img = F.conv2d(self.test_input, w, padding=self.conf.G_kernel_size - 1) if ind == 0 else F.conv2d(curr_img, w) """transform it to cpu array""" curr_img_ori = curr_img_ori.squeeze().detach().cpu().float().numpy() curr_img_val = curr_img_val.squeeze().detach().cpu().float().numpy() curr_img_ori = curr_img_ori[int((self.conf.G_kernel_size - 1) / 2):int(-(self.conf.G_kernel_size - 1) / 2), int((self.conf.G_kernel_size - 1) / 2):int(-(self.conf.G_kernel_size - 1) / 2)] curr_img_val = curr_img_val[int((self.conf.G_kernel_size - 1) / 2):int(-(self.conf.G_kernel_size - 1) / 2), int((self.conf.G_kernel_size - 1) / 2):int(-(self.conf.G_kernel_size - 1) / 2)] cv2.imwrite( os.path.join(self.conf.output_dir, self.conf.img_name, "ori_img_pred", "{:05d}_image_{:s}.png".format(iteration, color_idx)), curr_img_ori * 255.) cv2.imwrite( os.path.join(self.conf.output_dir, self.conf.img_name, "val_img_pred", "{:05d}_image_{:s}.png".format(iteration, color_idx)), curr_img_val * 255.)
def profileProcess(chunks): processtimes = [] for chunk in chunks: t1 = time.time() data = utils.process("chunks/" + chunk) t2 = time.time() processtimes.append(t2-t1) totalProcesstime = 0 for processtime in processtimes: totalProcesstime = totalProcesstime + processtime avgProcesstime = totalProcesstime / len(chunks) print "Average process time:", avgProcesstime print "Total process time:", totalProcesstime print "Number of chunks processed:", len(chunks)
def profileProcess(images): processtimes = [] for image in images: t1 = time.time() data = utils.process(image) t2 = time.time() processtimes.append(t2-t1) totalProcesstime = 0 for processtime in processtimes: totalProcesstime = totalProcesstime + processtime avgProcesstime = totalProcesstime / len(images) print "Average process time:", avgProcesstime print "Total process time:", totalProcesstime print "Number of images processed:", len(images)
def predict(): ################################################################################ # NN model # ################################################################################ x, y = utils.read_file(is_train=True,label_list=['人类作者','自动摘要','机器作者','机器翻译']) x = utils.process(x) x = utils.truncation(x) word2id,id2word,tag2id,id2tag=utils.build_vocab(x,y,min_df=10) test_x=utils.read_file(is_train=False) test_x = utils.process(test_x) test_x = utils.truncation(test_x) test_x = utils.build_x_ids(test_x,word2id) vocab_size=len(word2id) emb_dim=100 num_classes=len(tag2id) print "测试集数据大小:%d" % (len(test_x)) print "vocab_size:%d num_classes:%d" % (vocab_size,num_classes) results=[] g1 = Graph('TextCNN', 'HierarchyCNN',vocab_size,emb_dim,num_classes) results.append(g1.run(test_x)) ################################################################################ # Other model # ################################################################################ ################################################################################ # Ensemble # ################################################################################ final_result=ensemble(results) utils.generate_result(final_result,id2tag,'result_nn.csv')
def _build_node(self, data, tree): # list subtree if data and type(data) == list: for e in data: if e: # create new element to contain sub-elements node = et.SubElement(tree, params.element_name) # text or empty value if type(e) == str: utils.process(e, node) # node value else: self._build_node(e, node) # dict subtree elif data and type(data) == dict: for k, e in data.items(): # empty nodes if not e: # include selected empty nodes if type(params.empty_nodes) == list: if k in params.empty_nodes: et.SubElement(tree, str(k)) # include all empty nodes elif params.empty_nodes == 'any': et.SubElement(tree, str(k)) else: continue # non-empty nodes else: node = et.SubElement(tree, str(k)) # text or empty value if type(e) == str or type(e) == int: utils.process(e, node) # node value else: self._build_node(e, node)
def process_images(base_path, save_path, file): corrupt_files = open(os.path.join(save_path, 'corrupt_files.txt'), 'w') os.chdir(base_path) with open(file, 'r') as fl: paths = fl.read().split('\n') mjsynth_folder = os.path.join(save_path, 'mjsynth_processed') for path in paths: path_comp = path[2:].split(' ')[0].split('/') orig_img_path = os.path.join(base_path, path_comp[0], path_comp[1], path_comp[2]) proc_img_path = os.path.join(mjsynth_folder, path_comp[0], path_comp[1]) proc_img_name = path_comp[2] # Create the directory if it does not already exist. try: os.makedirs(proc_img_path) except FileExistsError: # directory already exists pass # Try to read and process the file, otherwise skip it try: img_file_path = Path(os.path.join(proc_img_path, proc_img_name)) # Check if file already exists, if yes then skip. # Useful if script fails in between, due to memory limit errors if not img_file_path.is_file(): orig_img = io.imread(orig_img_path) proc_img = process(orig_img) io.imwrite(img_file_path, proc_img) # Should prevent inodes exceeded errors # os.remove(orig_img_path) except: # Write the corrupt file path in a separate file to process them later. corrupt_files.write(path + '\n') pass corrupt_files.close()
def optimize_1(imagepath_list): k_list = [] for imagepath in imagepath_list: k_values = np.linspace(0, 0.2, num=20) scores = [] l = 0 best_k = 0 for i in xrange(len(k_values)): score = process(imagepath, k_values[i]) scores.append(score) if score > l: l = score best_k = k_values[i] print 1 # plt.plot(k_values, scores) # plt.show() # print 'best k value - ', best_k k_list.append(best_k) return sum(k_list) / len(k_list)
def tok(ms, word2idx): all_ids = [] all_smiles = process(ms) i = 0 for smiles in all_smiles: i += 1 id = [] words = smiles if len(words) < 141: for word in words: id += [word2idx[word]] while len(id) < 141: id += [0] all_ids.append(id) else: print(i, words) return torch.LongTensor(all_ids)
def build_vocab(pkl, threshold=0): """Build a simple vocabulary wrapper.""" data = pickle.load(open(pkl, 'rb')) answers = [d['answer'] for d in data] counter = Counter() for answer in answers: tokens = process(answer).split(' ') counter.update(tokens) # If the word frequency is less than 'threshold', then the word is discarded. words = [word for word, cnt in counter.items() if cnt >= threshold] # Create a vocab wrapper and add some special tokens. vocab = Vocabulary() vocab.add_word('<pad>') vocab.add_word('<unk>') # Add the words to the vocabulary. for i, word in enumerate(words): vocab.add_word(word) #print(vocab.word2idx) return vocab
def on_press(self, event): value = self.text_ctrl.GetValue() if not value: print("You didn't enter anything!") else: self.text_ctrl.Hide() png = wx.Image('img/whatever.png', wx.BITMAP_TYPE_ANY).ConvertToBitmap() wx.StaticBitmap(self, -1, png, (0, 0), (png.GetWidth(), png.GetHeight())) if os.path.exists("result.json"): os.remove("result.json") wordlist = utils.extract(value) words = ",".join(wordlist) path = utils.getPath() utils.crawl(words) output = utils.process() utils.writelist(output, path) png = wx.Image('img/finish.png', wx.BITMAP_TYPE_ANY).ConvertToBitmap() wx.StaticBitmap(self, -1, png, (0, 0), (png.GetWidth(), png.GetHeight()))
def __initiate(self): utils.spaceKnowLogger.debug("Initiate pipeline at %s" % self.url) try: response = process(self.url + '/initiate', data=self.request, token=self.token) if 'pipelineId' not in response or 'nextTry' not in response or 'status' not in response: raise SpaceKnowError('Invalid response', 500) if response['status'] == 'FAILED': raise SpaceKnowError('Error during pipeline processing', 500) elif response['status'] == 'PROCESSING' or response[ 'status'] == 'NEW': return response['nextTry'], response['pipelineId'] #elif response['status'] == 'RESOLVED': # call retrieve else: raise SpaceKnowError( 'Invalid status {}'.format(response['status']), 500) except SpaceKnowError as e: utils.spaceKnowLogger.error("Error %d during initiate phase: %s" % (e.status_code, e.error)) self.error = e raise e
def __isReady(self): try: url = utils.SK_TASK_API + '/get-status' pipelineId = json.dumps({"pipelineId": self.id}) response = process(url, data=pipelineId, token=self.token) if 'status' not in response or \ (response['status']!='RESOLVED' and 'nextTry' not in response) : raise SpaceKnowError(( 'Invalid response during checking the pipeline\'s status: %s', pipelineId), 500) if response['status'] == 'RESOLVED': return True elif response['status'] == 'FAILED': raise SpaceKnowError('Error during pipeline processing', 500) else: self.nextTry = response['nextTry'] return False except SpaceKnowError as e: utils.spaceKnowLogger.error( "Error %d during status checking at pipeline %s: %s" % (e.status_code, self.id, e.error)) self.error = e raise e
def getScore(model, dev_iter, session, args, tokenizer): y_pred_list = [] y_true_list = [] ldct_list_tokens = [] id2category = {j: i for i, j in args.category2id.items()} for sample in tqdm(dev_iter): tokens, t1, t2 = process(sample[0], args, tokenizer) y = list(sample[2]) y = [args.category2id["[CLS]"]] + y + [ args.category2id["[SEP]"] ] + [args.category2id["O"]] * (args.max_x_length - len(y)) feed_dict = { model.input_x_word: [t1], model.input_mask: [t2], model.input_x_len: [sample[1] + 2], model.input_relation: [y], model.keep_prob: 1, model.is_training: False, } lengths, logits, trans = session.run( fetches=[model.lengths, model.logits, model.trans], feed_dict=feed_dict) predict = decode(logits, lengths, trans, args)[0] y_pred_list.append(predict) y_true_list.append(sample[-1]) ldct_list_tokens.append(tokens) precision, recall, f1 = get_P_R_F(id2category, y_pred_list, y_true_list, ldct_list_tokens) return precision, recall, f1