def get_file_type(self, path): full_path = FileUtils.get_full_path(path) relative_path = FileUtils.get_rid_of_prefix_path(full_path, self.root) root = relative_path.split(os.path.sep)[0] if root == 'test': return 'test_file' elif root == 'include': return 'header_file' elif root == 'src': return 'src_file' else: return None
def create_file(cls, key, path, infos): template_path = cls.get_template_path(key, infos['project_root']) if path is None: target_path = cls.get_target_path(key, infos['project_root'], infos['project_name']) else: target_path = path content = TemplateUtils.replace(FileUtils.get_content(template_path), infos) FileUtils.create(target_path, content)
def _read_pkgconfig_changes(self): pkgconfig = {} files = FileUtils() files.open_datafile(PKGCONFIG_CONVERSIONS) for line in files.f: # the values are split by ': ' pair = line.split(': ') pkgconfig[pair[0]] = pair[1][:-1] files.close() return pkgconfig
def upload(self, path): fileName = FileUtils.getFileName(path) extension = FileUtils.getExtension(fileName) nxFolder = self.session.fetch(self.nxPath.strip()) folderUid = nxFolder['uid'] nxDoc = self.session.create(folderUid, "File", fileName, {'dc:title': fileName}) docPath = nxDoc['path'] try: binStream = open(path, 'rb') self.logger.debug('connector reads stream') blob = binStream.read().encode("base64") except Exception, e: self._handle_error(e) raise
def _read_licenses_changes(self): licenses = {} files = FileUtils() files.open_datafile(LICENSES_CHANGES) for line in files.f: # strip newline line = line.rstrip('\n') # file has format # correct license string<tab>known bad license string # tab is used as separator pair = line.split('\t') licenses[pair[1]] = pair[0] files.close() return licenses
def _find_macros_with_arg(self, spec): """ Load argumented macros from specfile """ macrofuncs = [] files = FileUtils() files.open(spec, 'r') for line in files.f: line = line.rstrip('\n') found_macro = self.re_spec_macrofunc.sub(r'\1', line) if found_macro != line: macrofuncs += [ found_macro ] files.close() return macrofuncs
def _load_keywords_whitelist(self): """ Create regexp for the unbrace keywords based on rpm showrc and whitelist. """ BRACKETING_EXCLUDES = 'excludes-bracketing.txt' # load the keywords files = FileUtils() files.open_datafile(BRACKETING_EXCLUDES) keywords= [] for line in files.f: keywords.append(line.rstrip('\n')) files.close() return keywords
def read_json(filename): if FileUtils.exists(filename): try: fd = open(filename, 'r') args = json.load(fd) return args finally: fd.close() else: return []
def process_IN_CREATE(self, event): path = event.pathname self.logger.debug("File path: " + path) if not os.path.isfile(path): return if FileUtils.isFilePart(path): return index = path.rfind("/") self.connector.setNxPath(self.mapper[path[:index]]) try: path = self.ocr.doOcr(path) except Exception, e: self.logger.error("OCR subprocess failed: " + str(e))
def process_IN_CREATE(self, event): path = event.pathname self.logger.debug('File path: ' + path) if not os.path.isfile(path): return if FileUtils.isFilePart(path): return index = path.rfind('/') self.connector.setNxPath(self.mapper[path[:index]]) try: path = self.ocr.doOcr(path) except Exception, e: self.logger.error('OCR subprocess failed: ' + str(e))
def getImage(self): """ @yield {string} media """ #result = [] for media in FileUtils.search(self.uploadDir, self.upload_file_suffixes): # upload fileSize limit size = os.path.getsize(media) if size > self.upload_max_file_size: logger.warning('skip:%s,size:%s,limit:%s', media, size, self.upload_max_file_size) self.backup(media) continue # Todo:check image file #result.append(media) yield media
def save_file(self, buffer, content_type, basename): """ for example) 1)http://www.example.co.jp/Netzawar.png => Netzawar.png 2)http://www.example.co.jp/ => example.suffix .suffix := self.getSuffix @param {io.BytesIO}buffer Response#content {string}contentType {string}basename """ suffix = self.getSuffix(content_type) logger.info('content-type:%s,decode:%s', content_type, suffix) with tempfile.NamedTemporaryFile(delete=False, suffix=suffix) as temp: temp.write(buffer.getvalue()) temp_file_name = temp.name if len(basename) == 0: logger.warning('create_filename:%s', os.path.basename(temp.name)) basename = os.path.basename(temp_file_name) p = Path(self.dataDir, basename).with_suffix(suffix) p = FileUtils.sequential(p) os.replace(temp_file_name, str(p))
def __get_class_name(self): file_base = FileUtils.get_base_name(self.file_path) return self.__strip_prefix(file_base)
def __generate_name(self, name): if FileUtils.get_postfix(name) is None: name = name + self.__get_postfix() if not FileUtils.start_from(name, self.__get_prefix()): name = self.__get_prefix() + name return name
#!/usr/bin/env python # encoding: utf-8 # [url]http://ldap3.readthedocs.io/tutorial.html#accessing-an-ldap-server[/url] import ldap3 from fileutils import FileUtils import os def verify(host): try: print host server = ldap3.Server(host, get_info=ldap3.ALL, connect_timeout=30) conn = ldap3.Connection(server, auto_bind=True) #print server if len(server.info.naming_contexts) > 0: for _ in server.info.naming_contexts: if conn.search(_, '(objectClass=inetOrgPerson)'): naming_contexts = _.encode('utf8') f = open('ldap.txt','a') f.write(host + '\n') f.close() except Exception, e: pass #print e if __name__ == '__main__': for host in FileUtils.getLines('ldap.lst'): verify(host)
import os, sys import numpy as np from fileutils import FileUtils from metric import ConfusionMatrix from sklearn.metrics import accuracy_score from utils.ply import read_ply, write_ply import ipdb utils = FileUtils() num_classes = 9 label_names = [ 'unclassified', 'ground', 'veg', 'cars', 'trucks', 'powerlines', 'fences/hedges', 'poles', 'buildings' ] data_dir = '/home/vlab/Nina/SemanticSegmentation/KPConv/Data/NPM3D/test_points/' def get_rgb_color_codes(preds): rgb = np.zeros((preds.shape[0], 3)) rgb[np.where(preds == 0)[0], :] = [0, 0, 100] rgb[np.where(preds == 1)[0], :] = [0, 0, 255] rgb[np.where(preds == 2)[0], :] = [0, 153, 0] rgb[np.where(preds == 3)[0], :] = [255, 0, 255] rgb[np.where(preds == 4)[0], :] = [255, 0, 255] rgb[np.where(preds == 5)[0], :] = [255, 255, 0] rgb[np.where(preds == 6)[0], :] = [255, 128, 0] rgb[np.where(preds == 7)[0], :] = [0, 255, 255]
def __get_name(self, file_path): name = FileUtils.get_name(file_path) return self.__generate_name(name)
def __copy_template(cls, key, project_root): target = cls.__get_project_template_path(key, project_root) source = cls.__get_cup_template_path(key) FileUtils.create(target, FileUtils.get_content(source))
def __verify_para(self, args): if args.struct or args.all: if FileUtils.get_postfix(args.file) is not None: raise Exception('can not generate class with postfix') if self.project.get_file_type(args.file) == 'test_file': raise Exception('can not generate class in test folder')
def get_relative_path(self, path): relative_path = FileUtils.get_rid_of_prefix_path(path, self.root) if relative_path.split(os.path.sep)[0] == 'include': relative_path = FileUtils.get_rid_of_top_path(relative_path) return FileUtils.get_rid_of_top_path(relative_path)
def __get_path(self, file_path): full_path = os.path.join(os.getcwd(), FileUtils.get_path(file_path)) if self.type == 'user_file': return full_path relative_path = self.project.get_relative_path(full_path) project_path = self.project.get_root_of(self.type) return os.path.join(project_path, relative_path)