Example #1
0
    def __init__(self, path, recursive, output_file_name, worker_process_count, io_thread_per_worker, buffer_size, \
                 fnmatch_pattern, length_of_side, dtype, max_entry_count, augmentation_cmd, pre_process_cmd, logger):
        self._path = path
        self._recursive = recursive

        path, file_name = os.path.split(output_file_name)
        file_name_without_ext, _ = os.path.splitext(file_name)
        if path.__len__() == 0:
            file_name_with_path_but_ext = os.path.join(os.getcwdu(), file_name_without_ext)
        elif not os.path.exists(path):
            try:
                os.makedirs(path, mode=755)
            except Exception as e:
                file_name_with_path_but_ext = os.path.join(os.getcwdu(), file_name_without_ext)
            file_name_with_path_but_ext = os.path.join(path, file_name_without_ext)
        else:
            file_name_with_path_but_ext = os.path.join(path, file_name_without_ext)
        self._file_name_with_path_but_ext = file_name_with_path_but_ext

        if worker_process_count > 0:
            self._worker_process_count = worker_process_count
        else:
            if pre_process_cmd.__len__() > 0:
                self._worker_process_count = int(multiprocessing.cpu_count() * 1.5)
            else:
                self._worker_process_count = int(multiprocessing.cpu_count() * 2)

        if io_thread_per_worker > 0:
            self._io_thread_per_worker = io_thread_per_worker
        else:
            self._io_thread_per_worker = 2

        self._buffer_size = buffer_size

        self._fnmatch_pattern = fnmatch_pattern
        self._length_of_side = length_of_side
        self._dtype = dtype
        self._max_entry_count = max_entry_count

        self._augmentation_cmd = augmentation_cmd
        self._augmentation_power = Augmentation.calc_power(augmentation_cmd)
        self._pre_process_cmd = pre_process_cmd

        self._l = logger

        self._total_size = 0

        self._current_index = 0

        self._ndarr = None

        # 쓰레드풀 대기

        self._in_q = multiprocessing.Queue(maxsize=-1)
        self._out_q = multiprocessing.Queue(maxsize=self._buffer_size)

        self._suicide = False

        self._producer_pool = []
        self._consumer = None
Example #2
0
 def list_profile_dirs(self):
     profiles = list_bundled_profiles()
     if profiles:
         print
         print "Available profiles in IPython:"
         self._print_profiles(profiles)
         print
         print "    The first request for a bundled profile will copy it"
         print "    into your IPython directory (%s)," % self.ipython_dir
         print "    where you can customize it."
     
     profiles = list_profiles_in(self.ipython_dir)
     if profiles:
         print
         print "Available profiles in %s:" % self.ipython_dir
         self._print_profiles(profiles)
     
     profiles = list_profiles_in(os.getcwdu())
     if profiles:
         print
         print "Available profiles in current directory (%s):" % os.getcwdu()
         self._print_profiles(profiles)
     
     print
     print "To use any of the above profiles, start IPython with:"
     print "    ipython --profile=<name>"
     print
Example #3
0
def main(argv):
    """Main."""

    if len(argv) < 3:
        print "USAGE: %s MODE HOST:PORT" % argv[0]
    else:
        mode = argv[1]
        uri = argv[2]
        if mode == "server":
            app = app_factory()
            # app.initialise(os.path.abspath(os.path.join(os.getcwdu(), ".mnemosyne")))
            app.initialise(os.path.abspath(os.path.join(os.getcwdu(), "testdb")))
            database = app.database()
            server = Server(uri, database, app.config(), app.log())
            server.start()
            app.finalise()
        elif mode == "client":
            app = app_factory()
            # app.initialise(os.path.abspath(os.path.join(os.getcwdu(), "testdb")))
            app.initialise(os.path.abspath(os.path.join(os.getcwdu(), ".mnemosyne")))
            database = app.database()
            client = Client(uri, database, app.controller(), app.config(), app.log())
            client.start()
            app.finalise()
        else:
            print "unknown mode"
Example #4
0
    def __init__(self, **params):
        """init this app"""
        self.config_dir = environment.config_dir
        if not os.path.exists(self.config_dir):
            os.makedirs(self.config_dir)
        self.client = client.Client(self.config_dir)
        self.client.connection.bind(
            self.client.connection.CLOSE_UNEXPECT, self.reconnect)
        self.client.connection.bind(
            self.client.connection.CONNECT, self.connected)
        if params.has_key('debug'):self.__debug = True
        else:self.__debug = False
        self.__connected = None
        lang = dict(
            domain=COMMAND_NAME,
            fallback=True
            )
        if os.path.exists(os.getcwdu() + u'/locale'):
            lang['localedir'] = os.getcwdu() + u'/locale'
        elif os.path.exists(os.getcwdu() + u'/share/locale'):
            lang['localedir'] = os.getcwdu() + u'/share/locale'
        self.__lang = gettext.translation(**lang)
        self.client.artwork.download_auto = True
        self.client.artwork.download_background = True
        self.client.lyrics.download_auto = True
        self.client.lyrics.download_background = True

        
        __builtins__['_']  = self.translate
        wx.App.__init__(self)
        self.notifyosd = notify.NotifyOSD(self.client)
        self.growlnotify = notify.GrowlNotify(self.client)
Example #5
0
 def handle_navigator_option(self, call_value):
     os.chdir(self.current_dir)
     if call_value != -1:
         option = self.dir_files[call_value]
         if call_value == 0:
             self.open_navigator()
         elif call_value == 1:
             self.open_directory_options()
         elif option == '~' + os.sep:
             os.chdir(os.getenv(self.home))
         elif option == '..' + os.sep:
             os.chdir(os.path.pardir)
         elif sublime.platform() == 'windows' and option in self.drives:
             os.chdir(option)
         elif option == bullet + ' To current view':
             os.chdir(os.path.dirname(self.window.active_view().file_name()))
         elif option.startswith(bullet + ' To bookmark'):
             os.chdir(self.bookmark)
         else:
             fullpath = os.path.join(os.getcwdu(), self.dir_files[call_value])
             if os.path.isdir(fullpath): # navigate to directory
                 os.chdir(self.dir_files[call_value])
             else: # open file
                 self.window.open_file(os.path.join(os.getcwdu(), fullpath))
                 return
         self.open_navigator()
Example #6
0
    def generate(self, include_draft=False):
        '''
        :include_draft: True/False, include draft pages or not to generate.
        '''
        self.include_draft = include_draft

        logger.debug("Empty the destination directory")
        dest_dir = os.path.join(self.target_path,
                                self.config["destination"])
        if os.path.exists(dest_dir):
            # for github pages and favicon.ico
            exclude_list = ['.git', 'CNAME', 'favicon.ico']
            emptytree(dest_dir, exclude_list)

        self.generate_pages()

        if not os.path.exists(os.path.join(self.config['source'], 'index.md')):
            self.generate_catalog(self.pages)

        feed_fn = 'atom.xml'
        if os.path.exists(os.path.join(getcwdu(), feed_fn)):
            self.generate_feed(self.pages, feed_fn)

        self.install_theme()

        self.copy_attach()

        # for default supported files to be copied to output/
        # CNAME for github pages with custom domain
        # TODO favicon can be other formats, such as .png, use glob match?
        for _fn in ('CNAME', 'favicon.ico'):
            _file = os.path.join(getcwdu(), _fn)
            if os.path.exists(_file):
                shutil.copy2(_file,
                             os.path.join(self.config['destination'], _fn))
Example #7
0
    def __init__(self, datadir=None, category=None, records=None):
        
        if datadir:
            datadir = os.path.abspath(datadir)
        elif os.path.exists(os.path.join(os.getcwdu(), ".mnemosyne")):
            datadir = os.path.abspath(os.path.join(os.getcwdu(), ".mnemosyne"))
        else:
            datadir = os.path.abspath(os.path.join(os.path.expanduser("~"), 
                        ".mnemosyne"))

        print 'datadir=', datadir

        libmnemosyne.initialise(datadir)

        self.card_type = FrontToBack()
        self.database = database()
        self.saved = False
        
        if records:
            self.records = records
        else:
            self.records = -1

        if not category:
            category = "English-Russian"
        
        self.category = category #Category(category)

        self.controller = ui_controller_main()
Example #8
0
    def create_guide(self):
        guide = Guide()
        has_start = False
        for ref_type, ref_title, pos_fid in self.guide:
            try:
                if len(pos_fid) != 2:
                    continue
            except TypeError:
                continue  # thumbnailstandard record, ignore it
            linktgt, idtext = self.get_id_tag_by_pos_fid(*pos_fid)
            if idtext:
                linktgt += b'#' + idtext
            g = Guide.Reference(linktgt, os.getcwdu())
            g.title, g.type = ref_title, ref_type
            if g.title == 'start' or g.type == 'text':
                has_start = True
            guide.append(g)

        so = self.header.exth.start_offset
        if so not in {None, NULL_INDEX} and not has_start:
            fi = self.get_file_info(so)
            if fi.filename is not None:
                idtext = self.get_id_tag(so).decode(self.header.codec)
                linktgt = fi.filename
                if idtext:
                    linktgt += '#' + idtext
                g = Guide.Reference('%s/%s'%(fi.type, linktgt), os.getcwdu())
                g.title, g.type = 'start', 'text'
                guide.append(g)

        return guide
Example #9
0
def main(basedir=None, query=None):
    from calibre import prints
    from calibre.utils.terminal import ColoredStream
    if basedir is None:
        try:
            basedir = raw_input('Enter directory to scan [%s]: ' % os.getcwdu()
                                ).decode(sys.stdin.encoding).strip() or os.getcwdu()
        except (EOFError, KeyboardInterrupt):
            return
    m = FilesystemMatcher(basedir)
    emph = ColoredStream(sys.stdout, fg='red', bold=True)
    while True:
        if query is None:
            try:
                query = raw_input('Enter query: ').decode(sys.stdin.encoding)
            except (EOFError, KeyboardInterrupt):
                break
            if not query:
                break
        for path, positions in islice(m(query).iteritems(), 0, 10):
            positions = list(positions)
            p = 0
            while positions:
                pos = positions.pop(0)
                if pos == -1:
                    continue
                prints(path[p:pos], end='')
                ch = get_char(path, pos)
                with emph:
                    prints(ch, end='')
                p = pos + len(ch)
            prints(path[p:])
        query = None
def startServer():
    currentpath = os.getcwdu()
    source_name = common.get_value_from_conf_path("TESTING_BROWSERS_OR_DEVICES", currentpath)
    if source_name in ("APP-Android", "APP-IOS", "H5-Android", "H5-IOS"):
        os.popen("taskkill /f /im cmd.exe")
        os.popen("taskkill /f /im node.exe")
        os.system(os.getcwdu() + '\Bat\StartAppiumServer.Bat')
Example #11
0
def factory(prefix, controller):
	"""This is the function that FSO's frameworkd will call to start this subsystem"""
	from logging import getLogger as get_logger
	log = get_logger('opimd')
	
	# Claim the bus name
	# TODO Check for exceptions
	SystemBus().request_name(DBUS_BUS_NAME_FSO)
	
	from backend_manager import BackendManager
	from domain_manager import DomainManager
	
	# Load plugins
	DomainManager.init(os.getcwdu())
	backend_manager = BackendManager(os.getcwdu())
	
	dbus_objects = []
	
	# Create a list of all d-bus objects to make frameworkd happy
	for dbus_obj in DomainManager.enumerate_dbus_objects():
		dbus_objects.append(dbus_obj)

	dbus_objects.append(backend_manager)
	
	log.info('opimd subsystem loaded')
	
	return dbus_objects
Example #12
0
def main_pyneo():
	"""This is the function that will be used to launch pypimd for pyneo"""
	log_open('pypimd', LOG_NDELAY|LOG_PID|LOG_PERROR, LOG_DAEMON)
	
	DBusGMainLoop(set_as_default=True)
	
	# Claim the bus name
	# TODO Check for exceptions
	SystemBus().request_name(DBUS_BUS_NAME_PYNEO)
	
	# Workaround for relative imports of pyneod stuff, see
	# http://mail.python.org/pipermail/python-list/2007-May/438250.html
	sys.path.append('/usr/share/pyneod')
	
	from backend_manager import BackendManager
	from domain_manager import DomainManager

	# Load plugins
	DomainManager.init(os.getcwdu())
	BackendManager(os.getcwdu())
	
	# 3-2-1-Go!
	main_loop = MainLoop()
	main_loop.run()
	
	log_close()
	return 0
Example #13
0
    def __init__(self, datadir=None, category=None):
        
        Mnemosyne.__init__(self)

        self.components.insert(0, ("mnemosyne.libmnemosyne.translator",
                                  "GetTextTranslator"))
        self.components.append(\
                    ("mnemosyne.libmnemosyne.ui_components.review_widget",
                     "ReviewWidget"))
        self.components.append(\
                    ("mnemosyne.libmnemosyne.ui_components.main_widget",
                     "MainWidget"))

        self.components.append(("mnemosyne.maemo_ui.factory", "ConfigHook"))
        
        if datadir:
            datadir = os.path.abspath(datadir)
        elif os.path.exists(os.path.join(os.getcwdu(), ".mnemosyne")):
            datadir = os.path.abspath(os.path.join(os.getcwdu(), ".mnemosyne"))
        else:
            datadir = os.path.abspath(os.path.join(os.path.expanduser("~"), 
                        ".mnemosyne"))


        self.initialise(datadir)
        self.review_controller().reset()

        self.card_type = [ct for ct in self.card_types() \
                            if ct.name == "Front-to-back only"][0]
        self.saved = False
        
        self.category = category #Category(category)
Example #14
0
    def convert(self, stream, options, file_ext, log,
                accelerators):
        from calibre.ebooks.metadata.opf2 import OPFCreator
        from calibre.ebooks.pdf.pdftohtml import pdftohtml

        log.debug('Converting file to html...')
        # The main html file will be named index.html
        self.opts, self.log = options, log
        if options.new_pdf_engine:
            return self.convert_new(stream, accelerators)
        pdftohtml(os.getcwdu(), stream.name, options.no_images)

        from calibre.ebooks.metadata.meta import get_metadata
        log.debug('Retrieving document metadata...')
        mi = get_metadata(stream, 'pdf')
        opf = OPFCreator(os.getcwdu(), mi)

        manifest = [(u'index.html', None)]

        images = os.listdir(os.getcwdu())
        images.remove('index.html')
        for i in images:
            manifest.append((i, None))
        log.debug('Generating manifest...')
        opf.create_manifest(manifest)

        opf.create_spine([u'index.html'])
        log.debug('Rendering manifest...')
        with open(u'metadata.opf', 'wb') as opffile:
            opf.render(opffile)

        return os.path.join(os.getcwdu(), u'metadata.opf')
Example #15
0
    def write_opf(self, guide, toc, spine, resource_map):
        mi = self.header.exth.mi
        if (self.cover_offset is not None and self.cover_offset <
                len(resource_map)):
            mi.cover = resource_map[self.cover_offset]

        if len(list(toc)) < 2:
            self.log.warn('KF8 has no metadata Table of Contents')

            for ref in guide:
                if ref.type == 'toc':
                    href = ref.href()
                    href, frag = urldefrag(href)
                    if os.path.exists(href.replace('/', os.sep)):
                        try:
                            toc = self.read_inline_toc(href, frag)
                        except:
                            self.log.exception('Failed to read inline ToC')

        opf = OPFCreator(os.getcwdu(), mi)
        opf.guide = guide

        def exclude(path):
            return os.path.basename(path) == 'debug-raw.html'

        opf.create_manifest_from_files_in([os.getcwdu()], exclude=exclude)
        opf.create_spine(spine)
        opf.set_toc(toc)

        with open('metadata.opf', 'wb') as of, open('toc.ncx', 'wb') as ncx:
            opf.render(of, ncx, 'toc.ncx')
        return 'metadata.opf'
def main():
    """ The main function """
    parser = OptionParser(usage='Usage: %prog [OPTIONS] PATH')
    parser.add_option('--dry-run', action='store_true', default=False,
                      dest='dry_run',
                      help='just display the actions that will be done')
    parser.add_option('--recursive', action='store_true', default=False,
                      dest='recursive',
                      help='traverse subdirectories recursively')
    parser.add_option('--move', action='store_true', default=False,
                      dest='move',
                      help='move the files instead of doing a copy')
    (options, args) = parser.parse_args()
    # Use the current directory if none has been specified
    if len(args) != 1:
        directory = os.getcwdu()
    else:
        directory = args[0]
    if not os.path.isdir(directory):
        parser.error("the files' directory specified is invalid.")
    # Get the absolute path and normalize its case
    directory = os.path.realpath(os.path.normcase(directory))
    # Get the current working dir
    cwdir = os.getcwdu()
    run(cwdir, directory, options.dry_run, options.recursive, options.move)
Example #17
0
    def generate(self):
        logger.debug("Empty the destination directory")
        dest_dir = os.path.join(self.target_path,
                                self.config["destination"])
        if os.path.exists(dest_dir):
            # for github pages
            exclude_list = ['.git', 'CNAME']
            emptytree(dest_dir, exclude_list)

        self.generate_pages()

        if not os.path.exists(os.path.join(self.config['source'], 'index.md')):
            self.generate_catalog(self.pages)

        feed_fn = 'atom.xml'
        if os.path.exists(os.path.join(getcwdu(), feed_fn)):
            self.generate_feed(self.pages, feed_fn)

        self.install_theme()

        self.copy_attach()

        # for github pages with custom domain
        cname_file = os.path.join(getcwdu(), 'CNAME')
        if os.path.exists(cname_file):
            shutil.copy2(cname_file,
                         os.path.join(self.config['destination'], 'CNAME'))
def merge(key, track, jobs):
    '''
      Merge a rosinstall into the workspace. The key can either be an

      - absolute path to a rosinstall file
      - name of a rosinstall file in the current dir
      - key in the rosinstall database

      :param str key: see above
      :param str track: the track to pull keys from (e.g. hydro, indigo)
      :param str jobs: number of parallel download jobs to spawn via wstool
    '''
    if os.environ.get('YUJIN_WORKSPACE') is not None:
        workspace_dir = os.environ.get('YUJIN_WORKSPACE')
    elif os.path.isdir(os.path.join(os.getcwdu(), 'src')):
        workspace_dir = os.getcwdu()
    elif os.path.isfile(os.path.join(os.getcwdu(), '.rosinstall')):
        workspace_dir = os.path.join(os.getcwdu(), '..')
    else:
        raise RuntimeError("Could not find an initialised workspace (you must be at the root below 'src', or in a setup.bash'd environment)")
    uri_list = []
    if os.path.isabs(key):
        uri_list.append(key)
    elif os.path.isfile(os.path.join(os.getcwd(), key)):
        uri_list.append(os.path.join(os.getcwdu(), key))
    elif urlparse.urlparse(key).scheme == "":  # not a http element, let's look up our database
        rosinstall_database, unused_lookup_track, unused_lookup_database = get_rosinstall_database(track)
        (unused_database_name_list, database_uri_list) = parse_database([key], rosinstall_database)
        uri_list.extend(database_uri_list)
    else:  # it's a http element'
        uri_list.append(key)
    populate_workspace(os.path.join(workspace_dir, 'src'), uri_list, jobs, do_init=False)
Example #19
0
    def open_navigator(self):
        self.current_dir = os.getcwdu()
        self.dir_files = ['[' + os.getcwdu() + ']',
            bullet + ' Directory actions', '..' + os.sep, '~' + os.sep]

        # annoying way to deal with windows
        if sublime.platform() == 'windows':
            if len(self.drives) == 0:
                for i in range(ord('A'), ord('Z') + 1):
                    drive = chr(i)
                    if (os.path.exists(drive + ':\\')):
                        self.drives.append(drive + ':\\')
            self.dir_files += self.drives

        for element in os.listdir(os.getcwdu()):
            ignore_element = False
            for ignore_pattern in self.ignore_list:
                if fnmatch(element, ignore_pattern):
                    ignore_element = True
                    break
            if not ignore_element:
                fullpath = os.path.join(os.getcwdu(), element)
                if os.path.isdir(fullpath):
                    self.dir_files.append(element + os.sep)
                else:
                    self.dir_files.append(element)

        self.dir_files = self.dir_files[:4] + sorted(self.dir_files[4:], key=sort_files)
        if self.bookmark:
            self.dir_files.insert(2, bullet + ' To bookmark (' + self.bookmark + ')')
        if self.window.active_view() and self.window.active_view().file_name():
            self.dir_files.insert(2, bullet + ' To current view')
        self.window.show_quick_panel(self.dir_files, self.handle_navigator_option, sublime.MONOSPACE_FONT)
    def execute(self, pool):
        
        # Test if git local repo exists
        if not os.path.isdir(os.path.join(os.getcwdu(), DevToolsConfigFile.Path)):
            prompt.error(DevToolsMessage.GitRepoNotExist.format(''))
            return

        error = False
        current_path = os.getcwdu()        
        try:
            log.info(u'Running Dev Tools initialization script.')
            if misc.is_os_windows():
                path = shell_utils.climb_dir_tree(shell_utils.ori_path(), OSSpecific.WindowsClimbUpDepth)
                #TODO: replace current workaround for WindowsModuleScript
                current_path = os.getcwdu()
                script_path = os.path.join(path, OSSpecific.WindowsModuleScriptPath)
                log.debug(u'Changing path to {0}.'.format(script_path))
                os.chdir(script_path)

                log.info(u'Running script "{0}".'.format(OSSpecific.WindowsModuleScriptName))
                shell_utils.call([OSSpecific.WindowsModuleScriptName])
                
                log.debug(u'Changing path to {0}.'.format(current_path))
                os.chdir(current_path)
                
                log.info(u'Running script "{0}".'.format(OSSpecific.WindowsRepoScript))
                fullpath = os.path.join(path, OSSpecific.WindowsRepoScript)
                prompt.error(shell_utils.call([fullpath]))
            else:
                path = shell_utils.climb_dir_tree(shell_utils.ori_path(), OSSpecific.LinuxClimbUpDepth)
                log.info(u'Running script "{0}" at {1}.'.format(OSSpecific.LinuxRepoScript,
                                                                path))
                fullpath = os.path.join(path, OSSpecific.LinuxRepoScript)
                prompt.error(shell_utils.call([fullpath]))
            
            location = DevToolsConfigFile.Path + os.path.sep + DevToolsConfigFile.Name        
            config_file.set_access_permission(location, True)

        except subprocess.CalledProcessError as ex:
            # Git returned with an error code
            log.error(u'Dev Tools initialization script report an error, because "{0}".'.format(ex))
            error = True
            prompt.error(DevToolsMessage.InitError.format(ex.message))
        
        except (OSError, IOError) as ex:
            log.error(u'Failed to call Dev Tools initialization script, because "{0}".'.format(ex))
            # Cannot find or run script
            error = True
            if ex.errno == FileErrorConstant.FileNotFoundErrorCode:
                if fullpath:
                    prompt.error(DevToolsMessage.FileMissingError.format(fullpath))
                else:
                    prompt.error(ex)

        finally:
            if error:            
                prompt.error(DevToolsMessage.ExecutionError.format(DevToolsConfigFile.InitHelpUrl))
        
        ret_result = OperationResult(self, None, None, None)
        return ret_result
 def data(self, index, role):
     target = self.targets[index.row()]
     if index.isValid() and (0 <= index.row() < len(self.targets)) and target: 
         column = index.column()
         if role == Qt.DecorationRole:
             if column == 1:
                 picturePath = os.path.join(os.getcwdu(), 'temp', target['targetPicture'])
                 if picturePath and os.path.exists(picturePath):
                     pixmap = QPixmap(picturePath)
                     return QIcon(pixmap.scaled(30, 30, Qt.IgnoreAspectRatio, Qt.FastTransformation))
                 else:
                     pixmap = QPixmap(os.path.join(os.getcwdu(), 'include', 'generic_user.png'))
                     pixmap.scaled(20, 20, Qt.IgnoreAspectRatio)
                     return QIcon(pixmap)
         if role == Qt.DisplayRole:
             if column == 0:
                 return QVariant(target['pluginName'])
             elif column == 1:
                 return QVariant()
             elif column == 2:
                 return QVariant(target['targetUsername'])
             elif column == 3:
                 return QVariant(target['targetFullname'])
             elif column == 4:
                 return QVariant(target['targetUserid'])
             
         
     else: 
         return QVariant()
Example #22
0
    def get_images(self, stream, tdir, top_level=False):
        images = []
        imgs = []

        if top_level:
            imgs = glob.glob(os.path.join(tdir, '*.png'))
        # Images not in top level try bookname_img directory because
        # that's where Dropbook likes to see them.
        if not imgs:
            if hasattr(stream, 'name'):
                imgs = glob.glob(os.path.join(tdir, os.path.splitext(os.path.basename(stream.name))[0] + '_img', '*.png'))
        # No images in Dropbook location try generic images directory
        if not imgs:
            imgs = glob.glob(os.path.join(os.path.join(tdir, u'images'), u'*.png'))
        if imgs:
            os.makedirs(os.path.join(os.getcwdu(), u'images'))
        for img in imgs:
            pimg_name = os.path.basename(img)
            pimg_path = os.path.join(os.getcwdu(), 'images', pimg_name)

            images.append('images/' + pimg_name)

            shutil.copy(img, pimg_path)

        return images
Example #23
0
def write_config(sec,key,val):
    '''
    Write config to file
    '''
    cf=ConfigParser.ConfigParser()
    cf.read(os.getcwdu()+os.path.sep+'config.ini')
    cf.set(sec, key,val)
    cf.write(open(os.getcwdu()+os.path.sep+'config.ini', "w"))
Example #24
0
def find_local_repo(root=os.getcwdu(), path=os.getcwdu()):
    repo_class = detect_local_repo(root)
    if repo_class is not None:
        return repo_class(root, path)
    parent = os.path.dirname(root)
    if parent == root:
        return None
    return find_local_repo(parent, path)
Example #25
0
def del_option(sec,key):
    '''
    Delete an option
    '''
    cf=ConfigParser.ConfigParser()
    cf.read(os.getcwdu()+os.path.sep+'config.ini')
    cf.remove_option(sec, key)
    cf.write(open(os.getcwdu()+os.path.sep+'config.ini', "w"))
Example #26
0
 def handle_new_directory(self, dir_name):
     if os.path.isfile(os.getcwdu() + os.sep + dir_name):
         sublime.error_message(dir_name + ' is already a file')
         return
     if os.path.isdir(os.getcwdu() + os.sep + dir_name):
         sublime.error_message(dir_name + ' already exists')
         return
     os.makedirs(os.getcwdu() + os.sep + dir_name)
Example #27
0
 def test_render_unicode_cwd(self):
     save = os.getcwdu()
     with TemporaryDirectory(u'ünicødé') as td:
         os.chdir(td)
         self.pm.in_template = r'\w [\#]'
         p = self.pm.render('in', color=False)
         self.assertEqual(p, u"%s [%i]" % (os.getcwdu(), ip.execution_count))
     os.chdir(save)
Example #28
0
    def convert(self, stream, options, file_ext, log,
                accelerators):
        from calibre.ebooks.metadata.toc import TOC
        from calibre.ebooks.metadata.opf2 import OPFCreator
        from calibre.utils.zipfile import ZipFile

        self.options = options
        self.log = log
        pages, images = [], []
        toc = TOC()

        if file_ext == 'pmlz':
            log.debug('De-compressing content to temporary directory...')
            with TemporaryDirectory(u'_unpmlz') as tdir:
                zf = ZipFile(stream)
                zf.extractall(tdir)

                pmls = glob.glob(os.path.join(tdir, u'*.pml'))
                for pml in pmls:
                    html_name = os.path.splitext(os.path.basename(pml))[0]+'.html'
                    html_path = os.path.join(os.getcwdu(), html_name)

                    pages.append(html_name)
                    log.debug('Processing PML item %s...' % pml)
                    ttoc = self.process_pml(pml, html_path)
                    toc += ttoc
                images = self.get_images(stream, tdir, True)
        else:
            toc = self.process_pml(stream, u'index.html')
            pages.append(u'index.html')

            if hasattr(stream, 'name'):
                images = self.get_images(stream, os.path.abspath(os.path.dirname(stream.name)))

        # We want pages to be orded alphabetically.
        pages.sort()

        manifest_items = []
        for item in pages+images:
            manifest_items.append((item, None))

        from calibre.ebooks.metadata.meta import get_metadata
        log.debug('Reading metadata from input file...')
        mi = get_metadata(stream, 'pml')
        if 'images/cover.png' in images:
            mi.cover = u'images/cover.png'
        opf = OPFCreator(os.getcwdu(), mi)
        log.debug('Generating manifest...')
        opf.create_manifest(manifest_items)
        opf.create_spine(pages)
        opf.set_toc(toc)
        with open(u'metadata.opf', 'wb') as opffile:
            with open(u'toc.ncx', 'wb') as tocfile:
                opf.render(opffile, tocfile, u'toc.ncx')

        return os.path.join(os.getcwdu(), u'metadata.opf')
def filedir():
    print u'我在这个目录:',os.getcwdu()
    cwd = os.getcwdu()
    print u'这个目录包含',os.listdir(cwd)
    filelist = os.listdir(cwd)
    L=[]
    for item in filelist:
        if os.path.isfile(item):
            L.append(item)
    print u'这个目录下的文件有',L
Example #30
0
 def handle_new_file(self, file_name):
     if os.path.isfile(os.getcwdu() + os.sep + file_name):
         sublime.error_message(file_name + ' already exists')
         return
     if os.path.isdir(os.getcwdu() + os.sep + file_name):
         sublime.error_message(file_name + ' is already a directory')
         return
     FILE = open(os.getcwdu() + os.sep + file_name, 'a')
     FILE.close()
     self.window.open_file(os.getcwdu() + os.sep + file_name)
Example #31
0
    def convert(self, stream, options, file_ext, log, accelerators):
        from lxml import etree
        from calibre.ebooks.metadata.fb2 import ensure_namespace, get_fb2_data
        from calibre.ebooks.metadata.opf2 import OPFCreator
        from calibre.ebooks.metadata.meta import get_metadata
        from calibre.ebooks.oeb.base import XLINK_NS, XHTML_NS, RECOVER_PARSER
        from calibre.ebooks.chardet import xml_to_unicode
        self.log = log
        log.debug('Parsing XML...')
        raw = get_fb2_data(stream)[0]
        raw = raw.replace(b'\0', b'')
        raw = xml_to_unicode(raw,
                             strip_encoding_pats=True,
                             assume_utf8=True,
                             resolve_entities=True)[0]
        try:
            doc = etree.fromstring(raw)
        except etree.XMLSyntaxError:
            try:
                doc = etree.fromstring(raw, parser=RECOVER_PARSER)
                if doc is None:
                    raise Exception('parse failed')
            except:
                doc = etree.fromstring(raw.replace('& ', '&amp;'),
                                       parser=RECOVER_PARSER)
        if doc is None:
            raise ValueError('The FB2 file is not valid XML')
        doc = ensure_namespace(doc)
        try:
            fb_ns = doc.nsmap[doc.prefix]
        except Exception:
            fb_ns = FB2NS

        NAMESPACES = {'f': fb_ns, 'l': XLINK_NS}
        stylesheets = doc.xpath(
            '//*[local-name() = "stylesheet" and @type="text/css"]')
        css = ''
        for s in stylesheets:
            css += etree.tostring(
                s, encoding=unicode, method='text', with_tail=False) + '\n\n'
        if css:
            import cssutils, logging
            parser = cssutils.CSSParser(fetcher=None,
                                        log=logging.getLogger('calibre.css'))

            XHTML_CSS_NAMESPACE = '@namespace "%s";\n' % XHTML_NS
            text = XHTML_CSS_NAMESPACE + css
            log.debug('Parsing stylesheet...')
            stylesheet = parser.parseString(text)
            stylesheet.namespaces['h'] = XHTML_NS
            css = unicode(stylesheet.cssText).replace('h|style', 'h|span')
            css = re.sub(r'name\s*=\s*', 'class=', css)
        self.extract_embedded_content(doc)
        log.debug('Converting XML to HTML...')
        ss = open(P('templates/fb2.xsl'), 'rb').read()
        ss = ss.replace("__FB_NS__", fb_ns)
        if options.no_inline_fb2_toc:
            log('Disabling generation of inline FB2 TOC')
            ss = re.compile(r'<!-- BUILD TOC -->.*<!-- END BUILD TOC -->',
                            re.DOTALL).sub('', ss)

        styledoc = etree.fromstring(ss)

        transform = etree.XSLT(styledoc)
        result = transform(doc)

        # Handle links of type note and cite
        notes = {
            a.get('href')[1:]: a
            for a in result.xpath('//a[@link_note and @href]')
            if a.get('href').startswith('#')
        }
        cites = {
            a.get('link_cite'): a
            for a in result.xpath('//a[@link_cite]') if not a.get('href', '')
        }
        all_ids = {x for x in result.xpath('//*/@id')}
        for cite, a in cites.iteritems():
            note = notes.get(cite, None)
            if note:
                c = 1
                while 'cite%d' % c in all_ids:
                    c += 1
                if not note.get('id', None):
                    note.set('id', 'cite%d' % c)
                    all_ids.add(note.get('id'))
                a.set('href', '#%s' % note.get('id'))
        for x in result.xpath('//*[@link_note or @link_cite]'):
            x.attrib.pop('link_note', None)
            x.attrib.pop('link_cite', None)

        for img in result.xpath('//img[@src]'):
            src = img.get('src')
            img.set('src', self.binary_map.get(src, src))
        index = transform.tostring(result)
        open(u'index.xhtml', 'wb').write(index)
        open(u'inline-styles.css', 'wb').write(css)
        stream.seek(0)
        mi = get_metadata(stream, 'fb2')
        if not mi.title:
            mi.title = _('Unknown')
        if not mi.authors:
            mi.authors = [_('Unknown')]
        cpath = None
        if mi.cover_data and mi.cover_data[1]:
            with open(u'fb2_cover_calibre_mi.jpg', 'wb') as f:
                f.write(mi.cover_data[1])
            cpath = os.path.abspath(u'fb2_cover_calibre_mi.jpg')
        else:
            for img in doc.xpath('//f:coverpage/f:image',
                                 namespaces=NAMESPACES):
                href = img.get('{%s}href' % XLINK_NS, img.get('href', None))
                if href is not None:
                    if href.startswith('#'):
                        href = href[1:]
                    cpath = os.path.abspath(href)
                    break

        opf = OPFCreator(os.getcwdu(), mi)
        entries = [(f2, guess_type(f2)[0]) for f2 in os.listdir(u'.')]
        opf.create_manifest(entries)
        opf.create_spine([u'index.xhtml'])
        if cpath:
            opf.guide.set_cover(cpath)
        with open(u'metadata.opf', 'wb') as f:
            opf.render(f)
        return os.path.join(os.getcwdu(), u'metadata.opf')
Example #32
0
    def convert(self, stream, options, file_ext, log, accelerators):
        from calibre.utils.zipfile import ZipFile
        from calibre import walk
        from calibre.ebooks import DRMError
        from calibre.ebooks.metadata.opf2 import OPF
        try:
            zf = ZipFile(stream)
            zf.extractall(os.getcwdu())
        except:
            log.exception('EPUB appears to be invalid ZIP file, trying a'
                          ' more forgiving ZIP parser')
            from calibre.utils.localunzip import extractall
            stream.seek(0)
            extractall(stream)
        encfile = os.path.abspath(os.path.join('META-INF', 'encryption.xml'))
        opf = self.find_opf()
        if opf is None:
            for f in walk(u'.'):
                if f.lower().endswith('.opf') and '__MACOSX' not in f and \
                        not os.path.basename(f).startswith('.'):
                    opf = os.path.abspath(f)
                    break
        path = getattr(stream, 'name', 'stream')

        if opf is None:
            raise ValueError(
                '%s is not a valid EPUB file (could not find opf)' % path)

        opf = os.path.relpath(opf, os.getcwdu())
        parts = os.path.split(opf)
        opf = OPF(opf, os.path.dirname(os.path.abspath(opf)))

        self._encrypted_font_uris = []
        if os.path.exists(encfile):
            if not self.process_encryption(encfile, opf, log):
                raise DRMError(os.path.basename(path))
        self.encrypted_fonts = self._encrypted_font_uris

        if len(parts) > 1 and parts[0]:
            delta = '/'.join(parts[:-1]) + '/'
            for elem in opf.itermanifest():
                elem.set('href', delta + elem.get('href'))
            for elem in opf.iterguide():
                elem.set('href', delta + elem.get('href'))

        self.removed_cover = self.rationalize_cover(opf, log)

        self.optimize_opf_parsing = opf
        for x in opf.itermanifest():
            if x.get('media-type', '') == 'application/x-dtbook+xml':
                raise ValueError(
                    'EPUB files with DTBook markup are not supported')

        not_for_spine = set()
        for y in opf.itermanifest():
            id_ = y.get('id', None)
            if id_ and y.get('media-type', None) in \
                ('application/vnd.adobe-page-template+xml','application/text'):
                not_for_spine.add(id_)

        seen = set()
        for x in list(opf.iterspine()):
            ref = x.get('idref', None)
            if not ref or ref in not_for_spine or ref in seen:
                x.getparent().remove(x)
                continue
            seen.add(ref)

        if len(list(opf.iterspine())) == 0:
            raise ValueError('No valid entries in the spine of this EPUB')

        with open('content.opf', 'wb') as nopf:
            nopf.write(opf.render())

        return os.path.abspath(u'content.opf')
Example #33
0
def getcwd():
    if sys.version_info > (3, 0):
        return os.getcwd()
    else:
        return os.getcwdu()
Example #34
0
####################################################################
import sys, os, re, inspect, json
from optparse import OptionParser
from datetime import date
import mako, markdown
from mako.lookup import TemplateLookup
from shiji import urldispatch, webapi

ARG_ERR = -1
MKDIR_ERR = -2
WRITE_FILE_ERR = -3
TEMPLATE_ERR = -4

re_version_entry = re.compile(r'\s*"(?P<version_id>[.\d]+)"\s*:\s*\(r"(?P<version>[.\d]+)",\s*(?P<version_module>v[_\d]+)\)\s*')

curr_dir = os.getcwdu()
templates_dir = os.path.dirname(os.path.realpath(__file__)) + "/utility_templates"
template_lookup = TemplateLookup(directories=templates_dir,
                                 output_encoding="utf-8",
                                 input_encoding="utf-8",
                                 encoding_errors="replace")

def extract_version_map(fn):
    version_list = []
    
    f_api_init = open(fn, "r")
    in_versions = False
    
    for line in f_api_init.readlines():
        if line[:16] == "api_versions = {":
            in_versions = True
Example #35
0
def main(argv=None, directory=None):
    """
    Main entry point for the tool, used by setup.py
    Returns a value that can be passed into exit() specifying
    the exit code.
    1 is an error
    0 is successful run
    """
    logging.basicConfig(format='%(message)s')

    argv = argv or sys.argv
    # Init the path tool to work with the specified directory,
    # or the current directory if it isn't set.
    if not directory:
        try:
            directory = os.getcwdu()
        except AttributeError:
            directory = os.getcwd()

    progname = argv[0]
    filename = os.path.basename(progname)
    name, _ = os.path.splitext(filename)

    if 'diff-cover' in name:
        arg_dict = parse_coverage_args(argv[1:])
        GitPathTool.set_cwd(directory)
        fail_under = arg_dict.get('fail_under')
        percent_covered = generate_coverage_report(
            arg_dict['coverage_xml'],
            arg_dict['compare_branch'],
            html_report=arg_dict['html_report'],
            css_file=arg_dict['external_css_file'],
            ignore_staged=arg_dict['ignore_staged'],
            ignore_unstaged=arg_dict['ignore_unstaged'],
            exclude=arg_dict['exclude'],
        )

        if percent_covered >= fail_under:
            return 0
        else:
            LOGGER.error("Failure. Coverage is below {0}%.".format(fail_under))
            return 1

    elif 'diff-quality' in name:
        arg_dict = parse_quality_args(argv[1:])
        GitPathTool.set_cwd(directory)
        fail_under = arg_dict.get('fail_under')
        tool = arg_dict['violations']
        user_options = arg_dict.get('options')
        if user_options:
            # strip quotes if present
            first_char = user_options[0]
            last_char = user_options[-1]
            if first_char == last_char and first_char in ('"', "'"):
                user_options = user_options[1:-1]
        driver = QUALITY_DRIVERS.get(tool)

        if driver is not None:
            # If we've been given pre-generated reports,
            # try to open the files
            input_reports = []

            for path in arg_dict['input_reports']:
                try:
                    input_reports.append(open(path, 'rb'))
                except IOError:
                    LOGGER.warning("Could not load '{0}'".format(path))
            try:
                reporter = QualityReporter(driver, input_reports, user_options)
                percent_passing = generate_quality_report(
                    reporter,
                    arg_dict['compare_branch'],
                    html_report=arg_dict['html_report'],
                    css_file=arg_dict['external_css_file'],
                    ignore_staged=arg_dict['ignore_staged'],
                    ignore_unstaged=arg_dict['ignore_unstaged'],
                    exclude=arg_dict['exclude'],
                )
                if percent_passing >= fail_under:
                    return 0
                else:
                    LOGGER.error(
                        "Failure. Quality is below {0}%.".format(fail_under))
                    return 1

            except (ImportError, EnvironmentError):
                LOGGER.error("Quality tool not installed: '{0}'".format(tool))
                return 1
            # Close any reports we opened
            finally:
                for file_handle in input_reports:
                    file_handle.close()

        else:
            LOGGER.error("Quality tool not recognized: '{0}'".format(tool))
            return 1

    else:
        assert False, 'Expect diff-cover or diff-quality in {0}'.format(name)
Example #36
0
    cd(here)


# ----------------------------------------------------------------------------
# Script starts
#
if __name__ == '__main__':

    # Determine the commit message that will be used on the branch gh-pages.
    try:
        msg = sys.argv[1]
    except IndexError:
        msg = 'Updated documentation.'

    # Check out the repository.
    startdir = os.getcwdu()
    if not os.path.exists(pages_dir):
        # Initialize the repository.
        init_repo(pages_dir)
    else:
        # If the repository exists make sure it is on the right branch and
        # is up-to-date.
        cd(pages_dir)
        sh('git checkout gh-pages')
        sh('git pull')
        cd(startdir)

    # Copy the built documentation to the gh-pages directory.
    sh('rm -rf {}/*'.format(pages_dir))
    sh('cp -r {}/* {}/'.format(html_dir, pages_dir))
Example #37
0
        "getBacktrace = function() { };\n",
        "getLcovInfo = function() { };\n",
        "isAsmJSCompilationAvailable = function() { };\n",
        "offThreadCompileScript = function() { };\n",
        "oomTest = function() { };\n",
        "printProfilerEvents = function() { };\n",
        "saveStack = function() { };\n",
        "wasmIsSupported = function() { return true; };\n",
        "// DDBEGIN\n"
    ]
    with io.open(str(jsfunfuzzOutputFilename), "r", encoding="utf-8", errors="replace") as f:
        for line in f:
            if line.startswith(marker):
                sline = line[len(marker):]
                # We only override wasmIsSupported above for the main global.
                # Hopefully, any imported tests that try to use wasmIsSupported within a newGlobal
                # will do so in a straightforward way where everything is on one line.
                if not ("newGlobal" in sline and "wasmIsSupported" in sline):
                    lines.append(sline)
    lines += [
        "\ntry{print(uneval(this));}catch(e){}\n",
        "// DDEND\n"
    ]
    return lines


if __name__ == "__main__":
    # pylint: disable=no-member
    many_timed_runs(None, os_ops.make_wtmp_dir(Path(os.getcwdu() if sys.version_info.major == 2 else os.getcwd())),
                    sys.argv[1:], create_collector.make_collector(), False)
Example #38
0
 def _abspath(path):
     if WINDOWS and os.path.splitunc(path)[0]:
         return os.path.abspath(path)
     return os.path.abspath(os.path.join(os.getcwdu(), path))
Example #39
0
                if p is not run[-1]:
                    style.apply_between_border()
            if has_visible_border:
                border_style.margin_left, border_style.margin_right = max_left, max_right
                self.block_runs.append((border_style, run))

        run = []
        for p in paras:
            if run and self.frame_map.get(p) == self.frame_map.get(run[-1]):
                style = self.styles.resolve_paragraph(p)
                last_style = self.styles.resolve_paragraph(run[-1])
                if style.has_identical_borders(last_style):
                    run.append(p)
                    continue
            if len(run) > 1:
                process_run(run)
            run = [p]
        if len(run) > 1:
            process_run(run)


if __name__ == '__main__':
    import shutil
    from calibre.utils.logging import default_log
    default_log.filter_level = default_log.DEBUG
    dest_dir = os.path.join(os.getcwdu(), 'docx_input')
    if os.path.exists(dest_dir):
        shutil.rmtree(dest_dir)
    os.mkdir(dest_dir)
    Convert(sys.argv[-1], dest_dir=dest_dir, log=default_log)()
def run_code(code, code_path, ns=None, function_name=None):
    """
    Import a Python module from a path, and run the function given by
    name, if function_name is not None.
    """

    # Change the working directory to the directory of the example, so
    # it can get at its data files, if any.  Add its path to sys.path
    # so it can import any helper modules sitting beside it.
    if six.PY2:
        pwd = os.getcwdu()
    else:
        pwd = os.getcwd()
    old_sys_path = list(sys.path)
    if setup.config.plot_working_directory is not None:
        try:
            os.chdir(setup.config.plot_working_directory)
        except OSError as err:
            raise OSError(str(err) + '\n`plot_working_directory` option in'
                          'Sphinx configuration file must be a valid '
                          'directory path')
        except TypeError as err:
            raise TypeError(str(err) + '\n`plot_working_directory` option in '
                            'Sphinx configuration file must be a string or '
                            'None')
        sys.path.insert(0, setup.config.plot_working_directory)
    elif code_path is not None:
        dirname = os.path.abspath(os.path.dirname(code_path))
        os.chdir(dirname)
        sys.path.insert(0, dirname)

    # Reset sys.argv
    old_sys_argv = sys.argv
    sys.argv = [code_path]

    # Redirect stdout
    stdout = sys.stdout
    if six.PY3:
        sys.stdout = io.StringIO()
    else:
        sys.stdout = cStringIO.StringIO()

    # Assign a do-nothing print function to the namespace.  There
    # doesn't seem to be any other way to provide a way to (not) print
    # that works correctly across Python 2 and 3.
    def _dummy_print(*arg, **kwarg):
        pass

    try:
        try:
            code = unescape_doctest(code)
            if ns is None:
                ns = {}
            if not ns:
                if setup.config.plot_pre_code is None:
                    six.exec_(six.text_type("import numpy as np\n" +
                    "from matplotlib import pyplot as plt\n"), ns)
                else:
                    six.exec_(six.text_type(setup.config.plot_pre_code), ns)
            ns['print'] = _dummy_print
            if "__main__" in code:
                six.exec_("__name__ = '__main__'", ns)
            code = remove_coding(code)
            six.exec_(code, ns)
            if function_name is not None:
                six.exec_(function_name + "()", ns)
        except (Exception, SystemExit) as err:
            raise PlotError(traceback.format_exc())
    finally:
        os.chdir(pwd)
        sys.argv = old_sys_argv
        sys.path[:] = old_sys_path
        sys.stdout = stdout
    return ns
Example #41
0
class NotFoundError(NameError):
    pass


class IgnoreFileException(Exception):
    pass


logger = getLogger('deps')

LIBS_AUTHORIZED_PATHS = [x for x in sys.path if x != ''
                         ] + [os.path.join(ROOT, 'packages'), 'packages']

PATCHES_PATHS = [
    os.path.abspath(os.path.join(os.getcwdu(), 'packages', 'patches')),
    os.path.abspath(os.path.join(ROOT, 'packages', 'patches')),
    os.path.abspath(os.path.join(ROOT, 'library_patches'))
]

# ../libs - for windows bundles, to use simple zip command
# site-packages/win32 - for pywin32
COMMON_SEARCH_PREFIXES = ('', 'site-packages/win32/lib', 'site-packages/win32',
                          'site-packages/pywin32_system32', 'site-packages',
                          'lib-dynload')

COMMON_MODULE_ENDINGS = ('/', '.py', '.pyo', '.pyc', '.pyd', '.so', '.dll')

# dependencies to load for each modules
WELL_KNOWN_DEPS = {
    'pupwinutils.memexec': {
Example #42
0
def obtainShell(shell, updateToRev=None, updateLatestTxt=False):  # pylint: disable=invalid-name,missing-param-doc
    # pylint: disable=missing-raises-doc,missing-type-doc,too-many-branches,too-complex,too-many-statements
    """Obtain a js shell. Keep the objdir for now, especially .a files, for symbols."""
    assert sm_compile_helpers.get_lock_dir_path(Path.home(), shell.build_opts.repo_dir).is_dir()
    cached_no_shell = shell.get_shell_cache_js_bin_path().with_suffix(".busted")

    if shell.get_shell_cache_js_bin_path().is_file():
        # Don't remove the comma at the end of this line, and thus remove the newline printed.
        # We would break JSBugMon.
        print("Found cached shell...")
        # Assuming that since the binary is present, everything else (e.g. symbols) is also present
        if platform.system() == "Windows":
            sm_compile_helpers.verify_full_win_pageheap(shell.get_shell_cache_js_bin_path())
        return
    elif cached_no_shell.is_file():
        raise Exception("Found a cached shell that failed compilation...")
    elif shell.get_shell_cache_dir().is_dir():
        print("Found a cache dir without a successful/failed shell...")
        sps.rm_tree_incl_readonly(shell.get_shell_cache_dir())

    shell.get_shell_cache_dir().mkdir()
    hg_helpers.destroyPyc(shell.build_opts.repo_dir)

    s3cache_obj = s3cache.S3Cache(S3_SHELL_CACHE_DIRNAME)
    use_s3cache = s3cache_obj.connect()

    if use_s3cache:
        if s3cache_obj.downloadFile(str(shell.get_shell_name_without_ext() + ".busted"),
                                    str(shell.get_shell_cache_js_bin_path()) + ".busted"):
            raise Exception("Found a .busted file for rev " + shell.get_hg_hash())

        if s3cache_obj.downloadFile(str(shell.get_shell_name_without_ext() + ".tar.bz2"),
                                    str(shell.get_s3_tar_with_ext_full_path())):
            print("Extracting shell...")
            with tarfile.open(str(shell.get_s3_tar_with_ext_full_path()), "r") as f:
                f.extractall(str(shell.get_shell_cache_dir()))
            # Delete tarball after downloading from S3
            shell.get_s3_tar_with_ext_full_path().unlink()
            if platform.system() == "Windows":
                sm_compile_helpers.verify_full_win_pageheap(shell.get_shell_cache_js_bin_path())
            return

    try:
        if updateToRev:
            # Print *with* a trailing newline to avoid breaking other stuff
            print("Updating to rev %s in the %s repository..." % (
                updateToRev,
                str(shell.build_opts.repo_dir)))
            subprocess.run(["hg", "-R", str(shell.build_opts.repo_dir),
                            "update", "-C", "-r", updateToRev],
                           check=True,
                           # pylint: disable=no-member
                           cwd=os.getcwdu() if sys.version_info.major == 2 else os.getcwd(),
                           stderr=subprocess.DEVNULL,
                           timeout=999)
        if shell.build_opts.patch_file:
            hg_helpers.patch_hg_repo_with_mq(shell.build_opts.patch_file, shell.get_repo_dir())

        cfgJsCompile(shell)
        if platform.system() == "Windows":
            sm_compile_helpers.verify_full_win_pageheap(shell.get_shell_cache_js_bin_path())
    except KeyboardInterrupt:
        sps.rm_tree_incl_readonly(shell.get_shell_cache_dir())
        raise
    except (subprocess.CalledProcessError, OSError) as ex:
        # Remove the cache dir, but recreate it with only the .busted file.
        sps.rm_tree_incl_readonly(shell.get_shell_cache_dir())
        shell.get_shell_cache_dir().mkdir()
        sm_compile_helpers.createBustedFile(cached_no_shell, ex)
        if use_s3cache:
            s3cache_obj.uploadFileToS3(str(shell.get_shell_cache_js_bin_path()) + ".busted")
        raise
    finally:
        if shell.build_opts.patch_file:
            hg_helpers.qpop_qrm_applied_patch(shell.build_opts.patch_file, shell.get_repo_dir())

    if use_s3cache:
        s3cache_obj.compressAndUploadDirTarball(str(shell.get_shell_cache_dir()),
                                                str(shell.get_s3_tar_with_ext_full_path()))
        if updateLatestTxt:
            # So js-dbg-64-dm-darwin-cdcd33fd6e39 becomes js-dbg-64-dm-darwin-latest.txt with
            # js-dbg-64-dm-darwin-cdcd33fd6e39 as its contents.
            txt_info = "-".join(str(shell.get_s3_tar_name_with_ext()).split("-")[:-1] + ["latest"]) + ".txt"
            s3cache_obj.uploadStrToS3("", txt_info, str(shell.get_s3_tar_name_with_ext()))
        shell.get_s3_tar_with_ext_full_path().unlink()
Example #43
0
 def copy_src_to_mount_point(self):
     context_dir = os.getcwdu()
     dst = os.path.join(self.get_mount_point(), self._build_id)
     shutil.copytree(context_dir, dst)
import collections
from datetime import datetime
from datetime import timedelta
from time import strptime
import time
import operator
import sys

# EXTRACT MATCHES WITH EMBEDDED STATISTICS

# ---- PARAMETERS ----
# --------------------
#List of players to work with

playersListDirectory = 'D:\\OneDrive\\Projects\\BettingSerivce\\FootballDataCollection\\footballData\\DATA\\players_list\\'
playersListDirectory = os.getcwdu() + '..\\..\\DATA\\players_list\\'
#Matches to work with
matchDirectory = 'D:\\OneDrive\\Projects\\BettingSerivce\\FootballDataCollection\\footballData\\footballData\\matches\\'
#Player files to work with
playersFileDirectory = 'D:\\OneDrive\\Projects\\BettingSerivce\\FootballDataCollection\\footballData\\DATA\\players\\'
#Where to output the match files with embedded statistics
output = 'D:\\OneDrive\\Projects\\BettingSerivce\\FootballDataCollection\\footballData\\DATA\\'
#Where to output the missing players list
missingPlayerFileName = 'missingPlayers.txt'
#How many missing players to output, ordered by their importance (i.e: the number of match they prevent to retrieve)
missingPlayerExtractSize = 1000
writeMatchFiles = True  #Write the match files or just run an analysis ?
validMatch = set(
)  #Matches that we can parse and for which we have the full lineup stats
existingPlayersFileDict = collections.OrderedDict()
missingPlayersDict = dict()
Example #45
0
                        help='file(s) with test data in csv format.')
    args = parser.parse_args()

    #initialize the database
    db = sqlite3.connect(':memory:')
    db_cursor = db.cursor()
    init_sequence_table(db_cursor)

    #read in test data from input file(s)
    for data_filename in args.data_file:
        data_file = open(data_filename, 'rb')
        populate_sequence_table(db_cursor, data_file)
        data_file.close()

    #prepare tmp directory for degen_primer output files
    wdir = os.getcwdu()
    tmp_dir = mkdtemp(prefix='degen_primer_test_')
    os.chdir(tmp_dir)
    print 'Working directory is now:', tmp_dir

    #output columns and regexps for searching corresponding values in report files
    output = [('SID', 'sequence', 'GC%', 'length', 'Tm', 'Tm-predicted',
               'delta-Tm', 'Dimers', 'Dimer dG', 'Hairpins', 'Hairpin dG')]
    values = {
        'Tm': [re.compile(' *(Tm|Tm mean): *(\d{1,3}\.\d{1}) C$'), None],
        'Dimers': [re.compile(' *(Dimers count): *(\d{1,3})$'), None],
        'Dimer_dG': [
            re.compile(' *(Most stable dimer): *(-\d{1,3}\.\d{2}) kcal/mol$'),
            None
        ],
        'Hairpins': [re.compile(' *(Hairpins count): *(\d{1,3})$'), None],
Example #46
0
def _codecov_submit():
    if os.getenv('CI') == 'true' and os.getenv('TRAVIS') == 'true':
        # http://docs.travis-ci.com/user/environment-variables/#Default-Environment-Variables
        build_url = 'https://travis-ci.org/%s/jobs/%s' % (
            os.getenv('TRAVIS_REPO_SLUG'), os.getenv('TRAVIS_JOB_ID'))
        query = {
            'service': 'travis',
            'branch': os.getenv('TRAVIS_BRANCH'),
            'build': os.getenv('TRAVIS_JOB_NUMBER'),
            'pr': os.getenv('TRAVIS_PULL_REQUEST'),
            'job': os.getenv('TRAVIS_JOB_ID'),
            'tag': os.getenv('TRAVIS_TAG'),
            'slug': os.getenv('TRAVIS_REPO_SLUG'),
            'commit': os.getenv('TRAVIS_COMMIT'),
            'build_url': build_url,
        }
        root = os.getenv('TRAVIS_BUILD_DIR')

    elif os.getenv('CI') == 'True' and os.getenv('APPVEYOR') == 'True':
        # http://www.appveyor.com/docs/environment-variables
        build_url = 'https://ci.appveyor.com/project/%s/build/%s' % (os.getenv(
            'APPVEYOR_REPO_NAME'), os.getenv('APPVEYOR_BUILD_VERSION'))
        query = {
            'service':
            "appveyor",
            'branch':
            os.getenv('APPVEYOR_REPO_BRANCH'),
            'build':
            os.getenv('APPVEYOR_JOB_ID'),
            'pr':
            os.getenv('APPVEYOR_PULL_REQUEST_NUMBER'),
            'job':
            '/'.join((os.getenv('APPVEYOR_ACCOUNT_NAME'),
                      os.getenv('APPVEYOR_PROJECT_SLUG'),
                      os.getenv('APPVEYOR_BUILD_VERSION'))),
            'tag':
            os.getenv('APPVEYOR_REPO_TAG_NAME'),
            'slug':
            os.getenv('APPVEYOR_REPO_NAME'),
            'commit':
            os.getenv('APPVEYOR_REPO_COMMIT'),
            'build_url':
            build_url,
        }
        root = os.getenv('APPVEYOR_BUILD_FOLDER')

    elif os.getenv('CI') == 'true' and os.getenv('CIRCLECI') == 'true':
        # https://circleci.com/docs/environment-variables
        query = {
            'service':
            'circleci',
            'branch':
            os.getenv('CIRCLE_BRANCH'),
            'build':
            os.getenv('CIRCLE_BUILD_NUM'),
            'pr':
            os.getenv('CIRCLE_PR_NUMBER'),
            'job':
            os.getenv('CIRCLE_BUILD_NUM') + "." +
            os.getenv('CIRCLE_NODE_INDEX'),
            'tag':
            os.getenv('CIRCLE_TAG'),
            'slug':
            os.getenv('CIRCLE_PROJECT_USERNAME') + "/" +
            os.getenv('CIRCLE_PROJECT_REPONAME'),
            'commit':
            os.getenv('CIRCLE_SHA1'),
            'build_url':
            os.getenv('CIRCLE_BUILD_URL'),
        }
        if sys.version_info < (3, ):
            root = os.getcwdu()
        else:
            root = os.getcwd()
    else:
        root = package_root
        if not os.path.exists(os.path.join(root, '.git')):
            print('git repository not found, not submitting coverage data')
            return
        git_status = _git_command(['status', '--porcelain'], root)
        if git_status != '':
            print(
                'git repository has uncommitted changes, not submitting coverage data'
            )
            return

        slug = None
        token = None
        try:
            with open(os.path.join(root, 'codecov.json'), 'rb') as f:
                json_data = json.loads(f.read().decode('utf-8'))
                slug = json_data['slug']
                token = json_data['token']
        except (OSError, ValueError, UnicodeDecodeError, KeyError):
            print('error reading codecov.json')
            return

        branch = _git_command(['rev-parse', '--abbrev-ref', 'HEAD'], root)
        commit = _git_command(['rev-parse', '--verify', 'HEAD'], root)
        tag = _git_command(['name-rev', '--tags', '--name-only', commit], root)
        impl = _plat.python_implementation()
        major, minor = _plat.python_version_tuple()[0:2]
        build_name = '%s %s %s.%s' % (_platform_name(), impl, major, minor)
        query = {
            'branch': branch,
            'commit': commit,
            'slug': slug,
            'token': token,
            'build': build_name,
        }
        if tag != 'undefined':
            query['tag'] = tag

    payload = 'PLATFORM=%s\n' % _platform_name()
    payload += 'PYTHON_VERSION=%s %s\n' % (_plat.python_version(),
                                           _plat.python_implementation())
    if 'oscrypto' in sys.modules:
        payload += 'OSCRYPTO_BACKEND=%s\n' % sys.modules['oscrypto'].backend()
    payload += '<<<<<< ENV\n'

    for path in _list_files(root):
        payload += path + '\n'
    payload += '<<<<<< network\n'

    payload += '# path=coverage.xml\n'
    with open(os.path.join(root, 'coverage.xml'), 'r', encoding='utf-8') as f:
        payload += f.read() + '\n'
    payload += '<<<<<< EOF\n'

    url = 'https://codecov.io/upload/v4'
    headers = {'Accept': 'text/plain'}
    filtered_query = {}
    for key in query:
        value = query[key]
        if value == '' or value is None:
            continue
        filtered_query[key] = value

    print('Submitting coverage info to codecov.io')
    info = _do_request('POST', url, headers, query_params=filtered_query)

    encoding = info[1] or 'utf-8'
    text = info[2].decode(encoding).strip()
    parts = text.split()
    upload_url = parts[1]

    headers = {
        'Content-Type': 'text/plain',
        'x-amz-acl': 'public-read',
        'x-amz-storage-class': 'REDUCED_REDUNDANCY'
    }

    print('Uploading coverage data to codecov.io S3 bucket')
    _do_request('PUT', upload_url, headers, data=payload.encode('utf-8'))
Example #47
0
''' E-book management software'''
__license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal <*****@*****.**>'
__docformat__ = 'restructuredtext en'

import sys, os, re, time, random, __builtin__, warnings

__builtin__.__dict__['dynamic_property'] = lambda func: func(None)
from math import floor
from functools import partial

warnings.simplefilter('ignore', DeprecationWarning)
try:
    os.getcwdu()
except:
    os.chdir(os.path.expanduser('~'))

from calibre.constants import (iswindows, isosx, islinux, isfrozen, isbsd,
                               preferred_encoding, __appname__, __version__,
                               __author__, win32event, win32api, winerror,
                               fcntl, filesystem_encoding, plugins, config_dir)
from calibre.startup import winutil, winutilerror

if False and islinux and not getattr(sys, 'frozen', False):
    # Imported before PyQt to workaround PyQt util-linux conflict discovered on gentoo
    # See http://bugs.gentoo.org/show_bug.cgi?id=317557
    # Importing uuid is slow so get rid of this at some point, maybe in a few
    # years when even Debian has caught up
    # Also remember to remove it from site.py in the binary builds
    import uuid
    uuid.uuid4()
Example #48
0
def setup_locale(cli_lang):
    """Sets up wx and Wrye Bash locales, ensuring they match or falling back
    to English if that is impossible. Also considers cli_lang as an override,
    installs the gettext translation and remembers the locale we end up with
    as bass.active_locale.

    bolt.deprint must be set up and ready to use (i.e. hooked up to the
    BashBugDump if debug mode is enabled) and the working directory must be
    correct (otherwise detection of translation files will not work and this
    method will always set locale to English).

    :param cli_lang: The language the user specified on the command line, or
        None.
    :return: The wx.Locale object we ended up using."""
    # We need a throwaway wx.App so that the calls below work
    import wx as _wx
    _temp_app = _wx.App(False)
    # Set the wx language - otherwise we will crash when loading any images
    cli_target = cli_lang and _wx.Locale.FindLanguageInfo(cli_lang)
    if cli_target:
        # The user specified a language that wx recognizes
        target_language = cli_target.Language
    else:
        # Fall back on the default language
        target_language = _wx.LANGUAGE_DEFAULT
    # We now have a language that wx supports, but we don't know if WB supports
    # it - so check that next
    target_locale = _wx.Locale(target_language)
    target_name = target_locale.GetCanonicalName()
    trans_path = os.path.join(os.getcwdu(), u'bash', u'l10n')
    if not os.path.exists(trans_path):
        # HACK: the CI has to run tests from the top dir, which causes us to
        # have a non-Mopy working dir here. Real fix is ditching the fake
        # startup and adding a real headless mode to WB (see #568 and #554)
        trans_path = os.path.join(os.getcwdu(), u'Mopy', u'bash', u'l10n')
    supported_l10ns = [
        l[:-3] for l in os.listdir(trans_path) if l[-3:] == u'.po'
    ]
    if target_name not in supported_l10ns:
        # We don't support this exact language. Check if we support any similar
        # languages (i.e. same prefix)
        wanted_prefix = target_name.split(u'_', 1)[0]
        for l in supported_l10ns:
            if l.split(u'_', 1)[0] == wanted_prefix:
                bolt.deprint(u"No translation file for language '%s', "
                             u"using similar language with translation file "
                             u"'%s' instead" % (target_name, l))
                target_name = l
                # Try switching wx to this locale as well
                lang_info = _wx.Locale.FindLanguageInfo(target_name)
                if lang_info:
                    target_locale = _wx.Locale(lang_info.Language)
                else:
                    # Didn't work, try the prefix to get a similar language
                    lang_info = _wx.Locale.FindLanguageInfo(wanted_prefix)
                    if lang_info:
                        target_locale = _wx.Locale(lang_info.Language)
                        bolt.deprint(
                            u"wxPython does not support language "
                            u"'%s', using supported language '%s' "
                            u"instead" %
                            (target_name, target_locale.GetCanonicalName()))
                    else:
                        # If even that didn't work, all we can do is complain
                        # about it and fall back to English
                        bolt.deprint(u"wxPython does not support the language "
                                     u"family '%s', will fall back to "
                                     u"English" % wanted_prefix)
                break
    po, mo = (os.path.join(trans_path, target_name + ext)
              for ext in (u'.po', u'.mo'))
    # English is the default, so it doesn't have a translation file
    # For all other languages, check if we have a translation
    if not target_name.startswith(u'en_') and not os.path.isfile(po):
        # WB does not support the default language, use English instead
        target_locale = _wx.Locale(_wx.LANGUAGE_ENGLISH)
        fallback_name = target_locale.GetCanonicalName()
        bolt.deprint(u"No translation file for language '%s', falling back to "
                     u"'%s'" % (target_name, fallback_name))
        target_name = fallback_name
    bolt.deprint(u"Set wxPython language to '%s'" %
                 target_locale.GetCanonicalName())
    bolt.deprint(u"Set Wrye Bash language to '%s'" % target_name)
    # Next, set the Wrye Bash locale based on the one we grabbed from wx
    if not os.path.isfile(po) and not os.path.isfile(mo):
        # We're using English or don't have a translation file - either way,
        # prepare the English translation
        trans = gettext.NullTranslations()
    else:
        try:
            # We have a translation file, check if it has to be compiled
            if not os.path.isfile(mo) or (os.path.getmtime(po) >
                                          os.path.getmtime(mo)):
                # Try compiling - have to do it differently if we're a
                # standalone build
                args = [u'm', u'-o', mo, po]
                if bass.is_standalone:
                    # Delayed import, since it's only present on standalone
                    import msgfmt
                    old_argv = sys.argv[:]
                    sys.argv = args
                    msgfmt.main()
                    sys.argv = old_argv
                else:
                    # msgfmt is only in Tools, so call it explicitly
                    from .env import python_tools_dir
                    m = os.path.join(python_tools_dir(), u'i18n', u'msgfmt.py')
                    subprocess.call([sys.executable, m, u'-o', mo, po])
            # We've successfully compiled the translation, read it into memory
            with open(mo, u'rb') as trans_file:
                trans = gettext.GNUTranslations(trans_file)
        except (UnicodeError, OSError):
            bolt.deprint(u'Error loading translation file:')
            traceback.print_exc()
            trans = gettext.NullTranslations()
    # Everything has gone smoothly, install the translation and remember what
    # we ended up with as the final locale
    # PY3: drop the unicode=True, gone in py3 (this is always unicode now)
    trans.install(unicode=True)
    bass.active_locale = target_name
    del _temp_app
    return target_locale
Example #49
0
def _start_commandline():  # pylint: disable=R0912,R0915
    """Start the command line interface to the program.

    The root logger is configured to write DEBUG+ messages into the
    destination directory if not configured otherwise. A handler that
    writes INFO+ messages to :data:`sys.stderr` is added to
    :const:`.LOGGER`.

    The interface usage is::

        usage: prog [-h] {setup,build} ...

        Test the DNF stack.

        positional arguments:
          {setup,build}  the action to be performed

        optional arguments:
          -h, --help     show this help message and exit

        If an error occurs the exit status is non-zero.

    The usage of the "setup" command is::

        usage: prog setup [-h] [--add-repository URL]
                          CHROOT [CHROOT ...] PROJECT

        Create a new Copr project.

        positional arguments:
          CHROOT                the chroots to be used in the project
                                ("22" adds "fedora-22-i386,
                                fedora-22-x86_64", "23" adds
                                "fedora-23-i386, fedora-23-x86_64",
                                "rawhide" adds "fedora-rawhide-i386,
                                fedora-rawhide-x86_64")
          PROJECT               the name of the project

        optional arguments:
          -h, --help            show this help message and exit
          --add-repository URL  the URL of an additional repository
                                that is required

    The usage of the "build" command is::

        usage: prog build [-h] PROJECT {tito,librepo,libcomps} ...

        Build RPMs of a project from the checkout in the current working
        directory in Copr.

        positional arguments:
          PROJECT               the name of the Copr project
          {tito,librepo,libcomps}
                                the type of the project

        optional arguments:
          -h, --help            show this help message and exit

    The usage for "tito" projects is::

        usage: prog build PROJECT tito [-h]

        Build a tito-enabled project.

        optional arguments:
          -h, --help  show this help message and exit

        The "tito" executable must be available.

    The usage for "librepo" projects is::

        usage: prog build PROJECT librepo [-h] [--release RELEASE] SPEC

        Build a librepo project fork.

        positional arguments:
          SPEC               the ID of the Fedora Git
                             revision of the spec file

        optional arguments:
          -h, --help         show this help message and exit
          --release RELEASE  a custom release number of the
                             resulting RPMs

        The "git", "rpmbuild", "sh" and "xz" executables must be
        available.

    The usage for "libcomps" projects is::

        usage: prog build PROJECT libcomps [-h] [--release RELEASE]

        Build a libcomps project fork.

        optional arguments:
          -h, --help         show this help message and exit
          --release RELEASE  a custom release number of the
                             resulting RPMs

        The "python" and "rpmbuild" executables must be available.

    :raises exceptions.SystemExit: with a non-zero exit status if an
       error occurs

    """
    chroot2chroots = {
        '22': {'fedora-22-i386', 'fedora-22-x86_64'},
        '23': {'fedora-23-i386', 'fedora-23-x86_64'},
        'rawhide': {'fedora-rawhide-i386', 'fedora-rawhide-x86_64'}
    }
    argparser = argparse.ArgumentParser(
        description='Test the DNF stack.',
        epilog='If an error occurs the exit status is non-zero.')
    cmdparser = argparser.add_subparsers(dest='command',
                                         help='the action to be performed')
    setupparser = cmdparser.add_parser(
        'setup', description='Create a new Copr project.')
    setupparser.add_argument(
        '--add-repository',
        action='append',
        default=[],
        type=unicode,
        help='the URL of an additional repository that is required',
        metavar='URL')
    setupparser.add_argument(
        'chroot',
        nargs='+',
        choices=sorted(chroot2chroots),
        metavar='CHROOT',
        help='the chroots to be used in the project ({})'.format(', '.join(
            '"{}" adds "{}"'.format(key, ', '.join(sorted(value)))
            for key, value in sorted(chroot2chroots.items()))))
    setupparser.add_argument('project',
                             type=unicode,
                             metavar='PROJECT',
                             help='the name of the project')
    buildparser = cmdparser.add_parser(
        'build',
        description='Build RPMs of a project from the checkout in the current '
        'working directory in Copr.')
    buildparser.add_argument('copr',
                             type=unicode,
                             metavar='PROJECT',
                             help='the name of the Copr project')
    projparser = buildparser.add_subparsers(dest='project',
                                            help='the type of the project')
    projparser.add_parser('tito',
                          description='Build a tito-enabled project.',
                          epilog='The "tito" executable must be available.')
    commonparser = argparse.ArgumentParser(add_help=False)
    commonparser.add_argument(
        '--release', help='a custom release number of the resulting RPMs')
    repoparser = projparser.add_parser(
        'librepo',
        description='Build a librepo project fork.',
        epilog='The "git", "rpmbuild", "sh" and "xz" executables must be '
        'available.',
        parents=[commonparser])
    repoparser.add_argument(
        'fedrev',
        type=unicode,
        metavar='SPEC',
        help='the ID of the Fedora Git revision of the spec file')
    projparser.add_parser(
        'libcomps',
        description='Build a libcomps project fork.',
        epilog='The "python" and "rpmbuild" executables must be available.',
        parents=[commonparser])
    options = argparser.parse_args()
    logfn = os.path.join(os.getcwdu(), '{}.log'.format(NAME))
    try:
        logging.basicConfig(
            filename=logfn,
            filemode='wt',
            format='%(asctime)s.%(msecs)03d:%(levelname)s:%(name)s:'
            '%(message)s',
            datefmt='%Y%m%dT%H%M%S',
            level=logging.DEBUG)
    except IOError:
        sys.exit('A log file ({}) be created or overwritten.'.format(logfn))
    handler = logging.StreamHandler()
    handler.setLevel(logging.INFO)
    handler.setFormatter(logging.Formatter('%(levelname)s %(message)s'))
    LOGGER.addHandler(handler)
    if options.command == b'setup':
        chroots = set(
            itertools.chain.from_iterable(chroot2chroots[chroot]
                                          for chroot in options.chroot))
        try:
            _create_copr(options.project, chroots, options.add_repository)
        except ValueError:
            LOGGER.debug('An exception have occurred during setup.',
                         exc_info=True)
            sys.exit('Copr have failed to create the project.')
    elif options.command == b'build':
        destdn = decode_path(tempfile.mkdtemp())
        try:
            if options.project == b'tito':
                try:
                    _build_tito(destdn, last_tag=False)
                except ValueError:
                    LOGGER.debug(
                        'An exception have occurred during the tito build.',
                        exc_info=True)
                    sys.exit(
                        'The build have failed. Hopefully the executables '
                        'have created an output in the destination '
                        'directory.')
                except OSError:
                    LOGGER.debug(
                        'An exception have occurred during the tito build.',
                        exc_info=True)
                    sys.exit('The destination directory cannot be overwritten '
                             'or the executable cannot be executed.')
            elif options.project == b'librepo':
                try:
                    _build_librepo(options.fedrev, destdn, options.release)
                except (IOError, urllib.ContentTooShortError, ValueError):
                    LOGGER.debug(
                        'An exception have occurred during the librepo build.',
                        exc_info=True)
                    sys.exit('The build have failed.')
                except OSError:
                    LOGGER.debug(
                        'An exception have occurred during the librepo build.',
                        exc_info=True)
                    sys.exit('The destination directory cannot be overwritten '
                             'or some of the executables cannot be executed.')
            elif options.project == b'libcomps':
                try:
                    _build_libcomps(destdn, options.release)
                except (IOError, ValueError):
                    LOGGER.debug(
                        'An exception have occurred during the libcmps build.',
                        exc_info=True)
                    sys.exit(
                        'The build have failed. Hopefully the executables have'
                        ' created an output in the destination directory.')
                except OSError:
                    LOGGER.debug(
                        'An exception have occurred during the libcmps build.',
                        exc_info=True)
                    sys.exit('The destination directory cannot be overwritten '
                             'or some of the executables cannot be executed.')
            try:
                _build_in_copr(destdn, options.copr)
            except ValueError:
                LOGGER.debug('Copr have failed to build the RPMs.',
                             exc_info=True)
                sys.exit('The build could not be requested or the build have '
                         'failed. Hopefully Copr provides some details.')
        finally:
            shutil.rmtree(destdn)
 def extractall(self, path=None):
     self.stream.seek(0)
     _extractall(self.stream, path=(path or os.getcwdu()))
Example #51
0
#group by cluster

# Plot
fig, ax = plt.subplots(figsize=(22, 12))  #set plot size
ax.margins(0.03)  # Optional, just adds 5% padding to the autoscaling
fig.subplots_adjust(right=0.7)
memoFig = []  #fig memoriz
memoCoul = []  #cool memoriz
memoLab = []
memoLab2 = []  #Lab memoriz
#iterate through groups to layer the plot
#note that I use the cluster_name and cluster_color dicts with the 'name' lookup to return the appropriate color/label
ClickNfo = []
tempoLab = []
tempoFic = []
Here = os.getcwdu().replace('\\', '//')
Here = Here.replace(
    'Patent2Net',
    ResultPathContentAug[ResultPathContentAug.index('DATA'):]) + '//'

#tooltip

#leg=ax.legend(numpoints=1, bbox_to_anchor=(1.60, 0), loc='lower right') #show legend with only one dot
#leg.draggable(state=True)
#
#interactive_legend = plugins.InteractiveLegendPlugin(memoFig,  memoLab ,
#                                                          alpha_unsel=0.2, alpha_over=1.5, start_visible=True)
memoFig2 = []
memoLab2 = []
tempoFic = []
MemoPoints = dict()
Example #52
0
def uwgeodynamics_fname():
    """
    Get the location of the config file.

    The file location is determined in the following order

    - `$PWD/uwgeodynamicsrc`

    - `$UWGEODYNAMICSRC` if it is a file

    - `$UWGEODYNAMICSRC/uwgeodynamicsrc`

    - `$MPLCONFIGDIR/uwgeodynamicsrc`

    - On Linux,

          - `$HOME/.uwgeodynamics/uwgeodynamicsrc`, if it exists

          - or `$XDG_CONFIG_HOME/uwgeodynamics/uwgeodynamicsrc` (if
            $XDG_CONFIG_HOME is defined)

          - or `$HOME/.config/uwgeodynamics/uwgeodynamicsrc` (if
            $XDG_CONFIG_HOME is not defined)

    - On other platforms,

         - `$HOME/.uwgeodynamics/uwgeodynamicsrc` if `$HOME` is defined.

    - Lastly, it looks in `$UWGEODYNAMICSDATA/uwgeodynamicsrc` for a
      system-defined copy.
    """
    if six.PY2:
        cwd = os.getcwdu()
    else:
        cwd = os.getcwd()
    fname = os.path.join(cwd, 'uwgeodynamicsrc')
    if os.path.exists(fname):
        return fname

    if 'UWGEODYNAMICSRC' in os.environ:
        path = os.environ['UWGEODYNAMICSRC']
        if os.path.exists(path):
            if os.path.isfile(path):
                return path
            fname = os.path.join(path, 'uwgeodynamicsrc')
            if os.path.exists(fname):
                return fname

    configdir = _get_configdir()
    if configdir is not None:
        fname = os.path.join(configdir, 'uwgeodynamicsrc')
        if os.path.exists(fname):
            home = get_home()
            if (sys.platform.startswith('linux') and home is not None
                    and os.path.exists(
                        os.path.join(home, '.uwgeodynamics',
                                     'uwgeodynamicsrc'))):
                warnings.warn(
                    "Found UWGeodynamics configuration in ~/.uwgeodynamics/. "
                    "To conform with the XDG base directory standard, "
                    "this configuration location has been deprecated "
                    "on Linux, and the new location is now %s/uwgeodynamics/. "
                    "Please move your configuration there to ensure that "
                    "UWGeodynamics will continue to find it in the future." %
                    _get_xdg_config_dir())
                return os.path.join(home, '.uwgeodynamics', 'uwgeodynamicsrc')
            return fname

    path = get_data_path()  # guaranteed to exist or raise
    fname = os.path.join(path, 'uwgeodynamicsrc')
    if not os.path.exists(fname):
        warnings.warn('Could not find matplotlibrc; using defaults')

    return fname
Example #53
0
        "wasmIsSupported = function() { return true; };\n",
        "// DDBEGIN\n",
    ]
    with io.open(str(jsfunfuzzOutputFilename),
                 "r",
                 encoding="utf-8",
                 errors="replace") as f:
        for line in f:
            if line.startswith(marker):
                sline = line[len(marker):]
                # We only override wasmIsSupported above for the main global.
                # Hopefully, any imported tests that try to use wasmIsSupported within a newGlobal
                # will do so in a straightforward way where everything is on one line.
                if not ("newGlobal" in sline and "wasmIsSupported" in sline):
                    lines.append(sline)
    lines += [
        "\ntry{print(uneval(this));}catch(e){}\n",
        "// DDEND\n",
    ]
    return lines


if __name__ == "__main__":
    # pylint: disable=no-member
    many_timed_runs(
        None,
        os_ops.make_wtmp_dir(
            Path(os.getcwdu() if sys.version_info.major ==
                 2 else os.getcwd())), sys.argv[1:],
        create_collector.make_collector(), False)
Example #54
0
os.dup2(fd, fd2)    # Duplicate file descriptor fd to fd2
os.fstat(fd)        # Return status for file descriptor fd, like stat().
os.fsync(fd)        # Force write of file with filedescriptor fd to disk
os.isatty(fd)       # Return True if the file descriptor fd is open and connected to a tty(-like) device, else False.
os.lseek(fd, pos, how)  # Set the current position of file descriptor fd to position pos, modified by how: SEEK_SET or 0
os.open(file, flags[, mode])    # Open the file file and set various flags according to flags
os.pipe()           # Create a pipe. Return a pair of file descriptors (r, w)
os.read(fd, n)      # Read at most n bytes from file descriptor fd
os.write(fd, str)   # Write the string str to file descriptor fd

################################ Files and Directories ################################

os.access(path, mode)       # Use the real uid/gid to test for access to path
os.chdir(path)              # Change the current working directory to path.
os.getcwd()                 # Return a string representing the current working directory.
os.getcwdu()                # Return a Unicode object representing the current working directory.
os.chmod(path, mode)        # Change the mode of path to the numeric mode
os.listdir(path)            # Return a list containing the names of the entries in the directory given by path
os.lstat(path)              # Perform the equivalent of an lstat() system call on the given path
os.mkdir(path[, mode])      # Create a directory named path with numeric mode mode
os.makedirs(path[, mode])   # Recursive directory creation function.
                            # Like mkdir(), but makes all intermediate-level directories needed to contain the leaf directory
os.remove(path)             # Remove (delete) the file path. If path is a directory, OSError is raised; see rmdir() below to remove a directory
os.removedirs(path)         # Remove directories recursively. Works like rmdir() except that, if the leaf directory is successfully removed
os.rename(src, dst)         # Rename the file or directory src to dst. If dst is a directory, OSError will be raised
os.renames(old, new)        # Recursive directory or file renaming function
os.rmdir(path)              # Remove (delete) the directory path. Only works when the directory is empty, otherwise, OSError is raised
os.stat(path)               # Perform the equivalent of a stat() system call on the given path
os.utime(path, times)       # Set the access and modified times of the file specified by path

# Generate the file names in a directory tree by walking the tree either top-down or bottom-up
Example #55
0
    def __call__(self, redirect_output=True, cwd=None, priority=None):
        '''
        If redirect_output is True, output from the child is redirected
        to a file on disk and this method returns the path to that file.
        '''
        exe = self.gui_executable if self.gui else self.executable
        env = self.env
        try:
            env[b'ORIGWD'] = binascii.hexlify(
                cPickle.dumps(cwd or os.path.abspath(os.getcwdu())))
        except EnvironmentError:
            # cwd no longer exists
            env[b'ORIGWD'] = binascii.hexlify(
                cPickle.dumps(cwd or os.path.expanduser(u'~')))

        _cwd = cwd
        if priority is None:
            priority = prefs['worker_process_priority']
        cmd = [exe]
        args = {
            'env': env,
            'cwd': _cwd,
        }
        if iswindows:
            priority = {
                'high': win32process.HIGH_PRIORITY_CLASS,
                'normal': win32process.NORMAL_PRIORITY_CLASS,
                'low': win32process.IDLE_PRIORITY_CLASS
            }[priority]
            args['creationflags'] = win32process.CREATE_NO_WINDOW | priority
        else:
            niceness = {
                'normal': 0,
                'low': 10,
                'high': 20,
            }[priority]
            args['preexec_fn'] = partial(renice, niceness)
        ret = None
        if redirect_output:
            self._file = PersistentTemporaryFile('_worker_redirect.log')
            args['stdout'] = self._file._fd
            args['stderr'] = subprocess.STDOUT
            if iswindows:
                args['stdin'] = subprocess.PIPE
            ret = self._file.name

        if iswindows and 'stdin' not in args:
            # On windows when using the pythonw interpreter,
            # stdout, stderr and stdin may not be valid
            args['stdin'] = subprocess.PIPE
            args['stdout'] = _windows_null_file
            args['stderr'] = subprocess.STDOUT

        if not iswindows:
            # Close inherited file descriptors in worker
            # On windows, this is done in the worker process
            # itself
            args['close_fds'] = True

        self.child = subprocess.Popen(cmd, **args)
        if 'stdin' in args:
            self.child.stdin.close()

        self.log_path = ret
        return ret
Example #56
0
    def test_abspath_expanduser_unicode(self):
        self.failUnlessRaises(AssertionError,
                              fileutil.abspath_expanduser_unicode,
                              "bytestring")

        saved_cwd = os.path.normpath(os.getcwdu())
        abspath_cwd = fileutil.abspath_expanduser_unicode(u".")
        abspath_cwd_notlong = fileutil.abspath_expanduser_unicode(
            u".", long_path=False)
        self.failUnless(isinstance(saved_cwd, unicode), saved_cwd)
        self.failUnless(isinstance(abspath_cwd, unicode), abspath_cwd)
        if sys.platform == "win32":
            self.failUnlessReallyEqual(
                abspath_cwd, fileutil.to_windows_long_path(saved_cwd))
        else:
            self.failUnlessReallyEqual(abspath_cwd, saved_cwd)
        self.failUnlessReallyEqual(abspath_cwd_notlong, saved_cwd)

        self.failUnlessReallyEqual(
            fileutil.to_windows_long_path(u"\\\\?\\foo"), u"\\\\?\\foo")
        self.failUnlessReallyEqual(
            fileutil.to_windows_long_path(u"\\\\.\\foo"), u"\\\\.\\foo")
        self.failUnlessReallyEqual(
            fileutil.to_windows_long_path(u"\\\\server\\foo"),
            u"\\\\?\\UNC\\server\\foo")
        self.failUnlessReallyEqual(fileutil.to_windows_long_path(u"C:\\foo"),
                                   u"\\\\?\\C:\\foo")
        self.failUnlessReallyEqual(
            fileutil.to_windows_long_path(u"C:\\foo/bar"),
            u"\\\\?\\C:\\foo\\bar")

        # adapted from <http://svn.python.org/view/python/branches/release26-maint/Lib/test/test_posixpath.py?view=markup&pathrev=78279#test_abspath>

        foo = fileutil.abspath_expanduser_unicode(u"foo")
        self.failUnless(foo.endswith(u"%sfoo" % (os.path.sep, )), foo)

        foobar = fileutil.abspath_expanduser_unicode(u"bar", base=foo)
        self.failUnless(
            foobar.endswith(u"%sfoo%sbar" % (os.path.sep, os.path.sep)),
            foobar)

        if sys.platform == "win32":
            # This is checking that a drive letter is added for a path without one.
            baz = fileutil.abspath_expanduser_unicode(u"\\baz")
            self.failUnless(baz.startswith(u"\\\\?\\"), baz)
            self.failUnlessReallyEqual(baz[5:], u":\\baz")

            bar = fileutil.abspath_expanduser_unicode(u"\\bar", base=baz)
            self.failUnless(bar.startswith(u"\\\\?\\"), bar)
            self.failUnlessReallyEqual(bar[5:], u":\\bar")
            # not u":\\baz\\bar", because \bar is absolute on the current drive.

            self.failUnlessReallyEqual(baz[4], bar[4])  # same drive

            baz_notlong = fileutil.abspath_expanduser_unicode(u"\\baz",
                                                              long_path=False)
            self.failIf(baz_notlong.startswith(u"\\\\?\\"), baz_notlong)
            self.failUnlessReallyEqual(baz_notlong[1:], u":\\baz")

            bar_notlong = fileutil.abspath_expanduser_unicode(u"\\bar",
                                                              base=baz_notlong,
                                                              long_path=False)
            self.failIf(bar_notlong.startswith(u"\\\\?\\"), bar_notlong)
            self.failUnlessReallyEqual(bar_notlong[1:], u":\\bar")
            # not u":\\baz\\bar", because \bar is absolute on the current drive.

            self.failUnlessReallyEqual(baz_notlong[0],
                                       bar_notlong[0])  # same drive

        self.failIfIn(u"~", fileutil.abspath_expanduser_unicode(u"~"))
        self.failIfIn(
            u"~", fileutil.abspath_expanduser_unicode(u"~", long_path=False))

        cwds = ['cwd']
        try:
            cwds.append(u'\xe7w\xf0'.encode(sys.getfilesystemencoding()
                                            or 'ascii'))
        except UnicodeEncodeError:
            pass  # the cwd can't be encoded -- test with ascii cwd only

        for cwd in cwds:
            try:
                os.mkdir(cwd)
                os.chdir(cwd)
                for upath in (u'', u'fuu', u'f\xf9\xf9', u'/fuu', u'U:\\',
                              u'~'):
                    uabspath = fileutil.abspath_expanduser_unicode(upath)
                    self.failUnless(isinstance(uabspath, unicode), uabspath)

                    uabspath_notlong = fileutil.abspath_expanduser_unicode(
                        upath, long_path=False)
                    self.failUnless(isinstance(uabspath_notlong, unicode),
                                    uabspath_notlong)
            finally:
                os.chdir(saved_cwd)
Example #57
0
def atomic_write(raw, name):
    with tempfile.NamedTemporaryFile(dir=os.getcwdu(), delete=False) as f:
        f.write(raw)
        os.fchmod(f.fileno(),
                  stat.S_IREAD | stat.S_IWRITE | stat.S_IRGRP | stat.S_IROTH)
        os.rename(f.name, name)
Example #58
0
#		- ~/.local (on Ubuntu 16.04)
# - The user site-packages folder:
#		- ~/.local/lib/python[X]/site-packages (on Ubuntu 16.04)
# - The global site-packages folders (on Ubuntu 16.04):
#		- /usr/local/lib/python3.5/dist-packages
#		- /usr/lib/python3/dist-packages
#		- /usr/lib/python3.5/dist-packages
# - /usr/local/share
# - /usr/share
# - The share folder within an Anaconda/ Miniconda environment

base_folders = []
if py3:
	cwd = os.getcwd()
else:
	cwd = os.getcwdu()
parent_folder = safe_decode(os.path.dirname(os.path.dirname(__file__)),
	enc=filesystem_encoding())
base_folders = [cwd, parent_folder]
if hasattr(site, u'getuserbase'):
	base_folders.append(os.path.join(
		safe_decode(site.getuserbase(), enc=filesystem_encoding()), u'share'))
if hasattr(site, u'getusersitepackages'):
	base_folders.append(os.path.join(
		safe_decode(site.getusersitepackages(), enc=filesystem_encoding()),
		u'share'))
if hasattr(site, u'getsitepackages'):
	base_folders += \
		[os.path.join(safe_decode(folder, enc=filesystem_encoding()), u'share') \
		for folder in site.getsitepackages()]
base_folders += [u'/usr/local/share', u'/usr/share']
Example #59
0
def getCPythonResults(cpython_cmd, cpython_cached, force_update, send_kill):
    # Many details, pylint: disable=too-many-locals

    cached = False
    if cpython_cached:
        # TODO: Hashing stuff and creating cache filename is duplicate code
        # and should be shared.
        hash_input = " -- ".join(cpython_cmd)
        if str is not bytes:
            hash_input = hash_input.encode("utf8")

        command_hash = hashlib.md5(hash_input)

        for element in cpython_cmd:
            if os.path.exists(element):
                with open(element, "rb") as element_file:
                    command_hash.update(element_file.read())

        hash_salt = os.environ.get("NUITKA_HASH_SALT", "")

        if str is not bytes:
            hash_salt = hash_salt.encode("utf8")
        command_hash.update(hash_salt)

        if os.name == "nt" and python_version < 0x300:
            curdir = os.getcwdu()
        else:
            curdir = os.getcwd()

        command_hash.update(curdir.encode("utf8"))

        cache_filename = os.path.join(getTestingCPythonOutputsCacheDir(),
                                      command_hash.hexdigest())

        if os.path.exists(cache_filename) and not force_update:
            try:
                with open(cache_filename, "rb") as cache_file:
                    (
                        cpython_time,
                        stdout_cpython,
                        stderr_cpython,
                        exit_cpython,
                    ) = pickle.load(cache_file)
            except (IOError, EOFError):
                # Broken cache content.
                pass
            else:
                cached = True

    if not cached:
        cpython_time, stdout_cpython, stderr_cpython, exit_cpython = _getCPythonResults(
            cpython_cmd=cpython_cmd, send_kill=send_kill)

        if cpython_cached:
            with open(cache_filename, "wb") as cache_file:
                pickle.dump(
                    (cpython_time, stdout_cpython, stderr_cpython,
                     exit_cpython),
                    cache_file,
                )

    return cpython_time, stdout_cpython, stderr_cpython, exit_cpython
Example #60
0
import ConfigParser
import base64
import threading
import shutil
import errno

# Coverage support
if "COVERAGE_FILE" in os.environ:
    FORK_ARGS = ["coverage", "run", "--parallel-mode"]
    if "COVERAGE_DIR" in os.environ:
        FORK_ARGS += ["--rcfile="+os.environ["COVERAGE_DIR"]+"/.coveragerc"]
else:
    FORK_ARGS = ["python"]

# Useful constants for the test
KEYLIME_DIR=os.getcwdu()+"/../keylime/"

# Custom imports
sys.path.insert(0, KEYLIME_DIR)
import common
import tornado_requests
import registrar_client
import tenant
import crypto
import tpm_initialize
import tpm_quote
import tpm_random
import user_data_encrypt
import secure_mount