Пример #1
0
def check(modules):
	checked = []
	module_names = {}
	main_module = 0
	for module in modules:
		try:
			if module.name in module_names:
				raise CheckError("redefined")
			module_names[module.name] = module
			_check_module(module, checked)
			checked.append(module)
		except config.BaseError as e:
			raise CheckError("Module %s: %s" % (module.name, str(e)))
		if module.is_main:
			main_module += 1
	if main_module == 0:
		utils.warn("Main module missing")
	for module in modules:
		def check_dep(lst):
			for r in lst:
				if r.is_main:
					utils.warn("Main module %s: Should be the root module (required by %s)" % (r.name, module.name))
		check_dep(module.public_required)
		check_dep(module.private_required)
	for module in modules:
		check_include_loop(module, [])
	return True
Пример #2
0
 def commit(self):
     if self.__cursor:
         self.__cursor.close()
         self.__conn.commit()
         self.__cursor = None
     else:
         utils.warn('Nothing committed!!!')
Пример #3
0
    def verify_plugin_settings(self):
        puts("Verifying settings requested by plugins...")

        missing_settings = False
        missing_setting_error_messages = []
        with indent(2):
            for name, meta in self.required_settings_from_plugins.items():
                if not hasattr(settings, name):
                    error_message = (
                        "%(setting_name)s is missing. It's required by the"
                        "%(plugin_name)s plugin's '%(function_name)s' method."
                    ) % meta
                    puts(colored.red("✗ %(setting_name)s" % meta))
                    missing_setting_error_messages.append(error_message)
                    missing_settings = True
                else:
                    show_valid("%(setting_name)s" % meta)

            if missing_settings:
                puts("")
                warn(
                    "Will is missing settings required by some plugins. "
                    "He's starting up anyway, but you will run into errors"
                    " if you try to use those plugins!"
                )
                self.add_startup_error("\n".join(missing_setting_error_messages))
            else:
                puts("")
Пример #4
0
def Minimize(func, h, points):
    n = len(points)
    if n > 2000:
        #raise ValueError("Too many points for Cross Validation, limit is 2000")
        msg = "Too many points for Cross Validation, limit is 2000, using 0.7 * hRef."
        utils.warn(msg)
        return 0.7*h
    
    allSquaredDistances = DistancesSquared(points)

    min_percent = 0.05
    max_percent = 2.00
    step_percent = 0.10
    h1 = Search(func, allSquaredDistances, n, h, min_percent, max_percent, step_percent)
    if h1 <= min_percent * h or h1 >= max_percent * h:
        # then it is the min or max value checked
        msg = "Cross Validation using "+func.__name__+" failed to minimize, using 0.7 * hRef."
        utils.warn(msg)
        return 0.7*h
#    return h1
    #print h1
    h2 = Search(func, allSquaredDistances, n, h1, 0.89, 1.11, 0.01)
    #print h2
    h3 = Search(func, allSquaredDistances, n, h2, 0.989, 1.011, 0.001)
    return h3
Пример #5
0
    def __no_cvs_check_user_override(self):
        """Return True iff pre-commit-checks are turned off by user override...

        ... via the ~/.no_cvs_check file.

        This function also performs all necessary debug traces, warnings,
        etc.
        """
        no_cvs_check_fullpath = expanduser('~/.no_cvs_check')
        # Make sure the tilde expansion worked.  Since we are only using
        # "~" rather than "~username", the expansion should really never
        # fail...
        assert (not no_cvs_check_fullpath.startswith('~'))

        if not isfile(no_cvs_check_fullpath):
            return False

        # The no_cvs_check file exists.  Verify its age.
        age = time.time() - getmtime(no_cvs_check_fullpath)
        one_day_in_seconds = 24 * 60 * 60

        if (age > one_day_in_seconds):
            warn('%s is too old and will be ignored.' % no_cvs_check_fullpath)
            return False

        debug('%s found - pre-commit checks disabled' % no_cvs_check_fullpath)
        syslog('Pre-commit checks disabled for %(rev)s on %(repo)s by user'
               ' %(user)s using %(no_cvs_check_fullpath)s'
               % {'rev': self.new_rev,
                  'repo': self.email_info.project_name,
                  'user': get_user_name(),
                  'no_cvs_check_fullpath': no_cvs_check_fullpath,
                  })
        return True
Пример #6
0
def readDXF(filename):
    """Read a DXF file and extract the recognized entities.

    `filename`: name of a .DXF file.

    Returns a multiline string with one line for each recognized entity,
    in a format that can directly be used by :func:`convertDXF`.
    
    This function requires the external program `dxfparser` which comes
    with the pyFormex distribution. It currently recognizes entities of
    type 'Arc', 'Line', 'Polyline', 'Vertex'.
    """
    import utils,commands
    print(filename)
    if utils.hasExternal('dxfparser'):
        cmd = 'pyformex-dxfparser %s 2>/dev/null' % filename
        print(cmd)
        sta,out = utils.runCommand(cmd)
        if sta==0:
            return out
        else:
            return ''
    else:
        utils.warn('warn_no_dxfparser')
        return ''
Пример #7
0
def track_module(module):
	dependency_map = {}
	include_stack = collections.OrderedDict()
	included_dirs = set()
	for source, ext, _ in module.source_files():
		dependency_map[source] = (track_file(module, source, included_dirs, include_stack), ext)
	for dep in module.private_required:
		ok = False
		for d in dep.public_includes:
			if d in included_dirs:
				ok = True
				break
		if not ok:
			utils.warn("Module %s: Useless dependency '%s'" % (module.name, dep.name))
	included_dirs = set()
	for header, ext, _ in module.header_files():
		dependency_map[header] = (track_file(module, header, included_dirs, include_stack), ext)
	for dep in module.public_required:
		ok = False
		for d in dep.public_includes:
			if d in included_dirs:
				ok = True
				break
		if not ok:
			utils.warn("Module %s: Dependency '%s' should be private" % (module.name, dep.name))
	return dependency_map
Пример #8
0
def tvnamer(paths):
    """Main tvnamer function, takes an array of paths, does stuff.
    """
    # Warn about move_files function
    if Config['move_files_enable']:
        import warnings
        warnings.warn("The move_files feature is still under development. "
            "Be very careful with it.\n"
            "It has not been heavily tested, and is not recommended for "
            "general use yet.")

    if Config['force_name'] is not None:
        import warnings
        warnings.warn("The --name argument is a temporary solution, and will"
            "be removed at some point in the future. Do no depend on it")

    p("#" * 20)
    p("# Starting tvnamer")

    episodes_found = []

    for cfile in findFiles(paths):
        parser = FileParser(cfile)
        try:
            episode = parser.parse()
        except InvalidFilename, e:
            warn("Invalid filename: %s" % e)
        else:
            if episode.seriesname is None:
                warn("Parsed filename did not contain series name, skipping: %s" % cfile)
            else:
                episodes_found.append(episode)
Пример #9
0
def tvnamer(paths):
    """Main tvnamer function, takes an array of paths, does stuff.
    """
    print "#" * 20
    print "# Starting tvnamer"

    episodes_found = []

    for cfile in findFiles(paths):
        parser = FileParser(cfile)
        try:
            episode = parser.parse()
        except InvalidFilename:
            warn("Invalid filename %s" % cfile)
        else:
            episodes_found.append(episode)

    if len(episodes_found) == 0:
        raise NoValidFilesFoundError()

    print "# Found %d episodes" % len(episodes_found)

    tvdb_instance = Tvdb(
        interactive=not Config['selectfirst'],
        debug = Config['verbose'],
        search_all_languages = Config['search_all_languages'],
        language = Config['language'])

    for episode in episodes_found:
        processFile(tvdb_instance, episode)
        print

    print "#" * 20
    print "# Done"
Пример #10
0
    def __init__(self, *args, **kwargs):
        self.__dict__ = self.__shared_state

        if not getattr(self, 'initialised', False):
            self.initialised = True

            self.timestamp = time.strftime("%Y%m%d%H%M%S")

            cnf = Config()
            if cnf.has_key("Dir::UrgencyLog"):
                # Create the log directory if it doesn't exist
                self.log_dir = cnf["Dir::UrgencyLog"]

                if not os.path.exists(self.log_dir) or not os.access(self.log_dir, os.W_OK):
                    warn("UrgencyLog directory %s does not exist or is not writeable, using /srv/ftp.debian.org/tmp/ instead" % (self.log_dir))
                    self.log_dir = '/srv/ftp.debian.org/tmp/'

                # Open the logfile
                self.log_filename = "%s/.install-urgencies-%s.new" % (self.log_dir, self.timestamp)
                self.log_file = open_file(self.log_filename, 'w')

            else:
                self.log_dir = None
                self.log_filename = None
                self.log_file = None

            self.writes = 0
Пример #11
0
def post_receive_one(ref_name, old_rev, new_rev, refs, submitter_email):
    """post-receive treatment for one reference.

    PARAMETERS
        ref_name: The name of the reference.
        old_rev: The SHA1 of the reference before the update.
        new_rev: The SHA1 of the reference after the update.
        refs: A dictionary containing all references, as described
            in git_show_ref.
        submitter_email: Same as AbstractUpdate.__init__.
    """
    debug('post_receive_one(ref_name=%s\n'
          '                        old_rev=%s\n'
          '                        new_rev=%s)'
          % (ref_name, old_rev, new_rev))

    update = new_update(ref_name, old_rev, new_rev, refs, submitter_email)
    if update is None:
        # We emit a warning, rather than trigger an assertion, because
        # it gives the script a chance to process any other reference
        # that was updated, but not processed yet.
        warn("post-receive: Unsupported reference update: %s (ignored)."
             % ref_name,
             "              old_rev = %s" % old_rev,
             "              new_rev = %s" % new_rev)
        return
    update.send_email_notifications()
Пример #12
0
def doMoveFile(cnamer, destDir = None, destFilepath = None, getPathPreview = False):
    """Moves file to destDir, or to destFilepath
    """

    if (destDir is None and destFilepath is None) or (destDir is not None and destFilepath is not None):
        raise ValueError("Specify only destDir or destFilepath")

    if not (Config['move_files_enable'] or Config['link_files_enable']):
        raise ValueError("move_files feature is disabled but doMoveFile was called")

    if Config['move_files_destination'] is None:
        raise ValueError("Config value for move_files_destination cannot be None if move_files_enabled is True")

    try:
        if Config['link_files_enable'] and not Config['move_files_enable']:
            return cnamer.linkPath(
                new_path = destDir,
                new_fullpath = destFilepath,
                always_move = Config['always_move'],
                getPathPreview = getPathPreview,
                force = Config['overwrite_destination_on_move'])
        else:
            return cnamer.newPath(
                new_path = destDir,
                new_fullpath = destFilepath,
                always_move = Config['always_move'],
                getPathPreview = getPathPreview,
                force = Config['overwrite_destination_on_move'],
                linkBack = Config['link_files_enable'])

    except OSError, e:
        warn(e)
Пример #13
0
def syslog(message, tag='cvs_check', priority='local0.warn'):
    """Add the given entry to the syslog file.

    PARAMETERS
        message: The message to file.
        tag: Mark every line in the log with the specified tag.
        priority: Enter the message with the specified priority.
    """
    logger_exe = 'logger'
    if 'GIT_HOOKS_LOGGER' in environ:
        logger_exe = environ['GIT_HOOKS_LOGGER']

    p = Popen([logger_exe, '-t', tag, '-p', priority, message],
              stdout=PIPE, stderr=STDOUT)
    out, _ = p.communicate()
    if p.returncode != 0:
        info = (['Failed to file the following syslog entry:',
                 '  - message: %s' % message,
                 '  - tag: %s' % tag,
                 '  - priority: %s' % priority,
                 '',
                 'logger returned with error code %d:' % p.returncode]
                + out.splitlines())
        warn(*info)

    elif out.rstrip():
        print out.rstrip()
Пример #14
0
def processFile(tvdb_instance, episode):
    """Gets episode name, prompts user for input
    """
    p("#" * 20)
    p("# Processing file: %s" % episode.fullfilename)

    if len(Config['input_filename_replacements']) > 0:
        replaced = applyCustomInputReplacements(episode.fullfilename)
        p("# With custom replacements: %s" % (replaced))

    # Use force_name option. Done after input_filename_replacements so
    # it can be used to skip the replacements easily
    if Config['force_name'] is not None:
        episode.seriesname = Config['force_name']

    p("# Detected series: %s (%s)" % (episode.seriesname, episode.number_string()))

    try:
        episode.populateFromTvdb(tvdb_instance, force_name=Config['force_name'], series_id=Config['series_id'])
    except (DataRetrievalError, ShowNotFound), errormsg:
        if Config['always_rename'] and Config['skip_file_on_error'] is True:
            warn("Skipping file due to error: %s" % errormsg)
            return
        else:
            warn(errormsg)
Пример #15
0
    def get(self, request, *args, **kwargs):
        # get base
        base = kwargs['base']
        base_model = get_object_or_404(Base, name=base)

        # exec id
        exec_id = kwargs['id']

        # get exec from database
        try:
            exec_model = base_model.apys.get(name=exec_id)
        except Apy.DoesNotExist:
            warn(channel_name_for_user(request), "404 on %s" % request.META['PATH_INFO'])
            return HttpResponseNotFound("404 on %s"     % request.META['PATH_INFO'])

        user = channel_name_for_user(request)
        debug(user, "%s-Request received, URI %s" % (request.method, request.path))

        apy_data = serializers.serialize("json", [exec_model], fields=('base_id', 'name'))
        struct = json.loads(apy_data)
        apy_data = json.dumps(struct[0])
        rpc_request_data = {}
        rpc_request_data.update({'model': apy_data, 
                'base_name': base_model.name,
            })
        get_dict = copy.deepcopy(request.GET)
        post_dict = copy.deepcopy(request.POST)
        for key in ["json", "shared_key"]:
            if request.method == "GET":
                if get_dict.has_key(key): del get_dict[key]
            if request.method == "POST":
                if post_dict.has_key(key): del get_dict[key]
        rpc_request_data.update({'request': 
                { 
                'method': request.method,
                'GET': get_dict.dict(),
                'POST': post_dict.dict(),
                #'session': request.session.session_key,
                'user': {'username': request.user.username},
                'REMOTE_ADDR': request.META.get('REMOTE_ADDR')
                }
            })
        logger.debug("REQUEST-data: %s" % rpc_request_data)
        try:
            # _do on remote
            start = int(round(time.time() * 1000))
            response_data = call_rpc_client(json.dumps(rpc_request_data), 
                generate_vhost_configuration(base_model.user.username, base_model.name), 
                base_model.name, 
                base_model.executor.password)
            end = int(round(time.time() * 1000))
            ms=str(end-start)

            logger.info("RESPONSE-time: %sms" %  str(ms))
            logger.debug("RESPONSE-data: %s" % response_data[:120])
            data = json.loads(response_data)
        except Exception, e:
            logger.exception(e)
            raise HttpResponseServerError(e)
Пример #16
0
def debugConnection(sock, addr, port):
    # Prints the details of a connection
    warn("connection timed out, plesae check listener status")
    info("detailed Report:")
    info("IP_ADDR: "%addr)
    info("PORT: "%port)
    if not sock.gettimeout(): return
    info("connection timed out after %.3f seconds"%sock.gettimeout())
Пример #17
0
def doRenameFile(cnamer, newName):
    """Renames the file. cnamer should be Renamer instance,
    newName should be string containing new filename.
    """
    try:
        cnamer.newName(newName, force = Config['overwrite_destination_on_rename'])
    except OSError, e:
        warn(e)
Пример #18
0
def doRenameFile(cnamer, newName):
    """Renames the file. cnamer should be Renamer instance,
    newName should be string containing new filename.
    """
    try:
        cnamer.newName(newName)
    except OSError, e:
        warn(unicode(e))
Пример #19
0
    def init(self):
        if os.path.exists(self.repo_path):
            utils.warn("Homefiles repo already exists at '%s'"
                       % self.repo_path)
            return

        utils.mkdir(self.repo_path)
        self.git.init()
Пример #20
0
def BuildFixesFromPoints(features, shapeFieldName, dateField, groupingFields,
                         locationVarianceField, mobilityVarianceField, spatialReference):
    
    fieldNames = [f.name for f in arcpy.ListFields(features)]
    if not dateField or dateField not in fieldNames:
        raise ValueError, "date field is not found in the dataset"

    #FIXME - verify field types   
    dateFieldDelimited = arcpy.AddFieldDelimiters(features, dateField)
    sort = dateField  + " A"
    fields = shapeFieldName + ";" + dateField
    if locationVarianceField and locationVarianceField in fieldNames:
        fields += ";" + arcpy.AddFieldDelimiters(features, locationVarianceField)
    else:
        locationVarianceField = None
    if mobilityVarianceField and mobilityVarianceField in fieldNames:
        fields += ";" + arcpy.AddFieldDelimiters(features, mobilityVarianceField)
    else:
        mobilityVarianceField = None
    if spatialReference.factoryCode != arcpy.Describe(features).spatialReference.factoryCode:
        #FIXME - ESRI BUG - reprojection does not work if the data is in a FGDB and a sort order is given.
        sort = ''
        msg = "Due to a bug in ArcGIS 10, data cannot be both sorted and projected on the fly. "
        msg += "Since a projection is required for your data sorting is turned off. "
        msg += "This is OK for data in the Animal Movements Database, however other data "
        msg += "must be correctly pre-sorted by date or you will get incorrect results. "
        msg += "If you can't guarantee pre-sorted data, then reproject your data first."
        utils.warn(msg)
    
    results = {}
    #print groupingFields, dateFieldDelimited
    for groupName, whereClaus in GetGroupings(groupingFields, dateFieldDelimited).iteritems():
        #print groupName, whereClaus
        #utils.info("Where = " + where + " Fields = " + fields + " Sort = " + sort)
        #FIXME - ESRI BUG - reprojection does not work if the data is in a FGDB and a sort order is given.
        #utils.info("Spatial Ref = " + spatialRef.Name)
        fixes = []
        firstTime = None
        #print whereClaus, spatialRef, fields, sort
        points = arcpy.SearchCursor(features, whereClaus, spatialReference, fields, sort)
        for point in points:
            fix = [0,0,0,0,0]
            newTime = point.getValue(dateField)
            if firstTime == None:
                firstTime = newTime
            fix[0] = GetMinutes(newTime, firstTime)
            fix[1] = point.getValue(shapeFieldName).getPart().X
            fix[2] = point.getValue(shapeFieldName).getPart().Y
            if locationVarianceField:
                fix[3] = point.getValue(locationVarianceField)
            if mobilityVarianceField:
                fix[4] = point.getValue(mobilityVarianceField)
            fixes.append(fix)
            
        results[groupName] = fixes
        utils.info("fixes "+ str(len(fixes)) +" first fix: " + str(fixes[0]))
    return results
Пример #21
0
    def update(self, stop_event):
        # Mobot hardware update loop
        # self.alphacv() # Removed: Image Processing doesnt happen here anymore
        while not stop_event.is_set():
            self.touchcount = abs(self.touchcount - 1)
            BrickPi.MotorSpeed[L] = self.vL
            BrickPi.MotorSpeed[L1] = self.vL
            BrickPi.MotorSpeed[R] = self.vR
            BrickPi.MotorSpeed[R1] = self.vR
            result = BrickPiUpdateValues()
            if not result:
                # Successfully updated values
                # Read touch sensor values
                if BrickPi.Sensor[S2]:
                    # Prevent signal disturbances
                    threshold = int(28 - self.values['TCHS'] * 20)
                    self.touchcount += 2
                    if self.touchcount > threshold:
                        # Increment gates count
                        self.status['GATC'] += 1
                        # Reset signal strength
                        self.touchcount = 0
                # Update encoder values
                self.encL = BrickPi.Encoder[L]
                self.encR = BrickPi.Encoder[R]

            speeds = self.calculateMobotMovement()
            self._setMotorSpeed(speeds[0], speeds[1])
            self._updateStatus()

            # Emergency Abort
            if (not GPIO.input(ABORT_BTN)):
                warn('manually aborting mission!!!')
                self.stopMission()
                CAMERA.close()
                sys.exit(0)

            # Ramp Adapatation
            incline_btn = not GPIO.input(RAMP_BTN)
            if ((not self.incline_btn_prev) and incline_btn):
                # Button is pressed, set inclined
                self.setInclined()

            elif ((self.incline_btn_prev) and not incline_btn):
                # Button is released, unset inclined
                self.unsetInclined()

            self.incline_btn_prev = incline_btn

            # Update Terminal Feedback
            # Should be disabled for safety considerations -> STABLE_MODE
            if term2 != None and not STABLE_MODE:
                c = term2.scr.getch()
                if c == 410:
                    info("@terminal: resize event")
                    term2.resizeAll()
                term2.refreshAll()
Пример #22
0
    def handle_entity_decisions(self, entity, decision):
        """Handles the entity's decision by resetting it if there is an error"""

        entity_memento = utils.Memento(entity)
        try:
            entity.perform_action(*decision)

        except utils.OutOfBoundsError:
            utils.warn('{}.handle_entity_decisions'.format(self), '{} tried to go out of bounds...little bugger'.format(entity))
            entity_memento.restore()
Пример #23
0
 def exposed_startVideoStream(self, port):
     # Establishs a TCP connection with interface for video streaming
     if self.videothd != None:
         warn("video streaming already running")
         return
     self.videothd = threading.Thread(target=startVideoStream_H264,
         args=(port, self.videostop))
     self.videothd.daemon = True
     info("starting video stream")
     self.videothd.start()
Пример #24
0
	def main_by_names():
		mains = []
		for name in main_names:
			if name not in main_map:
				utils.warn("Unknown main module '%s'" % name)
				continue
			if main_map[name] == None:
				utils.warn("Main module '%s' specified twice" % name)
				continue
			mains.append(main_map[name])
			main_map[name] = None
		return mains
Пример #25
0
def doMoveFile(cnamer, destDir, getPathPreview = False):
    """Moves file to destDir"""
    if not Config['move_files_enable']:
        raise ValueError("move_files feature is disabled but doMoveFile was called")

    if Config['move_files_destination'] is None:
        raise ValueError("Config value for move_files_destination cannot be None if move_files_enabled is True")

    try:
        return cnamer.newPath(destDir, getPathPreview = getPathPreview)
    except OSError, e:
        warn(unicode(e))
Пример #26
0
def BuildNormalizedRaster(subsetIdentifier, uniqueValues, locationLayer, hList, saveRasters, rasterFolder, sr = None, cellSize = None):
    n = 0
    layer = "subsetSelectionForRaster"
    savedState, searchRadius = UD_Raster.SetRasterEnvironment(locationLayer, max(hList), sr, cellSize)
    try:
        hDict = {}
        for k,v in zip(uniqueValues,hList):
            hDict[k]=v
        for value in uniqueValues:
            query = UD_Isopleths.BuildQuery(locationLayer,subsetIdentifier,value)
            utils.info("Creating KDE raster for " + query)
            if arcpy.Exists(layer):
                arcpy.Delete_management(layer)
            arcpy.MakeFeatureLayer_management(locationLayer, layer, query)
            try:
                searchRadius = 2 * hDict[value]
                gotRaster, probRaster = UD_Raster.GetNormalizedKernelRaster(layer, searchRadius)
                if gotRaster:
                    lines, polys, donuts = None, None, None
                    if isoplethLines:
                        lines = isoplethLines + "_" + str(value)
                    if isoplethPolys:
                        polys = isoplethPolys + "_" + str(value)
                    if isoplethDonuts:
                        donuts = isoplethDonuts + "_" + str(value)
                    UD_Isopleths.CreateIsopleths(isoplethList, probRaster, lines, polys, donuts)
                    if saveRasters:
                        # Save individual probability rasters
                        name = os.path.join(rasterFolder,"praster_"+str(value)+".tif")
                        probRaster.save(name)
                    if n:
                        raster = raster + probRaster
                        n = n + 1
                    else:
                        raster = probRaster
                        n = 1
                else:
                    errorMsg = str(probRaster) # only if gotRaster is False
                    utils.warn("  Raster creation failed, not included in total. " + errorMsg)                
            finally:
                arcpy.Delete_management(layer)
    finally:
        UD_Raster.RestoreRasterEnvironment(savedState)

    if n == 0:
        return False, None
    #renormalize from 1 to 100
    raster = arcpy.sa.Slice(raster,100,"EQUAL_INTERVAL")
    if saveRasters:
        name = os.path.join(rasterFolder,"_praster_TOTAL.tif")
        raster.save(name)
    return True, raster
Пример #27
0
    def on_disconnect(self):
        warn("connection lost")
        self._connected = False
        self.done = True
        self.loopstop.set()
        self.hardwarestop.set()
        self.exposed_stopVideoStream()

        # Shut down the processors orderly
        while self.pool:
            with self.lock:
                processor = self.pool.pop()
            processor.terminated = True
Пример #28
0
def doMoveFile(cnamer, destDir):
    """Moves file to destDir"""
    if not Config['move_files_enable']:
        raise ValueError("move_files feature is disabled but doMoveFile was called")

    if Config['move_files_destination'] is None:
        raise ValueError("Config value for move_files_destination cannot be None if move_files_enabled is True")

    p("New directory:", destDir)
    try:
        cnamer.newPath(destDir)
    except OSError, e:
        warn(unicode(e))
Пример #29
0
    def __init__(self, **kwargs):
        if "template_dirs" in kwargs:
            warn("template_dirs is now depreciated")
        if "plugin_dirs" in kwargs:
            warn("plugin_dirs is now depreciated")

        log_level = getattr(settings, 'LOGLEVEL', logging.ERROR)
        logging.basicConfig(
            level=log_level,
            format='%(levelname)-8s %(message)s'
        )

        # Find all the PLUGINS modules
        plugins = settings.PLUGINS
        self.plugins_dirs = {}

        # Set template dirs.
        full_path_template_dirs = []
        for t in settings.TEMPLATE_DIRS:
            full_path_template_dirs.append(os.path.abspath(t))

        # Add will's templates_root
        if TEMPLATES_ROOT not in full_path_template_dirs:
            full_path_template_dirs += [TEMPLATES_ROOT, ]

        # Add this project's templates_root
        if PROJECT_TEMPLATE_ROOT not in full_path_template_dirs:
            full_path_template_dirs += [PROJECT_TEMPLATE_ROOT, ]

        # Convert those to dirs
        for plugin in plugins:
            path_name = None
            for mod in plugin.split('.'):
                if path_name is not None:
                    path_name = [path_name]
                file_name, path_name, description = imp.find_module(mod, path_name)

            # Add, uniquely.
            self.plugins_dirs[os.path.abspath(path_name)] = plugin

            if os.path.exists(os.path.join(os.path.abspath(path_name), "templates")):
                full_path_template_dirs.append(
                    os.path.join(os.path.abspath(path_name), "templates")
                )

        # Key by module name
        self.plugins_dirs = dict(zip(self.plugins_dirs.values(), self.plugins_dirs.keys()))

        # Storing here because storage hasn't been bootstrapped yet.
        os.environ["WILL_TEMPLATE_DIRS_PICKLED"] =\
            ";;".join(full_path_template_dirs)
Пример #30
0
def processFile(tvdb_instance, episode):
    """Gets episode name, prompts user for input
    """
    print "#" * 20
    print "# Processing file: %s" % episode.fullfilename
    print "# Detected series: %s (season: %s, episode: %s)" % (
        episode.seriesname,
        episode.seasonnumber,
        episode.episodenumber)

    try:
        correctedSeriesName, epName = getEpisodeName(tvdb_instance, episode)
    except (DataRetrievalError, ShowNotFound), errormsg:
        warn(errormsg)
Пример #31
0
def layer():
    lt = time.time()
    global NC_LAYER, NC_IMAGE  #, NC_SCORE

    #print(utils.ribb("==", sep="="))
    #print(utils.ribb("[%d] LAYER " % NC_LAYER, sep="="))
    #print(utils.ribb("==", sep="="), "\n")

    # --- 1 step --- find all possible lines (that makes sense) ----------------
    print("Starting new round")
    lt = time.time()
    segments = pSLID(NC_IMAGE['main'])
    raw_lines = SLID(NC_IMAGE['main'], segments)
    lines = slid_tendency(raw_lines)

    # --- 2 step --- find interesting intersections (potentially a mesh grid) --
    print(utils.clock(),
          time.time() - lt, "--- 1 step --- found all lines", len(lines))
    v[0] += time.time() - lt
    lt = time.time()
    points = LAPS(NC_IMAGE['main'], lines)

    print(utils.clock(),
          time.time() - lt, "--- 2 step --- find all intersections",
          len(points))
    v[1] += time.time() - lt
    lt = time.time()
    four_points, mat_pts = hldet.getGridFromPoints(
        points, padding=0 if NC_LAYER == 2 else .25)
    re = four_points
    oim = NC_IMAGE['main'].copy()
    for pt in mat_pts:
        cv2.circle(oim, (int(pt[0]), int(pt[1])), 6, (255, 0, 0), 3)

    print(utils.clock(),
          time.time() - lt, "--- 3 step --- fit grid from points")
    v[2] += time.time() - lt
    lt = time.time()
    try:
        NC_IMAGE.crop(four_points)
    except:
        utils.warn("Error on crop")

    print(utils.clock(), time.time() - lt, "--- 4 step --- post crop")
    return re
Пример #32
0
    def loadGraphicsV3(self, grahpics_name, graphic_index):
        utils.warn("graphics v3 is not fully supported")

        if self.__graphics['header']['魔数'] == 'RD':
            # RunLength解码
            self.__graphics['rle'] = self.extractGraphics(
                self.__graphics['raw'], self.__graphics['header'])

            # 校验一下解压出来的数据长度是否正确
            rle_length = self.__graphics['header']['宽度'] * self.__graphics[
                'header']['高度'] + self.__graphics['header']['调色板长度']
            if len(self.__graphics['rle']) == rle_length:

                # 切出内置调色板
                inpal = self.__graphics['rle'][
                    -self.__graphics['header']['调色板长度'] - 1:-1]
                # 把内置调色板整形成一长条list
                inpal_np = np.array(inpal).astype('uint8')
                inpal_np = inpal_np.reshape(-1, 3)
                self.__graphics['inpal'] = inpal_np.tolist()

                # 切出图像数据
                pp = self.__graphics['rle'][0:(
                    self.__graphics['header']['宽度'] *
                    self.__graphics['header']['高度'])]
                self.__graphics['bytes'] = []
                for pixel in pp:
                    if pixel == 0xF0:
                        self.__graphics['bytes'].append([0, 0, 0, 0])
                    else:
                        [r, g, b] = self.__graphics['inpal'][pixel]
                        a = 255
                        self.__graphics['bytes'].append([r, g, b, a])

                # 变成pixmap数据返回
                p = np.array(self.__graphics['bytes']).astype('uint8')
                p = p.reshape(self.__graphics_info['dict']['高度'], -1, 4)
                # 把数组上下翻转,因为像素按行倒序存放
                p = np.flipud(p)
                self.__graphics['pixmap'] = Image.fromarray(
                    p, mode='RGBA').toqpixmap()

            else:
                utils.warn("rle数据长度(%d)不正确,应为%d" %
                           (len(self.__graphics['rle']), rle_length))
Пример #33
0
    def perform_segmentation(self):
        '''
        Worker function, runs as code in ScanFile._segmentation_thread
        Calculates and sets the segmentation on the image.
        Do not return anything from this method.
        '''
        segmentation_array = None
        try:
            # change stage title
            self.instructions.setText('<html><head/><body><p align="center">Stage 2:</p><p align="center">'
                                      'Calculating <\p><p>Segmentation...</p><p '
                                      'align="center">(please wait)</p></body></html>')

            segmentation_array = self._all_scans[self._segmentation_running].perform_segmentation()
        except Exception as ex:
            print('perform_segmentation', ex)

        if segmentation_array is None:
            utils.warn('An error occurred while computing the segmentation. Please perform better markings, '
                 'and try again.')
            self.instructions.setText(
                '<html><head/><body><p align="center">Stage 1 [retry]: Boundary Marking...</p><p '
                'align="center">(hover for instructions)</p></body></html>')

            # reset status in workspace table
            item = QtWidgets.QTableWidgetItem('')
            self.tableWidget.setItem(self._segmentation_running, 1, item)

            self.segmentation_finished.emit()
            return

        # update finished status in workspace table
        item = QtWidgets.QTableWidgetItem(SEGMENTED)
        self.tableWidget.setItem(self._segmentation_running, 1, item)

        # show hidden features which are now relevant to work on segmentation
        self.verticalFrame.show()

        self.instructions.setText('<html><head/><body><p align="center">Stage 3: Review Segmentation...</p><p '
                                  'align="center">(hover for instructions)</p></body></html>')
        self.instructions.setToolTip('Use paintbrush and eraser to fix result segmentation.\nWhen finished, '
                                     'save segmentation.')

        self.save_seg_btn.setEnabled(True)
        self.segmentation_finished.emit()  # next segmentation will be run
Пример #34
0
def maybe_post_receive_hook(post_receive_data):
    """Call the post-receive-hook is required.

    This function implements supports for the hooks.post-receive-hook
    config variable, by calling this function if the config variable
    is defined.
    """
    result = maybe_call_thirdparty_hook('hooks.post-receive-hook',
                                        hook_input=post_receive_data)
    if result is not None:
        hook_exe, p, out = result
        sys.stdout.write(out)
        # Flush stdout now, to make sure the script's output is printed
        # ahead of the warning below, which is directed to stderr.
        sys.stdout.flush()
        if p.returncode != 0:
            warn('!!! WARNING: %s returned code: %d.' %
                 (hook_exe, p.returncode))
Пример #35
0
    def __init__(self, **kwargs):
        if "template_dirs" in kwargs:
            warn("template_dirs is now depreciated")
        if "plugin_dirs" in kwargs:
            warn("plugin_dirs is now depreciated")

        log_level = getattr(settings, 'LOGLEVEL', logging.ERROR)
        logging.basicConfig(level=log_level,\
            format='%(levelname)-8s %(message)s')

        
        # Find all the PLUGINS modules
        plugins = settings.PLUGINS
        self.plugins_dirs = {}

        # Convert those to dirs
        for plugin in plugins:
            path_name = None
            for mod in plugin.split('.'):
                if path_name is not None:
                    path_name=[path_name]
                file_name, path_name, description = imp.find_module(mod, path_name)

            # Add, uniquely.
            self.plugins_dirs[os.path.abspath(path_name)] = plugin

        # Key by module name
        self.plugins_dirs = dict(zip(self.plugins_dirs.values(),self.plugins_dirs.keys()))

        full_path_template_dirs = []
        for t in settings.TEMPLATE_DIRS:
            full_path_template_dirs.append(os.path.abspath(t))
        
        # Add will's templates_root
        if not TEMPLATES_ROOT in full_path_template_dirs:
            full_path_template_dirs += [TEMPLATES_ROOT, ]

        # Add this project's templates_root
        if not PROJECT_TEMPLATE_ROOT in full_path_template_dirs:
            full_path_template_dirs += [PROJECT_TEMPLATE_ROOT, ]

        # Storing here because storage hasn't been bootstrapped yet.
        os.environ["WILL_TEMPLATE_DIRS_PICKLED"] =\
            ";;".join(full_path_template_dirs)
Пример #36
0
def warn_about_tag_update(tag_name, old_rev, new_rev):
    """Emit a warning about tag updates.

    PARAMETER
        tag_name: The name of the tag being updated.
        old_rev: The old revision referenced by the tag.
        new_rev: The new revision referenced by the tag.
    """
    warn('---------------------------------------------------------------',
         '--  IMPORTANT NOTICE:',
         '--',
         '--  You just updated the tag %s as follow:' % tag_name,
         '--    old SHA1: %s' % old_rev,
         '--    new SHA1: %s' % new_rev,
         '--',
         '-- Other developers pulling from this repository will not',
         '-- get the new tag. Assuming this update was deliberate,',
         '-- notifying all known users of the update is recommended.',
         '---------------------------------------------------------------')
Пример #37
0
def comicnamer(paths):
    """Main comicnamer function, takes an array of paths, does stuff.
    """
    # Warn about move_files function
    if Config['move_files_enable']:
        import warnings
        warnings.warn(
            "The move_files feature is still under development. "
            "Be very careful with it.\n"
            "It has not been heavily tested, and is not recommended for "
            "general use yet.")

    p("#" * 20)
    p("# Starting comicnamer")

    issues_found = []

    for cfile in findFiles(paths):
        cfile = cfile.decode("utf-8")
        parser = FileParser(cfile)
        try:
            issue = parser.parse()
        except InvalidFilename:
            warn("Invalid filename %s" % cfile)
        else:
            issues_found.append(issue)

    if len(issues_found) == 0:
        raise NoValidFilesFoundError()

    p("# Found %d issue" % len(issues_found) + ("s" * (len(issues_found) > 1)))

    # Sort issues by series name and issue number
    issues_found.sort(key=lambda x: (x.seriesname, x.issuenumbers))

    comicvine_instance = Comicvine(interactive=not Config['select_first'])

    for issue in issues_found:
        processFile(comicvine_instance, issue)
        p('')

    p("#" * 20)
    p("# Done")
def BCV2(allDistancesSquared, n, h):
    term1 = 1.0 / (4 * math.pi * h * h * (n - 1))
    term2 = 8 * (n - 1) * (n - 2) * h * h * math.pi
    #print "h",h,"n",n,"term1",term1,"term2",term2
    total = 0.0
    for d in allDistancesSquared:
        if d == 0.0:
            utils.warn(
                "Warning duplicate locations found, results may be invalid.")
            utils.warn("        Separating the locations by 1 unit.")
            d = 1
        D = d / (h * h)
        D2 = D * D
        term3 = (D2 - 8 * D + 8) * math.exp(-D2 / 2) / term2
        total = total + term3
        #print "d",d,"D",D,"D2",D2,"term3",term3,"total", total
    result = term1 + total
    #print "BCV2", h, result
    return result
def DistancesSquared(points):
    small = 1e-3
    allSquaredDistances = []
    n = len(points)
    for i in range(n):
        #for j in range(i+1, n):  #unique set of distances dij = dji is faster; however produces different LSCV
        for j in range(n):
            if i == j:
                continue
            dx = points[j][0] - points[i][0]
            dy = points[j][1] - points[i][1]
            d2 = dx * dx + dy * dy
            if d2 < small:
                utils.warn(
                    "Distance from %d to %d is too small (%g).  Using %g" %
                    (i, j, d2, small))
                d2 = small
            allSquaredDistances.append(d2)
    return allSquaredDistances
Пример #40
0
def main() -> None:
    failed = False

    nonmatching_fns_with_dump = {
        p.stem: p.read_bytes()
        for p in (root / "expected").glob("*.bin")
    }

    for func in utils.get_functions():
        if not func.decomp_name:
            continue

        try:
            get_fn_from_my_elf(func.decomp_name, 0)
        except KeyError:
            utils.warn(
                f"couldn't find {utils.format_symbol_name_for_msg(func.decomp_name)}"
            )
            continue

        if func.status == utils.FunctionStatus.Matching:
            if not check_function(func.addr, func.size, func.decomp_name):
                utils.print_error(
                    f"function {utils.format_symbol_name_for_msg(func.decomp_name)} is marked as matching but does not match"
                )
                failed = True
        elif func.status == utils.FunctionStatus.Equivalent or func.status == utils.FunctionStatus.NonMatching:
            if check_function(func.addr, func.size, func.decomp_name):
                utils.print_note(
                    f"function {utils.format_symbol_name_for_msg(func.decomp_name)} is marked as non-matching but matches"
                )

            fn_dump = nonmatching_fns_with_dump.get(func.decomp_name, None)
            if fn_dump is not None and not check_function(
                    func.addr, len(fn_dump), func.decomp_name, fn_dump):
                utils.print_error(
                    f"function {utils.format_symbol_name_for_msg(func.decomp_name)} does not match expected output"
                )
                failed = True

    if failed:
        sys.exit(1)
Пример #41
0
def clear_expiry():
    utils.log(f" Running clear task.")

    clear_queue = []
    for channel_name in fetched:
        for video_id in fetched[channel_name]:
            for m3u8_id in fetched[channel_name][video_id]["fregments"]:
                if time.time() - fetched[channel_name][video_id]["fregments"][
                        m3u8_id]["create_time"] > const.EXPIRY_TIME:
                    utils.log(
                        f"[{channel_name}] {m3u8_id} has expired. Clearing...")

                    clear_queue.append({
                        "channel_name": channel_name,
                        "video_id": video_id,
                        "m3u8_id": m3u8_id
                    })
    for x in clear_queue:
        try:
            os.remove(fetched[x["channel_name"]][x["video_id"]]["fregments"][
                x["m3u8_id"]]["file"])
        except:
            utils.warn(
                f"[{x['channel_name']}] Error occurs when deleting {x['m3u8_id']}. Ignoring..."
            )
        fetched[x["channel_name"]][x["video_id"]]["fregments"].pop(
            x["m3u8_id"])

    clear_queue = []
    for channel_name in fetched:
        for video_id in fetched[channel_name]:
            if not fetched[channel_name][video_id]["fregments"]:
                clear_queue.append({
                    "channel_name": channel_name,
                    "video_id": video_id
                })
    for x in clear_queue:
        utils.log(
            f"[{x['channel_name']}] {x['video_id']} has all gone. Clearing...")
        fetched[x['channel_name']].pop(x['video_id'])

    save()
Пример #42
0
 def insert(self, db_connection):
     sql = """
         INSERT INTO opinions (
             docket_number,
             type_id,
             authoring_justice,
             no_concurrences_flag,
             effective_type_flag
         )
         VALUES (?, ?, ?, ?, ?);
     """
     if self._sql_tuple is None:
         msg = "Encountered unknown authoring justice '{}' in {}"
         utils.warn(msg, self.authoring_justice, repr(self))
         return False
     tup = self._sql_tuple + (self.has_no_concurrences,
                              self.needs_effective_type)
     cur = self._insert(db_connection, sql, tup)
     self._fetch_id(cur)
     return True
Пример #43
0
def urlopen(url, retry=0):
    try:
        return urllib.request.urlopen(url)
    except urllib.error.HTTPError as e:
        if e.code == 503:
            if retry < const.HTTP_RETRY:
                utils.warn(
                    f" Get {e.code} Error. Trying {retry+1}/{const.HTTP_RETRY}..."
                )
                time.sleep(1)
                return urlopen(url, retry + 1)
            else:
                raise e
        else:
            raise e
    except urllib.error.URLError as e:
        if retry < const.HTTP_RETRY:
            return urlopen(url, retry + 1)
        else:
            raise e
Пример #44
0
def insert_case(db_connection, case_filing):
    # Begin and commit transaction for inserting the case
    # filing and its opinions.
    inserted_opinions = []
    try:
        with db_connection:
            case_filing.insert(db_connection)
            for opinion in case_filing.opinions:
                # Case Filing has no opinions.
                if opinion is None:
                    break
                if opinion.insert(db_connection):
                    inserted_opinions.append(opinion)
    except (apsw.Error, sqlite3.Error) as e:
        msg = 'Unable to insert {}: {}'
        utils.warn(msg, case_filing.docket_number, e)
        # Case filing and opinions not inserted, so no
        # concurrences to insert.
        return None
    return inserted_opinions
Пример #45
0
def get_emails_from_script(script_filename, changed_files):
    """The list of emails addresses for the given list of changed files.

    This list is obtained by running the given script, and passing it
    the list of changed files via stdin (one file per line). By
    convention, passing nothing via stdin (no file changed) should
    trigger the script to return all email addresses.

    PARAMETERS
        script_filename: The name of the script to execute.
        changed_files: A list of files to pass to the script (via stdin).
            None is also accepted in place of an empty list.
    """
    input_str = '' if changed_files is None else '\n'.join(changed_files)

    p = Popen([script_filename], stdin=PIPE, stdout=PIPE)
    (output, _) = p.communicate(input=input_str)
    if p.returncode != 0:
        warn('!!! %s failed with error code: %d.'
             % (script_filename, p.returncode))
    return output.splitlines()
Пример #46
0
 def send_email_notifications(self):
     """Send all email notifications associated to this update.
     """
     no_email_re = self.search_config_option_list('hooks.no-emails')
     if no_email_re is not None:
         warn(*['-' * 70,
                "--  The hooks.no-emails config option contains `%s',"
                % no_email_re,
                '--  which matches the name of the reference being'
                ' updated ',
                '--  (%s).' % self.ref_name,
                '--',
                '--  Commit emails will therefore not be sent.',
                '-' * 70,
                ], prefix='')
         return
     # This phase needs all added commits to have certain attributes
     # to be computed.  Do it now.
     self.__set_send_email_p_attr(self.new_commits_for_ref)
     self.__email_ref_update()
     self.__email_new_commits()
Пример #47
0
    def verify_plugin_settings(self):
        puts("Verifying settings requested by plugins...")

        missing_settings = False
        missing_setting_error_messages = []
        with indent(2):
            for name, meta in self.required_settings_from_plugins.items():
                if not hasattr(settings, name):
                    error_message = "%(setting_name)s is missing. It's required by the %(plugin_name)s plugin's '%(function_name)s' method." % meta
                    puts(colored.red("✗ %(setting_name)s" % meta))
                    missing_setting_error_messages.append(error_message)
                    missing_settings = True
                else:
                    show_valid("%(setting_name)s" % meta)

            if missing_settings:
                puts("")
                warn("Will is missing settings required by some plugins. He's starting up anyway, but you will run into errors if you try to use those plugins!")
                self.add_startup_error("\n".join(missing_setting_error_messages))
            else:
                puts("")
Пример #48
0
def processFile(comicvine_instance, issue):
    """Gets issue name, prompts user for input
    """
    p("#" * 20)
    p("# Processing file: %s" % issue.fullfilename)

    if len(Config['input_filename_replacements']) > 0:
        replaced = applyCustomInputReplacements(issue.fullfilename)
        p("# With custom replacements: %s" % (replaced))

    p("# Detected series: %s (issue: %s)" %
      (issue.seriesname, ", ".join([str(x) for x in issue.issuenumbers])))

    try:
        correctedSeriesName, issName = getIssueName(comicvine_instance, issue)
    except (DataRetrievalError, SeriesNotFound), errormsg:
        if Config['always_rename'] and Config['skip_file_on_error'] is True:
            warn("Skipping file due to error: %s" % errormsg)
            return
        else:
            warn(errormsg)
Пример #49
0
def processFile(tvdb_instance, episode):
    """Gets episode name, prompts user for input
    """
    p("#" * 20)
    p("# Processing file: %s" % episode.fullfilename)

    if len(Config['input_filename_replacements']) > 0:
        replaced = applyCustomInputReplacements(episode.fullfilename)
        p("# With custom replacements: %s" % (replaced))

    p("# Detected series: %s (%s)" %
      (episode.seriesname, episode.number_string()))

    try:
        episode.populateFromTvdb(tvdb_instance)
    except (DataRetrievalError, ShowNotFound), errormsg:
        if Config['always_rename'] and Config['skip_file_on_error'] is True:
            warn("Skipping file due to error: %s" % errormsg)
            return
        else:
            warn(errormsg)
Пример #50
0
def tvnamer(paths):
    """Main tvnamer function, takes an array of paths, does stuff.
    """

    p("#" * 20)
    p("# Starting tvnamer")

    episodes_found = []

    for cfile in findFiles(paths):
        parser = FileParser(cfile)
        try:
            episode = parser.parse()
        except InvalidFilename, e:
            warn("Invalid filename: %s" % e)
        else:
            if episode.seriesname is None and Config['force_name'] is None and Config['series_id'] is None:
                warn("Parsed filename did not contain series name (and --name or --series-id not specified), skipping: %s" % cfile)

            else:
                episodes_found.append(episode)
Пример #51
0
def cli(question_file, answers_dir, answer_file, test):
    """Given a set of questions and the answers directory with top 100
    documents for each question, generate the answer file
    """
    success('---NLP Project Three: Question Answer---')

    question_filepath = os.path.realpath(question_file)
    answers_dirpath = os.path.realpath(answers_dir)
    answer_filepath = os.path.realpath(answer_file)

    log('Answering: {}\n Using: {}\n Into: {}'.format(question_filepath,
                                                      answers_dirpath,
                                                      answer_filepath))

    if test:
        warn('Testing, not normal execution...')
        _test_endpoint(question_filepath, answers_dirpath, answer_filepath)
        return

    try:
        questions = get_questions(question_filepath)
        if len(questions) is not 232:
            warn('devset has 232 questions (Got {})'.format(len(questions)))

        answers = {}
        for question in questions:
            answers[question.qid] = get_answers(question, answers_dirpath)
        if len(answers) is not len(questions):
            warn('Got {} answers for {} questions'.format(
                len(answers), len(questions)))

        write_answers(answers, answer_filepath)
        success('Wrote answers to {}'.format(answer_filepath))
    except NotImplementedError as e:
        error('TODO: {}'.format(e))
Пример #52
0
def order(input_dir, output, manifest, debug):
    try:
        paths = sanitize_paths(input_dir, output, manifest)
        input_dirpath, output_filepath, manifest_filepath = paths
    except ValueError as e:
        warn('Error: {}. Exiting.'.format(e.message))
        return

    graph_d3_data, manifest_data = process(input_dirpath, debug)

    if os.path.exists(output_filepath):
        warn('Overwriting the existing file at {}'.format(output_filepath))

    if os.path.exists(manifest_filepath):
        warn('Overwriting the existing file at {}'.format(manifest_filepath))

    with open(output_filepath, 'w') as output_file:
        output_file.truncate()
        output_file.write(json.dumps(graph_d3_data, indent=2))

    with open(manifest_filepath, 'w') as manifest_file:
        manifest_file.truncate()
        manifest_file.write(json.dumps(manifest_data, indent=2))

    success('Wrote partial ordering graph of {} to {}'.format(input_dirpath, output_filepath))
    success('Created manifest file at {}'.format(manifest_filepath))
Пример #53
0
def cli(input_dir, output_dir):
    success("---NLP Project Three---")
    input_dirpath = os.path.realpath(input_dir)
    output_dirpath = os.path.realpath(output_dir)

    if input_dirpath == output_dirpath:
        raise ValueError('Input and output directories must be different')

    log("{} --> {}".format(input_dirpath, output_dirpath))

    questions = os.listdir(input_dirpath)
    for question_dirname in questions:
        input_question_dirpath = os.path.join(input_dirpath, question_dirname)

        output_question_dirpath = os.path.join(output_dirpath,
                                               question_dirname)
        os.makedirs(output_question_dirpath)

        log('Processing question {}...'.format(question_dirname))

        filenames = os.listdir(input_question_dirpath)
        for filename in filenames:
            input_filepath = os.path.join(input_question_dirpath, filename)

            file_content = get_content(input_filepath)

            if not file_content:
                warn('Found no content in document {}:{}'.format(
                    question_dirname, filename))

            tokens = string_to_tokens(file_content)
            ner_chunks = process_ner(tokens)

            output_filepath = os.path.join(output_question_dirpath, filename)
            output_data = '\n'.join(ner_chunks)
            with open(output_filepath, 'w') as output_file:
                output_file.write(output_data)

        log('Wrote to directory {}'.format(output_question_dirpath))
Пример #54
0
def process(path_to_dir, debug):
    g = Graph()
    programs = []
    program_to_vertex = {}
    for filename in os.listdir(path_to_dir):
        _, extension = os.path.splitext(filename)
        if extension != '.p2':
            continue

        filepath = os.path.join(path_to_dir, filename)
        try:
            meta = get_program_info(filepath)
        except SyntaxError as e:
            warn('Error processing "{}" at {}:{}\n{}'.format(
                filename, e.lineno, e.offset, e.text))
            continue

        programs.append(meta)

    programs = decompose_to_scc(programs)
    for program in programs:
        vertex_id = g.add_vertex(program)
        program_to_vertex[program.pid] = vertex_id

    for i in xrange(len(programs)):
        for j in xrange(i + 1, len(programs)):
            first, second = programs[i], programs[j]
            first_vid, second_vid = program_to_vertex[
                first.pid], program_to_vertex[second.pid]

            if first in second:
                g.add_edge((first_vid, second_vid))
            if second in first:
                g.add_edge((second_vid, first_vid))

    if debug:
        debug_dump(g)

    return graph_to_d3_dict(g)
Пример #55
0
def main(args: argparse.Namespace) -> None:
    """
    Main method for building tf examples from individual book (.npy) files

    :param args: ArgumentParser-parsed arguments
    :return: None
    """
    utils.log_args(args)

    if args.sent_per_book != -1:
        utils.warn("Using a max number of sentences per book")

    # Initialize the list of output files to write the examples to
    output_files = []
    for i_tf_ex in range(args.num_example_files):
        cur_tf_file_name = "%d_TfExample.tfrecord" % i_tf_ex
        output_files.append(os.path.join(args.output_dir, cur_tf_file_name))

    # Generate examples
    with tf_example_utils.WriteAsTfExample(output_files, args.vocab_file,
                                           args.max_num_tokens) as writer:
        generate_tf_example(args, writer)
Пример #56
0
def findFiles(paths):
    """Takes an array of paths, returns all files found
    """
    valid_files = []

    for cfile in paths:
        cur = FileFinder(cfile,
                         with_extension=Config['valid_extensions'],
                         recursive=Config['recursive'])

        try:
            valid_files.extend(cur.findFiles())
        except InvalidPath:
            warn("Invalid path: %s" % cfile)

    if len(valid_files) == 0:
        raise NoValidFilesFoundError()

    # Remove duplicate files (all paths from FileFinder are absolute)
    valid_files = list(set(valid_files))

    return valid_files
Пример #57
0
def begin_battle() -> bool:
    if not MODE_FIGHT:  # enable auto in auto mode
        for auto in [Btn.auto, Btn.auto_sub]:
            auto.click(utils.screenshot())  # enable auto

    # use heals
    if not Btn.zero_heals.on_screen(utils.screenshot()):
        utils.click(47, 255, 25, 25, 2.0)
        Btn.use_heal.click(utils.screenshot())
        Btn.cancel_heal.click(utils.screenshot())

    # Check mood
    screen = utils.screenshot()
    if Btn.mood.on_screen(screen):
        log("Ships in bad mood. Wait 60 min")
        utils.warn("mood", screen)
        time.sleep(60 * 60)
        log("Continue")
        return False

    Btn.battle.click(utils.screenshot())  # begin battle

    if Btn.enhance_start.click(utils.screenshot()):
        menu.enhance_ships()
        for i in range(2):
            adb.back()
            time.sleep(1.5)
        return begin_battle()

    if Btn.battle.on_screen(utils.screenshot()):  # check if battle started
        return False

    if MODE_FIGHT:
        fight()
    else:
        log("Waiting 30s")
        time.sleep(30.0)
    return True
Пример #58
0
    def load(self, load_ckpt_paths, options, epoch=200):
        """Load checkpoints.
    """
        assert len(load_ckpt_paths) == len(self.embeds)
        for i in range(len(self.embeds)):
            ckpt_path = load_ckpt_paths[i]
            load_opt = options[i]
            if len(ckpt_path) == 0:
                utils.info('{}: training from scratch'.format(
                    self.modalities[i]))
                continue

            if load_opt == 0:  # load teacher model (visual + sequence)
                path = os.path.join(ckpt_path, 'embed_{}.pth'.format(epoch))
                ckpt = torch.load(path)
                try:
                    self.embeds[i].load_state_dict(ckpt)
                except:
                    utils.warn(
                        'Check that the "modalities" argument is correct.')
                    exit(0)
                utils.info('{}: ckpt {} loaded'.format(self.modalities[i],
                                                       path))
            elif load_opt == 1:  # load pretrained visual encoder
                ckpt = torch.load(ckpt_path)
                # Change keys in the ckpt
                new_state_dict = OrderedDict()
                for key in list(ckpt.keys())[:-2]:  # exclude fc weights
                    new_key = key[7:]  # Remove 'module.'
                    new_state_dict[new_key] = ckpt[key]
                # update state_dict
                state_dict = self.embeds[i].module.embed.state_dict()
                state_dict.update(new_state_dict)
                self.embeds[i].module.embed.load_state_dict(state_dict)
                utils.info('{}: visual encoder from {} loaded'.format(
                    self.modalities[i], ckpt_path))
            else:
                raise NotImplementedError
Пример #59
0
def track_file(module, file_name, included_dirs, include_stack):
	dependencies = set()
	include_stack[file_name] = True
	for inc in module.include_map()[file_name]:
		ok = False
		for m, inc_dir in module.private_dirs():
			abs_header = os.path.abspath(os.path.join(inc_dir, inc))
			if os.path.isfile(abs_header):
				ok = True
				dependencies.add(abs_header)
				included_dirs.add(inc_dir)
				if not abs_header in m.include_map():
					utils.warn("Unknown file '%s' of module %s (included from '%s')" % (abs_header, m.name, file_name))
					break
				if abs_header in include_stack:
					raise config.BaseError("File include loop ('%s' included by '%s')" % (
						os.path.relpath(abs_header), " --> ".join([os.path.relpath(i) for i in include_stack])))
				dependencies |= track_file(m, abs_header, included_dirs, include_stack)
				break
		if not ok:
			utils.warn("Cannot found '%s' included from '%s'" % (inc, os.path.relpath(file_name)))
	del include_stack[file_name]
	return dependencies
def LSCV(allDistancesSquared, n, h):
    term1 = 1.0 / (math.pi * n * h * h)
    term2a = 1.0 / (4.0 * math.pi)
    term2b = -1.0 / (4.0 * h * h)
    term3a = 1.0 / math.pi
    term3b = -1.0 / (2.0 * h * h)
    term4 = n * n * h * h
    #print "term1",term1,"term2a",term2a,"term3a",term3a,"term2b",term2b,"term3b",term3b,"term4",term4
    total = 0.0
    for d in allDistancesSquared:
        if d == 0:
            utils.warn(
                "Warning duplicate locations found, results may be invalid.")
            utils.warn("        Separating the locations by 1 unit.")
            d = 1
        term2 = term2a * math.exp(d * term2b)
        term3 = term3a * math.exp(d * term3b)
        term5 = (term2 - term3) / term4
        total = total + term5
        #print "d",d,"term2",term2,"term3",term3,"term5",term5, "total", total
    result = term1 + total
    #print "CV", h, result
    return result