예제 #1
1
파일: sitemap.py 프로젝트: hibozzy/mediatum
 def create_sitemap_index(self, sitemaps, cur_time):
     """
     Creates a sitemap index file from the sitemaps passed in
     @param sitemaps: a list of strings of sitemap names
     """
     if os.path.isfile(self.path):
         logging.getLogger("everything").info("%s already exists" % self.path)
     else:
         root = etree.Element("sitemapindex", xmlns="http://www.sitemaps.org/schemas/sitemap/0.9")
         if not sitemaps:
             pass
         else:
             for i in sitemaps:
                 sm = etree.SubElement(root, "sitemap")
                 loc = etree.SubElement(sm, "loc")
                 loc.text = "/".join(["http:/", self.host, i])
                 lastmod = etree.SubElement(sm, "lastmod")
                 lastmod.text = cur_time
             try:
                 with open(self.path, "w") as f:
                     f.write("""<?xml version="1.0" encoding="UTF-8"?>\n""")
                     f.write(etree.tostring(root))
                     f.close()
             except IOError:
                 logging.getLogger("error").error("Error creating %s" % self.path)
예제 #2
0
def getLogger(name, goodLooking=True, logLevel=logging.INFO):
 
  logger = logging.getLogger(name)
  logger.setLevel(logLevel)
  
  ch = logging.StreamHandler()
  ch.setLevel(logLevel)

  if goodLooking == True:
    formatter = PsiopicFormatter()
  else:
    formatter = logging.Formatter()

  ch.setFormatter(formatter)

  logger.addHandler(ch)

  # setup requests lib logger if debug is on
  if logLevel == logging.DEBUG: 
    requests_logger = logging.getLogger('requests.packages.urllib3')
    requests_logger.setLevel(logging.DEBUG)
    requests_logger.propagate = True
    requests_logger.addHandler(ch)


  return logger 
예제 #3
0
 def init_logger(self, pth):
     handler = logging.FileHandler(os.path.join(pth, "log.txt"), 'a')
     formatter = logging.Formatter(
         "%(levelname)-8s | %(asctime)s | %(name)-10s | %(message)s")
     handler.setFormatter(formatter)
     handler.setLevel(logging.DEBUG)
     logging.getLogger("").addHandler(handler)
예제 #4
0
def _init_python():
    if PY2 or is_release():
        MinVersions.PYTHON2.check(sys.version_info)
    else:
        # for non release builds we allow Python3
        MinVersions.PYTHON3.check(sys.version_info)

    if is_osx():
        # We build our own openssl on OSX and need to make sure that
        # our own ca file is used in all cases as the non-system openssl
        # doesn't use the system certs
        install_urllib2_ca_file()

    if is_windows():
        # Not really needed on Windows as pygi-aio seems to work fine, but
        # wine doesn't have certs which we use for testing.
        install_urllib2_ca_file()

    if is_windows() and os.sep != "\\":
        # In the MSYS2 console MSYSTEM is set, which breaks os.sep/os.path.sep
        # If you hit this do a "setup.py clean -all" to get rid of the
        # bytecode cache then start things with "MSYSTEM= ..."
        raise AssertionError("MSYSTEM is set (%r)" % environ.get("MSYSTEM"))

    if is_windows():
        # gdbm is broken under msys2, this makes shelve use another backend
        sys.modules["gdbm"] = None
        sys.modules["_gdbm"] = None

    logging.getLogger().addHandler(PrintHandler())
예제 #5
0
def set_logger_level(logger_name, log_level='error'):
    '''
    Tweak a specific logger's logging level
    '''
    logging.getLogger(logger_name).setLevel(
        LOG_LEVELS.get(log_level.lower(), logging.ERROR)
    )
예제 #6
0
    def do_bugin(self, args):
        """bugin [ <logger> ]  - add a console logging handler to a logger"""
        args = args.split()
        if _debug: ConsoleCmd._debug("do_bugin %r", args)

        # get the logger name and logger
        if args:
            loggerName = args[0]
            if loggerName in logging.Logger.manager.loggerDict:
                logger = logging.getLogger(loggerName)
            else:
                logger = None
        else:
            loggerName = '__root__'
            logger = logging.getLogger()

        # add a logging handler
        if not logger:
            self.stdout.write("not a valid logger name\n")
        elif loggerName in self.handlers:
            self.stdout.write("%s already has a handler\n" % loggerName)
        else:
            handler = ConsoleLogHandler(logger)
            self.handlers[loggerName] = handler
            self.stdout.write("handler to %s added\n" % loggerName)
        self.stdout.write("\n")
예제 #7
0
    def do_bugout(self, args):
        """bugout [ <logger> ]  - remove a console logging handler from a logger"""
        args = args.split()
        if _debug: ConsoleCmd._debug("do_bugout %r", args)

        # get the logger name and logger
        if args:
            loggerName = args[0]
            if loggerName in logging.Logger.manager.loggerDict:
                logger = logging.getLogger(loggerName)
            else:
                logger = None
        else:
            loggerName = '__root__'
            logger = logging.getLogger()

        # remove the logging handler
        if not logger:
            self.stdout.write("not a valid logger name\n")
        elif not loggerName in self.handlers:
            self.stdout.write("no handler for %s\n" % loggerName)
        else:
            handler = self.handlers[loggerName]
            del self.handlers[loggerName]

            # see if this (or its parent) is a module level logger
            if hasattr(logger, 'globs'):
                logger.globs['_debug'] -= 1
            elif hasattr(logger.parent, 'globs'):
                logger.parent.globs['_debug'] -= 1

            # remove it from the logger
            logger.removeHandler(handler)
            self.stdout.write("handler to %s removed\n" % loggerName)
        self.stdout.write("\n")
예제 #8
0
def start_server(langcodes,
                 host,
                 port,
                 use_reloader,
                 verbose=False,
                 logformat='[%(asctime)-15s][%(levelname)s][%(module)s][%(pathname)s:%(lineno)d]: %(message)s',
                 use_features=False,
                 debug=False):
    """
    Start a SemanticizerFlaskServer with all processors loaded into the
    pipeline.

    @param verbose: Set whether the Flask server should be verbose
    @param logformat: The logformat used by the Flask server
    """
    # Initialize the pipeline
    pipeline = procpipeline.build(langcodes, use_features, debug=debug)
    # Create the FlaskServer
    logging.getLogger().info("Setting up server")
    server = Server()
    server.set_debug(verbose, logformat)
    # Setup all available routes / namespaces for the HTTP server
    server.setup_all_routes(pipeline, langcodes)
    logging.getLogger().info("Done setting up server, now starting...")
    # And finally, start the thing
    server.start(host, port, use_reloader)
예제 #9
0
def main():
    # Init the logger
    init_logging(config_get(('logging', 'path'), 'log.txt'),
                 config_get(('logging', 'verbose'), False),
                 config_get(('logging', 'format'), None))

    # Set the datasource and init it
    wpmlangs = config_get(('wpm', 'languages'))
    settings = config_get(('settings'), {})
    init_datasource(wpmlangs, settings)

    # Start the server
    try:
        start_server(config_get(('wpm', 'languages')).keys(),
                     config_get(('server', 'host'), '0.0.0.0'),
                     config_get(('server', 'port'), 5000),
                     config_get(('server', 'use_reloader'), False),
                     config_get(('logging', 'verbose'), False),
                     config_get(('logging', 'format'), None),
                     config_get(('linkprocs', 'includefeatures'), False),
                     config_get(('server', 'debug'), False))
    except ValueError as e:
        logging.getLogger().fatal("Error running Semanticizer server: %s" \
                                  % e.message)
        raise
예제 #10
0
 def __init__(self):
     self.log = logging.getLogger("pyzord")
     self.usage_log = logging.getLogger("pyzord-usage")
     self.log.addHandler(logging.NullHandler())
     self.usage_log.addHandler(logging.NullHandler())
     self.forwarder = None
     self.one_step = False
예제 #11
0
def configure_logging():
    format = logging.Formatter('%(asctime)s :: PID %(process)d :: %(name)s (%(levelname)s) :: %(message)s')
    logger = logging.StreamHandler()
    logger.setFormatter(format)
    logger.setLevel(logging.DEBUG)
    logging.getLogger().addHandler(logger)
    logging.getLogger().setLevel(logging.DEBUG)
예제 #12
0
파일: files.py 프로젝트: devdej/OctoPrint
def _create_lastmodified(path, recursive):
	if path.endswith("/files"):
		# all storages involved
		lms = [0]
		for storage in fileManager.registered_storages:
			try:
				lms.append(fileManager.last_modified(storage, recursive=recursive))
			except:
				logging.getLogger(__name__).exception("There was an error retrieving the last modified data from storage {}".format(storage))
				lms.append(None)

		if filter(lambda x: x is None, lms):
			# we return None if ANY of the involved storages returned None
			return None

		# if we reach this point, we return the maximum of all dates
		return max(lms)

	elif path.endswith("/files/local"):
		# only local storage involved
		try:
			return fileManager.last_modified(FileDestinations.LOCAL, recursive=recursive)
		except:
			logging.getLogger(__name__).exception("There was an error retrieving the last modified data from storage {}".format(FileDestinations.LOCAL))
			return None

	else:
		return None
예제 #13
0
    def __call__(self, *args, **kwargs):

        self._reply_arrived_event.clear()
        self.result = None

        wait = kwargs.get('wait', False)

        if self.door is None:
            self.init_device()

        logging.getLogger('HWR').debug("Executing sardana macro: %s" % self.macro_format)
        
        try:
            fullcmd = self.macro_format % args 
        except:
            logging.getLogger('HWR').info("  - Wrong format for macro arguments. Macro is %s / args are (%s)" % (self.macro_format, str(args)))
            return
   
        try:
            import time
            self.t0 = time.time()
            if (self.doorstate in ["ON","ALARM"]):
                self.door.runMacro( (fullcmd).split()  )
                self.macrostate = SardanaMacro.STARTED
                self.emit('commandBeginWaitReply', (str(self.name()), ))
            else:
                logging.getLogger('HWR').error("%s. Cannot execute. Door is not READY", str(self.name()) )
                self.emit('commandFailed', (-1, self.name()))
        except TypeError:
            logging.getLogger('HWR').error("%s. Cannot properly format macro code. Format is: %s, args are %s", str(self.name()), self.macro_format, str(args)) 
            self.emit('commandFailed', (-1, self.name()))
        except DevFailed, error_dict:
            logging.getLogger('HWR').error("%s: Cannot run macro. %s", str(self.name()), error_dict) 
            self.emit('commandFailed', (-1, self.name()))
예제 #14
0
파일: __init__.py 프로젝트: djace/imdbpy
def IMDb(accessSystem=None, *arguments, **keywords):
    """Return an instance of the appropriate class.
    The accessSystem parameter is used to specify the kind of
    the preferred access system."""
    if accessSystem is None or accessSystem in ('auto', 'config'):
        try:
            cfg_file = ConfigParserWithCase(*arguments, **keywords)
            # Parameters set by the code take precedence.
            kwds = cfg_file.getDict('imdbpy')
            if 'accessSystem' in kwds:
                accessSystem = kwds['accessSystem']
                del kwds['accessSystem']
            else:
                accessSystem = 'http'
            kwds.update(keywords)
            keywords = kwds
        except Exception, e:
            import logging
            logging.getLogger('imdbpy').warn('Unable to read configuration' \
                                            ' file; complete error: %s' % e)
            # It just LOOKS LIKE a bad habit: we tried to read config
            # options from some files, but something is gone horribly
            # wrong: ignore everything and pretend we were called with
            # the 'http' accessSystem.
            accessSystem = 'http'
예제 #15
0
def init_logger(location, config):
    """ Initialize the logger with settings from config. """

    class NullHandler(logging.Handler):
        def emit(self, record):
            pass

    if get_conf(config, 'Logging.enabled', False) == False:
        handler = NullHandler()
        logging.getLogger("dagobah").addHandler(handler)
        return

    if get_conf(config, 'Logging.logfile', 'default') == 'default':
        path = os.path.join(location, 'dagobah.log')
    else:
        path = config['Logging']['logfile']

    level_string = get_conf(config, 'Logging.loglevel', 'info').upper()
    numeric_level = getattr(logging, level_string, None)

    logging.basicConfig(filename=path, level=numeric_level)

    root = logging.getLogger()
    stdout_logger = logging.StreamHandler(sys.stdout)
    stdout_logger.setLevel(logging.INFO)
    root.addHandler(stdout_logger)

    print 'Logging output to %s' % path
    logging.info('Logger initialized at level %s' % level_string)
예제 #16
0
파일: config.py 프로젝트: Extensis/Burton
    def readfp(self, fp, platform):
        """The readfp method reads configuration data from a file or file-like
        object for a specific platform.
        """
        parser = ConfigParser.SafeConfigParser(self._config_file_defaults)
        parser.readfp(fp)

        if not parser.has_section(platform):
            logger = logging.getLogger(burton.logger_name)
            logger.error("Unable to parse config file")
            logger.error("Platform " + str(platform) + " does not exist")
            return False

        sections = [ parser.defaults(), dict(parser.items(platform)) ]

        for section in sections:
            for key in section:
                if key not in self._config_file_defaults:
                    logger = logging.getLogger(burton.logger_name)
                    logger.error("Unable to parse config file")
                    logger.error(key + " is not a valid option")
                    return False
                else:
                    value = section[key]
                    if value is not None and value != "None" and len(value) > 0:
                        self.set(key, self._parse_value(value))

        return self._validate_config_file(self._config_file_defaults)
예제 #17
0
    def propertyChanged(self, property_name, old_value, new_value):
        """
        Overriding BaseComponents.BlissWidget (propertyChanged object) 
        run method.
        """
        if property_name == 'beamline_setup':
            self.beamline_setup_hwobj = self.getHardwareObject(new_value)

            if self.beamline_setup_hwobj:
                self.diffractometer_hwobj = self.beamline_setup_hwobj.diffractometer_hwobj
                
                if self.diffractometer_hwobj:
                    self.diffractometer_hwobj.connect("minidiffStateChanged",
                                                      self.diffractometer_changed)
                    
                self.shape_history = self.beamline_setup_hwobj.shape_history_hwobj

                if self.queue_model_hwobj:
                    self.beamline_setup_hwobj.queue_model_hwobj = self.queue_model_hwobj
                    self.task_tool_box_widget.set_beamline_setup(self.beamline_setup_hwobj)
            else:
                logging.getLogger('user_level_log').error('Could not load beamline setup '+\
                                                          'check configuration !.')
        elif property_name == 'queue_model':
            self.queue_model_hwobj = self.getHardwareObject(new_value)

            if self.beamline_setup_hwobj:
                self.beamline_setup_hwobj.queue_model_hwobj = self.queue_model_hwobj
                self.task_tool_box_widget.set_beamline_setup(self.beamline_setup_hwobj)
예제 #18
0
파일: build_mozc.py 프로젝트: faxinba/mozc
def main():
  logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')
  logging.getLogger().addFilter(ColoredLoggingFilter())

  if len(sys.argv) < 2:
    ShowHelpAndExit()

  # Move to the Mozc root source directory only once since os.chdir
  # affects functions in os.path and that causes troublesome errors.
  os.chdir(MOZC_ROOT)

  command = sys.argv[1]
  args = sys.argv[2:]

  if command == 'gyp':
    (cmd_opts, cmd_args) = ParseGypOptions(args)
    GypMain(cmd_opts, cmd_args)
  elif command == 'build':
    (cmd_opts, cmd_args) = ParseBuildOptions(args)
    BuildMain(cmd_opts, cmd_args)
  elif command == 'runtests':
    (cmd_opts, cmd_args) = ParseRunTestsOptions(args)
    RunTestsMain(cmd_opts, cmd_args)
  elif command == 'clean':
    (cmd_opts, cmd_args) = ParseCleanOptions(args)
    CleanMain(cmd_opts, cmd_args)
  else:
    logging.error('Unknown command: %s', command)
    ShowHelpAndExit()
예제 #19
0
    def setup(self, verbose_level, error_level, logdir):
        self.presetup()
        logger_dnf = logging.getLogger("dnf")

        # setup file logger
        logfile = os.path.join(logdir, dnf.const.LOG)
        handler = _create_filehandler(logfile)
        logger_dnf.addHandler(handler)
        # temporarily turn off stdout/stderr handlers:
        self.stdout_handler.setLevel(SUPERCRITICAL)
        self.stderr_handler.setLevel(SUPERCRITICAL)
        # put the marker in the file now:
        _paint_mark(logger_dnf)
        # bring std handlers to the preferred level
        self.stdout_handler.setLevel(verbose_level)
        self.stderr_handler.setLevel(error_level)

        # setup Python warnings
        logging.captureWarnings(True)
        logger_warnings = logging.getLogger("py.warnings")
        logger_warnings.addHandler(self.stderr_handler)
        logger_warnings.addHandler(handler)

        # setup RPM callbacks logger
        logger_rpm = logging.getLogger("dnf.rpm")
        logger_rpm.propagate = False
        logger_rpm.setLevel(SUBDEBUG)
        logfile = os.path.join(logdir, dnf.const.LOG_RPM)
        handler = _create_filehandler(logfile)
        logger_rpm.addHandler(handler)
        _paint_mark(logger_rpm)
예제 #20
0
    def new_html(self, html_path, image_prefix, run_number):
	logging.getLogger().debug('got a new html page: %s, prefix: %r, run number: %s', html_path, image_prefix, run_number)

        # prepend the time and date to the path we just got so
        # the history is more readable
        time_string = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())

        index = (time_string, str(image_prefix), str(run_number))
        self.history_map[index] = html_path
        # synchronize the history prop
        if self.current_user is not None:
            whole_history = pickle.loads(self.getProperty('history').getValue())
            whole_history[self.current_user] = self.history_map
            self.getProperty('history').setValue(pickle.dumps(whole_history))
                
        self.history.insertRows(self.history.numRows())
        logging.debug('numRows() is %d', self.history.numRows())
        rows = self.history.numRows() - 1

        self.history.setText(rows, 0, QString(time_string))
        self.history.setText(rows, 1, QString(str(image_prefix)))
        self.history.setText(rows, 2, QString(str(run_number)))

        logging.debug('numRows() is %d', self.history.numRows())

        self.load_file(html_path)
def addFeaturesAuthorFreqInReview( ctx, outFeaturesMaps):
    logging.getLogger("Features").info( "author frequency" )
    multiCommentKey="M-C"
    isAuthorKey="I-A"
    reviewStarsKey="R-S"
    reviewStarsDeviationKey = "R-SD"
    for itrComment, (reviewId,author) in enumerate( ctx.mAuthorReviewPerComment ):
        if(ctx.mAuthorFreqPerReview[reviewId][author]>5):
            outFeaturesMaps[ itrComment ][multiCommentKey]=2
        elif(ctx.mAuthorFreqPerReview[reviewId][author]>1):
            outFeaturesMaps[ itrComment ][multiCommentKey]=1
        else:
            outFeaturesMaps[ itrComment ][multiCommentKey]=0
        if(ctx.mReviewAuthorMap[reviewId]==author):
            outFeaturesMaps[ itrComment ][isAuthorKey]=1
        else:
            outFeaturesMaps[ itrComment ][isAuthorKey]=0
            
        outFeaturesMaps[ itrComment ][reviewStarsKey]=float(ctx.mReviewStarMap[reviewId])
        
        if(ctx.mReviewStarMap[reviewId]>ctx.productAvgStars):
            outFeaturesMaps[ itrComment ][reviewStarsDeviationKey]=-1
        elif(ctx.mReviewStarMap[reviewId]>ctx.productAvgStars):
            outFeaturesMaps[ itrComment ][reviewStarsDeviationKey]=1
        else:
            outFeaturesMaps[ itrComment ][reviewStarsDeviationKey]=0
예제 #22
0
def main(argv):
  """Runs the development application server."""
  args, option_dict = ParseArguments(argv)

  if len(args) != 1:
    print >>sys.stderr, 'Invalid arguments'
    PrintUsageExit(1)

  root_path = args[0]

  if '_DEFAULT_ENV_AUTH_DOMAIN' in option_dict:
    auth_domain = option_dict['_DEFAULT_ENV_AUTH_DOMAIN']
    dev_appserver.DEFAULT_ENV['AUTH_DOMAIN'] = auth_domain
  if '_ENABLE_LOGGING' in option_dict:
    enable_logging = option_dict['_ENABLE_LOGGING']
    dev_appserver.HardenedModulesHook.ENABLE_LOGGING = enable_logging

  log_level = option_dict[ARG_LOG_LEVEL]



  option_dict['root_path'] = os.path.realpath(root_path)


  logging.getLogger().setLevel(log_level)

  default_partition = option_dict[ARG_DEFAULT_PARTITION]
  appinfo = None
  try:
    appinfo, _, _ = dev_appserver.LoadAppConfig(
        root_path, {}, default_partition=default_partition)
  except yaml_errors.EventListenerError, e:
    logging.error('Fatal error when loading application configuration:\n%s', e)
    return 1
예제 #23
0
 def isLocalProxy(self):
     # I'm a server if:
     # order_in_proxy is set (ie I have chance to become a server)
     # fk_use_as_proxy is equal to my id (ie the proxy server is me)
     result = (self.order_in_proxy != None)
     logging.getLogger().debug("isLocalProxy(#%s): %s" % (self.getId(), result))
     return result
예제 #24
0
 def isStateStopped(self):
     if self.getCommandStatut() == 'stop': # 'stop' deprecated a while ago, but may still be present, so we take the opportunity to fix it here
         logging.getLogger().warn("Detected command #%s in deprecated state 'stop', setting it to 'stopped'")
         self.setStateStopped()
     result = (self.getCommandStatut() == 'stop' or self.getCommandStatut() == 'stopped')
     logging.getLogger().debug("isStateStopped(#%s): %s" % (self.getId(), result))
     return result
예제 #25
0
 def isProxyClient(self):
     # I'm a client if:
     # fk_use_as_proxy is set (ie I found a proxy server)
     # fk_use_as_proxy is not equal to my id (ie the proxy server is not me)
     result = (self.fk_use_as_proxy != None and self.fk_use_as_proxy != self.id)
     logging.getLogger().debug("isProxyClient(#%s): %s" % (self.getId(), result))
     return result
예제 #26
0
    def test_notifier(self):
        self.config(notification_driver=['log'])

        transport = _FakeTransport(self.conf)

        notifier = messaging.Notifier(transport, 'test.localhost')

        message_id = uuid.uuid4()
        self.mox.StubOutWithMock(uuid, 'uuid4')
        uuid.uuid4().AndReturn(message_id)

        timeutils.set_time_override()

        message = {
            'message_id': str(message_id),
            'publisher_id': 'test.localhost',
            'event_type': 'test.notify',
            'priority': 'INFO',
            'payload': 'bar',
            'timestamp': str(timeutils.utcnow.override_time),
        }

        logger = self.mox.CreateMockAnything()

        self.mox.StubOutWithMock(logging, 'getLogger')
        logging.getLogger('oslo.messaging.notification.test.notify').\
            AndReturn(logger)

        logger.info(jsonutils.dumps(message))

        self.mox.ReplayAll()

        notifier.info({}, 'test.notify', 'bar')
예제 #27
0
    def __init__(self):
        from django.conf import settings
        from django.core.exceptions import ImproperlyConfigured
        import logging

        if hasattr(settings, "LOGGING"):
            for module, properties in settings.LOGGING.items():
                logger = logging.getLogger(module)

                if "level" in properties:
                    logger.setLevel(properties["level"])
                elif hasattr(settings, "GLOBAL_LOG_LEVEL") and "handlers" not in properties:
                    logger.setLevel(settings.GLOBAL_LOG_LEVEL)
                else:
                    raise ImproperlyConfigured(
                        "A logger in settings.LOGGING doesn't have its log level set. "
                        + "Either set a level on that logger, or set GLOBAL_LOG_LEVEL."
                    )

                handlers = []
                if "handler" in properties:
                    handlers = [properties["handler"]]
                elif "handlers" in properties:
                    handlers = properties["handlers"]
                elif hasattr(settings, "GLOBAL_LOG_HANDLERS"):
                    handlers = settings.GLOBAL_LOG_HANDLERS

                self.add_handlers(logger, handlers)

        elif hasattr(settings, "GLOBAL_LOG_LEVEL") and hasattr(settings, "GLOBAL_LOG_HANDLERS"):
            logger = logging.getLogger("")
            logger.setLevel(settings.GLOBAL_LOG_LEVEL)
            handlers = settings.GLOBAL_LOG_HANDLERS

            self.add_handlers(logger, handlers)
예제 #28
0
 def setUp(self):
     yield super(TestServerScan, self).setUp()
     yield self.get_client()
     yield self.do_create_lots_of_files('_pre')
     self.handler = handler = MementoHandler()
     handler.setLevel(logging.DEBUG)
     logging.getLogger('fsyncsrvr.SyncDaemon').addHandler(handler)
예제 #29
0
def configureBasicLogger(logDir,logName=""):
    # start logger:
    fileLogPath = "sim_" + strftime("%H-%M", gmtime()) + ".log" if len(logName) == 0 else logName
    fileLogPath = os.path.join(logDir, fileLogPath)
    if not os.path.exists(logDir):
        os.makedirs(logDir)
    #     flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY
    #     os.open(fileLogPath, flags)
    #     os.close(fileLogPath)
    # set up logging to file - see previous section for more details
    logging.basicConfig(level=logging.INFO,
                        format="%(asctime)s [%(processName)-12.12s] [%(levelname)-5.5s]  %(message)s",
                        datefmt='%m-%d %H:%M:%S',
                        filename=fileLogPath,
                        filemode='w')
    # define a Handler which writes INFO messages or higher to the sys.stderr
    console = logging.StreamHandler()
    console.setLevel(logging.INFO)
    # set a format which is simpler for console use
    formatter = logging.Formatter('%(asctime)s [%(processName)-12.12s] [%(levelname)-5.5s] %(message)s',
                                  datefmt='%m-%d %H:%M:%S')
    # tell the handler to use this format
    console.setFormatter(formatter)
    # add the handler to the root logger
    logging.getLogger().addHandler(console)
예제 #30
0
 def setLogBase(self, logBase):
     self.logBase = logBase
     if logBase:
         self.stdoutLog = logging.getLogger(logBase + '.stdout')
         self.stderrLog = logging.getLogger(logBase + '.stderr')
     else:
         self.stdoutLog = self.stderrLog = None
예제 #31
0
"""

from __future__ import unicode_literals, division, absolute_import
from collections import MutableMapping, defaultdict
from datetime import datetime
import logging
import pickle
from sqlalchemy import Column, Integer, String, DateTime, Unicode, select, Index
from flexget import db_schema
from flexget.event import event
from flexget.manager import Session
from flexget.utils import json
from flexget.utils.database import json_synonym
from flexget.utils.sqlalchemy_utils import table_schema, create_index, table_add_column

log = logging.getLogger('util.simple_persistence')
Base = db_schema.versioned_base('simple_persistence', 4)

# Used to signify that a given key should be deleted from simple persistence on flush
DELETE = object()


@db_schema.upgrade('simple_persistence')
def upgrade(ver, session):
    if ver is None:
        # Upgrade to version 0 was a failed attempt at cleaning bad entries from our table, better attempt in ver 1
        ver = 0
    if ver == 0:
        # Remove any values that are not loadable.
        table = table_schema('simple_persistence', session)
        for row in session.execute(
예제 #32
0
import yaml

from . import GECKO
from .actions import render_actions_json
from .create import create_tasks
from .generator import TaskGraphGenerator
from .parameters import Parameters, get_version, get_app_version
from .taskgraph import TaskGraph
from .try_option_syntax import parse_message
from .util.schema import validate_schema, Schema
from taskgraph.util.hg import get_hg_revision_branch
from taskgraph.util.partials import populate_release_history
from taskgraph.util.yaml import load_yaml
from voluptuous import Required, Optional

logger = logging.getLogger(__name__)

ARTIFACTS_DIR = 'artifacts'

# For each project, this gives a set of parameters specific to the project.
# See `taskcluster/docs/parameters.rst` for information on parameters.
PER_PROJECT_PARAMETERS = {
    'try': {
        'target_tasks_method': 'try_tasks',
    },
    'try-comm-central': {
        'target_tasks_method': 'try_tasks',
    },
    'ash': {
        'target_tasks_method': 'ash_tasks',
        'optimize_target_tasks': True,
예제 #33
0
SIDES = {
    SLIDE_1_VALUE: 1,
    SLIDE_2_VALUE: 2,
    SLIDE_3_VALUE: 3,
    SLIDE_4_VALUE: 4,
    SLIDE_5_VALUE: 5,
    SLIDE_6_VALUE: 6,
    KNOCK_1_VALUE: 1,
    KNOCK_2_VALUE: 2,
    KNOCK_3_VALUE: 3,
    KNOCK_4_VALUE: 4,
    KNOCK_5_VALUE: 5,
    KNOCK_6_VALUE: 6,
}

_LOGGER = logging.getLogger(__name__)


def extend_dict(dictionary, value, ranges):
    """Extend a dict."""
    for item in ranges:
        dictionary[item] = value


extend_dict(MOVEMENT_TYPE, FLIP, range(FLIP_BEGIN, FLIP_END))


class CubeAQGL01(XiaomiCustomDevice):
    """Aqara magic cube device."""

    def __init__(self, *args, **kwargs):
예제 #34
0
 def putQ(cls, queue, f, logQ, configurer):
     configurer(logQ)
     logger = logging.getLogger()
     logger.info('Queuing: %s' % f)
     queue.put(f)
예제 #35
0
 def worker_configure(cls, queue):
     h = QueueHandler(queue)
     root = logging.getLogger()
     root.addHandler(h)
     root.setLevel(logging.DEBUG)
예제 #36
0
                        "--dict",
                        action="store",
                        dest="dict",
                        default="",
                        help='dictionary')
    parser.add_argument("-p",
                        "--punc",
                        action="store",
                        dest="punc",
                        default="puncs.list",
                        help='punctuation lists')

    args = parser.parse_args()
    options = vars(args)

    logger = logging.getLogger()
    formatter = logging.Formatter(
        '[%(asctime)s][*%(levelname)s*][%(filename)s:%(lineno)d|%(funcName)s] - %(message)s',
        '%Y%m%d-%H:%M:%S')
    file_handler = logging.FileHandler('LOG-selectWord.txt',
                                       'w',
                                       encoding='utf-8')
    file_handler.setFormatter(formatter)
    logger.addHandler(file_handler)

    stream_handler = logging.StreamHandler()
    stream_handler.setFormatter(formatter)
    logger.addHandler(stream_handler)
    logger.setLevel(logging.INFO)

    allStartTP = time.time()
예제 #37
0
class Application(tornado.web.Application):
    def __init__(self):
        app_settings = {
            'default_handler_args': dict(status_code=404),
            'static_path': os.path.join(os.path.dirname(__file__), 'static')
        }

        app_handlers = [
            (r'^/$', WritingHandler),
            (r'^/writing$', WritingHandler),
            (r'^/projects$', ProjectHandler),
            (r'^/book$', BookHandler),
        ]

        self.logger_client = LoggerClient()
        super(Application, self).__init__(app_handlers, **app_settings)


if __name__ == "__main__":
    port = 8888
    address = '0.0.0.0'
    logging_level = logging.getLevelName('INFO')
    logging.getLogger().setLevel(logging_level)
    logging.info('starting foo_web_ui on %s:%d', address, port)

    http_server = tornado.httpserver.HTTPServer(request_callback=Application(),
                                                xheaders=True)
    http_server.listen(port, address=address)

    tornado.ioloop.IOLoop.instance().start()
예제 #38
0
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
"""
.. module:: TODO
   :platform: Unix
   :synopsis: TODO.

.. moduleauthor:: Aljosha Friemann <*****@*****.**>

"""

import click, os, subprocess, logging, re

CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])

log = logging.getLogger('builder')

def tokenize(string):
    return re.split('\s+', string)

def valid_docker_namespace(string):
    # [a-z0-9-_]{4,30} docker v1.5.0
    return string.lstrip('*').split('/')[-1].strip().lower()[:30]

def get_git_root(directory):
    cwd = os.getcwd()
    os.chdir(directory)

    proc = subprocess.Popen('git rev-parse --show-toplevel', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)

    out, err = proc.communicate()
예제 #39
0
    pipeline_options = PipelineOptions(pipeline_args)
    pipeline_options.view_as(
        SetupOptions).save_main_session = save_main_session
    with beam.Pipeline(options=pipeline_options) as p:

        # Read the text file[pattern] into a PCollection.
        lines = p | ReadFromText(known_args.input)

        # Count the occurrences of each word.
        counts = (lines
                  | 'Split' >> (beam.FlatMap(lambda x: re.findall(
                      r'[A-Za-z\']+', x)).with_output_types(str))
                  | 'PairWithOne' >> beam.Map(lambda x: (x, 1))
                  | 'GroupAndSum' >> beam.CombinePerKey(sum))

        # Format the counts into a PCollection of strings.
        def format_result(word_count):
            (word, count) = word_count
            return '%s: %s' % (word, count)

        output = counts | 'Format' >> beam.Map(format_result)

        # Write the output using a "Write" transform that has side effects.
        # pylint: disable=expression-not-assigned
        output | WriteToText(known_args.output)


if __name__ == '__main__':
    logging.getLogger().setLevel(logging.INFO)
    run()
예제 #40
0
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""Import / Database Processing / Import repository-source hierarchies from scvs file """

import os
import sys
import csv
import time
import logging
LOG = logging.getLogger()
LOG.setLevel(logging.INFO)

from gramps.gen.errors import GrampsImportError
from gramps.gen.db import DbTxn
from gramps.gen.lib import Note, NoteType, Repository, RepoRef, RepositoryType, Source, Tag
# from gramps.gui.utils import ProgressMeter
# from gramps.gen.plug.utils import OpenFileOrStdin
from gramps.gen.config import config as configman

from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext

# LOG = logging.getLogger(".importSources")
from gramps.gen.utils.libformatting import ImportInfo
예제 #41
0
import logging
from enum import Enum
from telegram_helper import send_message

logging.basicConfig()
logger = logging.getLogger('message_processor')


class Commands(Enum):
    echo = 1


def process_incoming_message(text):
    try:
        if text.startswith('/' + Commands.echo.name):
            payload = text.split(Commands.echo.name)[1].strip()
            send_message(payload)

    except KeyError:
        logger.error('Command Not Found')
예제 #42
0
 def log(self):
     if self.log:
         return self.log
     else:
         return logging.getLogger(self.name)
예제 #43
0
import hashlib
import io
import itertools
import json
import logging
import os
import tarfile
import tempfile

from os.path import exists, join, normpath, relpath, splitext

import nbformat
from ipython_genutils import text

log = logging.getLogger('rsconnect_jupyter')
log.setLevel(logging.DEBUG)


def make_source_manifest(entrypoint, environment, appmode):
    package_manager = environment['package_manager']

    manifest = {
        "version": 1,
        "metadata": {
            "appmode": appmode,
            "entrypoint": entrypoint
        },
        "locale": environment['locale'],
        "python": {
            "version": environment['python'],
예제 #44
0
    def __init__(self, *args, **kwargs):
        logger = logging.getLogger('chardet.charsetprober')
        logger.setLevel(logging.INFO)
        super(CitoSpider, self).__init__(*args, **kwargs)

        # Use values of 'urrlist' or 'seedpath' key to set start_urls spider attribute
        if kwargs.get('urllist'):
            self.start_urls = kwargs['urllist']
        elif kwargs.get('seedpath'):
            with open(kwargs['seedpath'], "r") as fd:
                self.start_urls = [
                    x.strip('\n') for x in fd.readlines()
                ]  # taken from https://stackoverflow.com/questions/3277503/how-do-i-read-a-file-line-by-line-into-a-list
        else:
            print 'Seed URLS missing in configuration file!\n' \
                  'Please set a value for either the "seedpath" or "urllist" key in configuration file.\n' \
                  'Note: if value for "urllist" is not empty, it will have precedence over value for "seedpath".'
            os._exit(1)
        print "start_urls:", self.start_urls

        # Use value of 'csvpath' to set crawled_urls_dict spider attribute
        if kwargs.get('csvpath'):
            self.csvpath = kwargs['csvpath']
        else:
            self.csvpath = os.path.join(
                main_dir, 'crawledPages.csv')  # set a default value
        print "csvpath is {0}".format(self.csvpath)

        self.crawled_urls_dict = {}  # set a default value
        if os.path.isfile(self.csvpath):
            with open(self.csvpath, "r") as f:
                reader = csv.DictReader(f, fieldnames=('url', 'date'))
                try:
                    for row in reader:
                        self.crawled_urls_dict[row['url']] = row['date']
                except csv.Error as e:
                    print "Something went wrong trying to read csv file"
                    sys.exit('file %s, line %d: %s' %
                             (kwargs['csvpath'], reader.line_num,
                              e))  # https://docs.python.org/2/library/csv.html
            print "\nLoaded crawled_urls_dict from {0}".format(self.csvpath)
        else:  # file csvpath may not exist when script is run for the first time
            print "{0} does not exist. crawled_urls_dict = {1}".format(
                self.csvpath, self.crawled_urls_dict)

        self.maxhours = 720  # set a default value
        if kwargs.get('maxhours'):
            self.maxhours = kwargs[
                'maxhours']  # change default value for maxhours to the one passed in
        print "maxhours is:", self.maxhours

        # Use value of 'querypath' to set sparql_query spider attribute
        self.sparql_query = """CONSTRUCT { ?s ?p ?o . }
                               WHERE {
                                    ?s ?p ?o .
                                    FILTER regex(str(?p), "^http://purl.org/spar/cito/.*")
                                    }"""  # set a default value
        if kwargs.get('querypath'):
            try:
                with open(kwargs['querypath'], "r") as fd:
                    self.sparql_query = fd.read()
            except Exception as e:
                print "\n"
                traceback.print_exc(file=sys.stdout)
                print "\n\n"
                info_str = u"Ooops: {0}: {1}".format(e.__class__.__name__,
                                                     str(e))
                print info_str, "\n...shutting down."
                os._exit(1)
        print "sparql_query is:", self.sparql_query

        # Use value of 'allowedlinks' to set allowedlinks spider attribute
        self.allowedlinks = []  # set a default value
        if kwargs.get('allowedlinks'):
            self.allowedlinks = kwargs['allowedlinks']
        print "allowedlinks is:", self.allowedlinks

        # Use value of 'prioritylinks' to set prioritylinks spider attribute
        self.prioritylinks = []  # set a default value
        if kwargs.get('prioritylinks'):
            self.prioritylinks = kwargs['prioritylinks']
        print "prioritylinks is:", self.prioritylinks

        # Use value of 'samedomain' to set samedomain spider attribute
        self.samedomain = False  # set a default value
        if kwargs.get(
                'samedomain'
        ):  # i.e., samedomain key is found and value associated to key evaluates to True
            self.samedomain = True
        print "samedomain is:", self.samedomain

        # Use value of 'visitonly' to set visitonly spider attribute
        self.visitonly = False  # set a default value
        if kwargs.get(
                'visitonly'
        ):  # i.e., visitonly key is found and value associated to key evaluates to True
            self.visitonly = True
        print "visitonly is:", self.visitonly

        # Use value of 'debug' to set debug spider attribute
        self.debug = False  # set a default value
        if kwargs.get(
                'debug'
        ):  # i.e., debug key is found and value associated to key evaluates to True
            self.debug = True
        print "debug is:", self.debug
예제 #45
0
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.

from .mininode import *
from .blockstore import BlockStore, TxStore
from .util import p2p_port

import logging

logger = logging.getLogger("TestFramework.comptool")

'''
This is a tool for comparing two or more bitcoinds to each other
using a script provided.

To use, create a class that implements get_tests(), and pass it in
as the test generator to TestManager.  get_tests() should be a python
generator that returns TestInstance objects.  See below for definition.
'''

# TestNode behaves as follows:
# Configure with a BlockStore and TxStore
# on_inv: log the message but don't request
# on_headers: log the chain tip
# on_pong: update ping response map (for synchronization)
# on_getheaders: provide headers via BlockStore
# on_getdata: provide blocks via BlockStore

global mininode_lock
예제 #46
0
class Chain(object):
    """
    An Chain is a combination of one or more VM classes.  Each VM is associated
    with a range of blocks.  The Chain class acts as a wrapper around these other
    VM classes, delegating operations to the appropriate VM depending on the
    current block number.
    """
    logger = logging.getLogger("evm.chain.chain.Chain")
    header = None
    network_id = None
    vms_by_range = None

    def __init__(self, chaindb, header=None):
        if not self.vms_by_range:
            raise ValueError(
                "The Chain class cannot be instantiated with an empty `vms_by_range`"
            )

        self.chaindb = chaindb
        self.header = header
        if self.header is None:
            self.header = self.create_header_from_parent(self.get_canonical_head())

    @classmethod
    def configure(cls, name, vm_configuration, **overrides):
        if 'vms_by_range' in overrides:
            raise ValueError("Cannot override vms_by_range.")

        for key in overrides:
            if not hasattr(cls, key):
                raise TypeError(
                    "The Chain.configure cannot set attributes that are not "
                    "already present on the base class.  The attribute `{0}` was "
                    "not found on the base class `{1}`".format(key, cls)
                )

        # Organize the Chain classes by their starting blocks.
        overrides['vms_by_range'] = generate_vms_by_range(vm_configuration)

        return type(name, (cls,), overrides)

    #
    # Convenience and Helpers
    #
    def get_block(self):
        """
        Passthrough helper to the current VM class.
        """
        return self.get_vm().block

    def create_transaction(self, *args, **kwargs):
        """
        Passthrough helper to the current VM class.
        """
        return self.get_vm().create_transaction(*args, **kwargs)

    def create_unsigned_transaction(self, *args, **kwargs):
        """
        Passthrough helper to the current VM class.
        """
        return self.get_vm().create_unsigned_transaction(*args, **kwargs)

    def create_header_from_parent(self, parent_header, **header_params):
        """
        Passthrough helper to the VM class of the block descending from the
        given header.
        """
        return self.get_vm_class_for_block_number(
            block_number=parent_header.block_number + 1,
        ).create_header_from_parent(parent_header, **header_params)

    #
    # Chain Operations
    #
    def get_vm_class_for_block_number(self, block_number):
        """
        Return the vm class for the given block number.
        """
        validate_block_number(block_number)
        for n in reversed(self.vms_by_range.keys()):
            if block_number >= n:
                return self.vms_by_range[n]
        else:
            raise VMNotFound("No vm available for block #{0}".format(block_number))

    def get_vm(self, header=None):
        """
        Return the vm instance for the given block number.
        """
        if header is None:
            header = self.header

        vm_class = self.get_vm_class_for_block_number(header.block_number)
        return vm_class(header=header, chaindb=self.chaindb)

    #
    # Header/Block Retrieval
    #
    def get_block_header_by_hash(self, block_hash):
        """
        Returns the requested block header as specified by block hash.

        Raises BlockNotFound if there's no block header with the given hash in the db.
        """
        validate_word(block_hash, title="Block Hash")
        return self.chaindb.get_block_header_by_hash(block_hash)

    def get_canonical_head(self):
        """
        Returns the block header at the canonical chain head.

        Raises CanonicalHeadNotFound if there's no head defined for the canonical chain.
        """
        return self.chaindb.get_canonical_head()

    def get_canonical_block_by_number(self, block_number):
        """
        Returns the block with the given number in the canonical chain.

        Raises BlockNotFound if there's no block with the given number in the
        canonical chain.
        """
        validate_uint256(block_number, title="Block Number")
        return self.get_block_by_hash(self.chaindb.lookup_block_hash(block_number))

    def get_block_by_hash(self, block_hash):
        """
        Returns the requested block as specified by block hash.
        """
        validate_word(block_hash, title="Block Hash")
        block_header = self.get_block_header_by_hash(block_hash)
        vm = self.get_vm(block_header)
        return vm.get_block_by_header(block_header)

    #
    # Chain Initialization
    #
    @classmethod
    def from_genesis(cls,
                     chaindb,
                     genesis_params,
                     genesis_state=None):
        """
        Initialize the Chain from a genesis state.
        """
        state_db = chaindb.get_state_db(BLANK_ROOT_HASH, read_only=False)

        if genesis_state is None:
            genesis_state = {}

        for account, account_data in genesis_state.items():
            state_db.set_balance(account, account_data['balance'])
            state_db.set_nonce(account, account_data['nonce'])
            state_db.set_code(account, account_data['code'])

            for slot, value in account_data['storage'].items():
                state_db.set_storage(account, slot, value)

        if 'state_root' not in genesis_params:
            # If the genesis state_root was not specified, use the value
            # computed from the initialized state database.
            genesis_params = assoc(genesis_params, 'state_root', state_db.root_hash)
        elif genesis_params['state_root'] != state_db.root_hash:
            # If the genesis state_root was specified, validate that it matches
            # the computed state from the initialized state database.
            raise ValidationError(
                "The provided genesis state root does not match the computed "
                "genesis state root.  Got {0}.  Expected {1}".format(
                    state_db.root_hash,
                    genesis_params['state_root'],
                )
            )

        genesis_header = BlockHeader(**genesis_params)
        genesis_chain = cls(chaindb, genesis_header)
        chaindb.persist_block_to_db(genesis_chain.get_block())
        return cls.from_genesis_header(chaindb, genesis_header)

    @classmethod
    def from_genesis_header(cls, chaindb, genesis_header):
        chaindb.persist_header_to_db(genesis_header)
        return cls(chaindb)

    #
    # Mining and Execution API
    #
    def apply_transaction(self, transaction):
        """
        Apply the transaction to the current head block of the Chain.
        """
        vm = self.get_vm()
        return vm.apply_transaction(transaction)

    def import_block(self, block, perform_validation=True):
        """
        Import a complete block.
        """
        if block.number > self.header.block_number:
            raise ValidationError(
                "Attempt to import block #{0}.  Cannot import block with number "
                "greater than current block #{1}.".format(
                    block.number,
                    self.header.block_number,
                )
            )

        parent_chain = self.get_chain_at_block_parent(block)
        imported_block = parent_chain.get_vm().import_block(block)

        # Validate the imported block.
        if perform_validation:
            ensure_imported_block_unchanged(imported_block, block)
            self.validate_block(imported_block)

        self.chaindb.persist_block_to_db(imported_block)
        self.header = self.create_header_from_parent(self.get_canonical_head())
        self.logger.debug(
            'IMPORTED_BLOCK: number %s | hash %s',
            imported_block.number,
            encode_hex(imported_block.hash),
        )
        return imported_block

    def mine_block(self, *args, **kwargs):
        """
        Mines the current block.
        """
        mined_block = self.get_vm().mine_block(*args, **kwargs)

        self.validate_block(mined_block)

        self.chaindb.persist_block_to_db(mined_block)
        self.header = self.create_header_from_parent(self.get_canonical_head())
        return mined_block

    def get_chain_at_block_parent(self, block):
        """
        Returns a `Chain` instance with the given block's parent at the chain head.
        """
        try:
            parent_header = self.get_block_header_by_hash(block.header.parent_hash)
        except BlockNotFound:
            raise ValidationError("Parent ({0}) of block {1} not found".format(
                block.header.parent_hash,
                block.header.hash
            ))

        init_header = self.create_header_from_parent(parent_header)
        return type(self)(self.chaindb, init_header)

    @to_tuple
    def get_ancestors(self, limit):
        lower_limit = max(self.header.block_number - limit, 0)
        for n in reversed(range(lower_limit, self.header.block_number)):
            yield self.get_canonical_block_by_number(n)

    #
    # Validation API
    #
    def validate_block(self, block):
        """
        Performs validation on a block that is either being mined or imported.

        Since block validation (specifically the uncle validation must have
        access to the ancestor blocks, this validation must occur at the Chain
        level.

        TODO: move the `seal` validation down into the vm.
        """
        self.validate_seal(block.header)
        self.validate_uncles(block)

    def validate_uncles(self, block):
        recent_ancestors = dict(
            (ancestor.hash, ancestor)
            for ancestor in self.get_ancestors(MAX_UNCLE_DEPTH + 1),
        )
        recent_uncles = []
        for ancestor in recent_ancestors.values():
            recent_uncles.extend([uncle.hash for uncle in ancestor.uncles])
        recent_ancestors[block.hash] = block
        recent_uncles.append(block.hash)

        for uncle in block.uncles:
            if uncle.hash in recent_ancestors:
                raise ValidationError(
                    "Duplicate uncle: {0}".format(encode_hex(uncle.hash)))
            recent_uncles.append(uncle.hash)

            if uncle.hash in recent_ancestors:
                raise ValidationError(
                    "Uncle {0} cannot be an ancestor of {1}".format(
                        encode_hex(uncle.hash), encode_hex(block.hash)))

            if uncle.parent_hash not in recent_ancestors or (
               uncle.parent_hash == block.header.parent_hash):
                raise ValidationError(
                    "Uncle's parent {0} is not an ancestor of {1}".format(
                        encode_hex(uncle.parent_hash), encode_hex(block.hash)))

            self.validate_seal(uncle)

    def validate_seal(self, header):
        check_pow(
            header.block_number, header.mining_hash,
            header.mix_hash, header.nonce, header.difficulty)
예제 #47
0
import hashlib
import os
import sys
import random
from exchange.poloniex import poloniexUtil
from exchange.okex import okexUtil
from exchange.bitfinex import bitfinexUtil
from exchange.huobi import huobiUtil
from datetime import datetime
import sqlite3
import math

SUPPORT_PAIR = 'ETC_USDT'
if 'pair' in os.environ:
    SUPPORT_PAIR = os.environ['pair']
logger = logging.getLogger("deal")
logger.setLevel(logging.DEBUG)
ch = TimedRotatingFileHandler('cal_' + SUPPORT_PAIR + '.log',
                              when='D',
                              interval=1,
                              backupCount=3)
formatter = logging.Formatter(
    '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)

logger.info('BEGIN monitor {}'.format(SUPPORT_PAIR))
okexUtil = okexUtil(SUPPORT_PAIR)
poloniexUtil = poloniexUtil(SUPPORT_PAIR)
bitfinexUtil = bitfinexUtil(SUPPORT_PAIR)
huobiUtil = huobiUtil(SUPPORT_PAIR)
예제 #48
0
import subprocess
#import CalendarCredentials
import logging
# Changes
# 1 Rearranged output of "python Settings.py"
# 09/08/2020 - Added daily alarm volumes & debug level
# 02/09/2020 - Added minimum volume level
# 02/11/2020 - Added proper volume percentage calls (used by web.py)
#              Added class functions for alarmFromPercent, alarmToPercent and remap
#              Should keep volume and volumepercent in step rounding permitting
# 03/11/2020 - Changed default volume and volumepercent to minimum
# 20/04/2021 - SetVolumePercent nolonger converts its parameter to a percentage as it already should be one
# 27/04/2021 - Got a better remap range to range routine
# 08/05/2021 - Fixed minor syntax errors. Added mqtt broker

log = logging.getLogger('root')

import threading
lock = threading.Lock()

# Radio stations we can play through mplayer
# http://www.radiofeeds.co.uk/bbcradio1.pls
# http://www.radiofeeds.co.uk/bbcradio2.pls
# From http://www.listenlive.eu/uk.html
# http://www.listenlive.eu/bbcradio2.m3u
# Ones From http://www.suppertime.co.uk/blogmywiki/2015/04/updated-list-of-bbc-network-radio-urls/
# dont work :(
#STATIONS = [
#   {'name':'BBC Radio 1', 'url':'http://open.live.bbc.co.uk/mediaselector/5/select/version/2.0/mediaset/http-icy-mp3-a/vpid/bbc_radio_one/format/pls.pls'},
#   {'name':'BBC Radio 2', 'url':'http://open.live.bbc.co.uk/mediaselector/5/select/version/2.0/mediaset/http-icy-mp3-a/vpid/bbc_radio_two/format/pls.pls'},
#   {'name':'BBC Radio London', 'url':'http://www.radiofeeds.co.uk/bbclondon.pls'},
예제 #49
0
import logging
import time

import numpy as np
import tensorflow as tf

start_time = time.time()

# set up logging
formatter = logging.Formatter('%(asctime)s : %(name)s :: %(levelname)s : %(message)s')
logger = logging.getLogger('main')
logger.setLevel(logging.DEBUG)
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
console_handler.setLevel(logging.DEBUG)
logger.debug('started')

random_seed = 47
np.random.seed(seed=random_seed)
raw_data = np.random.normal(10, 1, 100)
alpha = tf.constant(0.05)
beta = tf.Variable(1.0 - alpha)
current_value = tf.placeholder(tf.float32)
previous_average = tf.Variable(0.0)
update_average = alpha * current_value + tf.multiply(beta, previous_average)

average_history = tf.summary.scalar('running average', update_average)
value_history = tf.summary.scalar('incoming values', current_value)
merged = tf.summary.merge_all()
log_folder = './logs/'
예제 #50
0
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
# Place, Suite 330, Boston, MA 02111-1307 USA

import re, socket, urllib, urlparse, urllib2
from Cookie import SimpleCookie
import logging
from logging.handlers import *

__version__ = '0.1.1'
__author__ = 'Greg Albrecht <*****@*****.**>'

logger = logging.getLogger('weboutlook')
logger.setLevel(logging.INFO)
consolelogger = logging.StreamHandler()
consolelogger.setLevel(logging.INFO)
logger.addHandler(consolelogger)

socket.setdefaulttimeout(15)

class InvalidLogin(Exception):
    pass

class RetrievalError(Exception):
    pass

def create_opener(base_url, username, password):
    logger.debug(locals())
예제 #51
0
import logging
import logging.handlers
import requests
import re
import math
import splunk.Intersplunk
import splunklib.client as client
import splunklib.searchcommands as searchcommands
import os

LOG_ROTATION_LOCATION = os.environ[
    'SPLUNK_HOME'] + "/var/log/splunk/gmap_api.log"
LOG_ROTATION_BYTES = 1 * 1024 * 1024
LOG_ROTATION_LIMIT = 5

logger = logging.getLogger("geocoding")
logger.setLevel(logging.DEBUG)
handler = logging.handlers.RotatingFileHandler(LOG_ROTATION_LOCATION,
                                               maxBytes=LOG_ROTATION_BYTES,
                                               backupCount=LOG_ROTATION_LIMIT)
handler.setFormatter(
    logging.Formatter("[%(levelname)s] (%(threadName)-10s) %(message)s"))
logger.addHandler(handler)

URL_BASE = "https://maps.googleapis.com/maps/api/geocode/json"


@Configuration()
class geocodingCommand(StreamingCommand):
    threads = Option(require=False, default=8, validate=validators.Integer())
    null_value = Option(require=False, default="")
import socket
import tqdm
import os
import sys
import pickle
import logging
from sys import platform
import time
import logging

logging.basicConfig(level=logging.WARNING)  # Global logging configuration
logger = logging.getLogger("UMT - Client Socket")  # Logger for this module
logger.setLevel(logging.INFO)  # Debugging for this file.

# --- Server Network Information
HOST = "192.168.1.236"  # the ip address or hostname of the server, the receiver
PORT = 5001  # the port, let's use 5001
SEPARATOR = "<SEPARATOR>"
BUFFER_SIZE = 4096  # send 4096 bytes each time step

# --- Changing directory URL depending on the platform
if platform == 'linux' or platform == 'linux2':
    DETECTIONS = 'detections.ssg'
if platform == 'darwin':
    DETECTIONS = 'rpi-urban-mobility-tracker/umt/boundries.ssg'


# --- Attempts to connect to the server, if the devices fails it'll re-try every 60
# --- seconds until a succesful connection is made.
def connectToServer(host, port):
    sent = False
예제 #53
0
from webcamlib.LightSensor import LightSensor
import logging
"""
 "unit" test (more of a functional test) to make sure the lightsensor works
"""


class TestLightSensorMethods(unittest.TestCase):
    def setUp(self):
        self.configFile = Config('tests/config-test-bh1750.json')

    def tearDown(self):
        self.configFile.dispose()

    def runTest(self):
        sensor = LightSensor(self.configFile.sensors.light)
        self.assertNotEqual(0, sensor.lightlevel)


if __name__ == '__main__':
    root_logger = logging.getLogger('')
    # Setup logging to the screen
    ch = logging.StreamHandler()
    ch.setLevel(logging.DEBUG)
    # create formatter and add it to the handlers
    formatter = logging.Formatter(
        '[%(asctime)s] [%(name)-15.15s] [%(levelname)-7.7s] %(message)s')
    ch.setFormatter(formatter)
    # add the handlers to logger
    root_logger.addHandler(ch)
    unittest.main()
예제 #54
0
    }
}
CONFIGURABLE_SETTINGS = DEFAULTS['viewer']
CONFIGURABLE_SETTINGS['use_24_hour_clock'] = DEFAULTS['main']['use_24_hour_clock']

PORT = int(getenv('PORT', 8080))
LISTEN = getenv('LISTEN', '127.0.0.1')

# Initiate logging
logging.basicConfig(level=logging.INFO,
                    format='%(message)s',
                    datefmt='%a, %d %b %Y %H:%M:%S')

# Silence urllib info messages ('Starting new HTTP connection')
# that are triggered by the remote url availability check in view_web
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.WARNING)

logging.debug('Starting viewer.py')


class ScreenlySettings(IterableUserDict):
    """Screenly OSE's Settings."""

    def __init__(self, *args, **kwargs):
        IterableUserDict.__init__(self, *args, **kwargs)
        self.home = getenv('HOME')
        self.conf_file = self.get_configfile()

        if not path.isfile(self.conf_file):
            logging.error('Config-file %s missing', self.conf_file)
예제 #55
0
# a.error('d')
# a.critical('e')

'''
    handler类
        通过handler对象可以把日志内容写到不同地方,python提供了十几种实用的handler,比较常用的有:
        类型 	说明
        StreamHandler               输出到控制台
        FileHandler                 输出到文件
        BaseRotatingHandler         可以按时间写入到不同的日志中。比如将日志按天写入不同的日期结尾的文件文件
        SocketHandler               用TCP网络连接写LOG
        DatagramHandler             用UDP网络连接写LOG
        SMTPHandler                 把LOG写成EMAIL邮寄出去
'''
fh = logging.FileHandler('D:/test1.txt')            # 创建一个输出到指定文件的handler对象fh
a = logging.getLogger("MyRoot")
fh.setLevel(logging.INFO)                             # 设置日志级别
a.addHandler(fh)                                      # 给logger添加handler
# a.info("这个日志有点扯把子")

'''
    handler常用方法
        Handler.setLevel()                              设置handler将会处理的日志消息的最低严重级别
        Handler.setFormatter()                          为handler设置一个格式器对象
        Handler.addFilter() 和 Handler.removeFilter()    为handler添加 和 删除一个过滤器对象
    Formater类
        字段/属性名称 	使用格式 	描述
        asctime 	%(asctime)s 	日志事件发生的时间--人类可读时间,如:2003-07-08 16:49:45,896
        created 	%(created)f 	日志事件发生的时间--时间戳,就是当时调用time.time()函数返回的值
        relativeCreated 	%(relativeCreated)d 	日志事件发生的时间相对于logging模块加载时间的相对毫秒数(目前还不知道干嘛用的)
        msecs 	%(msecs)d 	        日志事件发生事件的毫秒部分
예제 #56
0
import logging
import re
import lxml.etree

from virt_backup.exceptions import DiskNotFoundError

logger = logging.getLogger("virt_backup")


def get_domain_disks_of(dom_xml, *filter_dev):
    """
    Get disks from the domain xml

    :param dom_xml: domain xml to extract the disks from
    :param filter_dev: return only disks for which the dev name matches
                       with one in filter_dev. If no parameter, will return
                       every disks.
    """
    if isinstance(dom_xml, str):
        dom_xml = lxml.etree.fromstring(
            dom_xml, lxml.etree.XMLParser(resolve_entities=False))
    filter_dev = sorted(list(filter_dev))
    disks = {}
    for elem in dom_xml.xpath("devices/disk"):
        try:
            if elem.get("device", None) != "disk":
                continue

            if elem.get("type", None) != "file":
                logger.debug(
                    "Disk %s is not a file, which not compatible with virt-backup",
예제 #57
0
class CreateProject(show.ShowOne):
    """Create new project"""

    log = logging.getLogger(__name__ + '.CreateProject')

    def get_parser(self, prog_name):
        parser = super(CreateProject, self).get_parser(prog_name)
        parser.add_argument(
            'name',
            metavar='<project-name>',
            help='New project name',
        )
        parser.add_argument(
            '--domain',
            metavar='<project-domain>',
            help='Domain owning the project (name or ID)',
        )
        parser.add_argument(
            '--description',
            metavar='<project-description>',
            help='New project description',
        )
        enable_group = parser.add_mutually_exclusive_group()
        enable_group.add_argument(
            '--enable',
            action='store_true',
            help='Enable project',
        )
        enable_group.add_argument(
            '--disable',
            action='store_true',
            help='Disable project',
        )
        parser.add_argument(
            '--property',
            metavar='<key=value>',
            action=parseractions.KeyValueAction,
            help='Property to add for this project '
                 '(repeat option to set multiple properties)',
        )
        return parser

    def take_action(self, parsed_args):
        self.log.debug('take_action(%s)', parsed_args)
        identity_client = self.app.client_manager.identity

        if parsed_args.domain:
            domain = common.find_domain(identity_client,
                                        parsed_args.domain).id
        else:
            domain = None

        enabled = True
        if parsed_args.disable:
            enabled = False
        kwargs = {}
        if parsed_args.property:
            kwargs = parsed_args.property.copy()

        project = identity_client.projects.create(
            name=parsed_args.name,
            domain=domain,
            description=parsed_args.description,
            enabled=enabled,
            **kwargs
        )

        project._info.pop('links')
        return zip(*sorted(six.iteritems(project._info)))
예제 #58
0
)
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import (
    CodeNotaryError,
    CodeNotaryUntrusted,
    DockerAPIError,
    DockerError,
    DockerNotFound,
    DockerRequestError,
    DockerTrustError,
)
from ..resolution.const import ContextType, IssueType, SuggestionType
from ..utils import process_lock
from .stats import DockerStats

_LOGGER: logging.Logger = logging.getLogger(__name__)

IMAGE_WITH_HOST = re.compile(r"^((?:[a-z0-9]+(?:-[a-z0-9]+)*\.)+[a-z]{2,})\/.+")
DOCKER_HUB = "hub.docker.com"


class DockerInterface(CoreSysAttributes):
    """Docker Supervisor interface."""

    def __init__(self, coresys: CoreSys):
        """Initialize Docker base wrapper."""
        self.coresys: CoreSys = coresys
        self._meta: Optional[dict[str, Any]] = None
        self.lock: asyncio.Lock = asyncio.Lock()

    @property
예제 #59
0
class SetProject(command.Command):
    """Set project properties"""

    log = logging.getLogger(__name__ + '.SetProject')

    def get_parser(self, prog_name):
        parser = super(SetProject, self).get_parser(prog_name)
        parser.add_argument(
            'project',
            metavar='<project>',
            help='Project to change (name or ID)',
        )
        parser.add_argument(
            '--name',
            metavar='<new-project-name>',
            help='New project name',
        )
        parser.add_argument(
            '--domain',
            metavar='<project-domain>',
            help='New domain owning the project (name or ID)',
        )
        parser.add_argument(
            '--description',
            metavar='<project-description>',
            help='New project description',
        )
        enable_group = parser.add_mutually_exclusive_group()
        enable_group.add_argument(
            '--enable',
            action='store_true',
            help='Enable project',
        )
        enable_group.add_argument(
            '--disable',
            action='store_true',
            help='Disable project',
        )
        parser.add_argument(
            '--property',
            metavar='<key=value>',
            action=parseractions.KeyValueAction,
            help='Property to add for this project '
                 '(repeat option to set multiple properties)',
        )
        return parser

    def take_action(self, parsed_args):
        self.log.debug('take_action(%s)', parsed_args)
        identity_client = self.app.client_manager.identity

        if (not parsed_args.name
                and not parsed_args.description
                and not parsed_args.domain
                and not parsed_args.enable
                and not parsed_args.property
                and not parsed_args.disable):
            return

        project = utils.find_resource(
            identity_client.projects,
            parsed_args.project,
        )

        kwargs = {}
        if parsed_args.name:
            kwargs['name'] = parsed_args.name
        if parsed_args.domain:
            kwargs['domain'] = common.find_domain(identity_client,
                                                  parsed_args.domain).id
        if parsed_args.description:
            kwargs['description'] = parsed_args.description
        if parsed_args.enable:
            kwargs['enabled'] = True
        if parsed_args.disable:
            kwargs['enabled'] = False
        if parsed_args.property:
            kwargs.update(parsed_args.property)

        identity_client.projects.update(project.id, **kwargs)
        return
예제 #60
0
파일: main.py 프로젝트: joe2hpimn/kafkatos3
def main(argv):
    global config
    global logger
    """Program entry point.

    :param argv: command-line arguments
    :type argv: :class:`list`
    """
    author_strings = []
    for name, email in zip(metadata.authors, metadata.emails):
        author_strings.append('Author: {0} <{1}>'.format(name, email))

    epilog = '''{project} {version}

{authors}
URL: <{url}>
'''.format(project=metadata.project,
           version=metadata.version,
           authors='\n'.join(author_strings),
           url=metadata.url)

    arg_parser = argparse.ArgumentParser(
        prog=argv[0],
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description=metadata.description,
        epilog=epilog)
    arg_parser.add_argument('-V',
                            '--version',
                            action='version',
                            version='{0} {1}'.format(metadata.project,
                                                     metadata.version))
    arg_parser.add_argument('configfile', help='kafkatos3 config file to use')

    args = arg_parser.parse_args(args=argv[1:])

    config = parse_config(args.configfile)

    logger = logging.getLogger('kafkatos3')
    formatter = logging.Formatter(
        '%(asctime)s - [%(levelname)s/%(processName)s] - %(message)s')
    ch = logging.StreamHandler()
    ch.setFormatter(formatter)
    logger.setLevel(logging.INFO)
    logger.addHandler(ch)

    logger.info(
        "===============================================================")
    logger.info(epilog)
    logger.info(
        "===============================================================")

    for x in range(0, int(config.get("consumer", "consumer_processes"))):
        p = Process(target=consumer_process, args=(str(x), ))
        p.start()
        processes.append(p)

    p = Process(target=compression_process)
    p.start()
    processes.append(p)

    p = Process(target=s3_process)
    p.start()
    processes.append(p)

    setproctitle("[mainprocess] " + getproctitle())

    for p in processes:
        p.join()

    return 0