Esempio n. 1
0
def compare_namelists(case, baseline_name, baseline_root, logfile_name):
###############################################################################
    log_lvl = logging.getLogger().getEffectiveLevel()
    logging.disable(logging.CRITICAL)
    success = case_cmpgen_namelists(case, compare=True, compare_name=baseline_name, baseline_root=baseline_root, logfile_name=logfile_name)
    logging.getLogger().setLevel(log_lvl)
    return success
Esempio n. 2
0
def test_stderr_to_file_with_append(test, capfd):
    """Tests redirection of stderr to to a file with appending."""

    logging.disable(logging.CRITICAL)

    try:
        with tempfile.NamedTemporaryFile() as temp_file:
            temp_file.write(b"orig\n")
            temp_file.flush()

            process = sh.sh("-c", "echo test1; echo test2 >&2; echo тест3; echo тест4 >&2;",
                _stderr=File(temp_file.name, append=True))
            process.execute()

            assert process.stdout() == "test1\nтест3\n"
            assert process.stderr() == ""

            stdout, stderr = capfd.readouterr()
            assert stdout == ""
            assert stderr == ""

            with open(temp_file.name, "rb") as stderr:
                assert psys.u(stderr.read()) == "orig\ntest2\nтест4\n"
    finally:
        logging.disable(logging.NOTSET)
Esempio n. 3
0
def configure_logger(log_level):
    """Configure the program's logger.

    :param log_level: Log level for configuring logging
    :type log_level: str
    :rtype: None
    """

    if log_level is None:
        logging.disable(logging.CRITICAL)
        return None

    if log_level in constants.VALID_LOG_LEVEL_VALUES:
        logging.basicConfig(
            format=('%(threadName)s: '
                    '%(asctime)s '
                    '%(pathname)s:%(funcName)s:%(lineno)d - '
                    '%(message)s'),
            stream=sys.stderr,
            level=log_level.upper())
        return None

    msg = 'Log level set to an unknown value {!r}. Valid values are {!r}'
    raise DCOSException(
        msg.format(log_level, constants.VALID_LOG_LEVEL_VALUES))
def logger_init(crash_repo_path, stdout_level, file_level, disabled=False):
    log_levels = {
        "DEBUG": logging.DEBUG,
        "INFO": logging.INFO,
        "WARNING": logging.WARNING,
        "ERROR": logging.ERROR,
        "CRITICAL": logging.CRITICAL,
    }
    stdout_level = log_levels[stdout_level]
    file_level = log_levels[file_level]

    # create formatter for crash file logging
    formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")

    logger = logging.getLogger()

    # create file handler which logs even debug messages
    if crash_repo_path:
        crash_logger = logging.FileHandler(crash_repo_path)
        crash_logger.setLevel(file_level)
        crash_logger.setFormatter(formatter)
        logger.addHandler(crash_logger)
    # create console handler with a low log level
    print_logger = logging.StreamHandler()
    print_logger.setLevel(stdout_level)
    # add the handlers to logger
    logger.addHandler(print_logger)
    if disabled:
        logging.disable(logging.CRITICAL)
    def setUp(self):
        # Start container

        logging.disable(logging.ERROR)
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        # simulate preloading
        #preload_ion_params(self.container)
        logging.disable(logging.NOTSET)

        #Instantiate a process to represent the test
        process=VisualizationServiceTestProcess()

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceProcessClient(node=self.container.node, process=process)
        self.damsclient = DataAcquisitionManagementServiceProcessClient(node=self.container.node, process=process)
        self.pubsubclient =  PubsubManagementServiceProcessClient(node=self.container.node, process=process)
        self.ingestclient = IngestionManagementServiceProcessClient(node=self.container.node, process=process)
        self.imsclient = InstrumentManagementServiceProcessClient(node=self.container.node, process=process)
        self.dataproductclient = DataProductManagementServiceProcessClient(node=self.container.node, process=process)
        self.dataprocessclient = DataProcessManagementServiceProcessClient(node=self.container.node, process=process)
        self.datasetclient =  DatasetManagementServiceProcessClient(node=self.container.node, process=process)
        self.process_dispatcher = ProcessDispatcherServiceProcessClient(node=self.container.node, process=process)
        self.data_retriever = DataRetrieverServiceProcessClient(node=self.container.node, process=process)
        self.vis_client = VisualizationServiceProcessClient(node=self.container.node, process=process)

        self.ctd_stream_def = SBE37_CDM_stream_definition()
Esempio n. 6
0
def main():
  """Run the program."""
  # Check for --loglevel, --debug, we deal with them by ourselves because
  # option parser also use logging.
  loglevel = None
  for idx, arg in enumerate(sys.argv):
    if '--loglevel' in arg:
      if '=' in arg:
        loglevel = arg.split('=')[1]
      else:
        loglevel = sys.argv[idx + 1]
      level = getattr(logging, loglevel.upper(), None)
      if level is None:
          raise ValueError('Invalid log level: %s' % loglevel)
      loglevel = level
  else:
    if '--debug' in sys.argv or '-d' in sys.argv:
      loglevel = logging.DEBUG
  logging.basicConfig(
      level=loglevel,
      format='%(filename)s [%(lineno)d]: %(levelname)s %(message)s')
  if loglevel is None:
    # Disabling warning, info, debug messages
    logging.disable(logging.WARNING)

  opts = create_options()
  desc = 'Usage: %prog [Options...]'
  opts.parse_args(desc, sys.argv)

  keymon = KeyMon(opts)
  try:
    gtk.main()
  except KeyboardInterrupt:
    keymon.quit_program()
Esempio n. 7
0
    def tearDownClass(cls):
        logging.disable(logging.NOTSET)

        # Pop the connection to Redis
        testconn = pop_connection()
        assert testconn == cls.testconn, 'Wow, something really nasty ' \
                'happened to the Redis connection stack. Check your setup.'
Esempio n. 8
0
    def test_last_ditch_entity_replacement(self):
        # This is a UTF-8 document that contains bytestrings
        # completely incompatible with UTF-8 (ie. encoded with some other
        # encoding).
        #
        # Since there is no consistent encoding for the document,
        # Unicode, Dammit will eventually encode the document as UTF-8
        # and encode the incompatible characters as REPLACEMENT
        # CHARACTER.
        #
        # If chardet is installed, it will detect that the document
        # can be converted into ISO-8859-1 without errors. This happens
        # to be the wrong encoding, but it is a consistent encoding, so the
        # code we're testing here won't run.
        #
        # So we temporarily disable chardet if it's present.
        doc = b"""\357\273\277<?xml version="1.0" encoding="UTF-8"?>
<html><b>\330\250\330\252\330\261</b>
<i>\310\322\321\220\312\321\355\344</i></html>"""
        chardet = bs4.dammit.chardet_dammit
        logging.disable(logging.WARNING)
        try:
            def noop(str):
                return None
            bs4.dammit.chardet_dammit = noop
            dammit = UnicodeDammit(doc)
            self.assertEqual(True, dammit.contains_replacement_characters)
            self.assertTrue("\ufffd" in dammit.unicode_markup)

            soup = BeautifulSoup(doc, "html.parser")
            self.assertTrue(soup.contains_replacement_characters)
        finally:
            logging.disable(logging.NOTSET)
            bs4.dammit.chardet_dammit = chardet
    def setUp(self):
        self.temp_dir = tempfile.mkdtemp()
        self.collection_manager = CollectionManager()
        self.rest_app = RestApp(self.temp_dir, collection_manager=self.collection_manager)

        # disable all but critical errors!
        logging.disable(logging.CRITICAL)
    def setUp(self):
        from letsencrypt.account import Account

        logging.disable(logging.CRITICAL)

        self.accounts_dir = tempfile.mkdtemp("accounts")
        self.account_keys_dir = os.path.join(self.accounts_dir, "keys")
        os.makedirs(self.account_keys_dir, 0o700)

        self.config = mock.MagicMock(
            spec=configuration.NamespaceConfig, accounts_dir=self.accounts_dir,
            account_keys_dir=self.account_keys_dir, rsa_key_size=2048,
            server="letsencrypt-demo.org")

        key_file = pkg_resources.resource_filename(
            "acme.jose", os.path.join("testdata", "rsa512_key.pem"))
        key_pem = pkg_resources.resource_string(
            "acme.jose", os.path.join("testdata", "rsa512_key.pem"))

        self.key = le_util.Key(key_file, key_pem)
        self.email = "*****@*****.**"
        self.regr = messages2.RegistrationResource(
            uri="uri",
            new_authzr_uri="new_authzr_uri",
            terms_of_service="terms_of_service",
            body=messages2.Registration(
                recovery_token="recovery_token", agreement="agreement")
        )

        self.test_account = Account(
            self.config, self.key, self.email, None, self.regr)
Esempio n. 11
0
def process_email(quiet=False):
    for q in Queue.objects.filter(
            email_box_type__isnull=False,
            allow_email_submission=True):

        logger = logging.getLogger('django.helpdesk.queue.' + q.slug)
        if not q.logging_type or q.logging_type == 'none':
            logging.disable(logging.CRITICAL)  # disable all messages
        elif q.logging_type == 'info':
            logger.setLevel(logging.INFO)
        elif q.logging_type == 'warn':
            logger.setLevel(logging.WARN)
        elif q.logging_type == 'error':
            logger.setLevel(logging.ERROR)
        elif q.logging_type == 'crit':
            logger.setLevel(logging.CRITICAL)
        elif q.logging_type == 'debug':
            logger.setLevel(logging.DEBUG)
        if quiet:
            logger.propagate = False  # do not propagate to root logger that would log to console
        logdir = q.logging_dir or '/var/log/helpdesk/'
        handler = logging.FileHandler(join(logdir, q.slug + '_get_email.log'))
        logger.addHandler(handler)

        if not q.email_box_last_check:
            q.email_box_last_check = timezone.now() - timedelta(minutes=30)

        queue_time_delta = timedelta(minutes=q.email_box_interval or 0)

        if (q.email_box_last_check + queue_time_delta) < timezone.now():
            process_queue(q, logger=logger)
            q.email_box_last_check = timezone.now()
            q.save()
Esempio n. 12
0
def main():
    args = sys.argv[1:]

    # The only shared option is '--addons-path=' needed to discover additional
    # commands from modules
    if len(args) > 1 and args[0].startswith('--addons-path=') and not args[1].startswith("-"):
        # parse only the addons-path, do not setup the logger...
        odoo.tools.config._parse_config([args[0]])
        args = args[1:]

    # Default legacy command
    command = "server"

    # TODO: find a way to properly discover addons subcommands without importing the world
    # Subcommand discovery
    if len(args) and not args[0].startswith("-"):
        logging.disable(logging.CRITICAL)
        for module in get_modules():
            if isdir(joinpath(get_module_path(module), 'cli')):
                __import__('odoo.addons.' + module)
        logging.disable(logging.NOTSET)
        command = args[0]
        args = args[1:]

    if command in commands:
        o = commands[command]()
        o.run(args)
    def detach(self):
        try:
            # Re-attach to the process original namespaces before attaching the
            # first time to self.pid namespaces.
            attach_to_process_namespaces(self.host_ns_fds,
                                         self.namespaces)
        except Exception as e:
            logging.disable(logging.NOTSET)
            logger.error('Could not move back to the host: %s' % e)
            # XXX can't recover from this one. But it would be better to
            # bubble up the error.
            sys.exit(1)

        # We are now in host context

        try:
            os.chdir(self.host_cwd)
        except Exception as e:
            logger.error('Could not move to the host cwd: %s' % e)
            raise
        logging.disable(logging.NOTSET)
        try:
            close_process_namespaces(self.container_ns_fds,
                                     self.namespaces)
            close_process_namespaces(self.host_ns_fds, self.namespaces)
        except Exception as e:
            logger.warning('Could not close the namespaces: %s' % e)
Esempio n. 14
0
def setupLogging(agentConfig):
    """Configure logging to use syslog whenever possible.
    Also controls debug_mode."""
    if agentConfig['debug_mode']:
        logFile = "/tmp/dd-agent.log"
        logging.basicConfig(filename=logFile, filemode='w', level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        logging.info("Logging to %s" % logFile)
    else:
        try:
            from logging.handlers import SysLogHandler
            rootLog = logging.getLogger()
            rootLog.setLevel(logging.INFO)

            sys_log_addr = "/dev/log"

            # Special-case macs
            if sys.platform == 'darwin':
                sys_log_addr = "/var/run/syslog"

            handler = SysLogHandler(address=sys_log_addr, facility=SysLogHandler.LOG_DAEMON)
            formatter = logging.Formatter("dd-agent - %(name)s - %(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            rootLog.addHandler(handler)
            logging.info('Logging to syslog is set up')
        except Exception,e:
            sys.stderr.write("Error while setting up syslog logging (%s). No logging available" % str(e))
            logging.disable(logging.ERROR)
Esempio n. 15
0
def migrateSingleFile(fileName, options, neededUpdates):

    if not os.path.isfile(fileName):
        print """
ERROR: The file '%s' could not be found.
""" % fileName
        sys.exit(1)

    #turn off logging
    setupLogging()
    #logging.getLogger().setLevel(logging.NOTSET)
    logging.disable(logging.ERROR)


    #backup file
    shutil.copyfile(fileName, fileName + ".migration.bak")

    try:
        for version in neededUpdates:
            migrate([fileName], options, getNormalizedVersion(version), ["utf-8"])
    finally:
        # print migrated file
        for line in open(fileName):
            print line,
        #restore file
        shutil.copyfile(fileName + ".migration.bak", fileName)
Esempio n. 16
0
    def test_log_file_location_can_be_set_from_env_variable(self, logs):
        logging.disable(logging.NOTSET)

        response = Response()
        response.status_code = 0
        self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response

        now = u(int(time.time()))

        with utils.TemporaryDirectory() as tempdir:
            entity = 'tests/samples/codefiles/python.py'
            shutil.copy(entity, os.path.join(tempdir, 'python.py'))
            entity = os.path.realpath(os.path.join(tempdir, 'python.py'))
            config = 'tests/samples/configs/good_config.cfg'
            shutil.copy(config, os.path.join(tempdir, '.wakatime.cfg'))
            config = os.path.realpath(os.path.join(tempdir, '.wakatime.cfg'))
            expected_logfile = os.path.realpath(os.path.join(tempdir, '.wakatime.log'))

            with utils.mock.patch('wakatime.main.os.environ.get') as mock_env:
                mock_env.return_value = tempdir

                args = ['--file', entity, '--config', config, '--time', now]

                execute(args)

                retval = execute(args)
                self.assertEquals(retval, 102)
                self.assertNothingPrinted()

                self.assertEquals(logging.WARNING, logging.getLogger('WakaTime').level)
                logfile = os.path.realpath(logging.getLogger('WakaTime').handlers[0].baseFilename)
                self.assertEquals(logfile, expected_logfile)
                logs.check()
Esempio n. 17
0
    def test_membership_checked_against_decision_id(self):
        """
        Ensure that when creating feedback and comments by email
        against a decision, the users membership is tested against
        that decision and not (just) the email representing the
        organization.
        """
        # create an organization decision that Betty shouldn't be able to access
        logging.disable(logging.CRITICAL)
        non_member_organization = Organization.objects.exclude(users=self.user).latest("id")
        self.make_decision(organization=non_member_organization)

        # Betty tries to spoof something onto that decision
        # by posting to her own organization but using the original decision id
        email = getattr(mail, "outbox")[-1]
        mail_to = "*****@*****.**>" % self.bettysorg.slug
        poplib.POP3.mailbox = (
            [""],
            [
                str("From: %s <%s>" % (self.betty.email, self.betty.email)),
                str("To: %s <%s>" % (mail_to, mail_to)),
                str("Subject: Re: %s" % email.subject),
                "",
                "Danger: This is a bad idea",
                "",
            ],
            [""],
        )
        try:
            management.call_command("process_email")
        except:
            self.fail("Exception was raised when processing legitimate email.")

        # the email should be rejected and no feedback should be created.
        self.assertFalse(Feedback.objects.all())
Esempio n. 18
0
    def test_verbose_flag_enables_verbose_logging(self, logs):
        logging.disable(logging.NOTSET)

        response = Response()
        response.status_code = 0
        self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response

        now = u(int(time.time()))
        entity = 'tests/samples/codefiles/python.py'
        config = 'tests/samples/configs/has_regex_errors.cfg'
        args = ['--file', entity, '--config', config, '--time', now, '--verbose']

        retval = execute(args)
        self.assertEquals(retval, 102)
        self.assertNothingPrinted()

        self.assertEquals(logging.DEBUG, logging.getLogger('WakaTime').level)
        logfile = os.path.realpath(os.path.expanduser('~/.wakatime.log'))
        self.assertEquals(logfile, logging.getLogger('WakaTime').handlers[0].baseFilename)
        output = [u(' ').join(x) for x in logs.actual()]

        expected = u('WakaTime WARNING Regex error (unbalanced parenthesis) for include pattern: \\(invalid regex)')
        if self.isPy35OrNewer:
            expected = u('WakaTime WARNING Regex error (unbalanced parenthesis at position 15) for include pattern: \\(invalid regex)')
        self.assertEquals(output[0], expected)
        expected = u('WakaTime WARNING Regex error (unbalanced parenthesis) for exclude pattern: \\(invalid regex)')
        if self.isPy35OrNewer:
            expected = u('WakaTime WARNING Regex error (unbalanced parenthesis at position 15) for exclude pattern: \\(invalid regex)')
        self.assertEquals(output[1], expected)
        self.assertEquals(output[2], u('WakaTime DEBUG Sending heartbeats to api at https://api.wakatime.com/api/v1/users/current/heartbeats.bulk'))
        self.assertIn('Python', output[3])
        self.assertIn('response_code', output[4])
Esempio n. 19
0
    def test_default_log_file_used(self, logs):
        logging.disable(logging.NOTSET)

        response = Response()
        response.status_code = 0
        self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response

        now = u(int(time.time()))
        entity = 'tests/samples/codefiles/python.py'
        config = 'tests/samples/configs/has_regex_errors.cfg'
        args = ['--file', entity, '--config', config, '--time', now]

        retval = execute(args)
        self.assertEquals(retval, 102)
        self.assertNothingPrinted()

        self.assertEquals(logging.WARNING, logging.getLogger('WakaTime').level)
        logfile = os.path.realpath(os.path.expanduser('~/.wakatime.log'))
        self.assertEquals(logfile, logging.getLogger('WakaTime').handlers[0].baseFilename)
        output = [u(' ').join(x) for x in logs.actual()]
        expected = u('WakaTime WARNING Regex error (unbalanced parenthesis) for include pattern: \\(invalid regex)')
        if self.isPy35OrNewer:
            expected = u('WakaTime WARNING Regex error (unbalanced parenthesis at position 15) for include pattern: \\(invalid regex)')
        self.assertEquals(output[0], expected)
        expected = u('WakaTime WARNING Regex error (unbalanced parenthesis) for exclude pattern: \\(invalid regex)')
        if self.isPy35OrNewer:
            expected = u('WakaTime WARNING Regex error (unbalanced parenthesis at position 15) for exclude pattern: \\(invalid regex)')
        self.assertEquals(output[1], expected)
Esempio n. 20
0
def get_logger(name=None):
    """
    Make a singleton logger
    """
    level = logging.CRITICAL
    if DEBUG:
        logging.disable(logging.NOTSET)
        level = logging.DEBUG

    if name is None:
        name = "<unknown>"

    log = logging.getLogger(name=name)
    log.setLevel( level )
    console = logging.StreamHandler()
    console.setLevel( level )
    log_format = ('[%(asctime)s] [%(levelname)s] [%(module)s:%(lineno)d] (' + str(os.getpid()) + '.%(thread)d) %(message)s' if DEBUG else '%(message)s')
    formatter = logging.Formatter( log_format )
    console.setFormatter(formatter)
    log.propagate = False

    if len(log.handlers) > 0:
        for i in xrange(0, len(log.handlers)):
            log.handlers.pop(0)
    
    log.addHandler(console)
    return log
Esempio n. 21
0
    def test_update_project_when_default_role_does_not_exist(self):
        project = self.tenants.first()
        quota = self.quotas.first()

        api.keystone.get_default_role(IsA(http.HttpRequest)) \
            .AndReturn(None)  # Default role doesn't exist
        api.keystone.tenant_get(IsA(http.HttpRequest), self.tenant.id,
                                admin=True) \
            .AndReturn(project)
        quotas.get_tenant_quota_data(IsA(http.HttpRequest),
                                     tenant_id=self.tenant.id) \
            .AndReturn(quota)
        self.mox.ReplayAll()

        url = reverse('horizon:admin:projects:update',
                      args=[self.tenant.id])

        try:
            # Avoid the log message in the test output when the workflow's
            # step action cannot be instantiated
            logging.disable(logging.ERROR)
            with self.assertRaises(exceptions.NotFound):
                res = self.client.get(url)
        finally:
            logging.disable(logging.NOTSET)
Esempio n. 22
0
    def testDuplicateReviews(self):
        """Testing consolidation of duplicate reviews"""

        body_top = "This is the body_top."
        body_bottom = "This is the body_bottom."
        comment_text_1 = "Comment text 1"
        comment_text_2 = "Comment text 2"
        comment_text_3 = "Comment text 3"

        # Some objects we need.
        user = User.objects.get(username="******")

        review_request = ReviewRequest.objects.get(
            summary="Add permission checking for JSON API")
        filediff = \
            review_request.diffset_history.diffsets.latest().files.all()[0]

        # Create the first review.
        review = Review(review_request=review_request, user=user)
        review.body_top = body_top
        review.save()
        master_review = review

        comment = review.comments.create(filediff=filediff, first_line=1)
        comment.text = comment_text_1
        comment.num_lines = 1
        comment.save()

        # Create the second review.
        review = Review(review_request=review_request, user=user)
        review.save()

        comment = review.comments.create(filediff=filediff, first_line=1)
        comment.text = comment_text_2
        comment.num_lines = 1
        comment.save()

        # Create the third review.
        review = Review(review_request=review_request, user=user)
        review.body_bottom = body_bottom
        review.save()

        comment = review.comments.create(filediff=filediff, first_line=1)
        comment.text = comment_text_3
        comment.num_lines = 1
        comment.save()

        # Now that we've made a mess, see if we get a single review back.
        logging.disable(logging.WARNING)
        review = review_request.get_pending_review(user)
        self.assert_(review)
        self.assertEqual(review.id, master_review.id)
        self.assertEqual(review.body_top, body_top)
        self.assertEqual(review.body_bottom, body_bottom)

        comments = list(review.comments.all())
        self.assertEqual(len(comments), 3)
        self.assertEqual(comments[0].text, comment_text_1)
        self.assertEqual(comments[1].text, comment_text_2)
        self.assertEqual(comments[2].text, comment_text_3)
Esempio n. 23
0
def detach_volume(attached_servers, volume_id):
    LOG.info('Detaching volume "%s" .' % volume_id)
    volume_bootable = get_volume_info(volume_id).bootable
    if volume_bootable == 'false':
        # detach volume from instance by python sdk first
        logging.disable(logging.INFO)
        for server_id in attached_servers:
            pc.nova_delete_server_volume(server_id, volume_id)
        logging.disable(logging.NOTSET)
        t = 0
        while t <= 14:
            volume_status = get_volume_info(volume_id).status
            if volume_status == 'available':
                break
            time.sleep(3)
            t+=3
        # if timeout, detach-disk by virsh on compute node & update database
        if get_volume_info(volume_id).status != 'available':
            if detach_disk_on_compute_node(attached_servers, volume_id):
                # update database
                LOG.info('   Updating database.')
                # NOTE use UTC time
                detach_at = time.strftime('%Y-%m-%d %X', time.gmtime())
                sql_update_cinder_db = 'UPDATE volumes SET status="available",attach_status="detached" WHERE id="%s";' % volume_id
                cinder_db.connect(sql_update_cinder_db)
                for server_id in attached_servers:
                    sql_update_nova_db = 'UPDATE block_device_mapping SET deleted_at="%s",deleted=id WHERE instance_uuid="%s" and volume_id="%s" and deleted=0;' % (detach_at, server_id, volume_id)
                    nova_db.connect(sql_update_nova_db)
        if get_volume_info(volume_id).status == 'available':
            return True
    else:
        LOG.warn('Can not detach root device. Please delete instance "%s" first.' % attached_servers)
        return False
Esempio n. 24
0
def action_dumpdata(params):
    import logging

    logging.disable(logging.WARNING)
    from nailgun.db.sqlalchemy import fixman
    fixman.dump_fixture(params.model)
    sys.exit(0)
Esempio n. 25
0
def setVerbosityLevel(verbosityLevel):
	"""
	This definition defines logging verbosity level.

	Available verbosity levels::

		0: Critical.
		1: Error.
		2: Warning.
		3: Info.
		4: Debug.

	:param verbosityLevel: Verbosity level. ( Integer )
	:return: Definition success. ( Boolean )
	"""

	if verbosityLevel == 0:
		LOGGER.setLevel(logging.CRITICAL)
		logging.disable(logging.ERROR)
	elif verbosityLevel == 1:
		LOGGER.setLevel(logging.ERROR)
		logging.disable(logging.WARNING)
	elif verbosityLevel == 2:
		LOGGER.setLevel(logging.WARNING)
		logging.disable(logging.INFO)
	elif verbosityLevel == 3:
		LOGGER.setLevel(logging.INFO)
		logging.disable(logging.DEBUG)
	elif verbosityLevel == 4:
		LOGGER.setLevel(logging.DEBUG)
		logging.disable(logging.NOTSET)
	return True
Esempio n. 26
0
    def test_ioerror_when_reading_mercurial_branch(self, logs):
        logging.disable(logging.NOTSET)

        response = Response()
        response.status_code = 0
        self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response

        with mock.patch('wakatime.projects.git.Git.process') as mock_git:
            mock_git.return_value = False

            now = u(int(time.time()))
            entity = 'tests/samples/projects/hg/emptyfile.txt'
            config = 'tests/samples/configs/good_config.cfg'

            args = ['--file', entity, '--config', config, '--time', now]

            with mock.patch('wakatime.projects.mercurial.open') as mock_open:
                mock_open.side_effect = IOError('')
                execute(args)

            self.assertEquals('hg', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
            self.assertEquals('default', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['branch'])

            self.assertNothingPrinted()
            actual = self.getLogOutput(logs)
            expected = 'OSError' if self.isPy33OrNewer else 'IOError'
            self.assertIn(expected, actual)
Esempio n. 27
0
	def setUp(self):
		logging.disable(logging.CRITICAL)

		if not os.path.exists(self.fullText_dir):
			os.makedirs(self.fullText_dir)
		if not os.path.exists(self.metadata_dir):
			os.makedirs(self.metadata_dir)	
Esempio n. 28
0
 def setUp(self):
     super(TestBotSearch, self).setUp()
     # suppress logging
     logging.disable(logging.CRITICAL)
     # run the database migrations
     Migrate(self.app, self.db)
     upgrade()
Esempio n. 29
0
    def setUp(self):
        super(CreateOrderViewTests, self).setUp()

        # Override all loggers, suppressing logging calls of severity CRITICAL and below
        logging.disable(logging.CRITICAL)

        self.product_class = factories.ProductClassFactory(
            name=u'𝕿𝖗𝖎𝖆𝖑',
            requires_shipping=False,
            track_stock=False
        )
        self.courthouse = factories.ProductFactory(
            structure='parent',
            title=u'𝑩𝒆𝒓𝒏𝒂𝒍𝒊𝒍𝒍𝒐 𝑪𝒐𝒖𝒏𝒕𝒚 𝑨𝒏𝒏𝒆𝒙',
            product_class=self.product_class,
            stockrecords=None,
        )
        self.expensive_trial = factories.ProductFactory(
            structure='child',
            parent=self.courthouse,
            title=u'𝕋𝕣𝕚𝕒𝕝 𝕨𝕚𝕥𝕙 ℙ𝕣𝕚𝕧𝕒𝕥𝕖 𝔸𝕥𝕥𝕠𝕣𝕟𝕖𝕪',
            product_class=self.product_class,
            stockrecords__partner_sku=self.EXPENSIVE_TRIAL_SKU,
            stockrecords__price_excl_tax=D('999.99'),
        )

        # Remove logger override
        self.addCleanup(logging.disable, logging.NOTSET)
Esempio n. 30
0
 def setUp(self, **kwargs):
     self.client = Client()
     self.factory = RequestFactory()
     self.experiment = self.load_experiment(**kwargs)
     self.add_participants(**kwargs)
     self.logger = logger
     logging.disable(settings.DISABLED_TEST_LOGLEVEL)
Esempio n. 31
0
# See the file 'docs/LICENSE' for copying permission.

import os
import logging
import pkgutil
import subprocess
import xmlrpclib
import time
from lib.core.packages import choose_package
from lib.common.exceptions import CuckooError, CuckooPackageError
from lib.common.abstracts import Package, Auxiliary
from lib.common.constants import PATHS
from lib.core.config import Config
from lib.core.startup import init_logging
from modules import auxiliary
logging.disable(level=logging.DEBUG)
log = logging.getLogger()


class Analyzer(object):
    def __init__(self):
        self.config = None
        self.target = None

    def complete(self):
        """End analysis."""
        log.info("Analysis completed")

    def get_options(self):
        """Get analysis options.
        @return: options dict.
from urllib.request import urlopen
from bs4 import BeautifulSoup
import re
import logging

logging.basicConfig(level=logging.DEBUG,
                    format='%(asctime)s - %(levelname)s - %(message)s')
logging.disable(logging.CRITICAL)


def quitarPrimerParentesis(cadena):
    cParentesisDer = 0
    cParentesisIzq = 0
    bandera = False
    lista = cadena.split()
    for elemento in lista[:
                          7]:  #Checa que el paréntesis en efecto esté antes que cierto número de palabras
        if '(' in elemento:
            bandera = True
            break
    if not bandera:
        return None, None
    for i in range(len(cadena)):
        if cadena[i] == '(':
            if cParentesisIzq == 0:
                indiceIzq = i
            cParentesisIzq += 1
        elif cadena[i] == ')':
            cParentesisDer += 1
        if cParentesisIzq != 0 and cParentesisIzq == cParentesisDer:
            indiceDer = i
Esempio n. 33
0
import argparse
from model import GRAM_BIGRU_CRF
from test import Test
from pretrained import Parser

import logging, os

logging.disable(logging.WARNING)
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"

import tensorflow as tf

print(tf.contrib.util.constant_value(tf.ones([1])))


def run(args):
    if args.train == 1:
        model = GRAM_BIGRU_CRF(params=args)
        model.run(336)
    else:
        Test(args).run()
    return 1


if __name__ == '__main__':
    parser = argparse.ArgumentParser(
        description="Aspect Term Extraction With GRAM-BIGRU-CRF")
    parser.add_argument("-ds_name",
                        type=str,
                        default="Laptop",
                        help="dataset name")
Esempio n. 34
0
def test_solver_cases(*args):
    """
    A function for accessing _test_solver_casess as global state
    """
    if len(_test_solver_cases) == 0:
        logging.disable(logging.WARNING)

        #
        # MOSEK
        #

        _mosek_capabilities = set([
            'linear', 'integer', 'quadratic_objective', 'quadratic_constraint'
        ])

        _test_solver_cases['mosek', 'python'] = initialize(
            name='mosek',
            io='python',
            capabilities=_mosek_capabilities,
            import_suffixes=['dual', 'rc', 'slack'])
        #
        # CPLEX
        #

        _cplex_capabilities = set([
            'linear', 'integer', 'quadratic_objective', 'quadratic_constraint',
            'sos1', 'sos2'
        ])

        _test_solver_cases['cplex', 'lp'] = initialize(
            name='cplex',
            io='lp',
            capabilities=_cplex_capabilities,
            import_suffixes=['slack', 'dual', 'rc'])

        _test_solver_cases['cplex', 'mps'] = initialize(
            name='cplex',
            io='mps',
            capabilities=_cplex_capabilities,
            import_suffixes=['slack', 'dual', 'rc'])

        _test_solver_cases['cplex',
                           'nl'] = initialize(name='cplex',
                                              io='nl',
                                              capabilities=_cplex_capabilities,
                                              import_suffixes=['dual'])

        _test_solver_cases['cplex', 'python'] = initialize(
            name='cplex',
            io='python',
            capabilities=_cplex_capabilities,
            import_suffixes=['slack', 'dual', 'rc'])

        #
        # CPLEX PERSISTENT
        #

        _test_solver_cases['cplex_persistent', 'python'] = initialize(
            name='cplex_persistent',
            io='python',
            capabilities=_cplex_capabilities,
            import_suffixes=['slack', 'dual', 'rc'])

        #
        # GAMS
        #

        _gams_capabilities = set([
            'linear', 'integer', 'quadratic_objective', 'quadratic_constraint'
        ])

        _test_solver_cases['gams',
                           'gms'] = initialize(name='gams',
                                               io='gms',
                                               capabilities=_gams_capabilities,
                                               import_suffixes=['dual', 'rc'])

        _test_solver_cases['gams', 'python'] = initialize(
            name='gams',
            io='python',
            capabilities=_gams_capabilities,
            import_suffixes=['dual', 'rc'])

        #
        # GUROBI
        #
        # **NOTE: Gurobi does not handle quadratic constraints before
        #         Major Version 5
        #
        _gurobi_capabilities = set([
            'linear', 'integer', 'quadratic_objective', 'quadratic_constraint',
            'sos1', 'sos2'
        ])

        _test_solver_cases['gurobi', 'lp'] = initialize(
            name='gurobi',
            io='lp',
            capabilities=_gurobi_capabilities,
            import_suffixes=['slack', 'dual', 'rc'])

        _test_solver_cases['gurobi', 'mps'] = initialize(
            name='gurobi',
            io='mps',
            capabilities=_gurobi_capabilities,
            import_suffixes=['slack', 'dual', 'rc'])

        _test_solver_cases['gurobi', 'nl'] = initialize(
            name='gurobi',
            io='nl',
            capabilities=_gurobi_capabilities,
            options={
                'qcpdual': 1,
                'simplex': 1
            },
            import_suffixes=['dual'])

        _test_solver_cases['gurobi', 'python'] = initialize(
            name='gurobi',
            io='python',
            capabilities=_gurobi_capabilities,
            import_suffixes=['slack', 'dual', 'rc'])

        #
        # Gurobi PERSISTENT
        #

        _test_solver_cases['gurobi_persistent', 'python'] = initialize(
            name='gurobi_persistent',
            io='python',
            capabilities=_gurobi_capabilities,
            import_suffixes=['slack', 'dual', 'rc'])

        #
        # GLPK
        #
        _glpk_capabilities = set(['linear', 'integer'])

        if 'GLPKSHELL_old' in str(
                pyomo.solvers.plugins.solvers.GLPK.GLPK().__class__):
            glpk_import_suffixes = ['dual']
        else:
            glpk_import_suffixes = ['rc', 'dual']

        _test_solver_cases['glpk', 'lp'] = initialize(
            name='glpk',
            io='lp',
            capabilities=_glpk_capabilities,
            import_suffixes=glpk_import_suffixes)

        _test_solver_cases['glpk', 'mps'] = initialize(
            name='glpk',
            io='mps',
            capabilities=_glpk_capabilities,
            import_suffixes=glpk_import_suffixes,
            io_options={"skip_objective_sense": True})

        _test_solver_cases['glpk', 'python'] = initialize(
            name='glpk',
            io='python',
            capabilities=_glpk_capabilities,
            import_suffixes=[])

        #
        # CBC
        #
        _cbc_lp_capabilities = set(['linear', 'integer'])

        _test_solver_cases['cbc', 'lp'] = initialize(
            name='cbc',
            io='lp',
            capabilities=_cbc_lp_capabilities,
            import_suffixes=['dual', 'rc'])

        _cbc_nl_capabilities = set(['linear', 'integer', 'sos1', 'sos2'])

        _test_solver_cases['cbc', 'nl'] = initialize(
            name='cbc',
            io='nl',
            capabilities=_cbc_nl_capabilities,
            import_suffixes=['dual'])

        #_cbc_mps_capabilities = set(['linear', 'integer', 'sos1', 'sos2'])

        #_test_solver_cases['cbc', 'mps'] = initialize(
        #name='cbc',
        #io='mps',
        #capabilities=_cbc_mps_capabilities,
        #import_suffixes=['dual', 'rc'])

        #
        # PICO
        #
        _pico_capabilities = set(['linear', 'integer'])

        _test_solver_cases['pico',
                           'lp'] = initialize(name='pico',
                                              io='lp',
                                              capabilities=_pico_capabilities,
                                              import_suffixes=['dual'])

        _test_solver_cases['pico',
                           'nl'] = initialize(name='pico',
                                              io='nl',
                                              capabilities=_pico_capabilities,
                                              import_suffixes=['dual'])

        #
        # XPRESS
        #
        _xpress_capabilities = set([
            'linear', 'integer', 'quadratic_objective', 'quadratic_constraint',
            'sos1', 'sos2'
        ])

        _test_solver_cases['xpress', 'lp'] = initialize(
            name='xpress',
            io='lp',
            capabilities=_xpress_capabilities,
            import_suffixes=['dual', 'rc', 'slack'])

        _test_solver_cases['xpress', 'mps'] = initialize(
            name='xpress',
            io='mps',
            capabilities=_xpress_capabilities,
            import_suffixes=['dual', 'rc', 'slack'])

        _test_solver_cases['xpress', 'nl'] = initialize(
            name='xpress',
            io='nl',
            capabilities=_xpress_capabilities,
            import_suffixes=['dual'])

        #
        # IPOPT
        #
        _ipopt_capabilities = set(
            ['linear', 'quadratic_objective', 'quadratic_constraint'])

        _test_solver_cases['ipopt',
                           'nl'] = initialize(name='ipopt',
                                              io='nl',
                                              capabilities=_ipopt_capabilities,
                                              import_suffixes=['dual'])

        #
        # SCIP
        #
        _scip_capabilities = set([
            'linear', 'integer', 'quadratic_objective', 'quadratic_constraint',
            'sos1', 'sos2'
        ])

        _test_solver_cases['scip',
                           'nl'] = initialize(name='scip',
                                              io='nl',
                                              capabilities=_scip_capabilities,
                                              import_suffixes=[])

        #
        # CONOPT
        #
        _conopt_capabilities = set([
            'linear', 'integer', 'quadratic_objective', 'quadratic_constraint',
            'sos1', 'sos2'
        ])
        _test_solver_cases['conopt', 'nl'] = initialize(
            name='conopt',
            io='nl',
            capabilities=_conopt_capabilities,
            import_suffixes=[])

        #
        # BARON
        #
        _baron_capabilities = set([
            'linear', 'integer', 'quadratic_objective', 'quadratic_constraint'
        ])

        _test_solver_cases['baron', 'bar'] = initialize(
            name='baron',
            io='bar',
            capabilities=_baron_capabilities,
            import_suffixes=['rc', 'dual'])

        #
        # KNITROAMPL
        #
        _knitroampl_capabilities = set([
            'linear', 'integer', 'quadratic_objective', 'quadratic_constraint'
        ])

        _test_solver_cases['knitroampl', 'nl'] = initialize(
            name='knitroampl',
            io='nl',
            capabilities=_knitroampl_capabilities,
            import_suffixes=['dual'])

        logging.disable(logging.NOTSET)

        #
        # Error Checks
        #
        for sc in six.itervalues(_test_solver_cases):
            if sc.capabilities is None:
                sc.capabilities = set([])
            if sc.export_suffixes is None:
                sc.export_suffixes = []
            if sc.import_suffixes is None:
                sc.import_suffixes = []
            if sc.options is None:
                sc.options = {}
            if sc.io_options is None:
                sc.io_options = {}
            assert (sc.io is not None) and (type(sc.io) is str)
            assert type(sc.export_suffixes) in [list, tuple]
            assert type(sc.import_suffixes) in [list, tuple]
            assert type(sc.options) is dict
            for tag in sc.export_suffixes:
                assert type(tag) is str
            for tag in sc.import_suffixes:
                assert type(tag) is str
            assert type(sc.capabilities) is set
            for tag in sc.capabilities:
                assert type(tag) is str

    if len(args) == 0:
        return _test_solver_cases.keys()
    return _test_solver_cases[args]
Esempio n. 35
0
import logging, os
import math

logging.disable(logging.INFO)
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
from rlbot.agents.base_agent import BaseAgent, SimpleControllerState
from rlbot.messages.flat.QuickChatSelection import QuickChatSelection
from rlbot.utils.structures.game_data_struct import GameTickPacket
import tensorflow as tf

from util.ball_prediction_analysis import find_slice_at_time
from util.boost_pad_tracker import BoostPadTracker
from util.drive import steer_toward_target
from util.sequence import Sequence, ControlStep
from util.vec import Vec3

from rlbot.utils import public_utils, logging_utils
from queue import Empty
import os
import sys
sys.path.append('C:\\Users\\John\\Desktop\\stuff\\RLBots\\learning')
from rl_environments.game_values import OutputOptions, InputOptions
from rl_environments.kickoff_env import KickoffEnvironment
from rl_environments.rl_env import RLEnvironment
from tensorforce import Agent, Environment

MODEL = None
# MODEL = 'models/kickoff_no_boost_standalone'

class MyBot(BaseAgent):
    def __init__(self, name, team, index):
Esempio n. 36
0
 def setUp(self):
     logging.disable(logging.INFO)
     logging.disable(logging.WARNING)
     utils_net.VirtIface.LASTBYTE = -1  # Restart count at zero
     # These warnings are annoying during testing
     utils_net.VMNet.DISCARD_WARNINGS - 1
Esempio n. 37
0
 def setUp(self):
     logging.disable(logging.INFO)
     logging.disable(logging.WARNING)
Esempio n. 38
0
# Before go-live check if your settings are suitable for production
# See https://docs.djangoproject.com/en/dev/howto/deployment/checklist/
"""

from __future__ import absolute_import, unicode_literals

import logging
from pathlib import Path

logging.basicConfig(
    format=
    '%(asctime)s %(levelname)-7s %(thread)-5d %(filename)s:%(lineno)s | %(funcName)s | %(message)s',
    datefmt='%H:%M:%S')
logging.getLogger().setLevel(logging.DEBUG)
logging.disable(logging.NOTSET)

logging.debug("Settings loading: %s" % __file__)

# ╭────────────────────────────────────────────────────────────────────────────
# │ This is a composite strategy for setting up django website instance.
# │ We import default component settings and customise them below in this file
# │ To disable or enable particular component just comment or uncomment a component import

# noinspection PyUnresolvedReferences
from .components.debug_toolbar import *  # noqa: F402 F403 isort:skip
from .components.django_assets import *  # noqa: F402 F403 isort:skip
# from .components.celery import *  # noqa: F402 F403 isort:skip
from .components.import_export import *  # noqa: F402 F403 isort:skip
from .components.pycountry import *  # noqa: F402 F403 isort:skip
from .components.sentry import *  # noqa: F402 F403 isort:skip
Esempio n. 39
0
def setup():
    allArtists = {}
    allAlbums = {}
    allSongs = {}

    artistID = albumID = songID = None

    fieldnames = [
        'Date', 'Moment', 'ArtistFrom', 'AlbumFrom', 'SongFrom', 'ArtistTo',
        'AlbumTo', 'SongTo', 'RootNode', 'AlbumSong', 'Repetitions', 'Device',
        'isStreaming', 'isFirstListeningAlbum', 'Notes'
    ]

    parser = ArgumentParser()

    #parser.add_argument("-q", "--quiet", default=True, dest="quiet", help="Disable log")
    #parser.add_argument("-v", "--verbose", dest="verbose", help="Enable logging in stdout")
    #parser.add_argument("-vF", "--verboseFile", dest="verboseFile", help="Enable logging into logFile.log")

    parser.add_argument("-q",
                        "--quiet",
                        action='store_true',
                        default=True,
                        help="Disable log")
    parser.add_argument("-v",
                        "--verbose",
                        action='store_true',
                        help="Enable logging in stdout")
    parser.add_argument("-vF",
                        "--verboseFile",
                        action='store_true',
                        help="Enable logging into logFile.log")

    args = parser.parse_args()
    print(args)

    if (args.verbose == True):
        logging.basicConfig(level=logging.INFO,
                            format='%(asctime)s - %(levelname)s - %(message)s')
    elif (args.verboseFile == True):
        logging.basicConfig(filename='./logFile.log',
                            level=logging.INFO,
                            format='%(asctime)s - %(levelname)s - %(message)s')
    else:
        logging.disable(level=logging.INFO)
    '''
    args = parser.parse_args()
    print(args)

    rawLogLevel = 2 + (args.verbose or 0) - (args.quiet or 0)
    if rawLogLevel <= 0: 
        logLevel = logging.CRITICAL
    elif rawLogLevel == 1:
        logLevel = logging.ERROR
    elif rawLogLevel == 2:     # default
        logLevel = logging.WARNING
    elif rawLogLevel == 3: 
        logLevel = logging.INFO
    else:         
        logLevel = logging.DEBUG

    logging.basicConfig(level=logLevel, format='%(asctime)s - %(levelname)s - %(message)s')
    '''

    return (allArtists, allAlbums, allSongs, artistID, albumID, songID,
            fieldnames)
 def tearDown(self):
     # re-enable logging
     logging.disable(logging.DEBUG)
Esempio n. 41
0
 def tearDown(self):
   logging.disable(logging.NOTSET)
Esempio n. 42
0
import logging, random

logging.basicConfig(level=logging.DEBUG, format='%(levelname)s: %(message)s')
logging.disable(logging.DEBUG)


class Box():
    all_boxes = []

    def __init__(self,
                 on_left=None,
                 on_right=None,
                 on_top=None,
                 on_bottom=None,
                 mark=' '):
        if len(Box.all_boxes) == 9:
            # To delete the items in class attribute all_boxes in the
            # case that a fresh Board is instantiated.
            Box.all_boxes.clear()

        self.on_left = on_left
        self.on_right = on_right
        self.on_top = on_top
        self.on_bottom = on_bottom
        self.mark = mark

        self.all_boxes.append(self)
        logging.debug(self.all_boxes)

    def set_attributes(self, **kwargs):
        """This method is used to set the attributes of a Box class
Esempio n. 43
0
 def setUp(self):
     """Disable logging output messages"""
     logging.disable(logging.ERROR)
Esempio n. 44
0
OIDC_RP_SIGN_ALGO = 'RS256'
OIDC_USERNAME_ALGO = ''
OIDC_RP_SCOPES = 'openid email profile'
LOGOUT_REDIRECT_URL = '/'

OIDC_RP_CLIENT_ID = '1327'
OIDC_RP_CLIENT_SECRET = '1327-secret'

OIDC_OP_AUTHORIZATION_ENDPOINT = "https://example.com/auth"
OIDC_OP_TOKEN_ENDPOINT = "https://example.com/token"
OIDC_OP_USER_ENDPOINT = "https://example.com/me"
OIDC_OP_JWKS_ENDPOINT = "https://example.com/certs"

if TESTING:
	DATABASES['default'] = {'ENGINE': 'django.db.backends.sqlite3'}  # use sqlite to speed tests up
	logging.disable(logging.CRITICAL)  # disable logging, primarily to prevent console spam
	LANGUAGE_CODE = 'en-US'  # force language to be English while testing

# Create a localsettings.py to override settings per machine or user, e.g. for
# development or different settings in deployments using multiple servers.
_LOCAL_SETTINGS_FILENAME = os.path.join(BASE_DIR, "localsettings.py")
if os.path.exists(_LOCAL_SETTINGS_FILENAME):
	with open(_LOCAL_SETTINGS_FILENAME, "rb") as f:
		exec(compile(f.read(), _LOCAL_SETTINGS_FILENAME, 'exec'))
del _LOCAL_SETTINGS_FILENAME

# Django debug toolbar settings
if ENABLE_DEBUG_TOOLBAR:
	INSTALLED_APPS += ['debug_toolbar']
	MIDDLEWARE = ['debug_toolbar.middleware.DebugToolbarMiddleware'] + MIDDLEWARE
	INTERNAL_IPS = ['127.0.0.1']
Esempio n. 45
0
def set_log_level_off():
    """Disabled logging"""
    import logging
    logging.disable(logging.CRITICAL)
Esempio n. 46
0
 def setUp(self):
   logging.disable(logging.CRITICAL)
   time.sleep = mock.Mock()
Esempio n. 47
0
File: tests.py Progetto: tbson/24ho
 def setUp(self):
     logging.disable(logging.CRITICAL)
     self.client = APIClient()
Esempio n. 48
0
 def setUp(self):
     logging.disable(logging.FATAL)
Esempio n. 49
0
 def tearDownClass(cls):
     logging.disable(logging.NOTSET)
     super(WatchdogBaseTestCase, cls).tearDownClass()
Esempio n. 50
0
 def setUp(self):
     logging.disable(logging.ERROR)
Esempio n. 51
0
    def handle(self, *args, **options):
        if options["verbosity"] < 2:
            logging.disable(logging.WARNING)

        uploader = playlist = None
        if options["playlist"] or options["create_playlist"]:
            if options["rotator_assets"] or options[
                    "prerecorded_broadcast_assets"]:
                print("Can't add that type of asset to a playlist")
                return

            name = options["playlist"] or options["create_playlist"]

            try:
                playlist = Playlist.objects.get(name__iexact=name)
            except Playlist.DoesNotExist:
                if options["playlist"]:
                    print(f"No playlist exists with name {name}. Exiting.")
                    print("Try one of: ")
                    for name in Playlist.objects.values_list(
                            "name", flat=True).order_by("name"):
                        print(f" * {name}")
                else:
                    print(f"Playlist {name} does not exist. Creating it.")
                    playlist = Playlist.objects.create(name=name)

        if options["username"]:
            try:
                uploader = User.objects.get(username=options["username"])
            except User.DoesNotExist:
                print(
                    f'No user exists with username {options["username"]}. Exiting.'
                )
                return

        if options["rotator_assets"]:
            asset_cls = RotatorAsset
        elif options["scheduled_broadcast_assets"]:
            asset_cls = BroadcastAsset
        else:
            asset_cls = AudioAsset

        asset_paths = []

        for path in options["paths"]:
            if path in (".", "imports"):
                path = ""

            imports_root_path = f"{settings.AUDIO_IMPORTS_ROOT}{path}"
            if path.startswith(
                    "imports/") and not os.path.exists(imports_root_path):
                imports_root_path = f'{settings.AUDIO_IMPORTS_ROOT}{path.removeprefix("imports/")}'

            if os.path.isfile(imports_root_path):
                asset_paths.append(imports_root_path)

            elif os.path.isdir(imports_root_path):
                imports_root_path = imports_root_path.removesuffix("/")
                for root, dirs, files in os.walk(imports_root_path):
                    for file in files:
                        full_path = f"{root}/{file}"
                        if os.path.isfile(
                                full_path) and not os.path.islink(full_path):
                            asset_paths.append(full_path)

        asset_paths.sort()

        if asset_paths:
            print(
                f"Found {len(asset_paths)} potential asset files in paths under imports/. Running."
            )
        else:
            print(
                "Found no potential assets found with the supplied paths under imports/. Exiting."
            )
            return

        for path in asset_paths:
            delete_str = "and deleting " if options["delete"] else ""
            print(
                f"Importing {delete_str}{path.removeprefix(settings.AUDIO_IMPORTS_ROOT)}",
                end="",
                flush=True,
            )

            asset = asset_cls(uploader=uploader,
                              file_basename=os.path.basename(path))
            asset.file.save(f"imported/{asset.file_basename}",
                            File(open(path, "rb")),
                            save=False)

            try:
                asset.clean()
            except ValidationError as e:
                print(f"... skipping, validation error: {e}")
            else:
                asset.save()
                if playlist:
                    asset.playlists.add(playlist)
                print("... done!")
            finally:
                if options["delete"]:
                    os.remove(path)
Esempio n. 52
0
 def setUp(self):
     logging.disable(logging.WARNING)
     self.redis_connector = Redis(host='localhost', port=6380)
Esempio n. 53
0
from gevent import monkey
monkey.patch_all()

import logging

from create_app import create_app
"""
runs the flask app
"""
app = create_app('etc.settings')
logging.root.setLevel(logging.DEBUG)
logging.disable(0)
Esempio n. 54
0
 def setUpClass(cls):
     super(WatchdogBaseTestCase, cls).setUpClass()
     logging.disable(logging.ERROR)
Esempio n. 55
0
def setUpModule():  # pylint: disable=invalid-name
    """ Setup to ignore chromecast errors. """
    logging.disable(logging.CRITICAL)
Esempio n. 56
0
#!/usr/bin/python3
import logging

from hotsos.core.config import HotSOSConfig

logging.disable(level=logging.CRITICAL)
log = logging.getLogger()


def setup_logging(debug_mode=False):
    format = ("%(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [-] "
              "%(message)s")
    log.name = HotSOSConfig.PLUGIN_NAME
    logging.basicConfig(format=format)
    if debug_mode:
        logging.disable(logging.NOTSET)
        log.setLevel(logging.DEBUG)
Esempio n. 57
0
 def setUp(self):
     random.seed(0xdeadbeef)
     self.outdir = tempfile.mkdtemp()
     import logging
     logging.disable(logging.CRITICAL)
def disable(level):
    # disables any level leq to :attr:`level`
    logging.disable(level)
    yield
    logging.disable(logging.NOTSET)
Esempio n. 59
0
 def setUp(self) -> None:
     logging.disable()
Esempio n. 60
0
 def tearDown(self):
     shutil.rmtree(self.outdir)
     logging.disable(logging.NOTSET)