Example #1
1
def payment(request):
    # if this is a POST request we need to process the form data
    if request.method == "POST":
        # create a form instance and populate it with data from the request:
        form = TransactionForm(request.POST)
        # check whether it's valid:
        if form.is_valid():
            # process the data in form.cleaned_data as required
            try:
                # Retrieve transaction from form
                transaction = form.save(commit=False)
                # create factory
                factory = CommandFactory()
                # Try the transaction, if something goes wrong, an exception will be raised
                _make_transaction(request, transaction, factory)
                # Notify the transaction
                _notify_transaction(transaction, factory)

            except CommandError as ce:
                messages.error(request, ("Transfer unsuccesful: %s" % ce))
            except Exception as e:
                logging.error(e)
                messages.error(request, "Something went wrong, please try again")
            # redirect to a new URL:
            return HttpResponseRedirect(reverse("payments"))

    # if a GET (or any other method) we'll create a blank form
    else:
        form = TransactionForm()

    return render(request, "accounts/payment.html", {"form": form})
Example #2
1
    def file_operation_test(session, guest_cdrom_device, max_times):
        """
        Cdrom file operation test.
        """
        filename = "new"
        mount_point = get_cdrom_mount_point(session, guest_cdrom_device, params)
        mount_cmd = params["mount_cdrom_cmd"] % (guest_cdrom_device, mount_point)
        umount_cmd = params["umount_cdrom_cmd"] % guest_cdrom_device
        src_file = params["src_file"] % (mount_point, filename)
        dst_file = params["dst_file"] % filename
        copy_file_cmd = params["copy_file_cmd"] % (mount_point, filename)
        remove_file_cmd = params["remove_file_cmd"] % filename
        show_mount_cmd = params["show_mount_cmd"]
        md5sum_cmd = params["md5sum_cmd"]

        if params["os_type"] != "windows":
            error.context("Mounting the cdrom under %s" % mount_point, logging.info)
            session.cmd(mount_cmd, timeout=30)
        error.context("File copying test", logging.info)
        session.cmd(copy_file_cmd)
        f1_hash = session.cmd(md5sum_cmd % dst_file).split()[0].strip()
        f2_hash = session.cmd(md5sum_cmd % src_file).split()[0].strip()
        if f1_hash != f2_hash:
            raise error.TestFail("On disk and on cdrom files are different, " "md5 mismatch")
        session.cmd(remove_file_cmd)
        error.context("Mount/Unmount cdrom for %s times" % max_times, logging.info)
        for _ in range(1, max_times):
            try:
                session.cmd(umount_cmd)
                session.cmd(mount_cmd)
            except aexpect.ShellError, detail:
                logging.error("Mount/Unmount fail, detail: '%s'", detail)
                logging.debug(session.cmd(show_mount_cmd))
                raise
Example #3
1
    def Log(self, format_str, *args):
        """Logs the message using the flow's standard logging.

    Args:
      format_str: Format string
      *args: arguments to the format string
    Raises:
      RuntimeError: on parent missing logs_collection
    """
        format_str = utils.SmartUnicode(format_str)

        try:
            # The status message is always in unicode
            status = format_str % args
        except TypeError:
            logging.error("Tried to log a format string with the wrong number " "of arguments: %s", format_str)
            status = format_str

        logging.info("%s: %s", self.session_id, status)

        self.SetStatus(utils.SmartUnicode(status))

        with self.OpenLogsCollection(self.args.logs_collection_urn, mode="w") as logs_collection:
            logs_collection.Add(
                rdf_flows.FlowLog(
                    client_id=self.args.client_id,
                    urn=self.session_id,
                    flow_name=self.flow_obj.__class__.__name__,
                    log_message=status,
                )
            )
Example #4
1
 def _resume(self, data, reto=None):
     """ resume a party line connection after reboot. """
     fleet = getfleet()
     for i in data["partyline"]:
         logging.warn("partyline - resuming %s" % i)
         bot = fleet.byname(i["botname"])
         if not bot:
             logging.error("partyline - can't find bot")
             continue
         sock = socket.fromfd(i["fileno"], socket.AF_INET, socket.SOCK_STREAM)
         sock.setblocking(1)
         nick = i["nick"]
         userhost = i["userhost"]
         channel = i["channel"]
         if not bot:
             logging.error("partyline - can't find %s bot in fleet" % i["botname"])
             continue
         self.socks.append(
             {
                 "bot": bot,
                 "sock": sock,
                 "nick": nick,
                 "userhost": userhost,
                 "channel": channel,
                 "silent": i["silent"],
             }
         )
         bot._dccresume(sock, nick, userhost, channel)
         if reto:
             self.say_nick(nick, "rebooting done")
Example #5
1
def _ExtractDefaultMetrics(loading_trace):
    """Extracts all the default metrics from a given trace.

  Args:
    loading_trace: loading_trace.LoadingTrace.

  Returns:
    Dictionary with all trace extracted fields set.
  """
    END_REQUEST_EVENTS = [
        ("first_layout", "requestStart", "firstLayout"),
        ("first_contentful_paint", "requestStart", "firstContentfulPaint"),
        ("total_load", "requestStart", "loadEventEnd"),
        ("js_onload_event", "loadEventStart", "loadEventEnd"),
    ]
    web_page_tracked_events = _GetWebPageTrackedEvents(loading_trace.tracing_track)
    metrics = {}
    for metric_name, start_event_name, end_event_name in END_REQUEST_EVENTS:
        try:
            metrics[metric_name] = (
                web_page_tracked_events[end_event_name].start_msec
                - web_page_tracked_events[start_event_name].start_msec
            )
        except KeyError as error:
            logging.error("could not extract metric %s: missing trace event: %s" % (metric_name, str(error)))
            metrics[metric_name] = _FAILED_CSV_VALUE
    return metrics
    def resend_waveform(self, channel, w=[], m1=[], m2=[], clock=[]):
        """
        Resends the last sent waveform for the designated channel
        Overwrites only the parameters specified

        Input: (mandatory)
            channel (int) : 1 to 4, the number of the designated channel

        Input: (optional)
            w (float[numpoints]) : waveform
            m1 (int[numpoints])  : marker1
            m2 (int[numpoints])  : marker2
            clock (int) : frequency

        Output:
            None
        """
        filename = self._values["recent_channel_%s" % channel]["filename"]
        logging.debug(__name__ + " : Resending %s to channel %s" % (filename, channel))

        if w == []:
            w = self._values["recent_channel_%s" % channel]["w"]
        if m1 == []:
            m1 = self._values["recent_channel_%s" % channel]["m1"]
        if m2 == []:
            m2 = self._values["recent_channel_%s" % channel]["m2"]
        if clock == []:
            clock = self._values["recent_channel_%s" % channel]["clock"]

        if not ((len(w) == self._numpoints) and (len(m1) == self._numpoints) and (len(m2) == self._numpoints)):
            logging.error(__name__ + " : one (or more) lengths of waveforms do not match with numpoints")

        self.send_waveform(w, m1, m2, filename, clock)
        self.set_filename(filename, channel)
Example #7
1
def delete_bugex_result(request, delete_token):
    """Delete the results data for a specific user request."""

    # get user request
    user_request = get_object_or_404(UserRequest, delete_token=delete_token)

    # check if this request already has been deleted
    if user_request.status == UserRequestStatus.DELETED:
        message = "This BugEx result has already been deleted."
    else:
        try:
            # Deleting underlying archive file
            user_request.codearchive.archive_file.delete()
            # Deleting BugExResult, CodeArchive, all Facts, all SourceFiles,
            # all ClassFiles, all Folders, all Lines
            if user_request.result:
                # only try to delete result, if there actually is one
                user_request.result.delete()
                user_request.result = None  # manually set relation to null
                user_request.save()
            user_request.codearchive.delete()
            # Delete the entire directory where the archive file was stored
            shutil.rmtree(user_request.folder)
            # Set user request status to DELETED
            user_request.update_status(UserRequestStatus.DELETED)

            message = "Your BugEx result has been deleted successfully."

        except Exception as e:
            # something unexpected, we have to log this
            message = "Sorry, we could not delete this result."
            logging.error(message + " Exception: " + str(e))

    # render status page with appropriate content
    return render(request, "bugex_webapp/status.html", {"message": message, "pagetitle": "Delete result"})
    def parse_one(item, preferences_map):
        def get_parser_class_for(item_name):
            """
            Thanks to http://stackoverflow.com/a/452981
            """
            kls = "idp.parser." + item_name + "parser." + item_name.title() + "Parser"
            parts = kls.split(".")
            module = ".".join(parts[:-1])
            m = __import__(module)
            for comp in parts[1:]:
                m = getattr(m, comp)
            return m

        try:
            ParserClass = get_parser_class_for(item)
        except Exception as e:
            logging.error("No parser found for: " + item + "\n\tException is: " + str(e))
            return 1
        logging.info("___________________")
        logging.info("Parsing " + item + "...")
        parser = ParserClass(preferences_map)
        try:
            parser.start_processing()
        except Exception as e:
            logging.error("Exception occured while parsing item: " + item + "\n\tException is: " + str(e))
            traceback.print_exc()
        logging.info("Parsing finished for item: " + item)
Example #9
0
    def addInfoToOutputFilesForStep(self, stepName, step):
        """
        _addInfoToOutputFilesForStep_

        Add the information missing from output files to the files
        This requires the WMStep to be passed in
        """

        stepReport = self.retrieveStep(step=stepName)
        fileInfo = FileInfo()

        if not stepReport:
            return None

        listOfModules = getattr(stepReport, "outputModules", None)

        for module in listOfModules:
            outputMod = getattr(stepReport.output, module, None)
            for n in range(outputMod.files.fileCount):
                file = getattr(outputMod.files, "file%i" % (n), None)
                if not file:
                    msg = "Could not find file%i in module" % (n)
                    logging.error(msg)
                    return None
                fileInfo(fileReport=file, step=step, outputModule=module)

        return
Example #10
0
 def import_custom_report_metadata(self, ignore_cache=False, collection=None):
     """
     Load custom report metadata from source specified for the collection (in local_config)
     """
     custom_reports_source = self.get_collection_metadata("CUSTOM_REPORTS", collection=collection)
     if not custom_reports_source:
         logging.error("no custom reports available for collection %s" % collection)
         return
     crdata = self.get_data(custom_reports_source, ignore_cache=ignore_cache)["data"]
     logging.info("crdata = %s" % crdata)
     if len(crdata) == 0:
         msg = (
             "[custom_reports.import_custom_report_metadata] ERROR!  No data loaded for custom report from %s"
             % custom_reports_source
         )
         logging.error(msg)
         raise Exception(msg)
     cnt = self.import_data_to_ndb(
         crdata,
         "CustomReport",
         overwrite=True,
         overwrite_query=[CustomReport.collection == collection],
         extra_params={"collection": collection},
         date_fields=["date"],
     )
     return cnt
Example #11
0
def to_cygwin_path(path):
    """Converts an absolute standard Windows path to a cygwin path."""
    if len(path) < 2 or path[1] != ":":
        # TODO(maruel): Accept \\?\ and \??\ if necessary.
        logging.error("%s is not a win32 path", path)
        return None
    return "/cygdrive/%s/%s" % (path[0].lower(), path[3:].replace("\\", "/"))
Example #12
0
    def SaveCoverageData(self, test):
        """Saves the Emma coverage data before it's overwritten by the next test.

    Args:
      test: the test whose coverage data is collected.
    """
        if not self.coverage:
            return
        if not self.adb.Adb().Pull(TestRunner._COVERAGE_RESULT_PATH, constants.CHROME_DIR):
            logging.error(
                "ERROR: Unable to find file " + TestRunner._COVERAGE_RESULT_PATH + " on the device for test " + test
            )
        pulled_coverage_file = os.path.join(constants.CHROME_DIR, TestRunner._COVERAGE_FILENAME)
        if os.path.exists(TestRunner._COVERAGE_MERGED_FILENAME):
            cmd = [
                "java",
                "-classpath",
                TestRunner._EMMA_JAR,
                "emma",
                "merge",
                "-in",
                pulled_coverage_file,
                "-in",
                TestRunner._COVERAGE_MERGED_FILENAME,
                "-out",
                TestRunner._COVERAGE_MERGED_FILENAME,
            ]
            cmd_helper.RunCmd(cmd)
        else:
            shutil.copy(pulled_coverage_file, TestRunner._COVERAGE_MERGED_FILENAME)
        os.remove(pulled_coverage_file)
Example #13
0
    def _mal_api_request(self, url, **kwargs):
        if "username" not in self.config or "password" not in self.config:
            error("Username and password required for MAL requests")
            return None

        auth = (self.config["username"], self.config["password"])
        return self.request(url, auth=auth, xml=True, **kwargs)
    def set_hardware(self):
        """
        Retrieves the hardware configuration parsing it from the Config Parser as necessary.
        """
        hrdwr = dict()
        devices = dict()
        try:
            devices = json.loads(self.config.get("hardware", "devices"))
        except Exception as error:
            logging.error("devices not defined or not in JSON format." + str(error))
        hrdwr["devices"] = devices

        for device_type, settings in hrdwr["devices"].iteritems():
            for count in range(len(settings)):
                for k, v in settings[count].iteritems():
                    settings[count][k] = v if not isinstance(v, str) else int(v, 16)

        hrdwr["gpio_pins"] = map(int, self.config.get("hardware", "gpio_pins").split(","))
        self.gpio_len = len(hrdwr["gpio_pins"])

        hrdwr["gpio_len"] = len(hrdwr["gpio_pins"])

        temp = self.config.get("hardware", "pin_modes").split(",")
        if len(temp) != 1:
            hrdwr["pin_modes"] = temp
        else:
            hrdwr["pin_modes"] = [temp[0] for _ in range(self.gpio_len)]

        hrdwr["pwm_range"] = int(self.config.get("hardware", "pwm_range"))
        hrdwr["active_low_mode"] = self.config.getboolean("hardware", "active_low_mode")

        self.hardware = Section(hrdwr)
Example #15
0
 def log(self, msg, debug=False):
     if self._logfile:
         self._logfile.write(msg + "\n")
     if not debug and LOG_TO_SCREEN:
         logging.error(msg)
     else:
         logging.debug(msg)
Example #16
0
def check_input_file(parser, options, attr_name, config):
    """f(OptionParser, string, string, {}) -> None

	Checks if the filename is a valid file. If not, the process quits. Pass the
	index string of the koma panel to show the user error.

	The config parameter will be used as the default dictionary to grab paths
	from in case the normal parameter doesn't work.
	"""
    # Try the param filename.
    filename = getattr(options, attr_name)
    if filename and os.path.isfile(filename):
        return

        # Try the configuration file.
    filename = ""
    try:
        filename = config[attr_name]
    except:
        pass

    if filename and os.path.isfile(filename):
        setattr(options, attr_name, filename)
        return

    logging.error("Specify a valid image for the %s koma panel", attr_name)
    parser.print_help()
    sys.exit(1)
Example #17
0
def get_options_from_ini():
    """f() -> {}

	Tries to read the INI_FILE and returns the sections as python dictionaries.
	If the INI_FILE doesn't exist, it will be populated with defaults for the
	user.
	"""
    ret = {}
    if not os.path.isfile(INI_FILE):
        logging.info("No %r file, creating from template", INI_FILE)
        with closing(open(INI_FILE, "wt")) as out:
            out.write(INI_TEMPLATE)
        return ret

    try:
        c = ConfigParser.SafeConfigParser()
        c.read(INI_FILE)
        # Retrieve configuration sets along with expanded paths.
        for section in c.sections():
            ret[section] = dict([(x, os.path.expanduser(y)) for x, y in c.items(section)])
            # Convert width option to integer.
            if KEY_WIDTH in ret[section]:
                try:
                    ret[section][KEY_WIDTH] = int(ret[section][KEY_WIDTH])
                except ValueError, e:
                    logging.error("Couldn't parse int for [%s]%s", section, KEY_WIDTH)
                    del ret[section][KEY_WIDTH]
    except ConfigParser.NoSectionError, e:
        pass
    def Execute(params, location=None):
        """Executes the closure-builder with the given parameters.

    Args:
      params: Arguments supplied to the builder. (list of strings)
      location: Optional location to search for the tool. (string)

    Raises:
      ClosureBuilderError: Execution fails.
    """
        if not ClosureBuilder.IsInstalled(location):
            logging.error("Compile failed; closure-builder not installed.")
            raise ClosureBuilderError

        location = location or ""
        builder_path = os.path.join(location, BUILDER_PATH_NAME)
        builder_jar = os.path.join(builder_path, BUILDER_JAR_NAME)
        library_path = os.path.join(builder_path, LIBRARY_PATH_NAME)
        builder = os.path.join(library_path, "closure", "bin", "build", "closurebuilder.py")
        params.append("--root=%s" % library_path)
        params.append("--builder_jar=%s" % builder_jar)
        command = " ".join([sys.executable, builder] + params)

        try:
            base.Tool._ExecuteCommand(command)
        except base.ToolError:
            logging.error("closure-builder failed to execute:\n    %s." % command)
            raise ClosureBuilderError
Example #19
0
File: dist.py Project: louiz/botan
    def content_rewriter():
        for line in contents:

            if target_version != "HEAD":
                match = version_re.match(line)
                if match:
                    name_to_idx = {"major": 0, "minor": 1, "patch": 2}
                    in_tag = int(version_parts[name_to_idx[match.group(1)]])
                    in_file = int(match.group(2))

                    if in_tag != in_file:
                        logging.error(
                            'Version number part "%s" in botan_version.py does not match tag %s'
                            % (match.group(1), target_version)
                        )
                        raise Exception("Bad botan_version.py")

            if line == "release_vc_rev = None\n":
                yield "release_vc_rev = 'git:%s'\n" % (rev_id)
            elif line == "release_datestamp = 0\n":
                yield "release_datestamp = %d\n" % (rel_date)
            elif line == "release_type = 'unreleased'\n":
                if args[0] == "snapshot":
                    yield "release_type = 'snapshot'\n"
                else:
                    yield "release_type = 'released'\n"
            else:
                yield line
    def get(self):
        """The get handler method is called from a cron job.

    It expects no parameters and has no output. It checks all current bisect try
    jobs and send comments to an issue on the issue tracker if a bisect job has
    completed.
    """
        credentials = rietveld_service.Credentials(
            rietveld_service.GetDefaultRietveldConfig(), rietveld_service.PROJECTHOSTING_SCOPE
        )
        issue_tracker = issue_tracker_service.IssueTrackerService(additional_credentials=credentials)

        jobs_to_check = try_job.TryJob.query(try_job.TryJob.status == "started").fetch()
        all_successful = True
        for job in jobs_to_check:
            try:
                if job.use_buildbucket:
                    logging.info(
                        "Checking job %s with Buildbucket job ID %s.",
                        job.key.id(),
                        getattr(job, "buildbucket_job_id", None),
                    )
                else:
                    logging.info(
                        "Checking job %s with Rietveld issue ID %s.",
                        job.key.id(),
                        getattr(job, "rietveld_issue_id", None),
                    )
                _CheckJob(job, issue_tracker)
            except Exception as e:  # pylint: disable=broad-except
                logging.error("Caught Exception %s: %s\n%s", type(e).__name__, e, traceback.format_exc())
                all_successful = False
        if all_successful:
            utils.TickMonitoringCustomMetric("UpdateBugWithResults")
def configure_network(hostname, interfaces):
    if os.path.exists(NETWORK_FILE):
        infile = open(NETWORK_FILE)
    else:
        infile = StringIO()

    update_files, remove_files = process_interface_files(infile, interfaces)

    # Generate new hostname file
    infile = StringIO(update_files.get(NETWORK_FILE, infile))

    data = get_hostname_file(infile, hostname)
    update_files[NETWORK_FILE] = data

    # Generate new /etc/hosts file
    filepath, data = commands.network.get_etc_hosts(interfaces, hostname)
    update_files[filepath] = data

    # Write out new files
    commands.network.update_files(update_files, remove_files)

    pipe = subprocess.PIPE

    # Set hostname
    try:
        commands.network.sethostname(hostname)
    except Exception, e:
        logging.error("Couldn't sethostname(): %s" % str(e))
        return (500, "Couldn't set hostname: %s" % str(e))
Example #22
0
 def send(self, cmd_name, args=[]):
     logging.debug("Will try to send.")
     ans = [400]
     if self.pending_acks > 0:
         logging.warn("Acks pending, will not send.")
         ans = [408]
     try:
         logging.debug("Creating json message.")
         msg = shared.json_client_enc(cmd_name, args)
         logging.debug(json.dumps(msg, indent=2))
         try:
             logging.debug("Will zmq send()")
             self.sender.send(msg, copy=True)
             self.pending_acks += 1
             ans = [200]
             logging.debug("zmq send() ok")
         except Exception as e:
             ans = [400]
             err_msg = 'Failed to send "%s" via zmq! Exception:%s' % (msg, e.__str__())
             ans.append(err_msg)
             logging.error(err_msg)
     except Exception as e:
         ans = [400]
         err_msg = "Failed encode json msg. Exception:%s" % e.__str__()
         ans.append(err_msg)
         logging.error(err_msg)
     return ans
    def set_backup(self):
        """ Set the current router to backup """
        if not self.cl.is_redundant():
            logging.error("Set backup called on non-redundant router")
            return

        self.set_lock()
        logging.debug("Setting router to backup")
        ads = [o for o in self.address.get_ips() if o.is_public()]
        dev = ""
        for o in ads:
            if dev == o.get_device():
                continue
            logging.info("Bringing public interface %s down" % o.get_device())
            cmd2 = "ip link set %s down" % o.get_device()
            CsHelper.execute(cmd2)
            dev = o.get_device()
        cmd = "%s -C %s" % (self.CONNTRACKD_BIN, self.CONNTRACKD_CONF)
        CsHelper.execute("%s -d" % cmd)
        CsHelper.service("ipsec", "stop")
        CsHelper.service("xl2tpd", "stop")
        ads = [o for o in self.address.get_ips() if o.needs_vrrp()]
        for o in ads:
            CsPasswdSvc(o.get_gateway()).stop()
        CsHelper.service("dnsmasq", "stop")

        self.cl.set_master_state(False)
        self.cl.save()
        self.release_lock()
        logging.info("Router switched to backup mode")
Example #24
0
 def recv(self, timeout=5000):
     ans = []
     try:
         socks = dict(self.poller.poll(timeout))
         if not socks or not socks.get(self.sender, False):
             # did not get answer from server
             logging.error("Timed out waiting for answer from server")
             ans = [407]
             ans.append(shared.r_codes[ans[0]])
             return ans
         logging.debug("Will recv()")
         ack = self.sender.recv()
         logging.debug("Raw data:%s" % ack)
         self.pending_acks -= 1
         try:
             cmd_code, cmd_res, data, dec = shared.json_client_dec(ack)
             ans = [cmd_res, cmd_code, data, dec]
             if logging.root.level == logging.DEBUG:
                 logging.debug("will json.dumps to print data on screen")
                 print "DECODED:", json.dumps(dec, indent=2)
                 if data:
                     print "DATA:", data
         except:
             print "ERROR: failed to decode json. Raw data:\n%s" % ack
     except Exception as e:
         print e
         logging.error("recv(): %s" % e.__str__())
     return ans
Example #25
0
    def cleanup_ports(self):
        """
        Clean state of all ports and set port to default state.
        Default state:
           No data on port or in port buffer.
           Read mode = blocking.
        """
        # Check if python is still alive
        match, tmp = self._cmd("is_alive()", 10)
        if (match is None) or (match != 0):
            logging.error("Python died/is stuck/have remaining threads")
            logging.debug(tmp)
            try:
                self.vm.verify_kernel_crash()

                match, tmp = self._cmd("guest_exit()", 10)
                if (match is None) or (match == 0):
                    self.session.close()
                    self.session = utils_test.wait_for_login(self.vm)
                self.cmd(
                    "killall -9 python " "&& echo -n PASS: python killed" "|| echo -n PASS: python was already dead", 10
                )

                self._execute_worker()
                self._init_guest()
                self._cleanup_ports()

            except Exception, inst:
                logging.error(inst)
                raise VirtioPortFatalException(
                    "virtio-console driver is " "irreparably blocked, further tests might FAIL."
                )
def get_cache_stats():
    """Return a dictionary containing information on the current cache stats.

    This only supports memcache.
    """
    hostnames = get_memcached_hosts()

    if not hostnames:
        return None

    all_stats = []

    for hostname in hostnames:
        try:
            host, port = hostname.split(":")
            if host == "unix":
                socket_af = socket.AF_UNIX
                connect_param = port
            else:
                socket_af = socket.AF_INET
                connect_param = (host, int(port))

        except ValueError:
            logging.error('Invalid cache hostname "%s"' % hostname)
            continue

        s = socket.socket(socket_af, socket.SOCK_STREAM)
        try:
            s.connect(connect_param)
        except socket.error:
            s.close()
            continue

        s.send(b"stats\r\n")
        data = s.recv(2048).decode("ascii")
        s.close()

        stats = {}

        for line in data.splitlines():
            info = line.split(" ")

            if info[0] == "STAT":
                try:
                    value = int(info[2])
                except ValueError:
                    value = info[2]

                stats[info[1]] = value

        if stats["cmd_get"] == 0:
            stats["hit_rate"] = 0
            stats["miss_rate"] = 0
        else:
            stats["hit_rate"] = 100 * stats["get_hits"] / stats["cmd_get"]
            stats["miss_rate"] = 100 * stats["get_misses"] / stats["cmd_get"]

        all_stats.append((hostname, stats))

    return all_stats
Example #27
0
File: main.py Project: ipfire/pbs
    def cleanup_files(self):
        query = self.db.query("SELECT * FROM queue_delete")

        for row in query:
            if not row.path:
                continue

            path = os.path.join(PACKAGES_DIR, row.path)

            try:
                logging.debug("Removing %s..." % path)
                os.unlink(path)
            except OSError, e:
                logging.error("Could not remove %s: %s" % (path, e))

            while True:
                path = os.path.dirname(path)

                # Stop if we are running outside of the tree.
                if not path.startswith(PACKAGES_DIR):
                    break

                    # If the directory is not empty, we cannot remove it.
                if os.path.exists(path) and os.listdir(path):
                    break

                try:
                    logging.debug("Removing %s..." % path)
                    os.rmdir(path)
                except OSError, e:
                    logging.error("Could not remove %s: %s" % (path, e))
                    break
Example #28
0
def main():
    """f() -> None

	Main entry point of the application.
	"""
    options = process_arguments(sys.argv)
    # sizes = [get_image_size(x) for x in panels]
    # Prepare a target size which is taller than the expected width.
    S = "%dx%d" % (options.strip_width, 10 * options.strip_width)
    if run_command(
        [
            options.convert_path,
            options.panels[0],
            "-resize",
            S,
            options.panels[1],
            "-resize",
            S,
            options.panels[2],
            "-resize",
            S,
            options.panels[3],
            "-resize",
            S,
            "-quality",
            "85",
            "-append",
            options.output,
        ]
    ):
        logging.error("Unexpected return value from %r", options.convert_path)
    else:
        logging.info("Done.")
Example #29
0
    def Error(self, backtrace, client_id=None, status=None):
        """Kills this flow with an error."""
        client_id = client_id or self.args.client_id
        if self.IsRunning():
            # Set an error status
            reply = rdf_flows.GrrStatus()
            if status is None:
                reply.status = rdf_flows.GrrStatus.ReturnedStatus.GENERIC_ERROR
            else:
                reply.status = status

            if backtrace:
                reply.error_message = backtrace

            self.Terminate(status=reply)

            self.context.state = rdf_flows.Flow.State.ERROR

            if backtrace:
                logging.error("Error in flow %s (%s). Trace: %s", self.session_id, client_id, backtrace)
                self.context.backtrace = backtrace
            else:
                logging.error("Error in flow %s (%s).", self.session_id, client_id)

            self.Notify("FlowStatus", client_id, "Flow (%s) terminated due to error" % self.session_id)
Example #30
0
 def handle_data(self, data):
     # Set the reg_exps
     tag = self.get_starttag_text()
     if tag is None:
         return None
     logging.debug('tag={0}, line="{1}"'.format(tag, data))
     if not tag.startswith("<td"):
         return None
     global _splt_row
     global _splt_writer
     for desc, rec in _compiled_reg_exps:
         ss = rec.search(data)
         if ss is None:
             continue
         if desc == "date":
             date = datetime.datetime.strptime(" ".join(ss.groups()), "%b %d %Y")
             date = datetime.datetime.strftime(date, "%Y-%m-%d")
             _splt_row["Date"] = date
         elif desc == "splt":
             denom = float(ss.group("denom"))
             if denom <= 0.0:
                 logging.error("Found bad split: line={0}".format(data))
             splt = float(ss.group("numer")) / float(ss.group("denom"))
             _splt_row["Splits"] = splt
             _splt_writer.writerow(_splt_row)