示例#1
0
def signal_handler(signum, frame):
    message = translate_object("sigterm")
    stripped_message = strip_html(message)
    LOGGER.info(stripped_message)
    reason = "signal_handler called"
    exit_code = 143
    exit_handler(exit_code, reason)
示例#2
0
    def wait_sleep(self):
        # If we make the sleep period configurable one day, we need to
        # advice the user to also adjust WatchdogSec= in sdwdate's systemd
        # unit file.
        # minimum sleep time: 60 minutes
        sleep_time_minimum_seconds = 60 * 60
        # maximum sleep time: 180 minutes
        sleep_time_maximum_seconds = 180 * 60

        # self.sleep_time_seconds = randint(
        # sleep_time_minimum_seconds, sleep_time_maximum_seconds
        # )
        values = list(
            range(sleep_time_minimum_seconds, sleep_time_maximum_seconds))
        self.sleep_time_seconds = secrets.choice(values)

        sleep_time_minutes = self.sleep_time_seconds / 60
        sleep_time_minutes_rounded = round(sleep_time_minutes)

        message = (translate_object("sleeping") +
                   str(sleep_time_minutes_rounded) +
                   translate_object("minutes"))
        stripped_message = strip_html(message)
        LOGGER.info(stripped_message)

        SDNOTIFY_OBJECT.notify("WATCHDOG=1")

        #nanoseconds = randint(0, self.range_nanoseconds)
        nanoseconds = secrets.choice(self.range_nanoseconds)

        if self.sleep_time_seconds >= 10:
            file_object = open(sleep_long_file_path, "w")
            file_object.close()

        self.unixtime_before_sleep = int(time.time())

        # Using sh sleep in place of
        # python's time.sleep(self.sleep_time_seconds).
        # The latter uses the system clock for its inactive state time.
        # It becomes utterly confused when sclockadj is running.
        sleep_cmd = ("sleep" + " " + str(self.sleep_time_seconds) + "." +
                     str(nanoseconds))
        message = "running command: " + sleep_cmd
        LOGGER.info(message)

        # Avoid Popen shell=True.
        sleep_cmd = shlex.split(sleep_cmd)

        global sleep_process
        sleep_process = Popen(sleep_cmd)
        sleep_process.wait()
示例#3
0
    def sdwdate_fetch_loop(self):
        """
        Check remotes.
        Pick a random url in each pool, check the returned stdout.
        Append valid urls if time is returned, otherwise restart a cycle
        with a new random url, until every pool has a time value.
        returns:
        icon, status, message
        """
        fetching_msg = translate_object("fetching")
        restricted_msg = translate_object("restricted")

        # message = "restricted_msg: " + restricted_msg
        # LOGGER.info(message)
        # restricted_msg: Initial time fetching in progress...

        status_first_success = os.path.exists(status_first_success_path)

        if not status_first_success:
            icon = "busy"
            write_status(icon, restricted_msg)
            message = strip_html(restricted_msg)
            LOGGER.info(message)
        else:
            icon = "success"
            write_status(icon, fetching_msg)
            message = strip_html(fetching_msg)
            LOGGER.info(message)

        while True:
            self.iteration += 1
            message = "Running sdwdate fetch loop. iteration: %s" % self.iteration
            LOGGER.info(message)

            # Clear the lists.
            self.list_of_urls_returned[:] = []
            self.list_of_url_random_requested[:] = []

            for pool in self.pools:
                if pool.done:
                    continue
                pool_size = len(pool.url)
                while True:
                    # url_index = random.randrange(0, pool_size)
                    values = list(range(0, pool_size))
                    url_index = secrets.choice(values)
                    # print("pool_size: " + str(pool_size))
                    if url_index not in pool.already_picked_index:
                        # print("AAA str(len(pool.already_picked_index)): " \
                        # + \
                        # str(len(pool.already_picked_index)))
                        pool.already_picked_index.append(url_index)
                        break
                    if len(pool.already_picked_index) >= pool_size:
                        # print("BBB str(len(pool.already_picked_index)): " \
                        # + \
                        # str(len(pool.already_picked_index)))
                        pool_number = self.pools.index(pool)
                        message = ("pool " + str(pool_number) + ": " +
                                   translate_object("no_valid_time") +
                                   translate_object("restart"))
                        stripped_message = strip_html(message)
                        icon = "error"
                        status = "error"
                        LOGGER.error(stripped_message)
                        write_status(icon, message)
                        return status
                    # if url_index in pool.already_picked_index:
                    # print("CCC str(len(pool.already_picked_index)): " \
                    # + \
                    # str(len(pool.already_picked_index)))
                already_picked_number = len(pool.already_picked_index)

                message = ("pool " + str(self.pools.index(pool)) +
                           ": pool_size: " + str(pool_size) + " url_index: " +
                           str(url_index) + " already_picked_number: " +
                           str(already_picked_number) +
                           " already_picked_index: " +
                           str(pool.already_picked_index))
                LOGGER.info(message)

                pool.url_random_pool.append(pool.url[url_index])
                self.list_of_url_random_requested.append(pool.url[url_index])

            if len(self.list_of_url_random_requested) <= 0:
                message = translate_object(
                    "list_not_built") + translate_object("restart")
                stripped_message = strip_html(message)
                icon = "error"
                status = "error"
                LOGGER.error(stripped_message)
                write_status(icon, message)
                return status

            message = "requested urls %s" % self.list_of_url_random_requested
            LOGGER.info(message)

            self.list_of_urls_returned, \
                self.list_of_status, \
                self.list_of_unixtimes, \
                self.list_of_took_time, \
                self.list_of_half_took_time, \
                self.list_off_time_diff_raw_int, \
                self.list_off_time_diff_lag_cleaned_float, \
                = get_time_from_servers(
                    self.pools,
                    self.list_of_url_random_requested,
                    proxy_ip,
                    proxy_port
                )

            if self.list_of_urls_returned == []:
                message = translate_object(
                    "no_value_returned") + translate_object("restart")
                stripped_message = strip_html(message)
                icon = "error"
                status = "error"
                LOGGER.error(stripped_message)
                write_status(icon, message)
                return status

            message = 'returned urls "%s"' % self.list_of_urls_returned
            LOGGER.info(message)
            LOGGER.info("")

            for i in range(len(self.list_of_urls_returned)):
                returned_url_item_url = self.list_of_urls_returned[i]
                returned_url_item_unixtime = self.list_of_unixtimes[i]
                returned_url_item_took_time = self.list_of_took_time[i]
                returned_url_item_took_status = self.list_of_status[i]

                # Example returned_url_item_url:
                # http://tinhat233xymse34.onion

                if returned_url_item_took_status == "ok":
                    self.request_unixtimes[
                        returned_url_item_url] = returned_url_item_unixtime
                    self.request_took_times[
                        returned_url_item_url] = returned_url_item_took_time
                    self.valid_urls.append(returned_url_item_url)
                    self.unixtimes.append(returned_url_item_unixtime)
                    self.half_took_time_float[
                        returned_url_item_url] = self.list_of_half_took_time[i]
                    self.time_diff_raw_int[
                        returned_url_item_url] = self.list_off_time_diff_raw_int[
                            i]
                    self.time_diff_lag_cleaned_float[
                        returned_url_item_url] = self.list_off_time_diff_lag_cleaned_float[
                            i]
                else:
                    self.failed_urls.append(returned_url_item_url)

            if self.iteration >= 2:
                if len(self.list_of_status) >= 3:
                    if self.general_timeout_error(self.list_of_status):
                        message = translate_object("general_timeout_error")
                        stripped_message = strip_html(message)
                        icon = "error"
                        status = "error"
                        LOGGER.error(stripped_message)
                        write_status(icon, message)
                        return status

            message = ""
            message += "failed_urls: "
            message += str(len(self.failed_urls))
            message += " allowed_failures: "
            message += str(self.allowed_failures)
            LOGGER.info(message)
            if len(self.failed_urls) > self.allowed_failures:
                message = "Maximum allowed number of failures. Giving up."
                stripped_message = strip_html(message)
                icon = "error"
                status = "error"
                LOGGER.error(stripped_message)
                write_status(icon, message)
                return status

            old_unixtime = time.time()

            for pool in self.pools:
                if pool.done:
                    continue
                for url in pool.url_random_pool:
                    pool.done = url in self.valid_urls
                    if pool.done:
                        pool_number = self.pools.index(pool)

                        # Values are returned randomly. Get the index of the
                        # url.
                        web_unixtime = self.request_unixtimes[url]
                        web_unixtime = int(web_unixtime)
                        request_took_time_item = self.request_took_times[url]
                        web_time = time_human_readable(web_unixtime)

                        pool_diff = self.time_diff_raw_int[url]
                        self.list_of_pools_raw_diff.append(pool_diff)

                        # Rounding. Nanoseconds accuracy is impossible.
                        # It is unknown if the time (seconds) reported by
                        # remote servers was a
                        # "early second" (0.000000000) or
                        # "late second" (0.999999999).
                        time_diff_lag_cleaned_int = round(
                            self.time_diff_lag_cleaned_float[url])
                        self.pools_lag_cleaned_diff.append(
                            time_diff_lag_cleaned_int)

                        message = ""
                        message += "pool " + str(pool_number)
                        message += ": " + url + ", "
                        message += "web_time: " + web_time + ","
                        message += " took_time: "
                        message += str(request_took_time_item)
                        message += " seconds,"
                        message += " time_diff_raw: " + str(pool_diff)
                        message += " seconds,"
                        message += " time_diff_lag_cleaned: "
                        message += str(time_diff_lag_cleaned_int) + " seconds"
                        LOGGER.info(message)

            # message = "len(self.valid_urls): " + str(len(self.valid_urls))
            # LOGGER.info(message)
            if len(self.valid_urls) >= self.number_of_pools:
                break

        message = "End fetching remote times."
        LOGGER.info(message)
        LOGGER.info("")

        message = translate_object("success")
        stripped_message = strip_html(message)
        icon = "success"
        status = "success"
        LOGGER.info(stripped_message)
        write_status(icon, message)
        return status
示例#4
0
    def preparation(self):
        message = ""
        previous_messsage = ""
        loop_counter = 0
        loop_max = 10000
        preparation_sleep_seconds = 0
        while True:
            SDNOTIFY_OBJECT.notify("WATCHDOG=1")
            if loop_counter >= loop_max:
                loop_counter = 0
            loop_counter += 1
            msg = ("STATUS=Running sdwdate preparation loop. \
                preparation_sleep_seconds: " + str(preparation_sleep_seconds) +
                   " iteration: " + str(loop_counter) + " / " + str(loop_max))
            SDNOTIFY_OBJECT.notify(msg)

            # Wait one second after first failure. Two at second failure etc.
            # Up to a maximum of ten seconds wait between attempts.
            # The rationale behind this is to be quick at boot time while not
            # stressing the system.
            preparation_sleep_seconds += 1
            if preparation_sleep_seconds >= 10:
                preparation_sleep_seconds = 10

            preparation_path = "/usr/lib/helper-scripts/onion-time-pre-script"
            preparation_status = subprocess.Popen(preparation_path,
                                                  stdout=subprocess.PIPE,
                                                  stderr=subprocess.PIPE)

            stdout, stderr = preparation_status.communicate()
            preparation_status.kill()
            output_stdout = stdout.decode("UTF-8")
            output_stderr = stderr.decode("UTF-8")
            joint_message = output_stderr + "\n" + output_stdout

            if preparation_status.returncode == 0:
                LOGGER.info("PREPARATION:")
                message = joint_message.strip()
                LOGGER.info(strip_html(message))
                LOGGER.info("PREPARATION RESULT: SUCCESS.")
                LOGGER.info("\n")
                return True

            if joint_message == previous_messsage:
                # No new message. No status changes.
                # Therefore do not reset wait counter and wait
                # preparation_sleep_seconds.
                time.sleep(preparation_sleep_seconds)
                continue

            previous_messsage = joint_message

            LOGGER.info("PREPARATION:")
            message = joint_message.strip()
            LOGGER.info(strip_html(joint_message))

            if preparation_status.returncode == 1:
                icon = "error"
                LOGGER.info("PREPARATION RESULT: (known) ERROR.")
            elif preparation_status.returncode == 2:
                icon = "busy"
                LOGGER.info("PREPARATION RESULT: WAIT.")
            else:
                icon = "error"
                LOGGER.info("PREPARATION RESULT: (unknown) ERROR.")

            LOGGER.info("\n")
            # https://phabricator.whonix.org/T534#15429
            main_message = "Preparation not done yet. More more information, \
            see: sdwdate-gui -> right click -> Open sdwdate's log"

            write_status(icon, main_message)

            # Different message. Probably progress was made.
            # More progress to be expected.
            # Therefore reset wait counter to just wait a short time.
            preparation_sleep_seconds = 1
            time.sleep(preparation_sleep_seconds)