Exemple #1
0
 def __init__(self, start=4000, stop=6000):
     self.lock = None
     self.bind = None
     self.port = None
     from fasteners.process_lock import InterProcessLock
     import time
     while True:
         bind = BindFreePort(start, stop)
         if bind.port in self.used_ports:
             bind.release()
             continue
         '''
         Since we cannot be certain the user will bind the port 'immediately' (actually it is not possible using
         this flow. We must ensure that the port will not be reacquired even it is not bound to anything
         '''
         lock = InterProcessLock(
             path='/tmp/socialdna/port_{}_lock'.format(bind.port))
         success = lock.acquire(blocking=False)
         if success:
             self.lock = lock
             self.port = bind.port
             self.used_ports.add(bind.port)
             bind.release()
             break
         bind.release()
         time.sleep(0.01)
 def __init__(self, path):
     # Create lock file if it doesn't exist and set permissions for
     # all users to lock/unlock
     if not exists(path):
         f = open(path, 'w+')
         f.close()
         chmod(path, 0o777)
     self.plock = InterProcessLock(path)
     self.tlock = Lock()
class ComboLock:
    """ A combined process and thread lock.

    Arguments:
        path (str): path to the lockfile for the lock
    """
    def __init__(self, path):
        self.path = path
        self.plock = InterProcessLock(self.path)
        self.tlock = Lock()

    def acquire(self, blocking=True):
        """ Acquire lock, locks thread and process lock.

        Arguments:
            blocking(bool): Set's blocking mode of acquire operation.
                            Default True.

        Returns: True if lock succeeded otherwise False
        """
        # Create lock file if it doesn't exist and set permissions for
        # all users to lock/unlock
        if not exists(self.path):
            f = open(self.path, 'w+')
            f.close()
            chmod(self.path, 0o777)
        if not blocking:
            # Lock thread
            tlocked = self.tlock.acquire(blocking=False)
            if not tlocked:
                return False
            # Lock process
            plocked = self.plock.acquire(blocking=False)
            if not plocked:
                # Release thread lock if process couldn't be locked
                self.tlock.release()
                return False
        else:  # blocking, just wait and acquire ALL THE LOCKS!!!
            self.tlock.acquire()
            self.plock.acquire()
        return True

    def release(self):
        """ Release acquired lock. """
        self.plock.release()
        self.tlock.release()

    def __enter__(self):
        """ Context handler, acquires lock in blocking mode. """
        self.acquire()
        return self

    def __exit__(self, _type, value, traceback):
        """ Releases the lock. """
        self.release()
Exemple #4
0
class ComboLock:
    """ A combined process and thread lock.

    Arguments:
        path (str): path to the lockfile for the lock
    """
    def __init__(self, path):
        # Create lock file if it doesn't exist and set permissions for
        # all users to lock/unlock
        if not exists(path):
            f = open(path, 'w+')
            f.close()
            chmod(path, 0o777)
        self.plock = InterProcessLock(path)
        self.tlock = Lock()

    def acquire(self, blocking=True):
        """ Acquire lock, locks thread and process lock.

        Arguments:
            blocking(bool): Set's blocking mode of acquire operation.
                            Default True.

        Returns: True if lock succeeded otherwise False
        """
        if not blocking:
            # Lock thread
            tlocked = self.tlock.acquire(blocking=False)
            if not tlocked:
                return False
            # Lock process
            plocked = self.plock.acquire(blocking=False)
            if not plocked:
                # Release thread lock if process couldn't be locked
                self.tlock.release()
                return False
        else:  # blocking, just wait and acquire ALL THE LOCKS!!!
            self.tlock.acquire()
            self.plock.acquire()
        return True

    def release(self):
        """ Release acquired lock. """
        self.plock.release()
        self.tlock.release()

    def __enter__(self):
        """ Context handler, acquires lock in blocking mode. """
        self.acquire()
        return self

    def __exit__(self, _type, value, traceback):
        """ Releases the lock. """
        self.release()
Exemple #5
0
 def get_next(self):
     a_lock = None
     locked = False
     try:
         a_lock = InterProcessLock(self.uid_pidlock_filepath)
         locked = a_lock.acquire(blocking=True)
         if locked:
             v = self.load_value()
             v_next = v + 1
             self.save_value(v_next)
             return v_next
     finally:
         if locked and a_lock is not None:
             a_lock.release()
Exemple #6
0
    def crash_signature(self):
        """Create CrashSignature object from CrashInfo.

        Args:
            None

        Returns:
            CrashSignature: CrashSignature based on log data.
        """
        if self._signature is None:
            collector = Collector()
            with InterProcessLock(str(Path(grz_tmp()) / "fm_sigcache.lock")):
                if collector.sigCacheDir:
                    cache_sig, _ = collector.search(self.crash_info)
                    if cache_sig:
                        LOG.debug("signature loaded from cache file %r", cache_sig)
                        self._signature = CrashSignature.fromFile(cache_sig)
            # if cache lookup failed generate a crash signature
            if self._signature is None:
                self._signature = self.crash_info.createCrashSignature(
                    maxFrames=self.crash_signature_max_frames(self.crash_info)
                )
            if self._signature is None:
                LOG.debug("failed to create FM signature")
        return self._signature
Exemple #7
0
 def __enter__(self):
     self._lock = InterProcessLock("%s.lock" % (self._file, ))
     self._lock.acquire()
     try:
         with open(self._file, "r") as in_fp:
             data = load(in_fp)
         self.error = data["error"]
         self.failed = data["failed"]
         self.passed = data["passed"]
     except KeyError:
         LOG.debug("invalid status data in %r", self._file)
     except OSError:
         LOG.debug("%r does not exist", self._file)
     except ValueError:
         LOG.debug("failed to load stats from %r", self._file)
     return self
Exemple #8
0
def _get_unused_port(start, end=65536):
    """Get an unused port in the range [start, end) .

    Args:
        start (int) : port range start
        end (int): port range end
    """
    process_lock = None
    unused_port = None
    try:
        for port in range(start, end):
            process_lock = InterProcessLock(
                path='/tmp/socialbot/{}.lock'.format(port))
            if not process_lock.acquire(blocking=False):
                process_lock.lockfile.close()
                process_lock = None
                continue
            try:
                with contextlib.closing(socket.socket()) as sock:
                    sock.bind(('', port))
                    unused_port = port
                    break
            except socket.error:
                process_lock.release()
                process_lock = None
        if unused_port is None:
            raise socket.error("No unused port in [{}, {})".format(start, end))
        yield unused_port
    finally:
        if process_lock is not None:
            process_lock.release()
Exemple #9
0
class ReducerStats(object):
    """ReducerStats holds stats for the Grizzly reducer.
    """
    FILE = "reducer-stats.json"
    PATH = grz_tmp("status")

    __slots__ = ("_file", "_lock", "error", "failed", "passed")

    def __init__(self):
        self._file = pathjoin(self.PATH, self.FILE)
        self._lock = None
        self.error = 0
        self.failed = 0
        self.passed = 0

    def __enter__(self):
        self._lock = InterProcessLock("%s.lock" % (self._file, ))
        self._lock.acquire()
        try:
            with open(self._file, "r") as in_fp:
                data = load(in_fp)
            self.error = data["error"]
            self.failed = data["failed"]
            self.passed = data["passed"]
        except KeyError:
            LOG.debug("invalid status data in %r", self._file)
        except OSError:
            LOG.debug("%r does not exist", self._file)
        except ValueError:
            LOG.debug("failed to load stats from %r", self._file)
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        try:
            with open(self._file, "w") as out_fp:
                dump(
                    {
                        "error": self.error,
                        "failed": self.failed,
                        "passed": self.passed
                    }, out_fp)
        finally:
            if self._lock:
                self._lock.release()
                self._lock = None
Exemple #10
0
 def __init__(self, data_file, start_time=None):
     assert ".json" in data_file
     assert start_time is None or isinstance(start_time, float)
     self._lock = InterProcessLock("%s.lock" % (data_file, ))
     self._results = defaultdict(int)
     # if data_file is None the status report is read only (no reporting)
     self.data_file = data_file
     self.ignored = 0
     self.iteration = 0
     self.log_size = 0
     self.start_time = start_time
     self.test_name = None
     self.timestamp = start_time
Exemple #11
0
 def __init__(self, data_file, enable_profiling=False, start_time=None):
     assert data_file.endswith(".json")
     assert start_time is None or isinstance(start_time, float)
     self._lock = InterProcessLock("%s.lock" % (data_file, ))
     self._enable_profiling = enable_profiling
     self._profiles = dict()
     self._results = defaultdict(int)
     # if data_file is None the status report is read only (no reporting)
     self.data_file = data_file
     self.ignored = 0
     self.iteration = 0
     self.log_size = 0
     self.pid = None
     self.start_time = start_time
     self.test_name = None
     self.timestamp = start_time
Exemple #12
0
    def _submit_report(self, report, test_cases):
        collector = Collector()

        if not self.force_report:
            # search for a cached signature match
            with InterProcessLock(str(Path(grz_tmp()) / "fm_sigcache.lock")):
                _, cache_metadata = collector.search(report.crash_info)

            # check if signature has been marked as frequent in FM
            if cache_metadata is not None and cache_metadata["frequent"]:
                LOG.info(
                    "Frequent crash matched existing signature: %s",
                    cache_metadata["shortDescription"],
                )
                return None

        if self._ignored(report):
            LOG.info("Report is in ignore list")
            return None

        if report.is_hang:
            self.add_extra_metadata("is_hang", True)

        # dump test cases and the contained files to working directory
        test_case_meta = []
        for test_number, test_case in enumerate(test_cases):
            test_case_meta.append([test_case.adapter_name, test_case.input_fname])
            dump_path = report.path / ("%s-%d" % (report.prefix, test_number))
            dump_path.mkdir(exist_ok=True)
            test_case.dump(dump_path, include_details=True)
        report.crash_info.configuration.addMetadata(
            {"grizzly_input": repr(test_case_meta)}
        )
        if test_cases:
            environ_string = " ".join(
                "=".join(kv) for kv in test_cases[0].env_vars.items()
            )
            report.crash_info.configuration.addMetadata(
                {"recorded_envvars": environ_string}
            )
        else:
            self.quality = Quality.NO_TESTCASE
        report.crash_info.configuration.addMetadata(self._extra_metadata)

        # TODO: this should likely move to ffpuppet
        # grab screen log (used in automation)
        if getenv("WINDOW") is not None:
            screen_log = Path.cwd() / ("screenlog.%s" % (getenv("WINDOW"),))
            if screen_log.is_file():
                target_log = report.path / "screenlog.txt"
                copyfile(str(screen_log), str(target_log))
                Report.tail(target_log, 10240)  # limit to last 10K

        with TemporaryDirectory(prefix="fm-zip", dir=grz_tmp()) as tmp_dir:
            # add results to a zip file
            zip_name = Path(tmp_dir) / ("%s.zip" % (report.prefix,))
            with ZipFile(zip_name, mode="w", compression=ZIP_DEFLATED) as zip_fp:
                # add test files
                for entry in report.path.rglob("*"):
                    if entry.is_file():
                        zip_fp.write(
                            str(entry), arcname=str(entry.relative_to(report.path))
                        )
            # override tool name if specified
            if self.tool is not None:
                collector.tool = self.tool

            # submit results to the FuzzManager server
            new_entry = collector.submit(
                report.crash_info, testCase=zip_name, testCaseQuality=self.quality.value
            )
        LOG.info("Logged %d (%s)", new_entry["id"], self.quality.name)

        return new_entry["id"]
Exemple #13
0
def main():
    """Command line interface for the ``executor`` program."""
    # Enable logging to the terminal and system log.
    coloredlogs.install(syslog=True)
    # Command line option defaults.
    command_timeout = 0
    exclusive = False
    fudge_factor = 0
    lock_name = None
    lock_timeout = 0
    # Parse the command line options.
    try:
        options, arguments = getopt.getopt(sys.argv[1:], 'eT:l:t:f:vqh', [
            'exclusive', 'lock-timeout=', 'lock-file=', 'timeout=',
            'fudge-factor=', 'verbose', 'quiet', 'help',
        ])
        for option, value in options:
            if option in ('-e', '--exclusive'):
                exclusive = True
            elif option in ('-T', '--lock-timeout'):
                lock_timeout = parse_timespan(value)
            elif option in ('-l', '--lock-file'):
                lock_name = value
            elif option in ('-t', '--timeout'):
                command_timeout = parse_timespan(value)
            elif option in ('-f', '--fudge-factor'):
                fudge_factor = parse_timespan(value)
            elif option in ('-v', '--verbose'):
                coloredlogs.increase_verbosity()
            elif option in ('-q', '--quiet'):
                coloredlogs.decrease_verbosity()
            elif option in ('-h', '--help'):
                usage(__doc__)
                sys.exit(0)
            else:
                assert False, "Unhandled option!"
        # Make sure the operator provided a program to execute.
        if not arguments:
            usage(__doc__)
            sys.exit(0)
        # Make sure the program actually exists.
        program_name = arguments[0]
        if not os.path.isfile(program_name):
            # Only search the $PATH if the given program name
            # doesn't already include one or more path segments.
            if program_name == os.path.basename(program_name):
                matching_programs = which(program_name)
                if matching_programs:
                    program_name = matching_programs[0]
        # The subprocess.Popen() call later on doesn't search the $PATH so we
        # make sure to give it the absolute pathname to the program.
        arguments[0] = program_name
    except Exception as e:
        warning("Failed to parse command line arguments: %s", e)
        sys.exit(1)
    # Apply the requested fudge factor.
    apply_fudge_factor(fudge_factor)
    # Run the requested command.
    try:
        if exclusive:
            # Select a default lock file name?
            if not lock_name:
                lock_name = os.path.basename(arguments[0])
                logger.debug("Using base name of command as lock file name (%s).", lock_name)
            lock_file = get_lock_path(lock_name)
            lock = InterProcessLock(path=lock_file, logger=logger)
            logger.debug("Trying to acquire exclusive lock: %s", lock_file)
            if lock.acquire(blocking=(lock_timeout > 0), max_delay=lock_timeout):
                logger.info("Successfully acquired exclusive lock: %s", lock_file)
                run_command(arguments, timeout=command_timeout)
            else:
                logger.error("Failed to acquire exclusive lock: %s", lock_file)
                sys.exit(1)
        else:
            run_command(arguments, timeout=command_timeout)
    except ExternalCommandFailed as e:
        logger.error("%s", e.error_message)
        sys.exit(e.command.returncode)
Exemple #14
0
def main():
    """Command line interface for the ``executor`` program."""
    # Enable logging to the terminal and system log.
    coloredlogs.install(syslog=True)
    # Command line option defaults.
    command_timeout = 0
    exclusive = False
    fudge_factor = 0
    lock_name = None
    lock_timeout = 0
    # Parse the command line options.
    try:
        options, arguments = getopt.getopt(sys.argv[1:], 'eT:l:t:f:vqh', [
            'exclusive', 'lock-timeout=', 'lock-file=', 'timeout=',
            'fudge-factor=', 'verbose', 'quiet', 'help',
        ])
        for option, value in options:
            if option in ('-e', '--exclusive'):
                exclusive = True
            elif option in ('-T', '--lock-timeout'):
                lock_timeout = parse_timespan(value)
            elif option in ('-l', '--lock-file'):
                lock_name = value
            elif option in ('-t', '--timeout'):
                command_timeout = parse_timespan(value)
            elif option in ('-f', '--fudge-factor'):
                fudge_factor = parse_timespan(value)
            elif option in ('-v', '--verbose'):
                coloredlogs.increase_verbosity()
            elif option in ('-q', '--quiet'):
                coloredlogs.decrease_verbosity()
            elif option in ('-h', '--help'):
                usage(__doc__)
                sys.exit(0)
            else:
                assert False, "Unhandled option!"
        # Make sure the operator provided a program to execute.
        if not arguments:
            usage(__doc__)
            sys.exit(0)
        # Make sure the program actually exists.
        program_name = arguments[0]
        if not os.path.isfile(program_name):
            # Only search the $PATH if the given program name
            # doesn't already include one or more path segments.
            if program_name == os.path.basename(program_name):
                matching_programs = which(program_name)
                if matching_programs:
                    program_name = matching_programs[0]
        # The subprocess.Popen() call later on doesn't search the $PATH so we
        # make sure to give it the absolute pathname to the program.
        arguments[0] = program_name
    except Exception as e:
        warning("Failed to parse command line arguments: %s", e)
        sys.exit(1)
    # Apply the requested fudge factor.
    apply_fudge_factor(fudge_factor)
    # Run the requested command.
    try:
        if exclusive:
            # Select a default lock file name?
            if not lock_name:
                lock_name = os.path.basename(arguments[0])
                logger.debug("Using base name of command as lock file name (%s).", lock_name)
            lock_file = get_lock_path(lock_name)
            lock = InterProcessLock(path=lock_file, logger=logger)
            logger.debug("Trying to acquire exclusive lock: %s", lock_file)
            if lock.acquire(blocking=(lock_timeout > 0), max_delay=lock_timeout):
                logger.info("Successfully acquired exclusive lock: %s", lock_file)
                run_command(arguments, timeout=command_timeout)
            else:
                logger.error("Failed to acquire exclusive lock: %s", lock_file)
                sys.exit(1)
        else:
            run_command(arguments, timeout=command_timeout)
    except ExternalCommandFailed as e:
        logger.error("%s", e.error_message)
        sys.exit(e.command.returncode)
Exemple #15
0
    def _submit_report(self, report, test_cases):
        # search for a cached signature match and if the signature
        # is already in the cache and marked as frequent, don't bother submitting
        with InterProcessLock(pathjoin(grz_tmp(), "fm_sigcache.lock")):
            collector = Collector()
            cache_sig_file, cache_metadata = collector.search(report.crash_info)
            if cache_metadata is not None:
                if cache_metadata["frequent"]:
                    LOG.info("Frequent crash matched existing signature: %s",
                             cache_metadata["shortDescription"])
                    if not self.force_report:
                        return
                elif "bug__id" in cache_metadata:
                    LOG.info("Crash matched existing signature (bug %s): %s",
                             cache_metadata["bug__id"],
                             cache_metadata["shortDescription"])
                    # we will still report this one, but no more
                    cache_metadata["frequent"] = True
                # there is already a signature, initialize count
                cache_metadata.setdefault("_grizzly_seen_count", 0)
            else:
                # there is no signature, create one locally so we can count
                # the number of times we've seen it
                max_frames = report.crash_signature_max_frames(report.crash_info)
                cache_sig_file = collector.generate(report.crash_info, numFrames=max_frames)
                cache_metadata = {
                    "_grizzly_seen_count": 0,
                    "frequent": False,
                    "shortDescription": report.crash_info.createShortSignature()}
            if cache_sig_file is None:
                if self._ignored(report):
                    LOG.info("Report is unsupported and is in ignore list")
                    return
                LOG.warning("Report is unsupported by FM, saved to %r", report.path)
                # TODO: we should check if stackhasher failed too
                raise RuntimeError("Failed to create FM signature")
            # limit the number of times we report per cycle
            cache_metadata["_grizzly_seen_count"] += 1
            if cache_metadata["_grizzly_seen_count"] >= self.MAX_REPORTS:
                # we will still report this one, but no more
                cache_metadata["frequent"] = True
            metadata_file = cache_sig_file.replace(".signature", ".metadata")
            with open(metadata_file, "w") as meta_fp:
                dump(cache_metadata, meta_fp)

        # dump test cases and the contained files to working directory
        test_case_meta = []
        for test_number, test_case in enumerate(test_cases):
            test_case_meta.append([test_case.adapter_name, test_case.input_fname])
            dump_path = pathjoin(report.path, "%s-%d" % (report.prefix, test_number))
            if not isdir(dump_path):
                mkdir(dump_path)
            test_case.dump(dump_path, include_details=True)
        report.crash_info.configuration.addMetadata({"grizzly_input": repr(test_case_meta)})
        if test_cases:
            environ_string = " ".join("=".join(kv) for kv in test_cases[0].env_vars.items())
            report.crash_info.configuration.addMetadata({"recorded_envvars": environ_string})
        else:
            self.quality = self.QUAL_NO_TESTCASE
        report.crash_info.configuration.addMetadata(self._extra_metadata)

        # grab screen log (used in automation)
        if getenv("WINDOW") is not None:
            screen_log = pathjoin(getcwd(), ".".join(["screenlog", getenv("WINDOW")]))
            if isfile(screen_log):
                target_log = pathjoin(report.path, "screenlog.txt")
                copyfile(screen_log, target_log)
                Report.tail(target_log, 10240)  # limit to last 10K

        # add results to a zip file
        zip_name = "%s.zip" % (report.prefix,)
        with ZipFile(zip_name, mode="w", compression=ZIP_DEFLATED) as zip_fp:
            # add test files
            for dir_name, _, dir_files in walk(report.path):
                arc_path = relpath(dir_name, report.path)
                for file_name in dir_files:
                    zip_fp.write(
                        pathjoin(dir_name, file_name),
                        arcname=pathjoin(arc_path, file_name))

        # override tool name if specified
        if self.tool is not None:
            collector.tool = self.tool

        # announce shortDescription if crash is not in a bucket
        if cache_metadata["_grizzly_seen_count"] == 1 and not cache_metadata["frequent"]:
            LOG.info("Submitting new crash %r", cache_metadata["shortDescription"])
        # submit results to the FuzzManager server
        new_entry = collector.submit(report.crash_info, testCase=zip_name, testCaseQuality=self.quality)
        LOG.info("Logged %d with quality %d", new_entry["id"], self.quality)

        # remove zipfile
        if isfile(zip_name):
            unlink(zip_name)
    if do_collect:
        try:
            subprocess.check_output((bin_path('collect'), '--commands'),
                                    stderr=subprocess.STDOUT)
        except subprocess.CalledProcessError as e:
            log.error('collect --commands failed:\n{}',
                      e.output.decode('utf8'))

        # We don't want to hose the machine, so even when collecting fails, we
        # still wait for the configured interval.
        update_stamp(collect_stamp_file)

        try:
            subprocess.check_output((bin_path('submit'),),
                                    stderr=subprocess.STDOUT)
        except subprocess.CalledProcessError as e:
            log.error('submit failed:\n{}', e.output.decode('utf8'))
        else:
            update_stamp(submit_stamp_file)


if __name__ == '__main__':
    lock_file = os.path.join(var_dir, 'client-cron.lock')
    lock = InterProcessLock(lock_file)
    # Note: Not necessary to explicitly release the lock, since fasteners
    # guarantees that the lock is released when the process exits.
    if not lock.acquire(timeout=59, delay=1, max_delay=1):
        log.warn('Failed to acquire lock')
        sys.exit(1)
    main()
 def __init__(self, path):
     self.path = path
     self.plock = InterProcessLock(self.path)
     self.tlock = Lock()
Exemple #18
0
    port = 0
    if len(sys.argv) > 1:
        port = sys.argv[1]

    for i in range(2, len(sys.argv)):
        if sys.argv[i] == '--shutdown_on_idle':
            shutdown_on_idle = True
        if sys.argv[i] == '--launch_client':
            launch_client = True
        if sys.argv[i] == '--debug':
            debug = True

    app_data_path = Dirs.app_data_dir()
    lock_path = Dirs.app_data_dir() + '/lock'
    lock = Lock(lock_path)

    if lock.acquire(blocking=False):

        left_over_lock_files = glob.glob(app_data_path + '/*.lock')
        for left_over in left_over_lock_files:
            os.remove(left_over)

        server = Server(port, shutdown_on_idle=shutdown_on_idle, debug=debug)
        server.add_ports_opened_listener(_ports_opened)
        server.start()

    else:

        print('Server already running')