示例#1
0
    def __init__(self, dir, maxlen=20, num_samples=20):
        """
        :param dir: dataset's path
        :param maxlen: indicates the dimension of the circular buffer
                       (its size should be big enough to contain all the frame of a gesture)
        """
        # Events
        self.start = Event()  # Start to fire a sequence
        self.fire = Event()  # Fire a frame
        self.end = Event()  # End of sequence
        # Variables
        self.__buffer = collections.deque(maxlen=maxlen)  # Circular buffer
        self.buffer = []  # Circular buffer
        # Call super method
        super(CsvDatasetRealTime, self).__init__(dir)

        #### Transforms ####
        # Settings items for transforming
        transform1 = NormaliseLengthTransform(axisMode=True)
        transform2 = ScaleDatasetTransform(scale=100)
        transform3 = CenteringTransform()
        transform5 = ResampleInSpaceTransform(samples=num_samples)
        # Apply transforms
        self.addTransform(transform1)
        self.addTransform(transform2)
        self.addTransform(transform3)
        self.addTransform(transform5)
示例#2
0
    def __init__(self, comport):
        self.comport = comport
        self.packet_recv = Event()
        self.onerror = Event()
        self.controllerdisconnected = Event()

        self.buffer = {}
示例#3
0
    def __init__(self, packet_parser):
        self.controllerstate = Event()
        self.controllerdisconnected = Event()
        self.onerror = Event()

        self.packet_parser = packet_parser

        self.monitor = PreviewMonitor()
        self.monitor.packet_recv += self.packetrecv
示例#4
0
    def __init__(self):
        """
        """
        self.__logger = logging.getLogger(__name__)

        self.__is_running = False

        self.data_rx = Event()
        self.connection_changed = Event()
示例#5
0
    def __init__(self, portname, packet_parser):
        self.controllerstate = Event()
        self.controllerdisconnected = Event()
        self.onerror = Event()

        self.packet_parser = packet_parser

        self.serial = SerialMonitor(serial_port=portname)
        self.serial.packet_recv += self.serialmonitor_packetrecv
        self.serial.disconnected += self.serialmonitor_disconnected
示例#6
0
    def __init__(self, comport):
        self.comport = comport
        self.packet_recv = Event()
        self.onerror = Event()
        self.controllerdisconnected = Event()
        # all keyevents are hooked here
        if self.comport == 'keyboard_legacy':
            keyboard.on_press(callback=self.on_down)
            keyboard.on_release(callback=self.on_release)

        self.buffer = {}
示例#7
0
class Adapter(DbusObjectWrapper):
    '''
    Wrapper for a bluetooth adapter.
    '''
    def __init__(self, object_path):
        '''
        Initialize for the hci device at the given object path.
        '''
        super(Adapter, self).__init__('org.bluez', object_path,
                                      'org.bluez.Adapter')
        self.interface.connect_to_signal('DeviceFound', self.__device_found)
        self.interface.connect_to_signal('DeviceDisappeared',
                                         self.__device_disappeared)
        self.interface.connect_to_signal('DeviceCreated',
                                         self.__device_created)
        self.interface.connect_to_signal('DeviceRemoved',
                                         self.__device_removed)

    address = DbusProperty('Address', read_only=True)
    name = DbusProperty('Name', read_only=False)
    device_class = DbusProperty('Class', read_only=True)
    powered = DbusProperty('Powered', read_only=False)
    discoverable = DbusProperty('Discoverable', read_only=False)
    pairable = DbusProperty('Pairable', read_only=False)
    pairable_timeout = DbusProperty('PairableTimeout', read_only=False)
    discoverable_timeout = DbusProperty('DiscoverableTimeout', read_only=False)
    discovering = DbusProperty('Discovering', read_only=True)
    uuids = DbusProperty('UUIDs', read_only=True)

    device_found = Event()
    device_disappeared = Event()
    device_created = Event()
    device_removed = Event()

    def get_devices(self):
        '''
        Get all devices associated with this adapter.
        '''
        return get_objects_from_property(Device, self.interface, 'Devices')

    def __device_found(self, address, values):
        print("device_found: %s (%s)" % (address, values))
        self.device_found(address, values)

    def __device_disappeared(self, address):
        pass

    def __device_created(self, device):
        pass

    def __device_removed(self, device):
        pass
示例#8
0
    def __init__(self, comport, packet_parser):
        self.controllerstate = Event()
        self.controllerdisconnected = Event()
        self.onerror = Event()

        self.packet_parser = packet_parser
        self.comport = comport

        self.monitor = KeyboardMonitor(comport)
        self.monitor.packet_recv += self.packetrecv
        self.monitor.controllerdisconnected += self.finish

        self.t = None
示例#9
0
class RadioList(_RadioList):
    def __init__(self, values: Sequence[Tuple[_T, AnyFormattedText]]) -> None:
        super().__init__(values)
        self.handlers = []
        self.check_event = Event()

    def up(self) -> None:
        self._selected_index = max(0, self._selected_index - 1)

    def down(self) -> None:
        self._selected_index = min(
            len(self.values) - 1, self._selected_index + 1)

    def get_selected_index(self) -> int:
        return self._selected_index

    def get_selected_item(self) -> Tuple[_T, AnyFormattedText]:
        return self.values[self.get_selected_index()]

    def get_selected_value(self):
        return self.get_selected_item()[0]

    def get_checked_index(self):
        for idx, value in enumerate(self.values):
            if value[0] == self.current_value:
                return idx
        return -1

    def get_checked_value(self):
        return self.current_value

    def get_checked_item(self):
        return self.values[self.get_checked_index()]

    def set_checked_index(self, index: int):
        self._selected_index = index
        self.current_value = self.values[self._selected_index][0]

    def set_selected_index(self, index: int):
        self._selected_index = index

    def _handle_enter(self) -> None:

        old_value = None
        for value in self.values:
            if value[0] == self.current_value:
                old_value = value
        new_value = self.values[self._selected_index]
        super()._handle_enter()
        self.check_event.fire(old_value, new_value)
    def __init__(self, service_type, port):
        '''Create a TCP client'''
        self.__logger = logging.getLogger(__name__)

        self.connection_changed = Event(sender='client')
        self.data_rx = Event(sender='client')

        self.__service_type = service_type
        self.__loop = None
        self.__browser = None
        self.__port = port
        self.__queue = asyncio.Queue()
        self.__server_connection = None
        self.__shutdown_in_progress = False
示例#11
0
    def __init__(self, service_type, port):
        '''Create a TCP server'''
        self.__logger = logging.getLogger(__name__)

        self.connection_changed = Event(sender='server')

        # this keeps track of all the clients that connected to our
        # server.  It can be useful in some cases, for instance to
        # kill client connections or to broadcast some data to all
        # clients...
        self.__clients = {} # task -> (reader, writer)
        self.__port = port
        self.__server = None
        self.__loop = None
        self.__shutdown_in_progress = False
        self.__queue = asyncio.Queue()
        self.__info = ServiceInfo(
            service_type,
            'TTC-%s.%s' % (
                uuid.uuid3(uuid.NAMESPACE_DNS, socket.gethostname()),
                service_type),
            socket.inet_aton(
                socket.gethostbyname(socket.gethostname())),
            self.__port,
            0,
            0,
            {},
            socket.gethostname() + '.')
示例#12
0
    def _do_subscribe(self, name, handler):
        """
        Act as a server side event. Add the handler to the list of callbacks.
        
        Parameters
        ----------
        name : string
            Name of the event to subscribe to.
        handler : method
            Method that handles event notifications.

        Raises
        ------
        MessageHandleError
            An error occurred handling the message.
        ConnectionError
            An error occurred in the connection to the server.
        TimeoutError
            A timeout occurred in the request to the server.
        """

        #FIXME: This approach does not work if there is more than one instance
        # of the service interface. Then one will lose updates
        event_list = self._events.get(name, None)
        if event_list is None:
            event_list = Event()
            self._events[name] = event_list
            self._client_factory.subscribe(self._service_name, name,
                                           event_list)
        event_list += handler
    def __init__(self, servo, gpo_provider, main_angle, diverging_angle,
                 speed):
        '''
        Create a turnout

        This is only able to create electro frog turnouts
        '''
        self.__logger = logging.getLogger('turnout')

        self.__end_angles = {
            'transition_main': main_angle,
            'transition_diverging': diverging_angle
        }
        self.__gpo = gpo_provider
        self.__servo = servo
        self.__motion_time = 0

        self.__speed = speed

        self.__min_angle = min(main_angle, diverging_angle)
        self.__max_angle = max(main_angle, diverging_angle)

        self.__init_state_machine()

        self.state_changed = Event()
示例#14
0
class VideoAnalyzerBase:
    video_analyzed_event = Event(asynch=True, threads=1)

    def analyze_video(self, video):
        analyzed = None
        self.video_analyzed_event(analyzed)
        pass
示例#15
0
    def __init__(self,
                 loop,
                 client,
                 server=None,
                 discovery_timeout=DISCOVERY_TIMEOUT_S,
                 randomize_timeout=True):
        """Create a network manager"""
        self.__logger = logging.getLogger(__name__)

        self.discovery_timeout = discovery_timeout
        self.connection_changed = Event()

        self.__loop = loop

        self.__threshold = 0

        self.__connection_count = {}
        self.__connection_count['client'] = 0
        self.__connection_count['server'] = 0

        self.__init_state_machine(randomize_timeout)

        # Required to provide a client
        if None == client:
            raise AttributeError('Client must be specified')

        self.__service_list = {}
        self.__service_list['client'] = client
        self.__service_list['server'] = server

        self.__SERVICE_CONNECTION_THRESHOLD = {}
        self.__SERVICE_CONNECTION_THRESHOLD['client'] = 1
        self.__SERVICE_CONNECTION_THRESHOLD['server'] = 2

        self.data_rx = self.__service_list['client'].data_rx
示例#16
0
class RepositoryBase:
    new_repo_data_event = Event()

    def save_analyzed_video(self, analyzed_video):
        data = None
        self.new_repo_data_event(data)
        pass
示例#17
0
 def __init__(self, hmms, plot_result=False):
     # Hmms
     self.hmms = hmms
     # User option regarding the plotting of the computation
     self.plot_result = plot_result
     #### Events ####
     # Is raised when the system completed to fire a file, it is used for sending the results.
     self.managing_frame = Event()
示例#18
0
class VideoRecorderBase:
    video_recorded_event = Event()
    num_of_frames_for_trigger = 0

    def set_video_frames_to_trigger_event(self, num_of_frames):
        self.num_of_frames_for_trigger = num_of_frames

    def start_recording(self):
        pass
示例#19
0
    def __init__(self,
                 serial_port='/dev/ttyACM0',
                 baud_rate=115200,
                 timer_ms=10):
        self.baud_rate = baud_rate
        self.serial_port = serial_port
        self.timer_ms = timer_ms

        # array of bytes
        self._localBuffer = []

        #  establish connection
        self._datPort = serial.Serial(serial_port, baud_rate, timeout=timer_ms)
        # self._datPort.timeout = timer_ms

        self.packet_recv = Event()
        self.disconnected = Event()
        self.onerror = Event()
示例#20
0
    def _setup_axel_events(self):
        """
        Initialise axel events
        """
        # sim_output_message(msg)
        self.sim_output_message = Event()

        # event_choose_deployments(team_id, non_fielded_team, empty_positions)
        # non_fielded_team: oeo from the team who are not on the field
        #                   but are conscious
        # empty_positions: empty positions on the field into which an oeo
        #                  could be deployed
        self.event_choose_deployments = Event()

        # event_choose_actions(team_id, oeo_requiring_actions)
        # oeo_requiring_actions: oeo that are on the field and need to select
        #                        an action for the current turn
        self.event_choose_actions = Event()
示例#21
0
    def __init__(self,
                 url: str = "",
                 mode: str = MODE_BRANCH,
                 webroot: str = "",
                 git_repo_path: str = "",
                 max_remote_threads: int = 10,
                 max_local_threads: int = 5,
                 verify_ssl: bool = True,
                 debug: bool = False,
                 session: requests.Session = None,
                 user_agent: str = None):
        """
        Scanner constructor
        :param url: Base url of the remote host
        :param mode: Scan mode (tag, commit, branch)
        :param webroot: Local webroot in the git path
        :param git_repo_path: Local git repository path
        :param max_remote_threads: Maximum amount of threads to use while connecting to the remote host
        :param verify_ssl: Should SSL certificates be verified?
        :param session: Optional requests object, used to fetch data
        """
        self.__url = url.rstrip("/") + "/"
        self._mode = mode
        self.__root = webroot.lstrip("\\/")
        self._files_local = OrderedDefaultDict(dict)
        self.__files_remote = OrderedDefaultDict(dict)
        self.__max_remote_threads = max_remote_threads
        self._max_local_threads = max_local_threads
        self.__session = session or requests
        self.__verify_ssl = verify_ssl
        self.__repo = Repo(path=git_repo_path)
        self.__repo.heads.master.checkout("-f")
        self.__base = join(self.git_path, self.__root)
        self.__request_count = 0
        self._hashing_algorithm = "sha256"
        self.__hashing_interrupted = False
        self.__debug = debug
        self.__user_agent = user_agent or self.DEFAULT_USER_AGENT

        self.on_log = Event()
        self.on_progress = Event()
示例#22
0
def remote_event(*args, **kwargs):
    """
    Generate an event that should be exposed through a remote service.
    Also requires the class to be decorated as :func:`remote_service`.
    Warning: this is _not_ a decorator.
    
    Parameters
    ----------
    *args
        Positional arguments to pass to the Event constructor.
    **kwargs
        Keyword arguments to pass to the Event constructor.
        
    Returns
    -------
    E : Event
        Event with remote access enabled.
    """
    E = Event(*args, **kwargs)
    E._remote_event = True
    return E
示例#23
0
    def __init__(self, config):
        """
        Integrity constructor
        :param config: Configuration object
        :type config: config.Config
        """
        self.config = config
        self.server = None
        self.server_is_new = False  # If set to true, no events will be fired
        self.events = []

        self.on_events_detected = EventHandler()
示例#24
0
def remote_event(*args, **kwargs):
    """
    Generate an event that should be exposed through a remote service.
    Also requires the class to be decorated as :func:`remote_service`.
    Warning: this is _not_ a decorator.
    
    Parameters
    ----------
    *args
        Positional arguments to pass to the Event constructor.
    **kwargs
        Keyword arguments to pass to the Event constructor.
        
    Returns
    -------
    E : Event
        Event with remote access enabled.
    """
    E = Event(*args, **kwargs)
    E._remote_event = True
    return E
示例#25
0
文件: base.py 项目: Flid/hmcs
 def __init__(self):
     self.system_init = Event()
     self.socket_event_received = Event()
     self.socket_event_received += self._on_socket_event_received
     self._socket_event_subscribers = defaultdict(list)
     self._plugins = []
示例#26
0
 def __init__(self):
     self.packet_recv = Event()
示例#27
0
文件: key.py 项目: zqbxx/key-manager
        key_list = cache.get_key_list()
        for key in key_list:
            last_tm = key.last_access
            now = time.time()
            duration = now - last_tm
            if key.timeout:
                continue
            if duration > KEY_CHECKER['timeout']:
                key.timeout = True
                KEY_CHECKER['invalidate'](key)
        time.sleep(1)


KEY_CACHE = KeyCache()
KEY_CHECKER = {
    'invalidate': Event(),
    'thread': None,
    'timeout': 60 * 5
}


def start_check_key_thread():
    if KEY_CHECKER['thread'] is None:
        _CHECK_KEY_THREAD = threading.Thread(target=_check_key, daemon=True)
        _CHECK_KEY_THREAD.start()


def add_key_invalidate_callback(cb: Callable):
    KEY_CHECKER['invalidate'] += cb

示例#28
0
class Scanner:
    """
    Core scanner class
    Handles iteration and keeps track of all files
    """
    MODE_TAG = "tag"
    MODE_COMMIT = "commit"
    MODE_BRANCH = "branch"

    LOG_LEVEL_INFO = "*"
    LOG_LEVEL_ERROR = "!"
    LOG_LEVEL_WARNING = "-"
    LOG_LEVEL_SUCCESS = "+"

    DEFAULT_USER_AGENT = f"GitFingerprint/{__version__}"

    def __init__(self,
                 url: str = "",
                 mode: str = MODE_BRANCH,
                 webroot: str = "",
                 git_repo_path: str = "",
                 max_remote_threads: int = 10,
                 max_local_threads: int = 5,
                 verify_ssl: bool = True,
                 debug: bool = False,
                 session: requests.Session = None,
                 user_agent: str = None):
        """
        Scanner constructor
        :param url: Base url of the remote host
        :param mode: Scan mode (tag, commit, branch)
        :param webroot: Local webroot in the git path
        :param git_repo_path: Local git repository path
        :param max_remote_threads: Maximum amount of threads to use while connecting to the remote host
        :param verify_ssl: Should SSL certificates be verified?
        :param session: Optional requests object, used to fetch data
        """
        self.__url = url.rstrip("/") + "/"
        self._mode = mode
        self.__root = webroot.lstrip("\\/")
        self._files_local = OrderedDefaultDict(dict)
        self.__files_remote = OrderedDefaultDict(dict)
        self.__max_remote_threads = max_remote_threads
        self._max_local_threads = max_local_threads
        self.__session = session or requests
        self.__verify_ssl = verify_ssl
        self.__repo = Repo(path=git_repo_path)
        self.__repo.heads.master.checkout("-f")
        self.__base = join(self.git_path, self.__root)
        self.__request_count = 0
        self._hashing_algorithm = "sha256"
        self.__hashing_interrupted = False
        self.__debug = debug
        self.__user_agent = user_agent or self.DEFAULT_USER_AGENT

        self.on_log = Event()
        self.on_progress = Event()

    @property
    def hashing_algorithm(self):
        """
        Get he currently used hashing algorithm
        :return: String
        """
        return self._hashing_algorithm

    @hashing_algorithm.setter
    def hashing_algorithm(self, algorithm: str):
        """
        Set the hashing algorithm to use
        :param algorithm: String
        :return: None
        """
        if algorithm not in hashlib.algorithms_guaranteed:
            raise GitFingerprintException(
                f"Invalid hashing algorithm '{algorithm}', accepted algorithms: "
                f"{', '.join(hashlib.algorithms_guaranteed)}.")

        self._hashing_algorithm = algorithm

    @property
    def cache(self):
        """
        Get the local cache
        :return: Ordered dict, 
        """
        return {
            "hashing_algorithm": self._hashing_algorithm,
            "files": list(self._files_local.items())
        }

    @cache.setter
    def cache(self, cache):
        """
        Restore local files from a previous session (validate if the cache is valid)
        :return: None
        """
        if cache.get("hashing_algorithm") != self._hashing_algorithm:
            raise GitFingerprintException(
                f"Refused to overwrite cache, hashing algorithm "
                f"'{cache.get('hashing_algorithm')}' does not match hashing "
                f"algorithm '{self._hashing_algorithm}'.")

        for head, files in cache.get("files", []):
            self._files_local[head] = files

    @property
    @lru_cache()
    def results(self):
        """
        Calculate all results and sort them
        :return: Sorted results by amount of valid hashes and total successful hits
        """
        results = defaultdict(Counter)
        remote = {}

        for file, details in self.__files_remote.items():
            if details.get("status") != 404:
                remote[file] = details

        total_hits = len(remote)

        for head, files in self.__get_hashes_per_head():
            results[head]["hits"] = total_hits / (len(files) / 100)
            results[head]["hits"] = results[head][
                "hits"] if results[head]["hits"] < 100 else 100
            results[head]["hashes"] = 0

            for file, details in remote.items():
                if file in files and details.get("hash") == files[file]:
                    results[head]["hashes"] += 1

            results[head]["hashes"] = results[head]["hashes"] / (
                (total_hits / 100) or 1)

        return sorted(results.items(),
                      key=lambda x: (x[1]["hashes"], x[1]["hits"]),
                      reverse=True)

    @property
    @lru_cache()
    def all_heads(self):
        """
        Get a list of all local heads
        :return: List of unique heads
        """
        return set(self._files_local.keys())

    @property
    @lru_cache()
    def all_files(self) -> Set[str]:
        """
        Unique list of all files
        :return: List of all unique file names
        """
        return set(self.__all_files())

    def __all_files(self):
        """
        Get all files used in the tree structure
        :return: Iterator of strings
        """
        for head, files in self._files_local.items():
            for file in files.keys():
                yield file

    def __get_hashes_per_head(self):
        """
        Get all hashes per head, prevents us from having to copy deep structures
        into memory, this method creates the dictionary as the head moves from
        commit to commit.

        :return: Iterator 
        """
        tracker = {}

        for head, files in self._files_local.items():
            for file, file_hash in files.items():
                if file_hash is not None:
                    tracker[file] = file_hash
                if file_hash is None and file in tracker:
                    del tracker[file]

            yield head, tracker

    @property
    @lru_cache()
    def mode(self) -> str:
        """
        Mode getter, converts all half
        :return: String
        """
        for key in dir(self):
            if key.startswith("MODE_"):
                if getattr(self, key)[0].lower() == self._mode[0].lower():
                    return getattr(self, key)

        raise GitFingerprintException(f"Invalid scan mode: {self._mode}.")

    @property
    def git_path(self) -> str:
        """
        Getter for the absolute git path
        :return: String
        """
        return self.__repo.working_dir

    def log(self, message, level: str = LOG_LEVEL_INFO):
        """
        Shorthand for firing the log event
        :param message: 
        :param level: 
        :return: 
        """
        self.on_log.fire(message, level)

    def scan_local(self):
        """
        Build the local file tree
        :return: None
        """
        self.log(f"Building {self.mode} tree with hashing "
                 f"algorithm {self.hashing_algorithm}..")

        if self.mode == self.MODE_BRANCH:
            self.__build_file_tree(self.__branch_iterator())
        if self.mode == self.MODE_TAG:
            self.__build_file_tree(self.__tag_iterator())
        if self.mode == self.MODE_COMMIT:
            self.__build_commit_tree()

    def scan_remote(self):
        """
        Start scanning the remote host and return the results
        :return: None
        """
        self.log(f"Scanning remote host ({len(self.all_files)} files "
                 f"over {len(self._files_local)} {self.mode}s)..")

        pool = ThreadPool(self.__max_remote_threads)
        pool.map_async(self.request, self.all_files)
        pool.close()
        pool.join()

    def __build_commit_tree(self):
        def on_error(e):
            print(str(e) + "\n" + "".join(format_tb(e.__traceback__)))
            exit(1)

        cache = next(reversed(self._files_local), None)
        total = self.__repo.head.commit.count()

        commits = []

        for commit in self.__repo.iter_commits(reverse=True):
            if commit.hexsha not in self._files_local:
                commits.append(commit.hexsha)

        commits = chunks(commits, self._max_local_threads)
        commit_chunk_size = len(next(iter(commits), []))

        builder = partial(self.__build_commit_tree_thread, total)

        if cache:
            self.log(f"Starting at cached commit: {cache}",
                     self.LOG_LEVEL_INFO)

        pool = ThreadPool(processes=self._max_local_threads + 1)

        try:
            self.log(
                f"Setting up {self._max_local_threads} temporary repositories "
                f"and assigning {commit_chunk_size} commits per thread (total "
                f"commits: {total}).")

            pool.map_async(builder, commits, error_callback=on_error)
            pool.close()
            pool.join()

        except KeyboardInterrupt:
            self.log("Interrupt detected, halting threads..",
                     self.LOG_LEVEL_WARNING)
            self.__hashing_interrupted = True
            pool.join()
            raise

        finally:
            self.log("Cleaning up temporary repositories..")

        self.log("Finished building commit tree..")

    @temporary_directory
    def __build_commit_tree_thread(self, temp: str, total: int,
                                   commits: List[str]):
        """
        Build a part of the commit tree in a thread 
        :param temp: 
        :param total: 
        :param commits: 
        :return: 
        """
        if self.__debug:
            self.log(f"Setting up temporary repository in: {temp}")

        copytree(join(self.git_path, ".git"), join(temp, ".git"))
        temp_repo = Repo(temp)
        temp_repo.git.checkout("master", "-f")

        for index, commit in enumerate(commits):
            if self.__hashing_interrupted:
                break

            if index % 5 == 0:
                self.on_progress.fire(current=len(self._files_local) + 1,
                                      total=total)

            try:
                temp_repo.git.checkout(commit, "-f")
                files = temp_repo.head.commit.stats.files
                self._files_local[commit] = {
                }  # Ensures it gets skipped next time round

            except (GitCommandError, OSError, ValueError):
                break

            for file in files:
                if not file.startswith(self.__root):
                    continue

                path = join(temp, file)
                file = relpath(file, self.__root)

                if isfile(path):
                    with open(path, "rb") as buff:
                        self._files_local[commit][file] = self.hash(buff)
                else:
                    self._files_local[commit][file] = None

    def __branch_iterator(self):
        """
        Walk over all branches in the head and check them out
        :return: Head
        """
        for branch in self.__repo.git.branch("-a").splitlines(keepends=False):
            if " -> " in branch:
                continue

            branch = branch.strip("* ")
            self.__repo.git.checkout(branch)
            yield from self.__walk_local_files(branch)

    def __tag_iterator(self):
        """
        Walk over all tags individually
        :return: Head
        """
        for tag in self.__repo.tags:
            self.log(f"Scanning tag '{tag.name}'..")
            self.__repo.git.checkout(tag.name)
            yield from self.__walk_local_files(tag.name)

    def __walk_local_files(self, head) -> Tuple[str, str]:
        """
        Method that walks over all files, and joins their relative paths
        :return: Tuple
        """
        for path, _, files in walk(self.__base):
            for file in files:
                with open(join(path, file), "rb") as buff:
                    yield head, relpath(join(path, file),
                                        self.__base), self.hash(buff)

    def __build_file_tree(self, iterator):
        """
        Generic 'build file tree' method
        :param iterator: 
        :return: 
        """
        for head, file, file_hash in iterator:
            self._files_local[head][file] = file_hash

    def request(self, file):
        """
        Make a (threaded) request to the server
        :param file: File we're going to request
        """
        resp = self.__session.get(url=self.__url + file,
                                  allow_redirects=False,
                                  verify=self.__verify_ssl,
                                  headers={"User-Agent": self.__user_agent})

        self.__files_remote[file]["hash"] = self.hash(BytesIO(resp.content))
        self.__files_remote[file]["status"] = resp.status_code
        self.__request_count += 1

        if self.__request_count % 10 == 0:
            self.on_progress.fire(count=self.__request_count)

    def hash(self, file, block_size=65536) -> str:
        """
        Hash a file efficiently using the algorithm specified
        :param file: File like object to hash 
        :param block_size: Buffer size
        :return: Hash as a string
        """
        algo = getattr(hashlib, self._hashing_algorithm)()

        for block in iter(lambda: file.read(block_size), b""):
            algo.update(block)

        return str(algo.hexdigest())
示例#29
0
class VideoCreatorBase:
    video_created_event = Event()
    num_of_frames_for_trigger = 0

    def use_if_suited(self, data):
        pass
示例#30
0
def fire_event(name, *args, **kwargs):
    if name not in events: return

    #log.debug('Firing event %s', name)
    try:

        options = {
            'is_after_event': False,  # Fire after event
            'on_complete': False,  # onComplete event
            'single': False,  # Return single handler
            'merge': False,  # Merge items
            'in_order':
            False,  # Fire them in specific order, waits for the other to finish
        }

        # Do options
        for x in options:
            try:
                val = kwargs[x]
                del kwargs[x]
                options[x] = val
            except:
                pass

        if len(events[name]) == 1:

            single = None
            try:
                single = events[name][0]['handler'](*args, **kwargs)
            except:
                log.error('Failed running single event: %s',
                          traceback.format_exc())

            # Don't load thread for single event
            result = {
                'single': (single is not None, single),
            }

        else:

            e = Event(name=name, threads=10, exc_info=True, traceback=True)

            for event in events[name]:
                e.handle(event['handler'], priority=event['priority'])

            # Make sure only 1 event is fired at a time when order is wanted
            kwargs['event_order_lock'] = threading.RLock(
            ) if options['in_order'] or options['single'] else None
            kwargs['event_return_on_result'] = options['single']

            # Fire
            result = e(*args, **kwargs)

        result_keys = list(result.keys())
        result_keys.sort(key=nat_sort_key)

        if options['single'] and not options['merge']:
            results = None

            # Loop over results, stop when first not None result is found.
            for r_key in result_keys:
                r = result[r_key]
                if r[0] is True and r[1] is not None:
                    results = r[1]
                    break
                elif r[1]:
                    error_handler(r[1])
                else:
                    log.debug('Assume disabled eventhandler for: %s', name)

        else:
            results = []
            for r_key in result_keys:
                r = result[r_key]
                if r[0] == True and r[1]:
                    results.append(r[1])
                elif r[1]:
                    error_handler(r[1])

            # Merge
            if options['merge'] and len(results) > 0:

                # Dict
                if isinstance(results[0], dict):
                    results.reverse()

                    merged = {}
                    for result in results:
                        merged = merge_dictionaries(merged,
                                                    result,
                                                    prepend_list=True)

                    results = merged
                # Lists
                elif isinstance(results[0], list):
                    merged = []
                    for result in results:
                        if result not in merged:
                            merged += result

                    results = merged

        modified_results = fire_event('result.modify.%s' % name,
                                      results,
                                      single=True)
        if modified_results:
            log.debug('Return modified results for %s', name)
            results = modified_results

        if not options['is_after_event']:
            fire_event('%s.after' % name, is_after_event=True)

        if options['on_complete']:
            options['on_complete']()

        return results
    except Exception:
        log.error('%s: %s', (name, traceback.format_exc()))
示例#31
0
class Integrity:
    """
    Class that handles the integrity check process
    If the current server does not exist in the database, a new record will be added
    """
    def __init__(self, config):
        """
        Integrity constructor
        :param config: Configuration object
        :type config: config.Config
        """
        self.config = config
        self.server = None
        self.server_is_new = False  # If set to true, no events will be fired
        self.events = []

        self.on_events_detected = EventHandler()

    def load_database(self):
        """
        Loads and initializes the database if necessary
        :return: None
        """
        if not database_exists():
            print("[+] No database found, creating database '{}'".format(
                DATABASE_PATH))
            create_database()

        if self._server_exists():
            self._load_server()
        else:
            print(
                "[+] First run detected for server '{}', setting up tracker.".
                format(self.config.server_name))
            print(
                "[?] Note: No changes will be able to be detected this session"
            )
            self._add_server()

    def _server_exists(self):
        """
        Check if this server already is being tracked
        :return: Returns true if the server already exists
        """
        return Server.exists(name=self.config.server_name)

    def _add_server(self):
        """
        Add the current server to the table of servers
        The server_is_new flag will also be set for this session
        :return: None
        """
        self.server = Server.create(name=self.config.server_name)
        self.server_is_new = True

    def _load_server(self):
        """
        Load the server from the database
        :return: None
        """
        self.server = Server.get(name=self.config.server_name)

    def identify(self, output):
        """
        Identify
        :param output: Server output
        :type output: list
        :return:
        """
        for index, (path, checksum) in enumerate(output):

            # Find the checksum record
            checksum_record = self.server.get_related_checksum(path, checksum)

            # New (unknown) file detected
            if checksum_record is None:
                checksum_record = Checksum.create(path=path,
                                                  checksum=checksum,
                                                  server=self.server)
                self._handle_file_added(checksum_record)
                continue

            # Check if the file was modified
            if checksum_record.checksum != checksum:
                self._handle_file_modified(checksum_record, checksum)
                checksum_record.checksum = checksum
                continue

        # Loop through all known records first and try to find them
        for checksum_record in self.server.checksums:
            if not any([o for o in output if o[0] == checksum_record.path]):
                self._handle_file_removed(checksum_record)
                checksum_record.delete()

        # On events detected
        if any(self.events):
            self.on_events_detected.fire(
                self._get_events_as_anonymous_obj_list())

    def _get_events_as_anonymous_obj_list(self):
        """
        Convert the events dict to a list of anonymous objects
        :return: List of anonymous objects
        :rtype: dict[object]
        """
        return [e.to_anonymous_object() for e in self.events]

    def _handle_file_added(self, checksum_record):
        """
        An unknown new file was detected, log the event
        :param checksum_record: Checksum record which the event will be related to
        :type checksum_record: models.Checksum
        :return: None
        """
        if not self.server_is_new:
            description = "A new file was detected at '{path}'".format(
                path=checksum_record.path)
            event = Event.create(event=Event.FILE_ADDED,
                                 description=description,
                                 checksum=checksum_record)
            self.events.append(event)

    def _handle_file_modified(self, checksum_record, checksum):
        """
        A known file was modified, log the event
        :param checksum_record: Checksum record which the event will be related to
        :param checksum: The new checksum that was detected
        :type checksum_record: models.Checksum
        :type checksum: str
        :return: None
        """
        description = "File modification was detected at '{path}'".format(
            path=checksum_record.path)
        event = Event.create(event=Event.FILE_MODIFIED,
                             description=description,
                             checksum=checksum_record)
        self.events.append(event)

    def _handle_file_removed(self, checksum_record):
        """
        A known file was modified, log the event
        :param checksum_record: Checksum record which the event will be related to
        :type checksum_record: models.Checksum
        :return: None
        """
        description = "File removal was detected at '{path}'".format(
            path=checksum_record.path)
        event = Event.create(event=Event.FILE_REMOVED,
                             description=description,
                             checksum=checksum_record)
        self.events.append(event)

    def print_statistics(self):
        """
        Print the statistics of all events that were found
        :return:
        """
        print("[+] Integrity statistics")
        print("    |-- Files added:    {}".format(
            self._get_addition_event_count()))
        print("    |-- Files removed:  {}".format(
            self._get_removal_event_count()))
        print("    `-- Files modified: {}".format(
            self._get_modified_event_count()))

    def _get_addition_event_count(self):
        """
        Get the amount of events related to file removal
        :return: Amount of modification events
        :rtype: int
        """
        return len([e for e in self.events if e.event == Event.FILE_ADDED])

    def _get_removal_event_count(self):
        """
        Get the amount of events related to file removal
        :return: Amount of modification events
        :rtype: int
        """
        return len([e for e in self.events if e.event == Event.FILE_REMOVED])

    def _get_modified_event_count(self):
        """
        Get the amount of events related to file modification
        :return: Amount of modification events
        :rtype: int
        """
        return len([e for e in self.events if e.event == Event.FILE_MODIFIED])