예제 #1
0
def load_watch_data():
    data_path = os.path.join(sublime.packages_path(), 'User', S.FILE_WATCH_DATA)
    data = []
    try:
        data_file = open(data_path, 'rb')
    except:
        e = sys.exc_info()[1]
        info('Failed to open %s.' % data_path)
        debug(e)

    try:
        data = json.loads(H.data_read(data_file.read()))
    except:
        e = sys.exc_info()[1]
        info('Failed to parse %s.' % data_path)
        debug(e)

    # Check if expression is not already defined
    duplicates = []
    for index, entry in enumerate(data):
        matches = [x for x in S.WATCH if x['expression'] == entry['expression']]
        if matches:
            duplicates.append(entry)
        else:
            # Unset any previous value
            data[index]['value'] = None
    for duplicate in duplicates:
        data.remove(duplicate)

    if not isinstance(S.WATCH, list):
        S.WATCH = []

    # Set watch data
    S.WATCH.extend(data)
예제 #2
0
def load_breakpoint_data():
    data_path = os.path.join(sublime.packages_path(), 'User',
                             S.FILE_BREAKPOINT_DATA)
    data = {}
    try:
        data_file = open(data_path, 'rb')
    except:
        e = sys.exc_info()[1]
        info('Failed to open %s.' % data_path)
        debug(e)

    try:
        data = json.loads(H.data_read(data_file.read()))
    except:
        e = sys.exc_info()[1]
        info('Failed to parse %s.' % data_path)
        debug(e)

    # Do not use deleted files or entries without breakpoints
    if data:
        for filename, breakpoint_data in data.copy().items():
            if not breakpoint_data or not os.path.isfile(filename):
                del data[filename]

    if not isinstance(S.BREAKPOINT, dict):
        S.BREAKPOINT = {}

    # Set breakpoint data
    S.BREAKPOINT.update(data)
예제 #3
0
def load_watch_data():
    data_path = os.path.join(sublime.packages_path(), 'User', S.FILE_WATCH_DATA)
    data = []
    try:
        data_file = open(data_path, 'rb')
    except:
        e = sys.exc_info()[1]
        info('Failed to open %s.' % data_path)
        debug(e)

    try:
        data = json.loads(H.data_read(data_file.read()))
    except:
        e = sys.exc_info()[1]
        info('Failed to parse %s.' % data_path)
        debug(e)

    # Check if expression is not already defined
    for entry in data:
        matches = [x for x in S.WATCH if x['expression'] == entry['expression']]
        if matches:
            data.remove(entry)

    if not isinstance(S.WATCH, list):
        S.WATCH = []

    # Set watch data
    S.WATCH.extend(data)
예제 #4
0
def load_breakpoint_data():
    data_path = os.path.join(sublime.packages_path(), 'User', S.FILE_BREAKPOINT_DATA)
    data = {}
    try:
        data_file = open(data_path, 'rb')
    except:
        e = sys.exc_info()[1]
        info('Failed to open %s.' % data_path)
        debug(e)

    try:
        data = json.loads(H.data_read(data_file.read()))
    except:
        e = sys.exc_info()[1]
        info('Failed to parse %s.' % data_path)
        debug(e)

    # Do not use deleted files or entries without breakpoints
    if data:
        for filename, breakpoint_data in data.copy().items():
            if not breakpoint_data or not os.path.isfile(filename):
                del data[filename]

    if not isinstance(S.BREAKPOINT, dict):
        S.BREAKPOINT = {}

    # Set breakpoint data
    S.BREAKPOINT.update(data)
예제 #5
0
def load_watch_data():
    data_path = os.path.join(sublime.packages_path(), 'User', S.FILE_WATCH_DATA)
    data = []
    try:
        data_file = open(data_path, 'rb')
    except:
        e = sys.exc_info()[1]
        info('Failed to open %s.' % data_path)
        debug(e)

    try:
        data = json.loads(H.data_read(data_file.read()))
    except:
        e = sys.exc_info()[1]
        info('Failed to parse %s.' % data_path)
        debug(e)

    # Check if expression is not already defined
    duplicates = []
    for index, entry in enumerate(data):
        matches = [x for x in S.WATCH if x['expression'] == entry['expression']]
        if matches:
            duplicates.append(entry)
        else:
            # Unset any previous value
            data[index]['value'] = None
    for duplicate in duplicates:
        data.remove(duplicate)

    if not isinstance(S.WATCH, list):
        S.WATCH = []

    # Set watch data
    S.WATCH.extend(data)
예제 #6
0
 def read_until_null(self):
     """
     Get response data from debugger engine.
     """
     # Check socket connection
     if self.connected:
         # Get result data from debugger engine
         while not '\x00' in self.buffer:
             self.buffer += H.data_read(self.socket.recv(self.read_size))
         data, self.buffer = self.buffer.split('\x00', 1)
         return data
     else:
         raise ProtocolConnectionException("Xdebug is not connected")
예제 #7
0
 def read_until_null(self):
     """
     Get response data from debugger engine.
     """
     # Check socket connection
     if self.connected:
         # Get result data from debugger engine
         while not '\x00' in self.buffer:
             self.buffer += H.data_read(self.socket.recv(self.read_size))
         data, self.buffer = self.buffer.split('\x00', 1)
         return data
     else:
         raise ProtocolConnectionException("Xdebug is not connected")
예제 #8
0
 def read_until_null(self):
     """
     Get response data from debugger engine.
     """
     # Check socket connection
     if self.connected:
         # Get result data from debugger engine
         try:
             while '\x00' not in self.buffer:
                 self.buffer += H.data_read(self.socket.recv(self.read_size))
             data, self.buffer = self.buffer.split('\x00', 1)
             return data
         except:
             e = sys.exc_info()[1]
             raise ProtocolConnectionException(e)
     else:
         raise ProtocolConnectionException('Xdebug is not connected')
예제 #9
0
 def read_until_null(self):
     """
     Get response data from debugger engine.
     """
     # Check socket connection
     if self.connected:
         # Get result data from debugger engine
         try:
             while not "\x00" in self.buffer:
                 self.buffer += H.data_read(self.socket.recv(self.read_size))
             data, self.buffer = self.buffer.split("\x00", 1)
             return data
         except:
             e = sys.exc_info()[1]
             raise ProtocolConnectionException(e)
     else:
         raise ProtocolConnectionException("Xdebug is not connected")
예제 #10
0
class Protocol(object):
    """
    Class for connecting with debugger engine  ####which uses DBGp protocol.#### no longer true
    """

    # Maximum amount of data to be received at once by socket
    read_size = 1024

    def __init__(self, on_connection_lost_cb=None):
        # Set port number to listen for response
        self.port = get_value(S.KEY_PORT, S.DEFAULT_PORT)

        self.messages = [
        ]  # pulled from the lua client (a response to a message we sent)

        self.command_cbs = {}

        self.socket = None
        self.lock = threading.RLock()
        self.locked = 0
        self.listening_canceled_event = threading.Event()

        self.on_connection_lost_cb = on_connection_lost_cb

        with self as s:
            s.clear()

    def __enter__(self):
        self.lock.acquire()
        self.locked += 1
        return self

    def __exit__(self, err_type, error_obj, traceback):
        self.lock.release()
        self.locked = max(self.locked - 1, 0)

    def is_locked(self):
        return self.locked > 0

    def register_command_cb(self, cmd_name, cb):
        cbs = self.command_cbs.setdefault(cmd_name, [])
        cbs.append(cb)

    def transaction_id():
        """
        Standard argument for sending commands, an unique numerical ID.
        """
        def fget(self):
            self._transaction_id += 1
            return self._transaction_id

        def fset(self, value):
            self._transaction_id = value

        def fdel(self):
            self._transaction_id = 0

        return locals()

    # Transaction ID property
    transaction_id = property(**transaction_id())

    @assert_locked
    def clear(self):
        """
        Clear variables, reset transaction_id, close socket connection.
        """
        self.buffer = ''
        self.connected = False
        self.listening = False
        del self.transaction_id

        if self.socket:
            self.socket.close()

        self.socket = None

        #self.lock = threading.RLock()
        #self.locked = 0

        self.listening_canceled_event.clear()

    def stop_listening_for_incoming_connections(self):
        self.listening_canceled_event.set()

    @assert_locked
    def is_command(self, message):
        if not message:
            return False

        deserialized_message = deserialize(message)

        return deserialized_message in ('break', 'synchronize')

    @assert_locked
    def handle_command(self, message):
        command_name = deserialize(message)

        if command_name == 'break':
            filename = deserialize(self.read_next_message())
            line = deserialize(self.read_next_message())

            cbargs = (filename, line)

        elif command_name == 'synchronize':
            cbargs = tuple()

        cbs = self.command_cbs.get(command_name)
        if cbs and len(cbs) > 0:
            for cb in cbs:
                cb(*cbargs)

    @assert_locked
    def check_connection(self):
        t = time.clock()
        if not hasattr(self, 'last_check_time') or (
                t - self.last_check_time
        ) >= 1:  # need to rate-limit this so we don't flood the network
            self.last_check_time = t
            self.send('', 'ka')  # keep-alive channel

    @assert_locked
    def update(self):
        self.check_connection()

        # read in messages (these might be commands which will be handled immediately)
        message = self.read_next_message(async=True)

        # if it's not a command, we need to keep it around for the next read request
        if message:
            self.messages.append(message)

    @assert_locked
    def parse_grld_message(self, message):
        """
        returns parsed_message, remaining_buffer_data
        """
        if len(message) == 0:
            return '', ''

        if message.count("\n") < 2:
            raise ProtocolException("Tried to parse malformed GRLD data")

        channel, messageSize, remaining = message.split("\n", 2)
        data = remaining[:int(messageSize)]
        remaining = remaining[int(messageSize):]

        return data, remaining

    @assert_locked
    def read_socket_into_buffer(self, async=False):
        """
        Get response data from debugger engine.
        """
        # Check socket connection
        if self.connected:
            # Get result data from debugger engine
            try:
                if async:
                    r, _, _ = select.select([self.socket], [], [], 0)
                    if not r:
                        return

                rawSockData = ''
                while True:
                    rawSockData = self.socket.recv(self.read_size)
                    self.buffer += H.data_read(rawSockData)

                    r, _, _ = select.select([self.socket], [], [], 0)

                    if not r or not rawSockData:
                        break

            except:
                e = sys.exc_info()[1]
                raise ProtocolConnectionException(e)
        else:
            raise ProtocolConnectionException("GRLD is not connected")