Exemplo n.º 1
0
def main():
    G.__VERSION__ = '0.11'
    G.__PLUGIN_VERSION__ = None

    parser = optparse.OptionParser(usage='%prog [options]')
    parser.add_option("-p",
                      "--port",
                      dest="port",
                      default=0,
                      help="The port to listen on. Useful for debugging.")
    parser.add_option("--set-version", dest="version")

    options, args = parser.parse_args()
    port = int(options.port)
    G.__PLUGIN_VERSION__ = options.version
    if not G.__PLUGIN_VERSION__:
        print('--set-version is a required argument')
        print('args: %s' % ' '.join(args))
        sys.exit(1)

    utils.reload_settings()

    try:
        utils.normalize_persistent_data()
    except Exception:
        pass

    emacs = emacs_handler.EmacsHandler()
    G.emacs = emacs
    _, port = reactor.reactor.listen(emacs, port=port)
    utils.set_timeout(cb, 100, port)
    reactor.reactor.block()
Exemplo n.º 2
0
def plugin_loaded():
    global called_plugin_loaded
    if called_plugin_loaded:
        return
    called_plugin_loaded = True
    print('Floobits: Called plugin_loaded.')

    if not os.path.exists(G.FLOORC_JSON_PATH):
        migrations.migrate_floorc()
    utils.reload_settings()

    # TODO: one day this can be removed (once all our users have updated)
    old_colab_dir = os.path.realpath(
        os.path.expanduser(os.path.join('~', '.floobits')))
    if os.path.isdir(old_colab_dir) and not os.path.exists(G.BASE_DIR):
        print('Renaming %s to %s' % (old_colab_dir, G.BASE_DIR))
        os.rename(old_colab_dir, G.BASE_DIR)
        os.symlink(G.BASE_DIR, old_colab_dir)

    try:
        utils.normalize_persistent_data()
    except Exception as e:
        print('Floobits: Error normalizing persistent data:', str_e(e))
        # Keep on truckin' I guess

    d = utils.get_persistent_data()
    G.AUTO_GENERATED_ACCOUNT = d.get('auto_generated_account', False)

    # Sublime plugin API stuff can't be called right off the bat
    if not utils.can_auth():
        utils.set_timeout(create_or_link_account, 1)

    utils.set_timeout(global_tick, 1)
def plugin_loaded():
    global called_plugin_loaded
    if called_plugin_loaded:
        return
    called_plugin_loaded = True
    print('Floobits: Called plugin_loaded.')

    utils.reload_settings()

    # TODO: one day this can be removed (once all our users have updated)
    old_colab_dir = os.path.realpath(os.path.expanduser(os.path.join('~', '.floobits')))
    if os.path.isdir(old_colab_dir) and not os.path.exists(G.BASE_DIR):
        print('renaming %s to %s' % (old_colab_dir, G.BASE_DIR))
        os.rename(old_colab_dir, G.BASE_DIR)
        os.symlink(G.BASE_DIR, old_colab_dir)

    d = utils.get_persistent_data()
    G.AUTO_GENERATED_ACCOUNT = d.get('auto_generated_account', False)

    can_auth = (G.USERNAME or G.API_KEY) and G.SECRET
    # Sublime plugin API stuff can't be called right off the bat
    if not can_auth:
        utils.set_timeout(create_or_link_account, 1)

    utils.set_timeout(global_tick, 1)
Exemplo n.º 4
0
def main():
    G.__VERSION__ = "0.11"
    G.__PLUGIN_VERSION__ = None

    parser = optparse.OptionParser(usage="%prog [options]")
    parser.add_option("-p", "--port", dest="port", default=0, help="The port to listen on. Useful for debugging.")
    parser.add_option("--set-version", dest="version")

    options, args = parser.parse_args()
    port = int(options.port)
    G.__PLUGIN_VERSION__ = options.version
    if not G.__PLUGIN_VERSION__:
        print("--set-version is a required argument")
        print("args: %s" % " ".join(args))
        sys.exit(1)

    utils.reload_settings()

    try:
        utils.normalize_persistent_data()
    except Exception:
        pass

    emacs = emacs_handler.EmacsHandler()
    G.emacs = emacs
    _, port = reactor.reactor.listen(emacs, port=port)
    utils.set_timeout(cb, 100, port)
    reactor.reactor.block()
Exemplo n.º 5
0
def plugin_loaded():
    global called_plugin_loaded
    if called_plugin_loaded:
        return
    called_plugin_loaded = True
    print('Floobits: Called plugin_loaded.')

    if not os.path.exists(G.FLOORC_JSON_PATH):
        migrations.migrate_floorc()
    utils.reload_settings()

    # TODO: one day this can be removed (once all our users have updated)
    old_colab_dir = os.path.realpath(os.path.expanduser(os.path.join('~', '.floobits')))
    if os.path.isdir(old_colab_dir) and not os.path.exists(G.BASE_DIR):
        print('Renaming %s to %s' % (old_colab_dir, G.BASE_DIR))
        os.rename(old_colab_dir, G.BASE_DIR)
        os.symlink(G.BASE_DIR, old_colab_dir)

    try:
        utils.normalize_persistent_data()
    except Exception as e:
        print('Floobits: Error normalizing persistent data:', str_e(e))
        # Keep on truckin' I guess

    d = utils.get_persistent_data()
    G.AUTO_GENERATED_ACCOUNT = d.get('auto_generated_account', False)

    # Sublime plugin API stuff can't be called right off the bat
    if not utils.can_auth():
        utils.set_timeout(create_or_link_account, 1)

    utils.set_timeout(global_tick, 1)
Exemplo n.º 6
0
def start_event_loop():
    global ticker

    if G.TIMERS:
        msg.debug('Your Vim was compiled with +timer support. Awesome!')
        return

    if not bool(int(vim.eval('has("clientserver")'))):
        return fallback_to_feedkeys('This VIM was not compiled with clientserver support. You should consider using a different vim!')

    exe = getattr(G, 'VIM_EXECUTABLE', None)
    if not exe:
        return fallback_to_feedkeys('Your vim was compiled with clientserver, but I don\'t know the name of the vim executable.'
                                    'Please define it in your ~/.floorc using the vim_executable directive. e.g. \'vim_executable mvim\'.')

    servername = vim.eval('v:servername')
    if not servername:
        return fallback_to_feedkeys('I can not identify the servername of this vim. You may need to pass --servername to vim at startup.')

    evaler = ticker_python.format(binary=exe, servername=servername, sleep='1.0')
    ticker = subprocess.Popen(['python', '-c', evaler],
                              stderr=subprocess.PIPE,
                              stdout=subprocess.PIPE)
    ticker.poll()
    utils.set_timeout(ticker_watcher, 500, ticker)
Exemplo n.º 7
0
def start_event_loop():
    global ticker

    if G.TIMERS:
        msg.debug('Your Vim was compiled with +timer support. Awesome!')
        return

    if not bool(int(vim.eval('has("clientserver")'))):
        return fallback_to_feedkeys(
            'This VIM was not compiled with clientserver support. You should consider using a different vim!'
        )

    exe = getattr(G, 'VIM_EXECUTABLE', None)
    if not exe:
        return fallback_to_feedkeys(
            'Your vim was compiled with clientserver, but I don\'t know the name of the vim executable.'
            'Please define it in your ~/.floorc using the vim_executable directive. e.g. \'vim_executable mvim\'.'
        )

    servername = vim.eval('v:servername')
    if not servername:
        return fallback_to_feedkeys(
            'I can not identify the servername of this vim. You may need to pass --servername to vim at startup.'
        )

    evaler = ticker_python.format(binary=exe,
                                  servername=servername,
                                  sleep='1.0')
    ticker = subprocess.Popen(['python', '-c', evaler],
                              stderr=subprocess.PIPE,
                              stdout=subprocess.PIPE)
    ticker.poll()
    utils.set_timeout(ticker_watcher, 500, ticker)
Exemplo n.º 8
0
    def _uploader(self, paths_iter, cb, total_bytes, bytes_uploaded=0.0):
        reactor.tick()
        if len(self.proto) > 0:
            return utils.set_timeout(self._uploader, 10, paths_iter, cb,
                                     total_bytes, bytes_uploaded)

        bar_len = 20
        try:
            p = next(paths_iter)
            size = self._upload(p)
            bytes_uploaded += size
            try:
                percent = (bytes_uploaded / total_bytes)
            except ZeroDivisionError:
                percent = 0.5
            bar = '   |' + ('|' * int(bar_len * percent)) + (' ' * int(
                (1 - percent) * bar_len)) + '|'
            editor.status_message('Uploading... %2.2f%% %s' %
                                  (percent * 100, bar))
        except StopIteration:
            editor.status_message('Uploading... 100% ' + ('|' * bar_len) +
                                  '| complete')
            msg.log('All done uploading')
            return cb and cb()
        return utils.set_timeout(self._uploader, 50, paths_iter, cb,
                                 total_bytes, bytes_uploaded)
Exemplo n.º 9
0
def on_room_info_msg():
    who = 'Your friends'
    anon_perms = G.AGENT.workspace_info.get('anon_perms')
    if 'get_buf' in anon_perms:
        who = 'Anyone'
    _msg = 'You are sharing:\n\n%s\n\n%s can join your workspace at:\n\n%s' % (G.PROJECT_PATH, who, G.AGENT.workspace_url)
    # Workaround for horrible Sublime Text bug
    utils.set_timeout(sublime.message_dialog, 0, _msg)
Exemplo n.º 10
0
def on_room_info_msg():
    who = 'Your friends'
    anon_perms = G.AGENT.workspace_info.get('anon_perms')
    if 'get_buf' in anon_perms:
        who = 'Anyone'
    _msg = 'You are sharing:\n\n%s\n\n%s can join your workspace at:\n\n%s' % (
        G.PROJECT_PATH, who, G.AGENT.workspace_url)
    # Workaround for horrible Sublime Text bug
    utils.set_timeout(sublime.message_dialog, 0, _msg)
Exemplo n.º 11
0
def ticker_watcher(ticker):
    global ticker_errors
    if not G.AGENT:
        return
    ticker.poll()
    if ticker.returncode is None:
        return
    msg.warn('respawning new ticker')
    ticker_errors += 1
    if ticker_errors > 10:
        return fallback_to_feedkeys('Too much trouble with the floobits external ticker.')
    start_event_loop()
    utils.set_timeout(ticker_watcher, 2000, ticker)
Exemplo n.º 12
0
def main():
    G.__VERSION__ = '0.11'
    G.__PLUGIN_VERSION__ = '1.2.0'
    utils.reload_settings()

    migrations.rename_floobits_dir()
    migrations.migrate_symlinks()
    utils.normalize_persistent_data()

    emacs = emacs_handler.EmacsHandler()
    G.emacs = emacs
    _, port = reactor.reactor.listen(emacs)
    utils.set_timeout(cb, 100, port)
    reactor.reactor.block()
Exemplo n.º 13
0
def ticker_watcher(ticker):
    global ticker_errors
    if not G.AGENT:
        return
    ticker.poll()
    if ticker.returncode is None:
        return
    msg.warn('respawning new ticker')
    ticker_errors += 1
    if ticker_errors > 10:
        return fallback_to_feedkeys(
            'Too much trouble with the floobits external ticker.')
    start_event_loop()
    utils.set_timeout(ticker_watcher, 2000, ticker)
Exemplo n.º 14
0
def is_modifiable(name_to_check=None):
    if not agent or not agent.protocol:
        return
    vim_buf = vim.current.buffer
    name = vim_buf.name
    if not name:
        return
    if name_to_check and name_to_check != name:
        msg.warn('Can not call readonly on file: %s' % name)
    if not utils.is_shared(name):
        return
    if 'patch' not in agent.protocol.perms:
        vim.command("call g:FlooSetReadOnly()")
        utils.set_timeout(is_modifiable, 0, name)
Exemplo n.º 15
0
    def _connect(self, attempts=0):
        if attempts > 500:
            msg.error('Connection attempt timed out.')
            return self._reconnect()
        if not self._sock:
            msg.debug('_connect: No socket')
            return
        try:
            self._sock.connect((self.host, self.port))
            select.select([self._sock], [self._sock], [], 0)
        except socket.error as e:
            if e.errno == iscon_errno:
                pass
            elif e.errno in connect_errno:
                return utils.set_timeout(self._connect, 20, attempts + 1)
            else:
                msg.error('Error connecting:', e)
                return self._reconnect()
        if self.secure:
            sock_debug('SSL-wrapping socket')
            self._sock = ssl.wrap_socket(self._sock,
                                         ca_certs=self._cert_path,
                                         cert_reqs=ssl.CERT_REQUIRED,
                                         do_handshake_on_connect=False)

        self._q.clear()
        self.reconnect_delay = self.INITIAL_RECONNECT_DELAY
        self.retries = self.MAX_RETRIES
        self.emit("connect")
        self.connected = True
Exemplo n.º 16
0
    def _connect(self, host, port, attempts=0):
        if attempts > (self.proxy and 500 or 500):
            msg.error('Connection attempt timed out.')
            return self.reconnect()
        if not self._sock:
            msg.debug('_connect: No socket')
            return
        try:
            self._sock.connect((host, port))
            select.select([self._sock], [self._sock], [], 0)
        except socket.error as e:
            if e.errno == iscon_errno:
                pass
            elif e.errno in connect_errno:
                msg.debug('connect_errno: ', str_e(e))
                return utils.set_timeout(self._connect, 20, host, port, attempts + 1)
            else:
                msg.error('Error connecting: ', str_e(e))
                return self.reconnect()
        if self._secure:
            sock_debug('SSL-wrapping socket')
            self._sock = ssl.wrap_socket(self._sock, ca_certs=self._cert_path, cert_reqs=ssl.CERT_REQUIRED, do_handshake_on_connect=False)

        self._q.clear()
        self._buf_out = bytes()
        self.emit('connect')
        self.connected = True
    def _connect(self, attempts=0):
        if attempts > (self.proxy and 500 or 500):
            msg.error("Connection attempt timed out.")
            return self.reconnect()
        if not self._sock:
            msg.debug("_connect: No socket")
            return
        try:
            self._sock.connect((self._host, self._port))
            select.select([self._sock], [self._sock], [], 0)
        except socket.error as e:
            if e.errno == iscon_errno:
                pass
            elif e.errno in connect_errno:
                return utils.set_timeout(self._connect, 20, attempts + 1)
            else:
                msg.error("Error connecting:", e)
                return self.reconnect()
        if self._secure:
            sock_debug("SSL-wrapping socket")
            self._sock = ssl.wrap_socket(
                self._sock, ca_certs=self._cert_path, cert_reqs=ssl.CERT_REQUIRED, do_handshake_on_connect=False
            )

        self._q.clear()
        self.reconnect_delay = self.INITIAL_RECONNECT_DELAY
        self.retries = self.MAX_RETRIES
        self.emit("connect")
        self.connected = True
Exemplo n.º 18
0
    def _connect(self, host, port, attempts=0):
        if attempts > (self.proxy and 500 or 500):
            msg.error('Connection attempt timed out.')
            return self.reconnect()
        if not self._sock:
            msg.debug('_connect: No socket')
            return
        try:
            self._sock.connect((host, port))
            select.select([self._sock], [self._sock], [], 0)
        except socket.error as e:
            if e.errno == iscon_errno:
                pass
            elif e.errno in connect_errno:
                msg.debug('connect_errno: ', str_e(e))
                return utils.set_timeout(self._connect, 20, host, port,
                                         attempts + 1)
            else:
                msg.error('Error connecting: ', str_e(e))
                return self.reconnect()
        if self._secure:
            sock_debug('SSL-wrapping socket')
            self._sock = ssl.wrap_socket(self._sock,
                                         ca_certs=self._cert_path,
                                         cert_reqs=ssl.CERT_REQUIRED,
                                         do_handshake_on_connect=False)

        self._q.clear()
        self._buf_out = bytes()
        self.emit('connect')
        self.connected = True
Exemplo n.º 19
0
 def wait_empty_window(i):
     if i > 10:
         print('Too many failures trying to find an empty window. Using active window.')
         return finish(sublime.active_window())
     w = get_empty_window()
     if w:
         return finish(w)
     return utils.set_timeout(wait_empty_window, 50, i + 1)
Exemplo n.º 20
0
 def get_workspace_window():
     w = sublime.active_window()
     if w is None:
         return utils.set_timeout(get_workspace_window, 50)
     sublime.message_dialog(
         'Thank you for installing the Floobits plugin!\n\nLet\'s set up your editor to work with Floobits.'
     )
     w.show_quick_panel(opts, cb)
Exemplo n.º 21
0
def main():
    G.__VERSION__ = '0.03'
    G.__PLUGIN_VERSION__ = '1.0'
    utils.reload_settings()

    floo_log_level = 'msg'
    if G.DEBUG:
        floo_log_level = 'debug'
    msg.LOG_LEVEL = msg.LOG_LEVELS.get(floo_log_level.upper(),
                                       msg.LOG_LEVELS['MSG'])
    migrations.rename_floobits_dir()
    migrations.migrate_symlinks()

    emacs = emacs_handler.EmacsHandler()
    G.emacs = emacs
    _, port = reactor.reactor.listen(emacs)
    utils.set_timeout(cb, 100, port)
    reactor.reactor.block()
Exemplo n.º 22
0
def main():
    G.__VERSION__ = '0.11'
    G.__PLUGIN_VERSION__ = '1.1.1'
    utils.reload_settings()

    floo_log_level = 'msg'
    if G.DEBUG:
        floo_log_level = 'debug'
    msg.LOG_LEVEL = msg.LOG_LEVELS.get(floo_log_level.upper(), msg.LOG_LEVELS['MSG'])
    migrations.rename_floobits_dir()
    migrations.migrate_symlinks()
    utils.normalize_persistent_data()

    emacs = emacs_handler.EmacsHandler()
    G.emacs = emacs
    _, port = reactor.reactor.listen(emacs)
    utils.set_timeout(cb, 100, port)
    reactor.reactor.block()
Exemplo n.º 23
0
 def wait_empty_window(i):
     if i > 10:
         print(
             'Too many failures trying to find an empty window. Using active window.'
         )
         return finish(sublime.active_window())
     w = get_empty_window()
     if w:
         return finish(w)
     return utils.set_timeout(wait_empty_window, 50, i + 1)
Exemplo n.º 24
0
    def reconnect(self):
        if self._reconnect_timeout:
            return
        self.cleanup()
        self._reconnect_delay = min(10000, int(1.5 * self._reconnect_delay))

        if self._retries > 0:
            msg.log('Floobits: Reconnecting in %sms' % self._reconnect_delay)
            self._reconnect_timeout = utils.set_timeout(self.connect, self._reconnect_delay)
        elif self._retries == 0:
            editor.error_message('Floobits Error! Too many reconnect failures. Giving up.')
        self._retries -= 1
    def _uploader(self, paths_iter, cb, total_bytes, bytes_uploaded=0.0):
        reactor.tick()
        if len(self.proto) > 0:
            return utils.set_timeout(self._uploader, 10, paths_iter, cb, total_bytes, bytes_uploaded)

        bar_len = 20
        try:
            p = next(paths_iter)
            size = self._upload(p)
            bytes_uploaded += size
            try:
                percent = (bytes_uploaded / total_bytes)
            except ZeroDivisionError:
                percent = 0.5
            bar = '   |' + ('|' * int(bar_len * percent)) + (' ' * int((1 - percent) * bar_len)) + '|'
            editor.status_message('Uploading... %2.2f%% %s' % (percent * 100, bar))
        except StopIteration:
            editor.status_message('Uploading... 100% ' + ('|' * bar_len) + '| complete')
            msg.log('All done uploading')
            return cb and cb()
        return utils.set_timeout(self._uploader, 50, paths_iter, cb, total_bytes, bytes_uploaded)
Exemplo n.º 26
0
def main():
    G.__VERSION__ = '0.11'
    G.__PLUGIN_VERSION__ = '1.5.10'
    utils.reload_settings()

    if not os.path.exists(G.FLOORC_JSON_PATH):
        migrations.migrate_floorc()
        utils.reload_settings()

    migrations.rename_floobits_dir()
    migrations.migrate_symlinks()

    try:
        utils.normalize_persistent_data()
    except Exception:
        pass

    emacs = emacs_handler.EmacsHandler()
    G.emacs = emacs
    _, port = reactor.reactor.listen(emacs)
    utils.set_timeout(cb, 100, port)
    reactor.reactor.block()
Exemplo n.º 27
0
def get_or_create_chat(cb=None):
    if G.DEBUG:
        msg.LOG_LEVEL = msg.LOG_LEVELS['DEBUG']

    def return_view():
        G.CHAT_VIEW_PATH = G.CHAT_VIEW.file_name()
        G.CHAT_VIEW.set_read_only(True)
        if cb:
            return cb(G.CHAT_VIEW)

    def open_view():
        if not G.CHAT_VIEW:
            p = os.path.join(G.BASE_DIR, 'msgs.floobits.log')
            G.CHAT_VIEW = G.WORKSPACE_WINDOW.open_file(p)
        utils.set_timeout(return_view, 0)

    # Can't call open_file outside main thread
    if G.LOG_TO_CONSOLE:
        if cb:
            return cb(None)
    else:
        utils.set_timeout(open_view, 0)
Exemplo n.º 28
0
def main():
    G.__VERSION__ = '0.11'
    G.__PLUGIN_VERSION__ = '1.5.6'
    utils.reload_settings()

    if not os.path.exists(G.FLOORC_JSON_PATH):
        migrations.migrate_floorc()
        utils.reload_settings()

    migrations.rename_floobits_dir()
    migrations.migrate_symlinks()

    try:
        utils.normalize_persistent_data()
    except Exception:
        pass

    emacs = emacs_handler.EmacsHandler()
    G.emacs = emacs
    _, port = reactor.reactor.listen(emacs)
    utils.set_timeout(cb, 100, port)
    reactor.reactor.block()
Exemplo n.º 29
0
def get_or_create_chat(cb=None):
    if G.DEBUG:
        msg.LOG_LEVEL = msg.LOG_LEVELS['DEBUG']

    def return_view():
        G.CHAT_VIEW_PATH = G.CHAT_VIEW.file_name()
        G.CHAT_VIEW.set_read_only(True)
        if cb:
            return cb(G.CHAT_VIEW)

    def open_view():
        if not G.CHAT_VIEW:
            p = os.path.join(G.BASE_DIR, 'msgs.floobits.log')
            G.CHAT_VIEW = G.WORKSPACE_WINDOW.open_file(p)
        utils.set_timeout(return_view, 0)

    # Can't call open_file outside main thread
    if G.LOG_TO_CONSOLE:
        if cb:
            return cb(None)
    else:
        utils.set_timeout(open_view, 0)
Exemplo n.º 30
0
def main():
    G.__VERSION__ = '0.11'
    G.__PLUGIN_VERSION__ = None

    parser = optparse.OptionParser(usage='%prog [options]')
    parser.add_option("-p", "--port",
                      dest="port",
                      default=0,
                      help="The port to listen on. Useful for debugging.")
    parser.add_option("--set-version",
                      dest="version")

    options, args = parser.parse_args()
    port = int(options.port)
    G.__PLUGIN_VERSION__ = options.version
    if not G.__PLUGIN_VERSION__:
        print('--set-version is a required argument')
        print('args: %s' % ' '.join(args))
        sys.exit(1)

    utils.reload_settings()

    if not os.path.exists(G.FLOORC_JSON_PATH):
        migrations.migrate_floorc()
        utils.reload_settings()

    migrations.rename_floobits_dir()
    migrations.migrate_symlinks()

    try:
        utils.normalize_persistent_data()
    except Exception:
        pass

    emacs = emacs_handler.EmacsHandler()
    G.emacs = emacs
    _, port = reactor.reactor.listen(emacs, port=port)
    utils.set_timeout(cb, 100, port)
    reactor.reactor.block()
Exemplo n.º 31
0
    def _rate_limited_upload(self, paths_iter, total_bytes, bytes_uploaded=0.0, upload_func=None):
        reactor.tick()
        upload_func = upload_func or (lambda x: self._upload(utils.get_full_path(x)))
        if len(self.proto) > 0:
            self.upload_timeout = utils.set_timeout(self._rate_limited_upload, 10, paths_iter, total_bytes, bytes_uploaded, upload_func)
            return

        bar_len = 20
        try:
            p = next(paths_iter)
            size = upload_func(p)
            bytes_uploaded += size
            try:
                percent = (bytes_uploaded / total_bytes)
            except ZeroDivisionError:
                percent = 0.5
            bar = '   |' + ('|' * int(bar_len * percent)) + (' ' * int((1 - percent) * bar_len)) + '|'
            editor.status_message('Uploading... %2.2f%% %s' % (percent * 100, bar))
        except StopIteration:
            editor.status_message('Uploading... 100% ' + ('|' * bar_len) + '| complete')
            msg.log('All done uploading')
            return
        self.upload_timeout = utils.set_timeout(self._rate_limited_upload, 50, paths_iter, total_bytes, bytes_uploaded, upload_func)
Exemplo n.º 32
0
    def _rate_limited_upload(self, paths_iter, total_bytes, bytes_uploaded=0.0, upload_func=None):
        reactor.tick()
        upload_func = upload_func or (lambda x: self._upload(utils.get_full_path(x)))
        if len(self.proto) > 0:
            self.upload_timeout = utils.set_timeout(self._rate_limited_upload, 10, paths_iter, total_bytes, bytes_uploaded, upload_func)
            return

        bar_len = 20
        try:
            p = next(paths_iter)
            size = upload_func(p)
            bytes_uploaded += size
            try:
                percent = (bytes_uploaded / total_bytes)
            except ZeroDivisionError:
                percent = 0.5
            bar = '   |' + ('|' * int(bar_len * percent)) + (' ' * int((1 - percent) * bar_len)) + '|'
            editor.status_message('Uploading... %2.2f%% %s' % (percent * 100, bar))
        except StopIteration:
            editor.status_message('Uploading... 100% ' + ('|' * bar_len) + '| complete')
            msg.log('All done uploading')
            return
        self.upload_timeout = utils.set_timeout(self._rate_limited_upload, 50, paths_iter, total_bytes, bytes_uploaded, upload_func)
Exemplo n.º 33
0
    def reconnect(self):
        if self._reconnect_timeout:
            return
        self.cleanup()
        self._reconnect_delay = min(10000, int(1.5 * self._reconnect_delay))

        if self._retries > 0:
            msg.log('Floobits: Reconnecting in %sms' % self._reconnect_delay)
            self._reconnect_timeout = utils.set_timeout(self.connect, self._reconnect_delay)
        elif self._retries == 0:
            editor.error_message('Floobits Error! Too many reconnect failures. Giving up.')

        # Only use proxy.floobits.com if we're trying to connect to floobits.com
        G.OUTBOUND_FILTERING = self.host == 'floobits.com' and self._retries % 4 == 0
        self._retries -= 1
Exemplo n.º 34
0
    def reconnect(self):
        if self._reconnect_timeout:
            return
        self.cleanup()
        self._reconnect_delay = min(10000, int(1.5 * self._reconnect_delay))

        if self._retries > 0:
            msg.log('Floobits: Reconnecting in %sms' % self._reconnect_delay)
            self._reconnect_timeout = utils.set_timeout(
                self.connect, self._reconnect_delay)
        elif self._retries == 0:
            editor.error_message(
                'Floobits Error! Too many reconnect failures. Giving up.')

        # Only use proxy.floobits.com if we're trying to connect to floobits.com
        G.OUTBOUND_FILTERING = self.host == 'floobits.com' and self._retries % 4 == 0
        self._retries -= 1
Exemplo n.º 35
0
    def _run(self, edit, selections, r, data, view=None):
        global ignore_modified_timeout

        if not getattr(self, 'view', None):
            return selections

        G.IGNORE_MODIFIED_EVENTS = True
        utils.cancel_timeout(ignore_modified_timeout)
        ignore_modified_timeout = utils.set_timeout(unignore_modified_events,
                                                    2)
        start = max(int(r[0]), 0)
        stop = min(int(r[1]), self.view.size())
        region = sublime.Region(start, stop)

        if stop - start > 10000:
            self.view.replace(edit, region, data)
            G.VIEW_TO_HASH[self.view.buffer_id()] = hashlib.md5(
                listener.get_text(self.view).encode('utf-8')).hexdigest()
            return transform_selections(selections, start, stop - start)

        existing = self.view.substr(region)
        i = 0
        data_len = len(data)
        existing_len = len(existing)
        length = min(data_len, existing_len)
        while (i < length):
            if existing[i] != data[i]:
                break
            i += 1
        j = 0
        while j < (length - i):
            if existing[existing_len - j - 1] != data[data_len - j - 1]:
                break
            j += 1
        region = sublime.Region(start + i, stop - j)
        replace_str = data[i:data_len - j]
        self.view.replace(edit, region, replace_str)
        G.VIEW_TO_HASH[self.view.buffer_id()] = hashlib.md5(
            listener.get_text(self.view).encode('utf-8')).hexdigest()
        new_offset = len(replace_str) - ((stop - j) - (start + i))
        return transform_selections(selections, start + i, new_offset)
Exemplo n.º 36
0
    def _run(self, edit, selections, r, data, view=None):
        global ignore_modified_timeout

        if not getattr(self, 'view', None):
            return selections

        G.IGNORE_MODIFIED_EVENTS = True
        utils.cancel_timeout(ignore_modified_timeout)
        ignore_modified_timeout = utils.set_timeout(unignore_modified_events, 2)
        start = max(int(r[0]), 0)
        stop = min(int(r[1]), self.view.size())
        region = sublime.Region(start, stop)

        if stop - start > 10000:
            self.view.replace(edit, region, data)
            G.VIEW_TO_HASH[self.view.buffer_id()] = hashlib.md5(listener.get_text(self.view).encode('utf-8')).hexdigest()
            return transform_selections(selections, start, stop - start)

        existing = self.view.substr(region)
        i = 0
        data_len = len(data)
        existing_len = len(existing)
        length = min(data_len, existing_len)
        while (i < length):
            if existing[i] != data[i]:
                break
            i += 1
        j = 0
        while j < (length - i):
            if existing[existing_len - j - 1] != data[data_len - j - 1]:
                break
            j += 1
        region = sublime.Region(start + i, stop - j)
        replace_str = data[i:data_len - j]
        self.view.replace(edit, region, replace_str)
        G.VIEW_TO_HASH[self.view.buffer_id()] = hashlib.md5(listener.get_text(self.view).encode('utf-8')).hexdigest()
        new_offset = len(replace_str) - ((stop - j) - (start + i))
        return transform_selections(selections, start + i, new_offset)
Exemplo n.º 37
0
def get_active_window(cb):
    win = sublime.active_window()
    if not win:
        return utils.set_timeout(get_active_window, 50, cb)
    cb(win)
Exemplo n.º 38
0
 def open_view():
     if not G.CHAT_VIEW:
         p = os.path.join(G.BASE_DIR, 'msgs.floobits.log')
         G.CHAT_VIEW = G.WORKSPACE_WINDOW.open_file(p)
     utils.set_timeout(return_view, 0)
Exemplo n.º 39
0
 def set_workspace_window(cb):
     workspace_window = get_workspace_window()
     if workspace_window is None:
         return utils.set_timeout(set_workspace_window, 50, cb)
     G.WORKSPACE_WINDOW = workspace_window
     cb()
Exemplo n.º 40
0
 def open_view():
     if not G.CHAT_VIEW:
         p = os.path.join(G.BASE_DIR, 'msgs.floobits.log')
         G.CHAT_VIEW = G.WORKSPACE_WINDOW.open_file(p)
     utils.set_timeout(return_view, 0)
Exemplo n.º 41
0
    def _on_room_info(self, data):
        self.joined_workspace = True
        self.workspace_info = data
        G.PERMS = data['perms']

        self.proto.reset_retries()

        if G.OUTBOUND_FILTERING:
            msg.error(
                'Detected outbound port blocking! See https://floobits.com/help/network for more info.'
            )

        read_only = False
        if 'patch' not in data['perms']:
            read_only = True
            no_perms_msg = '''You don't have permission to edit this workspace. All files will be read-only.'''
            msg.log('No patch permission. Setting buffers to read-only')
            if 'request_perm' in data['perms']:
                should_send = yield self.ok_cancel_dialog, no_perms_msg + '\nDo you want to request edit permission?'
                # TODO: wait for perms to be OK'd/denied before uploading or bailing
                if should_send:
                    self.send({
                        'name': 'request_perms',
                        'perms': ['edit_room']
                    })
            else:
                if G.EXPERT_MODE:
                    editor.status_message(no_perms_msg)
                else:
                    editor.error_message(no_perms_msg)

        floo_json = {
            'url':
            utils.to_workspace_url({
                'owner': self.owner,
                'workspace': self.workspace,
                'host': self.proto.host,
                'port': self.proto.port,
                'secure': self.proto.secure,
            })
        }
        utils.update_floo_file(os.path.join(G.PROJECT_PATH, '.floo'),
                               floo_json)
        utils.update_recent_workspaces(self.workspace_url)

        ig = ignore.create_ignore_tree(G.PROJECT_PATH)
        G.IGNORE = ig
        for buf_id, buf in data['bufs'].items():
            buf_id = int(buf_id)  # json keys must be strings
            self.bufs[buf_id] = buf
            self.paths_to_ids[buf['path']] = buf_id
        changed_bufs, missing_bufs, new_files = self._scan_dir(
            data['bufs'], ig, read_only)

        ignored = []
        for p, buf_id in self.paths_to_ids.items():
            if p not in new_files:
                ignored.append(p)
            new_files.discard(p)

        if self.action == utils.JOIN_ACTION.UPLOAD:
            yield self._initial_upload, ig, missing_bufs, changed_bufs
            # TODO: maybe use org name here
            who = 'Your friends'
            anon_perms = G.AGENT.workspace_info.get('anon_perms')
            if 'get_buf' in anon_perms:
                who = 'Anyone'
            _msg = 'You are sharing:\n\n%s\n\n%s can join your workspace at:\n\n%s' % (
                G.PROJECT_PATH, who, G.AGENT.workspace_url)
            # Workaround for horrible Sublime Text bug
            utils.set_timeout(editor.message_dialog, 0, _msg)
            # Don't auto-upload again on reconnect
            self.action = utils.JOIN_ACTION.PROMPT
        elif changed_bufs or missing_bufs or new_files:
            # TODO: handle readonly here
            if self.action == utils.JOIN_ACTION.PROMPT:
                stomp_local = yield self.stomp_prompt, changed_bufs, missing_bufs, list(
                    new_files), ignored
                if stomp_local not in [0, 1]:
                    self.stop()
                    return
            elif self.action == utils.JOIN_ACTION.DOWNLOAD:
                stomp_local = True
            else:
                # This should never happen
                assert False
                return

            if stomp_local:
                for buf in changed_bufs:
                    self.get_buf(buf['id'], buf.get('view'))
                    self.save_on_get_bufs.add(buf['id'])
                for buf in missing_bufs:
                    self.get_buf(buf['id'], buf.get('view'))
                    self.save_on_get_bufs.add(buf['id'])
            else:
                yield self._initial_upload, ig, missing_bufs, changed_bufs

        success_msg = '%s@%s/%s: Joined!' % (self.username, self.owner,
                                             self.workspace)
        msg.log(success_msg)
        editor.status_message(success_msg)

        data = utils.get_persistent_data()
        data['recent_workspaces'].insert(0, {"url": self.workspace_url})
        utils.update_persistent_data(data)
        utils.add_workspace_to_persistent_json(self.owner, self.workspace,
                                               self.workspace_url,
                                               G.PROJECT_PATH)

        temp_data = data.get('temp_data', {})
        hangout = temp_data.get('hangout', {})
        hangout_url = hangout.get('url')
        if hangout_url:
            self.prompt_join_hangout(hangout_url)

        if data.get('repo_info'):
            msg.log('Repo info:', data.get('repo_info'))
            # TODO: check local repo info and update remote (or prompt?)
        else:
            repo_info = repo.get_info(self.workspace_url, G.PROJECT_PATH)
            if repo_info and 'repo' in G.PERMS:
                self.send({
                    'name': 'repo',
                    'action': 'set',
                    'data': repo_info,
                })

        self.emit("room_info")
Exemplo n.º 42
0
def get_active_window(cb):
    win = sublime.active_window()
    if not win:
        return utils.set_timeout(get_active_window, 50, cb)
    cb(win)
Exemplo n.º 43
0
def global_tick():
    Listener.push()
    if G.AGENT and G.AGENT.sock:
        G.AGENT.select()
    utils.set_timeout(global_tick, G.TICK_TIME)
Exemplo n.º 44
0
    def _on_room_info(self, data):
        self.joined_workspace = True
        self.workspace_info = data
        G.PERMS = data['perms']

        self.proto.reset_retries()

        if G.OUTBOUND_FILTERING:
            msg.error('Detected outbound port blocking! See https://floobits.com/help/network for more info.')

        read_only = False
        if 'patch' not in data['perms']:
            read_only = True
            no_perms_msg = '''You don't have permission to edit this workspace. All files will be read-only.'''
            msg.log('No patch permission. Setting buffers to read-only')
            if 'request_perm' in data['perms']:
                should_send = yield self.ok_cancel_dialog, no_perms_msg + '\nDo you want to request edit permission?'
                # TODO: wait for perms to be OK'd/denied before uploading or bailing
                if should_send:
                    self.send({'name': 'request_perms', 'perms': ['edit_room']})
            else:
                if G.EXPERT_MODE:
                    editor.status_message(no_perms_msg)
                else:
                    editor.error_message(no_perms_msg)

        floo_json = {
            'url': utils.to_workspace_url({
                'owner': self.owner,
                'workspace': self.workspace,
                'host': self.proto.host,
                'port': self.proto.port,
                'secure': self.proto.secure,
            })
        }
        utils.update_floo_file(os.path.join(G.PROJECT_PATH, '.floo'), floo_json)
        utils.update_recent_workspaces(self.workspace_url)

        ig = ignore.create_ignore_tree(G.PROJECT_PATH)
        G.IGNORE = ig
        for buf_id, buf in data['bufs'].items():
            buf_id = int(buf_id)  # json keys must be strings
            self.bufs[buf_id] = buf
            self.paths_to_ids[buf['path']] = buf_id
        changed_bufs, missing_bufs, new_files = self._scan_dir(data['bufs'], ig, read_only)

        ignored = []
        for p, buf_id in self.paths_to_ids.items():
            if p not in new_files:
                ignored.append(p)
            new_files.discard(p)

        if self.action == utils.JOIN_ACTION.UPLOAD:
            yield self._initial_upload, ig, missing_bufs, changed_bufs
            # TODO: maybe use org name here
            who = 'Your friends'
            anon_perms = G.AGENT.workspace_info.get('anon_perms')
            if 'get_buf' in anon_perms:
                who = 'Anyone'
            _msg = 'You are sharing:\n\n%s\n\n%s can join your workspace at:\n\n%s' % (G.PROJECT_PATH, who, G.AGENT.workspace_url)
            # Workaround for horrible Sublime Text bug
            utils.set_timeout(editor.message_dialog, 0, _msg)
            # Don't auto-upload again on reconnect
            self.action = utils.JOIN_ACTION.PROMPT
        elif changed_bufs or missing_bufs or new_files:
            # TODO: handle readonly here
            if self.action == utils.JOIN_ACTION.PROMPT:
                stomp_local = yield self.stomp_prompt, changed_bufs, missing_bufs, list(new_files), ignored
                if stomp_local not in [0, 1]:
                    self.stop()
                    return
            elif self.action == utils.JOIN_ACTION.DOWNLOAD:
                stomp_local = True
            else:
                # This should never happen
                assert False
                return

            if stomp_local:
                for buf in changed_bufs:
                    self.get_buf(buf['id'], buf.get('view'))
                    self.save_on_get_bufs.add(buf['id'])
                for buf in missing_bufs:
                    self.get_buf(buf['id'], buf.get('view'))
                    self.save_on_get_bufs.add(buf['id'])
            else:
                yield self._initial_upload, ig, missing_bufs, changed_bufs

        success_msg = '%s@%s/%s: Joined!' % (self.username, self.owner, self.workspace)
        msg.log(success_msg)
        editor.status_message(success_msg)

        data = utils.get_persistent_data()
        data['recent_workspaces'].insert(0, {"url": self.workspace_url})
        utils.update_persistent_data(data)
        utils.add_workspace_to_persistent_json(self.owner, self.workspace, self.workspace_url, G.PROJECT_PATH)

        temp_data = data.get('temp_data', {})
        hangout = temp_data.get('hangout', {})
        hangout_url = hangout.get('url')
        if hangout_url:
            self.prompt_join_hangout(hangout_url)

        if data.get('repo_info'):
            msg.log('Repo info:', data.get('repo_info'))
            # TODO: check local repo info and update remote (or prompt?)
        else:
            repo_info = repo.get_info(self.workspace_url, G.PROJECT_PATH)
            if repo_info and 'repo' in G.PERMS:
                self.send({
                    'name': 'repo',
                    'action': 'set',
                    'data': repo_info,
                })

        self.emit("room_info")
Exemplo n.º 45
0
def global_tick():
    Listener.push()
    if G.AGENT and G.AGENT.sock:
        G.AGENT.select()
    utils.set_timeout(global_tick, G.TICK_TIME)
Exemplo n.º 46
0
    def _on_room_info(self, data):
        self.reset()
        self.joined_workspace = True
        self.workspace_info = data
        G.PERMS = data['perms']

        self.proto.reset_retries()

        if G.OUTBOUND_FILTERING:
            msg.error('Detected outbound port blocking! See https://floobits.com/help/network for more info.')

        read_only = False
        if 'patch' not in data['perms']:
            read_only = True
            no_perms_msg = '''You don't have permission to edit this workspace. All files will be read-only.'''
            msg.log('No patch permission. Setting buffers to read-only')
            if 'request_perm' in data['perms']:
                should_send = yield self.ok_cancel_dialog, no_perms_msg + '\nDo you want to request edit permission?'
                # TODO: wait for perms to be OK'd/denied before uploading or bailing
                if should_send:
                    self.send({'name': 'request_perms', 'perms': ['edit_room']})
            else:
                if G.EXPERT_MODE:
                    editor.status_message(no_perms_msg)
                else:
                    editor.error_message(no_perms_msg)

        floo_json = {
            'url': utils.to_workspace_url({
                'owner': self.owner,
                'workspace': self.workspace,
                'host': self.proto.host,
                'port': self.proto.port,
                'secure': self.proto.secure,
            })
        }
        utils.update_floo_file(os.path.join(G.PROJECT_PATH, '.floo'), floo_json)
        utils.update_recent_workspaces(self.workspace_url)

        changed_bufs = []
        missing_bufs = []
        new_files = set()
        ig = ignore.create_ignore_tree(G.PROJECT_PATH)
        G.IGNORE = ig
        if not read_only:
            new_files = set([utils.to_rel_path(x) for x in ig.list_paths()])

        for buf_id, buf in data['bufs'].items():
            buf_id = int(buf_id)  # json keys must be strings
            buf_path = utils.get_full_path(buf['path'])
            new_dir = os.path.dirname(buf_path)
            utils.mkdir(new_dir)
            self.bufs[buf_id] = buf
            self.paths_to_ids[buf['path']] = buf_id

            view = self.get_view(buf_id)
            if view and not view.is_loading() and buf['encoding'] == 'utf8':
                view_text = view.get_text()
                view_md5 = hashlib.md5(view_text.encode('utf-8')).hexdigest()
                buf['buf'] = view_text
                buf['view'] = view
                G.VIEW_TO_HASH[view.native_id] = view_md5
                if view_md5 == buf['md5']:
                    msg.debug('md5 sum matches view. not getting buffer ', buf['path'])
                else:
                    changed_bufs.append(buf)
                    buf['md5'] = view_md5
                continue

            try:
                if buf['encoding'] == 'utf8':
                    if io:
                        buf_fd = io.open(buf_path, 'Urt', encoding='utf8')
                        buf_buf = buf_fd.read()
                    else:
                        buf_fd = open(buf_path, 'rb')
                        buf_buf = buf_fd.read().decode('utf-8').replace('\r\n', '\n')
                    md5 = hashlib.md5(buf_buf.encode('utf-8')).hexdigest()
                else:
                    buf_fd = open(buf_path, 'rb')
                    buf_buf = buf_fd.read()
                    md5 = hashlib.md5(buf_buf).hexdigest()
                buf_fd.close()
                buf['buf'] = buf_buf
                if md5 == buf['md5']:
                    msg.debug('md5 sum matches. not getting buffer ', buf['path'])
                else:
                    msg.debug('md5 differs. possibly getting buffer later ', buf['path'])
                    changed_bufs.append(buf)
                    buf['md5'] = md5
            except Exception as e:
                msg.debug('Error calculating md5 for ', buf['path'], ', ', str_e(e))
                missing_bufs.append(buf)

        ignored = []
        for p, buf_id in self.paths_to_ids.items():
            if p not in new_files:
                ignored.append(p)
            new_files.discard(p)

        if self.action == utils.JOIN_ACTION.UPLOAD:
            yield self._initial_upload, ig, missing_bufs, changed_bufs
            # TODO: maybe use org name here
            who = 'Your friends'
            anon_perms = G.AGENT.workspace_info.get('anon_perms')
            if 'get_buf' in anon_perms:
                who = 'Anyone'
            _msg = 'You are sharing:\n\n%s\n\n%s can join your workspace at:\n\n%s' % (G.PROJECT_PATH, who, G.AGENT.workspace_url)
            # Workaround for horrible Sublime Text bug
            utils.set_timeout(editor.message_dialog, 0, _msg)
        elif changed_bufs or missing_bufs or new_files:
            # TODO: handle readonly here
            if self.action == utils.JOIN_ACTION.PROMPT:
                stomp_local = yield self.stomp_prompt, changed_bufs, missing_bufs, list(new_files), ignored
                if stomp_local not in [0, 1]:
                    self.stop()
                    return
            elif self.action == utils.JOIN_ACTION.DOWNLOAD:
                stomp_local = True
            else:
                # This should never happen
                assert False
                return

            if stomp_local:
                for buf in changed_bufs:
                    self.get_buf(buf['id'], buf.get('view'))
                    self.save_on_get_bufs.add(buf['id'])
                for buf in missing_bufs:
                    self.get_buf(buf['id'], buf.get('view'))
                    self.save_on_get_bufs.add(buf['id'])
            else:
                yield self._initial_upload, ig, missing_bufs, changed_bufs

        success_msg = 'Successfully joined workspace %s/%s' % (self.owner, self.workspace)
        msg.log(success_msg)
        editor.status_message(success_msg)

        data = utils.get_persistent_data()
        data['recent_workspaces'].insert(0, {"url": self.workspace_url})
        utils.update_persistent_data(data)
        utils.add_workspace_to_persistent_json(self.owner, self.workspace, self.workspace_url, G.PROJECT_PATH)

        temp_data = data.get('temp_data', {})
        hangout = temp_data.get('hangout', {})
        hangout_url = hangout.get('url')
        if hangout_url:
            self.prompt_join_hangout(hangout_url)

        self.emit("room_info")
Exemplo n.º 47
0
    def _on_room_info(self, data):
        self.joined_workspace = True
        self.workspace_info = data
        G.PERMS = data['perms']

        self.proto.reset_retries()

        if G.OUTBOUND_FILTERING:
            msg.error(
                'Detected outbound port blocking! See https://floobits.com/help/network for more info.'
            )

        read_only = False
        if 'patch' not in data['perms']:
            read_only = True
            no_perms_msg = '''You don't have permission to edit this workspace. All files will be read-only.'''
            msg.log('No patch permission. Setting buffers to read-only')
            if 'request_perm' in data['perms']:
                should_send = yield self.ok_cancel_dialog, no_perms_msg + '\nDo you want to request edit permission?'
                # TODO: wait for perms to be OK'd/denied before uploading or bailing
                if should_send:
                    self.send({
                        'name': 'request_perms',
                        'perms': ['edit_room']
                    })
            else:
                if G.EXPERT_MODE:
                    editor.status_message(no_perms_msg)
                else:
                    editor.error_message(no_perms_msg)

        floo_json = {
            'url':
            utils.to_workspace_url({
                'owner': self.owner,
                'workspace': self.workspace,
                'host': self.proto.host,
                'port': self.proto.port,
                'secure': self.proto.secure,
            })
        }
        utils.update_floo_file(os.path.join(G.PROJECT_PATH, '.floo'),
                               floo_json)
        utils.update_recent_workspaces(self.workspace_url)

        changed_bufs = []
        missing_bufs = []
        new_files = set()
        ig = ignore.create_ignore_tree(G.PROJECT_PATH)
        G.IGNORE = ig
        if not read_only:
            new_files = set([utils.to_rel_path(x) for x in ig.list_paths()])

        for buf_id, buf in data['bufs'].items():
            buf_id = int(buf_id)  # json keys must be strings
            buf_path = utils.get_full_path(buf['path'])
            new_dir = os.path.dirname(buf_path)
            utils.mkdir(new_dir)
            self.bufs[buf_id] = buf
            self.paths_to_ids[buf['path']] = buf_id

            view = self.get_view(buf_id)
            if view and not view.is_loading() and buf['encoding'] == 'utf8':
                view_text = view.get_text()
                view_md5 = hashlib.md5(view_text.encode('utf-8')).hexdigest()
                buf['buf'] = view_text
                buf['view'] = view
                G.VIEW_TO_HASH[view.native_id] = view_md5
                if view_md5 == buf['md5']:
                    msg.debug('md5 sum matches view. not getting buffer ',
                              buf['path'])
                else:
                    changed_bufs.append(buf)
                    buf['md5'] = view_md5
                continue

            try:
                if buf['encoding'] == 'utf8':
                    if io:
                        buf_fd = io.open(buf_path, 'Urt', encoding='utf8')
                        buf_buf = buf_fd.read()
                    else:
                        buf_fd = open(buf_path, 'rb')
                        buf_buf = buf_fd.read().decode('utf-8').replace(
                            '\r\n', '\n')
                    md5 = hashlib.md5(buf_buf.encode('utf-8')).hexdigest()
                else:
                    buf_fd = open(buf_path, 'rb')
                    buf_buf = buf_fd.read()
                    md5 = hashlib.md5(buf_buf).hexdigest()
                buf_fd.close()
                buf['buf'] = buf_buf
                if md5 == buf['md5']:
                    msg.debug('md5 sum matches. not getting buffer ',
                              buf['path'])
                else:
                    msg.debug('md5 differs. possibly getting buffer later ',
                              buf['path'])
                    changed_bufs.append(buf)
                    buf['md5'] = md5
            except Exception as e:
                msg.debug('Error calculating md5 for ', buf['path'], ', ',
                          str_e(e))
                missing_bufs.append(buf)

        ignored = []
        for p, buf_id in self.paths_to_ids.items():
            if p not in new_files:
                ignored.append(p)
            new_files.discard(p)

        if self.action == utils.JOIN_ACTION.UPLOAD:
            yield self._initial_upload, ig, missing_bufs, changed_bufs
            # TODO: maybe use org name here
            who = 'Your friends'
            anon_perms = G.AGENT.workspace_info.get('anon_perms')
            if 'get_buf' in anon_perms:
                who = 'Anyone'
            _msg = 'You are sharing:\n\n%s\n\n%s can join your workspace at:\n\n%s' % (
                G.PROJECT_PATH, who, G.AGENT.workspace_url)
            # Workaround for horrible Sublime Text bug
            utils.set_timeout(editor.message_dialog, 0, _msg)
        elif changed_bufs or missing_bufs or new_files:
            # TODO: handle readonly here
            if self.action == utils.JOIN_ACTION.PROMPT:
                stomp_local = yield self.stomp_prompt, changed_bufs, missing_bufs, list(
                    new_files), ignored
                if stomp_local not in [0, 1]:
                    self.stop()
                    return
            elif self.action == utils.JOIN_ACTION.DOWNLOAD:
                stomp_local = True
            else:
                # This should never happen
                assert False
                return

            if stomp_local:
                for buf in changed_bufs:
                    self.get_buf(buf['id'], buf.get('view'))
                    self.save_on_get_bufs.add(buf['id'])
                for buf in missing_bufs:
                    self.get_buf(buf['id'], buf.get('view'))
                    self.save_on_get_bufs.add(buf['id'])
            else:
                yield self._initial_upload, ig, missing_bufs, changed_bufs

        success_msg = '%s@%s/%s: Joined!' % (self.username, self.owner,
                                             self.workspace)
        msg.log(success_msg)
        editor.status_message(success_msg)

        data = utils.get_persistent_data()
        data['recent_workspaces'].insert(0, {"url": self.workspace_url})
        utils.update_persistent_data(data)
        utils.add_workspace_to_persistent_json(self.owner, self.workspace,
                                               self.workspace_url,
                                               G.PROJECT_PATH)

        temp_data = data.get('temp_data', {})
        hangout = temp_data.get('hangout', {})
        hangout_url = hangout.get('url')
        if hangout_url:
            self.prompt_join_hangout(hangout_url)

        self.emit("room_info")
Exemplo n.º 48
0
def global_tick():
    reactor.tick()
    utils.set_timeout(global_tick, G.TICK_TIME)
    def _on_patch(self, data):
        buf_id = data['id']
        buf = self.bufs[buf_id]
        if 'buf' not in buf:
            msg.debug('buf %s not populated yet. not patching' % buf['path'])
            return

        if buf['encoding'] == 'base64':
            # TODO apply binary patches
            return self.get_buf(buf_id, None)

        if len(data['patch']) == 0:
            msg.debug('wtf? no patches to apply. server is being stupid')
            return

        msg.debug('patch is', data['patch'])
        dmp_patches = DMP.patch_fromText(data['patch'])
        # TODO: run this in a separate thread
        old_text = buf['buf']

        view = self.get_view(buf_id)
        if view and not view.is_loading():
            view_text = view.get_text()
            if old_text == view_text:
                buf['forced_patch'] = False
            elif not buf.get('forced_patch'):
                patch = utils.FlooPatch(view_text, buf)
                # Update the current copy of the buffer
                buf['buf'] = patch.current
                buf['md5'] = hashlib.md5(patch.current.encode('utf-8')).hexdigest()
                buf['forced_patch'] = True
                msg.debug('forcing patch for %s' % buf['path'])
                self.send(patch.to_json())
                old_text = view_text
            else:
                msg.debug('forced patch is true. not sending another patch for buf %s' % buf['path'])
        md5_before = hashlib.md5(old_text.encode('utf-8')).hexdigest()
        if md5_before != data['md5_before']:
            msg.warn('starting md5s don\'t match for %s. this is dangerous!' % buf['path'])

        t = DMP.patch_apply(dmp_patches, old_text)

        clean_patch = True
        for applied_patch in t[1]:
            if not applied_patch:
                clean_patch = False
                break

        if G.DEBUG:
            if len(t[0]) == 0:
                try:
                    msg.debug('OMG EMPTY!')
                    msg.debug('Starting data:', buf['buf'])
                    msg.debug('Patch:', data['patch'])
                except Exception as e:
                    print(e)

            if '\x01' in t[0]:
                msg.debug('FOUND CRAZY BYTE IN BUFFER')
                msg.debug('Starting data:', buf['buf'])
                msg.debug('Patch:', data['patch'])

        timeout_id = buf.get('timeout_id')
        if timeout_id:
            utils.cancel_timeout(timeout_id)
            del buf['timeout_id']

        if not clean_patch:
            msg.log('Couldn\'t patch %s cleanly.' % buf['path'])
            return self.get_buf(buf_id, view)

        cur_hash = hashlib.md5(t[0].encode('utf-8')).hexdigest()
        if cur_hash != data['md5_after']:
            buf['timeout_id'] = utils.set_timeout(self.get_buf, 2000, buf_id, view)

        buf['buf'] = t[0]
        buf['md5'] = cur_hash

        if not view:
            msg.debug('No view. Saving buffer %s' % buf_id)
            utils.save_buf(buf)
            return

        view.apply_patches(buf, t, data['username'])
Exemplo n.º 50
0
def global_tick():
    # XXX: A couple of sublime 2 users have had reactor == None here
    reactor.tick()
    utils.set_timeout(global_tick, G.TICK_TIME)
Exemplo n.º 51
0
 def set_workspace_window(cb):
     workspace_window = get_workspace_window()
     if workspace_window is None:
         return utils.set_timeout(set_workspace_window, 50, cb)
     G.WORKSPACE_WINDOW = workspace_window
     cb()
 def get_workspace_window():
     w = sublime.active_window()
     if w is None:
         return utils.set_timeout(get_workspace_window, 50)
     sublime.message_dialog('Thank you for installing the Floobits plugin!\n\nLet\'s set up your editor to work with Floobits.')
     w.show_quick_panel(opts, cb)
Exemplo n.º 53
0
def global_tick():
    # XXX: A couple of sublime 2 users have had reactor == None here
    reactor.tick()
    utils.set_timeout(global_tick, G.TICK_TIME)
Exemplo n.º 54
0
    def _on_patch(self, data):
        buf_id = data['id']
        buf = self.bufs[buf_id]
        if 'buf' not in buf:
            msg.debug('buf ', buf['path'], ' not populated yet. not patching')
            return

        if buf['encoding'] == 'base64':
            # TODO apply binary patches
            return self.get_buf(buf_id, None)

        if len(data['patch']) == 0:
            msg.debug('wtf? no patches to apply. server is being stupid')
            return

        msg.debug('patch is', data['patch'])
        dmp_patches = DMP.patch_fromText(data['patch'])
        # TODO: run this in a separate thread
        old_text = buf['buf']

        view = self.get_view(buf_id)
        if view and not view.is_loading():
            view_text = view.get_text()
            if old_text == view_text:
                buf['forced_patch'] = False
            elif not buf.get('forced_patch'):
                patch = utils.FlooPatch(view_text, buf)
                # Update the current copy of the buffer
                buf['buf'] = patch.current
                buf['md5'] = hashlib.md5(
                    patch.current.encode('utf-8')).hexdigest()
                buf['forced_patch'] = True
                msg.debug('forcing patch for ', buf['path'])
                self.send(patch.to_json())
                old_text = view_text
            else:
                msg.debug(
                    'forced patch is true. not sending another force patch for buf ',
                    buf['path'])

        md5_before = hashlib.md5(old_text.encode('utf-8')).hexdigest()
        if md5_before != data['md5_before']:
            msg.warn('starting md5s don\'t match for ', buf['path'],
                     '. this is dangerous!')

        t = DMP.patch_apply(dmp_patches, old_text)

        clean_patch = True
        for applied_patch in t[1]:
            if not applied_patch:
                clean_patch = False
                break

        if G.DEBUG:
            if len(t[0]) == 0:
                try:
                    msg.debug('OMG EMPTY!')
                    msg.debug('Starting data:', buf['buf'])
                    msg.debug('Patch:', data['patch'])
                except Exception as e:
                    msg.error(e)

            if '\x01' in t[0]:
                msg.debug('FOUND CRAZY BYTE IN BUFFER')
                msg.debug('Starting data:', buf['buf'])
                msg.debug('Patch:', data['patch'])

        timeout_id = buf.get('timeout_id')
        if timeout_id:
            utils.cancel_timeout(timeout_id)
            del buf['timeout_id']

        if not clean_patch:
            msg.log('Couldn\'t patch ', buf['path'], ' cleanly.')
            return self.get_buf(buf_id, view)

        cur_hash = hashlib.md5(t[0].encode('utf-8')).hexdigest()
        if cur_hash != data['md5_after']:
            msg.debug('Ending md5s don\'t match for ', buf['path'],
                      ' Setting get_buf timeout.')
            buf['timeout_id'] = utils.set_timeout(self.get_buf, 2000, buf_id,
                                                  view)

        buf['buf'] = t[0]
        buf['md5'] = cur_hash

        if not view:
            msg.debug('No view. Not saving buffer ', buf_id)

            def _on_load():
                v = self.get_view(buf_id)
                if v and 'buf' in buf:
                    v.update(buf, message=False)

            self.on_load[buf_id]['patch'] = _on_load
            return

        view.apply_patches(buf, t, data['username'])