Example #1
0
 def update_view(buf, view=None):
     view = view or get_view(buf['id'])
     visible_region = view.visible_region()
     viewport_position = view.viewport_position()
     region = sublime.Region(0, view.size())
     # deep copy
     selections = [x for x in view.sel()]
     MODIFIED_EVENTS.put(1)
     try:
         edit = view.begin_edit()
         view.replace(edit, region, buf['buf'])
     except Exception as e:
         msg.error('Exception updating view: %s' % e)
     finally:
         view.end_edit(edit)
     sublime.set_timeout(lambda: view.set_viewport_position(viewport_position, False), 0)
     view.sel().clear()
     view.show(visible_region, False)
     for sel in selections:
         view.sel().add(sel)
     if 'patch' in G.PERMS:
         view.set_read_only(False)
     else:
         view.set_status('Floobits', 'You don\'t have write permission. Buffer is read-only.')
         view.set_read_only(True)
Example #2
0
    def __check_ise_version(self):
        import subprocess
        import re
        xst = subprocess.Popen('which xst', shell=True,
            stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True)
        lines = xst.stdout.readlines()
        if not lines:
            p.error("Xilinx binaries are not in the PATH variable\n"
                "Can't determine ISE version")
            quit()

        xst = str(lines[0].strip())
        version_pattern = re.compile(".*?(\d\d\.\d).*") #First check if we have version in path
        match = re.match(version_pattern, xst)
        if match:
            ise_version=match.group(1)
        else: #If it is not the case call the "xst -h" to get version
            xst_output = subprocess.Popen('xst -h', shell=True,
            stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True)
            xst_output = xst_output.stdout.readlines()[0]
            xst_output = xst_output.strip()
            version_pattern = \
                    re.compile('Release\s(?P<major>\d|\d\d)[^\d](?P<minor>\d|\d\d)\s.*')
            match = re.match(version_pattern, xst_output)
            if match:
                ise_version=''.join((match.group('major'), '.', match.group('minor')))
            else:
                p.error("xst output is not in expected format: "+ xst_output +"\n"
                        "Can't determine ISE version")
                return None

        p.vprint("ISE version: " + ise_version)
        return ise_version
Example #3
0
 def update_view(buf, view=None):
     view = view or get_view(buf['id'])
     visible_region = view.visible_region()
     viewport_position = view.viewport_position()
     # deep copy
     selections = [x for x in view.sel()]
     MODIFIED_EVENTS.put(1)
     try:
         view.run_command('floo_view_replace_region', {
             'r': [0, view.size()],
             'data': buf['buf']
         })
     except Exception as e:
         msg.error('Exception updating view: %s' % e)
     utils.set_timeout(view.set_viewport_position, 0, viewport_position,
                       False)
     view.sel().clear()
     view.show(visible_region, False)
     for sel in selections:
         view.sel().add(sel)
     if 'patch' in G.PERMS:
         view.set_read_only(False)
     else:
         view.set_status(
             'Floobits',
             'You don\'t have write permission. Buffer is read-only.')
         view.set_read_only(True)
Example #4
0
 def connect(self, cb=None):
     self.empty_selects = 0
     self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
     if self.secure:
         if ssl:  # ST2 on linux doesn't have the ssl module. Not sure about windows
             self.sock = ssl.wrap_socket(self.sock, ca_certs=CERT, cert_reqs=ssl.CERT_REQUIRED)
         else:
             msg.debug("No SSL module found. Connection will not be encrypted.")
             if self.port == G.DEFAULT_PORT:
                 self.port = 3148  # plaintext port
     msg.debug("Connecting to %s:%s" % (self.host, self.port))
     try:
         self.sock.connect((self.host, self.port))
         if self.secure and ssl:
             self.sock.do_handshake()
     except socket.error as e:
         msg.error("Error connecting:", e)
         self.reconnect()
         return
     self.sock.setblocking(0)
     msg.debug("Connected!")
     self.reconnect_delay = G.INITIAL_RECONNECT_DELAY
     self.send_auth()
     if cb:
         cb()
Example #5
0
def getBuild(tag, date, path):
    # get build with defined tag and date and save in path
    buildName = "genie_" + tag + "_buildmaster_" + date
    # check if build aready exists
    if os.path.isdir(path + "/" + buildName):
        msg.warning(
            path + "/" + buildName + " already exists ... " + msg.BOLD +
            "skipping jenkins:getBuild\n", 1)
        return buildName
    # no build
    tarball = buildName + ".tgz"
    # check it build available
    if tarball not in getBuildList():
        msg.error("There is no artifact for " + msg.BOLD + tarball + "\n")
        print "Available artifacts:\n"
        for artifact in getBuildList():
            print "\t" + artifact + "\n"
        sys.exit(1)
    # download build
    msg.info("Downloading " + msg.BOLD + tarball)
    urllib.urlretrieve(url + "/artifact/genie_builds/" + tarball,
                       path + "/" + tarball)
    # extract the build
    msg.info("Extracting to " + msg.BOLD + path + "/" + buildName + "\n")
    tarfile.open(path + "/" + tarball, 'r').extractall(path + "/" + buildName)
    # return buildName
    return buildName
Example #6
0
def generate_filelist(prefix, old_size, new_size):

    """Generate filenames for output files
    and return as a dict (file: [begin, end]).

    Keyword arguments:
    prefix -- common path/to/basename
    old_size -- size of input hdf5 files
    new_size -- requested size for output hdf5 files
    """

    if new_size >= old_size:
        msg.error("Use splitter wisely...")
        sys.exit(1)

    nof_files, leftover = old_size / new_size, old_size % new_size

    files = OrderedDict()

    for i in range(nof_files + int(leftover > 0)):
        filename = "%(prefix)s_%(id)03d.hdf5" % {"prefix": prefix, "id": i}
        begin = i * new_size
        end = (i + 1) * new_size if i < nof_files else i * new_size + leftover
        files[filename] = [begin, end]

    return files
Example #7
0
 def delete_buf(path):
     if not utils.is_shared(path):
         msg.error('Skipping deleting %s because it is not in shared path %s.' % (path, G.PROJECT_PATH))
         return
     if os.path.isdir(path):
         for dirpath, dirnames, filenames in os.walk(path):
             # TODO: rexamine this assumption
             # Don't care about hidden stuff
             dirnames[:] = [d for d in dirnames if d[0] != '.']
             for f in filenames:
                 f_path = os.path.join(dirpath, f)
                 if f[0] == '.':
                     msg.log('Not deleting buf for hidden file %s' % f_path)
                 else:
                     Listener.delete_buf(f_path)
         return
     buf_to_delete = None
     rel_path = utils.to_rel_path(path)
     for buf_id, buf in BUFS.items():
         if rel_path == buf['path']:
             buf_to_delete = buf
             break
     if buf_to_delete is None:
         msg.error('%s is not in this room' % path)
         return
     msg.log('deleting buffer ', rel_path)
     event = {
         'name': 'delete_buf',
         'id': buf_to_delete['id'],
     }
     Listener.agent.put(event)
Example #8
0
 def create(self):
     """
     create() prototype
     :return: boolean
     """
     msg.error("Service " + str(self.__module__) + "doesn't implement create() function?")
     return False
 def delete_buf(path):
     if not utils.is_shared(path):
         msg.error('Skipping deleting %s because it is not in shared path %s.' % (path, G.PROJECT_PATH))
         return
     if os.path.isdir(path):
         for dirpath, dirnames, filenames in os.walk(path):
             # TODO: rexamine this assumption
             # Don't care about hidden stuff
             dirnames[:] = [d for d in dirnames if d[0] != '.']
             for f in filenames:
                 f_path = os.path.join(dirpath, f)
                 if f[0] == '.':
                     msg.log('Not deleting buf for hidden file %s' % f_path)
                 else:
                     Listener.delete_buf(f_path)
         return
     buf_to_delete = None
     rel_path = utils.to_rel_path(path)
     for buf_id, buf in BUFS.items():
         if rel_path == buf['path']:
             buf_to_delete = buf
             break
     if buf_to_delete is None:
         msg.error('%s is not in this room' % path)
         return
     msg.log('deleting buffer ', rel_path)
     event = {
         'name': 'delete_buf',
         'id': buf_to_delete['id'],
     }
     Listener.agent.put(event)
Example #10
0
    def make_list_of_modules(self):
        p.vprint("Making list of modules for " + str(self))
        new_modules = [self]
        modules = [self]
        while len(new_modules) > 0:
            cur_module = new_modules.pop()

            if not cur_module.isfetched:
                p.error("Unfetched module in modules list: " + str(cur_module))
                quit()
            if cur_module.manifest == None:
                p.vprint("No manifest in " + str(cur_module))
                continue
            cur_module.parse_manifest()

            for module in cur_module.local:
                modules.append(module)
                new_modules.append(module)

            for module in cur_module.git:
                modules.append(module)
                new_modules.append(module)

            for module in cur_module.svn:
                modules.append(module)
                new_modules.append(module)

        if len(modules) == 0:
            p.vprint("No modules were found in " + self.fetchto)
        return modules
Example #11
0
def get_info(workspace_url, project_dir):
    repo_type = detect_type(project_dir)
    if not repo_type:
        return
    msg.debug('Detected ', repo_type, ' repo in ', project_dir)
    data = {
        'type': repo_type,
    }
    cmd = REPO_MAPPING[repo_type]['cmd']
    try:
        p = subprocess.Popen(cmd,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE,
                             cwd=project_dir)
        result = p.communicate()
        repo_url = result[0].decode('utf-8').strip()
        if repo_type == 'svn':
            repo_url = parse_svn_xml(repo_url)
        msg.log(repo_type, ' url is ', repo_url)
        if not repo_url:
            msg.error('Error getting ', repo_type, ' url:', result[1])
            return
    except Exception as e:
        msg.error('Error getting ', repo_type, ' url:', str_e(e))
        return

    data['url'] = repo_url
    return data
Example #12
0
 def connect(self, cb=None):
     self.empty_selects = 0
     self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
     if self.secure:
         if ssl:  # ST2 on linux doesn't have the ssl module. Not sure about windows
             self.sock = ssl.wrap_socket(self.sock,
                                         ca_certs=CERT,
                                         cert_reqs=ssl.CERT_REQUIRED)
         else:
             msg.debug(
                 'No SSL module found. Connection will not be encrypted.')
             if self.port == G.DEFAULT_PORT:
                 self.port = 3148  # plaintext port
     msg.debug('Connecting to %s:%s' % (self.host, self.port))
     try:
         self.sock.connect((self.host, self.port))
         if self.secure and ssl:
             self.sock.do_handshake()
     except socket.error as e:
         msg.error('Error connecting:', e)
         self.reconnect()
         return
     self.sock.setblocking(0)
     msg.debug('Connected!')
     self.reconnect_delay = G.INITIAL_RECONNECT_DELAY
     self.send_auth()
     if cb:
         cb()
Example #13
0
def partcheck(data1, data2, key, n=100):
    """Do full comparison entry by entry.

    Keyword arguments:
    data1 -- numpy array with data
    data2 -- numpy array with data
    key -- dataset being checked
    n -- number of entries to compare in each part of the file
    """

    msg.warning("%(key)s dataset too big to fit in memory. "
                "Comparing first / last / random %(n)d entries." % {
                    "key": key,
                    "n": n
                })

    N = len(data1)
    entries = range(n)
    entries.extend(range(N - n, N))
    entries.extend(np.random.randint(low=n, high=N - n, size=n))

    for i in entries:
        if not np.array_equal(data1[i], data2[i]):
            msg.error("Different entry %(id)d in %(key)s dataset." % {
                "key": key,
                "id": i
            })
            sys.exit(1)
Example #14
0
def generate_filelist(prefix, old_size, new_size):
    """Generate filenames for output files
    and return as a dict (file: [begin, end]).

    Keyword arguments:
    prefix -- common path/to/basename
    old_size -- size of input hdf5 files
    new_size -- requested size for output hdf5 files
    """
    print "i am hereeeeee", new_size, old_size
    if new_size >= old_size:
        msg.error("Use splitter wisely...")
        sys.exit(1)

    nof_files, leftover = old_size / new_size, old_size % new_size

    files = OrderedDict()
    for i in range(nof_files + int(leftover > 0)):
        filename = "%(prefix)s_%(id)03d.hdf5" % {"prefix": prefix, "id": i}
        begin = i * new_size
        end = (i + 1) * new_size if i < nof_files else i * new_size + leftover
        files[filename] = [begin, end]

    print "files", nof_files
    return files
Example #15
0
    def make_list_of_modules(self):
        p.vprint("Making list of modules for " + str(self))
        new_modules = [self]
        modules = [self]
        while len(new_modules) > 0:
            cur_module = new_modules.pop()

            if not cur_module.isfetched:
                p.error("Unfetched module in modules list: " + str(cur_module))
                quit()
            if cur_module.manifest == None:
                p.vprint("No manifest in " + str(cur_module))
                continue
            cur_module.parse_manifest()

            for module in cur_module.local:
                modules.append(module)
                new_modules.append(module)

            for module in cur_module.git:
                modules.append(module)
                new_modules.append(module)

            for module in cur_module.svn:
                modules.append(module)
                new_modules.append(module)

        if len(modules) == 0:
            p.vprint("No modules were found in " + self.fetchto)
        return modules
Example #16
0
 def add_file(self, p):
     p_path = os.path.join(self.path, p)
     if p[0] == '.' and p not in HIDDEN_WHITELIST:
         msg.log('Ignoring hidden path %s' % p_path)
         return
     is_ignored = self.is_ignored(p_path)
     if is_ignored:
         msg.log(is_ignored)
         return
     try:
         s = os.stat(p_path)
     except Exception as e:
         msg.error('Error lstat()ing path %s: %s' % (p_path, unicode(e)))
         return
     if stat.S_ISDIR(s.st_mode):
         ig = Ignore(self, p_path)
         self.children.append(ig)
         self.size += ig.size
         return
     elif stat.S_ISREG(s.st_mode):
         if s.st_size > (MAX_FILE_SIZE):
             self.ignores['/TOO_BIG/'].append(p)
             msg.log(self.is_ignored_message(p_path, p, '/TOO_BIG/'))
         else:
             self.size += s.st_size
             self.files.append(p)
Example #17
0
def update_data(data, keys, skip=None):

    """Remove not requested datasets.

    Keyword arguments:
    data -- dicionary with data
    keys -- user-requested keys
    skip -- the key not to delete
    """

    for key in data.keys():
        if key == skip:
            continue
        if key not in keys:
            del data[key]

    if not len(data):
        msg.error("No datasets to process.")
        sys.exit(1)

    check.get_size(data)

    for key in keys:
        if key not in data.keys():
            msg.warning("%s requested, but not found." % key)
Example #18
0
 def is_ignored(self, path, is_dir=None):
     rel_path = os.path.relpath(path, self.path)
     for ignore_file, patterns in self.ignores.items():
         for pattern in patterns:
             base_path, file_name = os.path.split(rel_path)
             if pattern[0] == '/':
                 # Only match immediate children
                 if utils.unfuck_path(base_path) == self.path and fnmatch.fnmatch(file_name, pattern[1:]):
                     return self.is_ignored_message(path, pattern, ignore_file)
             else:
                 if len(pattern) > 0 and pattern[-1] == '/':
                     if is_dir is None:
                         try:
                             s = os.stat(path)
                         except Exception as e:
                             msg.error('Error lstat()ing path %s: %s' % (path, unicode(e)))
                             continue
                         is_dir = stat.S_ISDIR(s.st_mode)
                     if is_dir:
                         pattern = pattern[:-1]
                 if fnmatch.fnmatch(file_name, pattern):
                     return self.is_ignored_message(path, pattern, ignore_file)
                 if fnmatch.fnmatch(rel_path, pattern):
                     return self.is_ignored_message(path, pattern, ignore_file)
     if self.parent:
         return self.parent.is_ignored(path)
     return False
Example #19
0
    def __init__(self, parent, path):
        self.parent = parent
        self.size = 0
        self.children = []
        self.files = []
        self.ignores = {
            '/TOO_BIG/': []
        }
        self.path = utils.unfuck_path(path)

        try:
            paths = os.listdir(self.path)
        except OSError as e:
            if e.errno != errno.ENOTDIR:
                raise
            self.path = os.path.dirname(self.path)
            self.add_file(os.path.basename(path))
            return
        except Exception as e:
            msg.error('Error listing path %s: %s' % (path, unicode(e)))
            return

        msg.log('Initializing ignores for %s' % path)
        for ignore_file in IGNORE_FILES:
            try:
                self.load(ignore_file)
            except:
                pass

        for p in paths:
            self.add_file(p)
Example #20
0
    def __init__(self, parent, path, recurse=True):
        self.parent = parent
        self.size = 0
        self.children = []
        self.files = []
        self.ignores = {
            '/TOO_BIG/': []
        }
        self.path = utils.unfuck_path(path)

        try:
            paths = os.listdir(self.path)
        except OSError as e:
            if e.errno != errno.ENOTDIR:
                msg.error('Error listing path %s: %s' % (path, unicode(e)))
                return
            self.path = os.path.dirname(self.path)
            self.add_file(os.path.basename(path))
            return
        except Exception as e:
            msg.error('Error listing path %s: %s' % (path, unicode(e)))
            return

        msg.debug('Initializing ignores for %s' % path)
        for ignore_file in IGNORE_FILES:
            try:
                self.load(ignore_file)
            except Exception:
                pass

        if recurse:
            for p in paths:
                self.add_file(p)
Example #21
0
 def add_file(self, p):
     p_path = os.path.join(self.path, p)
     if p[0] == '.' and p not in HIDDEN_WHITELIST:
         msg.log('Ignoring hidden path %s' % p_path)
         return
     is_ignored = self.is_ignored(p_path)
     if is_ignored:
         msg.log(is_ignored)
         return
     try:
         s = os.stat(p_path)
     except Exception as e:
         msg.error('Error stat()ing path %s: %s' % (p_path, unicode(e)))
         return
     if stat.S_ISDIR(s.st_mode):
         ig = Ignore(self, p_path)
         self.children.append(ig)
         self.size += ig.size
         return
     elif stat.S_ISREG(s.st_mode):
         if s.st_size > (MAX_FILE_SIZE):
             self.ignores['/TOO_BIG/'].append(p)
             msg.log(self.is_ignored_message(p_path, p, '/TOO_BIG/'))
         else:
             self.size += s.st_size
             self.files.append(p)
Example #22
0
def get_info(workspace_url, project_dir):
    repo_type = detect_type(project_dir)
    if not repo_type:
        return
    msg.debug('Detected ', repo_type, ' repo in ', project_dir)
    data = {
        'type': repo_type,
    }
    cmd = REPO_MAPPING[repo_type]['cmd']
    try:
        p = subprocess.Popen(cmd,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE,
                             cwd=project_dir)
        result = p.communicate()
        repo_url = result[0].decode('utf-8').strip()
        if repo_type == 'svn':
            repo_url = parse_svn_xml(repo_url)
        msg.log(repo_type, ' url is ', repo_url)
        if not repo_url:
            msg.error('Error getting ', repo_type, ' url:', result[1])
            return
    except Exception as e:
        msg.error('Error getting ', repo_type, ' url:', str_e(e))
        return

    data['url'] = repo_url
    return data
Example #23
0
 def connect(self):
     self.empty_selects = 0
     self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
     if self.secure:
         if ssl:
             cert_path = os.path.join(G.COLAB_DIR, 'startssl-ca.pem')
             with open(cert_path, 'wb') as cert_fd:
                 cert_fd.write(cert.CA_CERT.encode('utf-8'))
             self.sock = ssl.wrap_socket(self.sock,
                                         ca_certs=cert_path,
                                         cert_reqs=ssl.CERT_REQUIRED)
         else:
             msg.log(
                 'No SSL module found. Connection will not be encrypted.')
             if self.port == G.DEFAULT_PORT:
                 self.port = 3148  # plaintext port
     msg.log('Connecting to %s:%s' % (self.host, self.port))
     try:
         self.sock.settimeout(30)  # Seconds before timing out connecting
         self.sock.connect((self.host, self.port))
         if self.secure and ssl:
             self.sock.do_handshake()
     except socket.error as e:
         msg.error('Error connecting:', e)
         self.reconnect()
         return
     self.sock.setblocking(False)
     msg.log('Connected!')
     self.reconnect_delay = G.INITIAL_RECONNECT_DELAY
     utils.set_timeout(self.select, 0)
     self.auth()
Example #24
0
    def delete_buf(self, path):
        """deletes a path"""

        if not path:
            return

        path = utils.get_full_path(path)

        if not self.is_shared(path):
            msg.error("Skipping deleting %s because it is not in shared path %s." % (path, G.PROJECT_PATH))
            return

        if os.path.isdir(path):
            for dirpath, dirnames, filenames in os.walk(path):
                # Don't care about hidden stuff
                dirnames[:] = [d for d in dirnames if d[0] != "."]
                for f in filenames:
                    f_path = os.path.join(dirpath, f)
                    if f[0] == ".":
                        msg.log("Not deleting buf for hidden file %s" % f_path)
                    else:
                        self.delete_buf(f_path)
            return
        buf_to_delete = None
        rel_path = utils.to_rel_path(path)
        for buf_id, buf in self.FLOO_BUFS.items():
            if rel_path == buf["path"]:
                buf_to_delete = buf
                break
        if buf_to_delete is None:
            msg.error("%s is not in this room" % path)
            return
        msg.log("deleting buffer ", rel_path)
        event = {"name": "delete_buf", "id": buf_to_delete["id"]}
        self.agent.put(event)
Example #25
0
    def on_patch(self, data):
        buf_id = data['id']
        buf = self.FLOO_BUFS[buf_id]
        view = self.get_view(buf_id)
        DMP = dmp.diff_match_patch()
        if len(data['patch']) == 0:
            msg.error('wtf? no patches to apply. server is being stupid')
            return
        dmp_patches = DMP.patch_fromText(data['patch'])
        # TODO: run this in a separate thread
        if view:
            old_text = view.get_text()
        else:
            old_text = buf.get('buf', '')
        md5_before = hashlib.md5(old_text.encode('utf-8')).hexdigest()
        if md5_before != data['md5_before']:
            msg.debug('maybe vim is lame and discarded a trailing newline')
            old_text += '\n'
        md5_before = hashlib.md5(old_text.encode('utf-8')).hexdigest()
        if md5_before != data['md5_before']:
            msg.warn('starting md5s don\'t match for %s. ours: %s patch: %s this is dangerous!' %
                    (buf['path'], md5_before, data['md5_before']))

        t = DMP.patch_apply(dmp_patches, old_text)

        clean_patch = True
        for applied_patch in t[1]:
            if not applied_patch:
                clean_patch = False
                break

        if G.DEBUG:
            if len(t[0]) == 0:
                msg.debug('OMG EMPTY!')
                msg.debug('Starting data:', buf['buf'])
                msg.debug('Patch:', data['patch'])
            if '\x01' in t[0]:
                msg.debug('FOUND CRAZY BYTE IN BUFFER')
                msg.debug('Starting data:', buf['buf'])
                msg.debug('Patch:', data['patch'])

        if not clean_patch:
            msg.error('failed to patch %s cleanly. re-fetching buffer' % buf['path'])
            return self.agent.send_get_buf(buf_id)

        cur_hash = hashlib.md5(t[0].encode('utf-8')).hexdigest()
        if cur_hash != data['md5_after']:
            msg.warn(
                '%s new hash %s != expected %s. re-fetching buffer...' %
                (buf['path'], cur_hash, data['md5_after'])
            )
            return self.agent.send_get_buf(buf_id)

        buf['buf'] = t[0]
        buf['md5'] = cur_hash

        if not view:
            self.save_buf(buf)
            return
        view.apply_patches(buf, t)
Example #26
0
def update_data(data, keys, skip=None):
    """Remove not requested datasets.

    Keyword arguments:
    data -- dicionary with data
    keys -- user-requested keys
    skip -- the key not to delete
    """

    loopkeys = list(data.keys())
    for key in loopkeys:
        if key == skip:
            continue
        if key not in keys:
            del data[key]

    if not len(data):
        msg.error("No datasets to process.")
        sys.exit(1)

    check.get_size(data)

    for key in keys:
        if key not in data.keys():
            msg.warning("%s requested, but not found." % key)
 def connect(self):
     self.empty_selects = 0
     self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
     if self.secure:
         if ssl:
             cert_path = os.path.join(G.COLAB_DIR, 'startssl-ca.pem')
             with open(cert_path, 'wb') as cert_fd:
                 cert_fd.write(cert.CA_CERT.encode('utf-8'))
             self.sock = ssl.wrap_socket(self.sock, ca_certs=cert_path, cert_reqs=ssl.CERT_REQUIRED)
         else:
             msg.log('No SSL module found. Connection will not be encrypted.')
             if self.port == G.DEFAULT_PORT:
                 self.port = 3148  # plaintext port
     msg.log('Connecting to %s:%s' % (self.host, self.port))
     try:
         self.sock.settimeout(30)  # Seconds before timing out connecting
         self.sock.connect((self.host, self.port))
         if self.secure and ssl:
             self.sock.do_handshake()
     except socket.error as e:
         msg.error('Error connecting:', e)
         self.reconnect()
         return
     self.sock.setblocking(False)
     msg.log('Connected!')
     self.reconnect_delay = G.INITIAL_RECONNECT_DELAY
     utils.set_timeout(self.select, 0)
     self.auth()
Example #28
0
def mkdir(path):
    try:
        os.makedirs(path)
    except OSError as e:
        if e.errno != 17:
            msg.error('Can not create directory {0}.\n{1}'.format(path, e))
            raise
Example #29
0
def mkdir(path):
    try:
        os.makedirs(path)
    except OSError as e:
        if e.errno != 17:
            msg.error('Can not create directory {0}.\n{1}'.format(path, e))
            raise
Example #30
0
 def is_ignored(self, path, is_dir=None):
     rel_path = os.path.relpath(path, self.path)
     for ignore_file, patterns in self.ignores.items():
         for pattern in patterns:
             base_path, file_name = os.path.split(rel_path)
             if pattern[0] == '/':
                 # Only match immediate children
                 if utils.unfuck_path(base_path) == self.path and fnmatch.fnmatch(file_name, pattern[1:]):
                     return self.is_ignored_message(path, pattern, ignore_file)
             else:
                 if len(pattern) > 0 and pattern[-1] == '/':
                     if is_dir is None:
                         try:
                             s = os.stat(path)
                         except Exception as e:
                             msg.error('Error lstat()ing path %s: %s' % (path, unicode(e)))
                             continue
                         is_dir = stat.S_ISDIR(s.st_mode)
                     if is_dir:
                         pattern = pattern[:-1]
                 if fnmatch.fnmatch(file_name, pattern):
                     return self.is_ignored_message(path, pattern, ignore_file)
                 if fnmatch.fnmatch(rel_path, pattern):
                     return self.is_ignored_message(path, pattern, ignore_file)
     if self.parent:
         return self.parent.is_ignored(path)
     return False
Example #31
0
 def update(self, interval):
     """
     update() prototype
     :param interval: int
     :return: boolean
     """
     msg.error("Service " + str(self.__module__) + "doesn't implement update() function")
     return False
Example #32
0
 def create(self):
     """
     create() prototype
     :return: boolean
     """
     msg.error("Service " + str(self.__module__) +
               "doesn't implement create() function?")
     return False
Example #33
0
 def run_local_synthesis(self):
     tm = self.modules_pool.get_top_module()
     if tm.target == "xilinx":
         if not os.path.exists("run.tcl"):
             self.__generate_tcl()
         os.system("xtclsh run.tcl")
     else:
         p.error("Target " + tm.target + " is not synthesizable")
Example #34
0
 def set_text(self, text):
     msg.debug("\n\nabout to patch %s %s" % (str(self), self.vim_buf.name))
     try:
         msg.debug("now buf is loadedish? %s" % vim.eval("bufloaded(%s)" % self.native_id))
         self.vim_buf[:] = text.encode("utf-8").split("\n")
     except Exception as e:
         msg.error("couldn't apply patches because: %s!\nThe unencoded text was: %s" % (str(e), text))
         raise
Example #35
0
def load_settings():
    settings = load_floorc()
    if not settings:
        msg.error('you should probably define some stuff in your ~/.floorc file')
    G.COLAB_DIR = os.path.expanduser(settings.get('share_dir', '~/.floobits/share/'))
    mkdir(G.COLAB_DIR)
    for name, val in settings.items():
        setattr(G, name, val)
Example #36
0
 def run_local_synthesis(self):
     tm = self.modules_pool.get_top_module()
     if tm.target == "xilinx":
         if not os.path.exists("run.tcl"):
             self.__generate_tcl()
         os.system("xtclsh run.tcl")
     else:
         p.error("Target " + tm.target + " is not synthesizable")
Example #37
0
def rm(path):
    """removes path and dirs going up until a OSError"""
    os.remove(path)
    try:
        os.removedirs(os.path.split(path)[0])
    except OSError as e:
        if e.errno != 66:
            msg.error('Can not delete directory {0}.\n{1}'.format(path, e))
            raise
Example #38
0
def rm(path):
    """removes path and dirs going up until a OSError"""
    os.remove(path)
    try:
        os.removedirs(os.path.split(path)[0])
    except OSError as e:
        if e.errno != 66:
            msg.error('Can not delete directory {0}.\n{1}'.format(path, e))
            raise
Example #39
0
 def update(self, interval):
     """
     update() prototype
     :param interval: int
     :return: boolean
     """
     msg.error("Service " + str(self.__module__) +
               "doesn't implement update() function")
     return False
Example #40
0
 def is_ignored(self, path, is_dir=None, log=False):
     if is_dir is None:
         try:
             s = os.stat(path)
         except Exception as e:
             msg.error('Error lstat()ing path ', path, ': ', str_e(e))
             return True
         is_dir = stat.S_ISDIR(s.st_mode)
     rel_path = os.path.relpath(path, self.path).replace(os.sep, '/')
     return self._is_ignored(rel_path, is_dir, log)
Example #41
0
 def is_ignored(self, path, is_dir=None, log=False):
     if is_dir is None:
         try:
             s = os.stat(path)
         except Exception as e:
             msg.error('Error lstat()ing path %s: %s' % (path, str_e(e)))
             return True
         is_dir = stat.S_ISDIR(s.st_mode)
     rel_path = os.path.relpath(path, self.path).replace(os.sep, '/')
     return self._is_ignored(rel_path, is_dir, log)
Example #42
0
def create_flooignore(path):
    flooignore = os.path.join(path, '.flooignore')
    # A very short race condition, but whatever.
    if os.path.exists(flooignore):
        return
    try:
        with open(flooignore, 'w') as fd:
            fd.write('\n'.join(DEFAULT_IGNORES))
    except Exception as e:
        msg.error('Error creating default .flooignore: %s' % str_e(e))
Example #43
0
def create_flooignore(path):
    flooignore = os.path.join(path, '.flooignore')
    # A very short race condition, but whatever.
    if os.path.exists(flooignore):
        return
    try:
        with open(flooignore, 'wb') as fd:
            fd.write('\n'.join(DEFAULT_IGNORES).encode('utf-8'))
    except Exception as e:
        msg.error('Error creating default .flooignore: %s' % str(e))
Example #44
0
def same_sizes(data1, data2):
    """Check if files have the same #entries per dataset.

    Keyword arguments:
    data1 -- first file
    data2 -- second file
    """

    if get_size(data1) != get_size(data2):
        msg.error("Files must have the same number of entries to be combined.")
        sys.exit(1)
Example #45
0
def check_keys(data1, data2):
    """Check it both files have the same datasets.

    Keyword arguments:
    data1 -- current data dictionary
    data2 -- data dictionary to be added
    """

    if data1.keys() != data2.keys():
        msg.error("Files have different datasets.")
        sys.exit(1)
Example #46
0
 def set_text(self, text):
     msg.debug('\n\nabout to patch %s %s' % (str(self), self.vim_buf.name))
     try:
         msg.debug("now buf is loadedish? %s" %
                   vim.eval('bufloaded(%s)' % self.native_id))
         self.vim_buf[:] = text.encode('utf-8').split('\n')
     except Exception as e:
         msg.error(
             "couldn't apply patches because: %s!\nThe unencoded text was: %s"
             % (str(e), text))
         raise
Example #47
0
def same_sizes(data1, data2):

    """Check if files have the same #entries per dataset.

    Keyword arguments:
    data1 -- first file
    data2 -- second file
    """

    if get_size(data1) != get_size(data2):
        msg.error("Files must have the same number of entries to be combined.")
        sys.exit(1)
Example #48
0
def check_shapes(data1, data2):
    """Check if shapes of datasets are the same.

    Keyword arguments:
    data1 -- current data dictionary
    data2 -- data dictionary to be added
    """

    for key in data1.keys():
        if data1[key].shape[1:] != data2[key].shape[1:]:
            msg.error("Different shapes for dataset: %s. " % key)
            sys.exit(1)
Example #49
0
def check_keys(data1, data2):

    """Check it both files have the same datasets.

    Keyword arguments:
    data1 -- current data dictionary
    data2 -- data dictionary to be added
    """

    if data1.keys() != data2.keys():
        msg.error("Files have different datasets.")
        sys.exit(1)
Example #50
0
def check_duplicates(keys1, keys2):
    """Check if given files have different (except skip) datasets.

    Keyword arguments:
    keys1 -- the list of keys to be copied from file1
    keys2 -- the list of keys to be copied from file2
    """

    for key in keys1:
        if key in keys2:
            msg.error("Duplicated dataset: %s in input files." % key)
            sys.exit(1)
Example #51
0
    def select(self):
        if not self.sock:
            msg.error('select(): No socket.')
            return self.reconnect()

        try:
            # this blocks until the socket is readable or writeable
            _in, _out, _except = select.select([self.sock], [self.sock],
                                               [self.sock])
        except (select.error, socket.error, Exception) as e:
            msg.error('Error in select(): %s' % str(e))
            return self.reconnect()

        if _except:
            msg.error('Socket error')
            return self.reconnect()

        if _in:
            buf = ''.encode('utf-8')
            while True:
                try:
                    d = self.sock.recv(4096)
                    if not d:
                        break
                    buf += d
                except (socket.error, TypeError):
                    break

            if buf:
                self.empty_selects = 0
                self.protocol(buf)
            else:
                self.empty_selects += 1
                if self.empty_selects > 10:
                    msg.error('No data from sock.recv() {0} times.'.format(
                        self.empty_selects))
                    return self.reconnect()

        if _out:
            for p in self.get_patches():
                if p is None:
                    SOCKET_Q.task_done()
                    continue
                try:
                    msg.debug('writing patch: %s' % p)
                    self.sock.sendall(p.encode('utf-8'))
                    SOCKET_Q.task_done()
                except Exception as e:
                    msg.error('Couldn\'t write to socket: %s' % str(e))
                    return self.reconnect()

        utils.set_timeout(self.select, 100)
Example #52
0
def check_shapes(data1, data2):

    """Check if shapes of datasets are the same.

    Keyword arguments:
    data1 -- current data dictionary
    data2 -- data dictionary to be added
    """

    for key in data1.keys():
        if data1[key].shape[1:] != data2[key].shape[1:]:
            msg.error("Different shapes for dataset: %s. " % key)
            sys.exit(1)
Example #53
0
def key_exists(key, data, filename):

    """Check if given dataset is included in the file.

    Keyword arguments:
    key -- key to look for
    data -- data dictionary to check
    """

    if key not in data.keys():
        msg.error("'%(key)s' key is missing in %(file)s."
                  % {"key": key, "file": filename})
        sys.exit(1)
Example #54
0
def get_git_excludesfile():
    global_ignore = None
    try:
        p = subprocess.Popen(['git', 'config -z --get core.excludesfile'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        result = p.communicate()
        global_ignore = result[0]
        if not global_ignore:
            return
        global_ignore = os.path.realpath(os.path.expanduser(global_ignore.decode('utf-8')))
        msg.log('git core.excludesfile is ', global_ignore)
    except Exception as e:
        msg.error('Error getting git core.excludesfile:', str_e(e))
    return global_ignore
Example #55
0
    def generate_fetch_makefile(self):
        pool = self.modules_pool

        if pool.get_fetchable_modules() == []:
            p.error("There are no fetchable modules. "
                "No fetch makefile is produced")
            quit()

        if not pool.is_everything_fetched():
            p.error("A module remains unfetched. "
                "Fetching must be done prior to makefile generation")
            quit()
        self.make_writer.generate_fetch_makefile(pool)
Example #56
0
    def generate_fetch_makefile(self):
        pool = self.modules_pool

        if pool.get_fetchable_modules() == []:
            p.error("There are no fetchable modules. "
                    "No fetch makefile is produced")
            quit()

        if not pool.is_everything_fetched():
            p.error("A module remains unfetched. "
                    "Fetching must be done prior to makefile generation")
            quit()
        self.make_writer.generate_fetch_makefile(pool)
Example #57
0
def check_duplicates(keys1, keys2):

    """Check if given files have different (except skip) datasets.

    Keyword arguments:
    keys1 -- the list of keys to be copied from file1
    keys2 -- the list of keys to be copied from file2
    """

    for key in keys1:
        if key in keys2:
            msg.error("Duplicated dataset: %s in input files." % key)
            sys.exit(1)
Example #58
0
def get_git_excludesfile():
    global_ignore = None
    try:
        p = subprocess.Popen(['git', 'config -z --get core.excludesfile'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        result = p.communicate()
        global_ignore = result[0]
        if not global_ignore:
            return
        global_ignore = os.path.realpath(os.path.expanduser(str(global_ignore)))
        msg.log('git core.excludesfile is ', global_ignore)
    except Exception as e:
        msg.error('Error getting git core.excludesfile:', str_e(e))
    return global_ignore