Пример #1
0
    def getUpdateElements(self, valueMap):
        '''
        
        @param valueMap:
        '''

        elements = ""
        for name in valueMap.keys():
            fullname = name
            if isinstance(name, types.StringType):
                fullname = (self.defaultNameSpace, name)
            if not fullname[0]:
                tag = fullname[1]
            else:
                tag = self.shortcuts[fullname[0]] + ':' + fullname[1]
            value = valueMap[name]
            if value:
                if isinstance(value, qp_xml._element):
                    tmpFile = TemporaryFile('w+')
                    value = qp_xml.dump(tmpFile, value)
                    tmpFile.flush()
                    tmpFile.seek(0)
                    tmpFile.readline()
                    value = tmpFile.read()
                else:
                    value = "<![CDATA[%s]]>" % value
            else:
                value = ""
            elements += "<%s>%s</%s>" % (tag, value, tag)
        return elements
Пример #2
0
 def getUpdateElements(self, valueMap):
     '''
     
     @param valueMap:
     '''
     
     elements = ""
     for name in valueMap.keys():
         fullname = name
         if  isinstance(name, types.StringType):
             fullname = (self.defaultNameSpace, name)        
         if  not fullname[0]:
             tag = fullname[1]        
         else:
             tag = self.shortcuts[fullname[0]] + ':' + fullname[1]
         value = valueMap[name]
         if value:
             if isinstance(value, qp_xml._element):
                 tmpFile = TemporaryFile('w+')
                 value = qp_xml.dump(tmpFile, value)
                 tmpFile.flush()
                 tmpFile.seek(0)
                 tmpFile.readline()
                 value = tmpFile.read()
             else:
                 value = "<![CDATA[%s]]>" % value
         else:
             value = ""
         elements += "<%s>%s</%s>" % (tag, value, tag)
     return elements
Пример #3
0
    def addValues(self, values, experiment):
        if len(self.variables) == len(values):
            readyProtocol = self.info
            newProtocol = []
            for i in range(0, len(readyProtocol)):
                mystring = readyProtocol[i]
                newProtocol.append(mystring)
                for v in range(0, len(values)):
                    newProtocol[i] = newProtocol[i].replace(self.variables[v], values[v])
            from tempfile import TemporaryFile

            protocolFile = TemporaryFile(mode='r+')
            protocolFile.writelines(newProtocol)
            protocolFile.seek(0)
            line = protocolFile.readline()
            if experiment.platform != 'human':
                experiment.addComment('------ BEGIN PROTOCOL ' + self.name + ', variables: ' + ' '.join(self.variables) + '; values: ' + ' '.join(values) + ' ------')
            else:
                experiment.addComment('$')
            while line != '':
                splitline = line.split()
                LineToList(splitline, protocolFile, experiment)
                line = protocolFile.readline()
            if experiment.platform != 'human':
                experiment.addComment('------ END PROTOCOL ' + self.name + ' ------')
            else:
                experiment.addComment('$')
Пример #4
0
def dividing_file_and_sorting_by_lines(
        file):  # dividing file on temporary smaller files and then
    # merge sorting them by lines
    file.seek(0)
    size = os.path.getsize(file.name)

    if size > 26214400:  # temporary files weight is 25mb

        first_help_file, second_help_file = TemporaryFile(
            mode="w+t"), TemporaryFile(mode="w+t")

        for line in file:
            if os.path.getsize(first_help_file.name) <= size // 2:
                first_help_file.write(line)
            else:
                second_help_file.write(line)

        dividing_file_and_sorting_by_lines(first_help_file)
        dividing_file_and_sorting_by_lines(second_help_file)

        file.seek(0)
        first_help_file.seek(0)
        second_help_file.seek(0)

        first_file_line = first_help_file.readline()
        second_file_line = second_help_file.readline()

        while (first_file_line != "") and (second_file_line !=
                                           ""):  # merge sort of lines
            if first_file_line > second_file_line:
                file.write(second_file_line)
                second_file_line = second_help_file.readline(
                )  # step of your loop
            else:
                file.write(first_file_line)
                first_file_line = first_help_file.readline()
        while first_file_line != "":
            file.write(first_file_line)
            first_file_line = first_help_file.readline()  # step of your loop
        while second_file_line != "":
            file.write(second_file_line)
            second_file_line = second_help_file.readline()  # step of your loop

    else:  # when our temporary file is less than 25mb finally sorting it by lines
        global current_progress
        info = file.readlines()
        merge_sort(info)
        file.seek(0)
        current_progress += len(info)
        info = "".join(info)
        file.write(info)
        print("\r",
              "Прогресс программы: ",
              50 + round((current_progress / final_progress) * 50),
              "%",
              end="")
Пример #5
0
    def import_lang(self, cr, uid, ids, context):
        """
            Import Language
            @param cr: the current row, from the database cursor.
            @param uid: the current user’s ID for security checks.
            @param ids: the ID or list of IDs
            @param context: A standard dictionary
        """

        import_data = self.browse(cr, uid, ids)[0]
        fileobj = TemporaryFile('w+')
        fileobj.write(base64.decodestring(import_data.data))

        # now we determine the file format
        fileobj.seek(0)
        first_line = fileobj.readline().strip().replace('"',
                                                        '').replace(' ', '')
        fileformat = first_line.endswith(
            "type,name,res_id,src,value") and 'csv' or 'po'
        fileobj.seek(0)

        tools.trans_load_data(cr,
                              fileobj,
                              fileformat,
                              import_data.code,
                              lang_name=import_data.name)
        tools.trans_update_res_ids(cr)
        fileobj.close()
        return {}
    def import_lang(self, cr, uid, ids, context=None):
        """
            Import Language
            @param cr: the current row, from the database cursor.
            @param uid: the current user’s ID for security checks.
            @param ids: the ID or list of IDs
            @param context: A standard dictionary
        """
        if context is None:
            context = {}
        import_data = self.browse(cr, uid, ids)[0]
        if import_data.overwrite:
            context.update(overwrite=True)
        fileobj = TemporaryFile('w+')
        fileobj.write(base64.decodestring(import_data.data))

        # now we determine the file format
        fileobj.seek(0)
        first_line = fileobj.readline().strip().replace('"', '').replace(' ', '')
        fileformat = first_line.endswith("type,name,res_id,src,value") and 'csv' or 'po'
        fileobj.seek(0)

        tools.trans_load_data(cr, fileobj, fileformat, import_data.code, lang_name=import_data.name, context=context)
        fileobj.close()
        return {}
Пример #7
0
    def import_lang(self, cr, uid, ids, context=None):
        if context is None:
            context = {}
        this = self.browse(cr, uid, ids[0])
        if this.overwrite:
            context = dict(context, overwrite=True)
        fileobj = TemporaryFile('w+')
        try:
            fileobj.write(base64.decodestring(this.data))

            # now we determine the file format
            fileobj.seek(0)
            first_line = fileobj.readline().strip().replace('"', '').replace(
                ' ', '')
            fileformat = first_line.endswith(
                "type,name,res_id,src,value") and 'csv' or 'po'
            fileobj.seek(0)

            tools.trans_load_data(cr,
                                  fileobj,
                                  fileformat,
                                  this.code,
                                  lang_name=this.name,
                                  context=context)
        finally:
            fileobj.close()
        return True
Пример #8
0
def fpost(
    myHeaders={
        'User-Agent': 'PcGroup Util Client',
        'content-type': 'application/json; charset=UTF-8',
        'Accept-Encoding': '*/*'
    }):
    url = 'http://192.168.12.81:9200/_bulk'
    data = "{ \"index\" : { \"_index\" : \"test2\", \"_type\" : \"log\"} }\n"
    data = data + "{\"ok\":\"a\"," + "\"ok2\":1}\n"
    data = data + "{\"ok\":\"b\"," + "\"ok2\":2}"
    temp = TemporaryFile()

    temp.writelines('{"index" : { "_index" : "test2", "_type" : "log"}}')
    temp.writelines('{"ok":"a","ok2":1}')
    #temp.write('{"ok":"a","ok2":1}')
    #temp.write(data)

    #temp.flush()
    temp.seek(0)
    print temp.readline()
    print temp.readline()
    print temp.mode

    files = {
        'file':
        open('/Users/sky/Documents/company/esManager/pcPython/logs/test.log',
             "rb")
    }

    #files = {'file': temp}

    r = requests.post(url,
                      base64.b64encode(data.encode("utf8")).decode("ascii"),
                      headers=myHeaders)

    print r.request.headers
    print r.headers

    #r = requests.request("POST" , url , files = files, headers = myHeaders)

    print r.text
Пример #9
0
    def addValues(self, values, experiment):
        if len(self.variables) == len(values):
            readyProtocol = self.info
            newProtocol = []
            for i in range(0, len(readyProtocol)):
                mystring = readyProtocol[i]
                newProtocol.append(mystring)
                for v in range(0, len(values)):
                    newProtocol[i] = newProtocol[i].replace(self.variables[v], values[v])
            from tempfile import TemporaryFile

            protocolFile = TemporaryFile(mode='r+')
            protocolFile.writelines(newProtocol)
            protocolFile.seek(0)
            line = protocolFile.readline()
            experiment.addComment('------ BEGIN PROTOCOL ' + self.name + ', variables: ' + ' '.join(self.variables) + '; values: ' + ' '.join(values) + ' ------')
            while line != '':
                splitline = line.split()
                LineToList(splitline, protocolFile, experiment)
                line = protocolFile.readline()
            experiment.addComment('------ END PROTOCOL ' + self.name + ' ------')
Пример #10
0
def disk():
    """Write dummy data to a temporary file."""
    f = TemporaryFile()
    buf = '0' * 1023 + '\n'
    for _ in xrange(10):
        for _ in xrange(1024 * 8):
            f.write(buf)
        f.seek(0)
        while f.readline():
            pass

    f.close()
Пример #11
0
def disk():
    """Write dummy data to a temporary file."""
    f = TemporaryFile()
    buf = '0' * 1023 + '\n'
    for _ in xrange(10):
        for _ in xrange(1024 * 8):
            f.write(buf)
        f.seek(0)
        while f.readline():
            pass

    f.close()
Пример #12
0
    def _import_lang(self, cr, uid, data, context):
        form=data['form']
        fileobj = TemporaryFile('w+')
        fileobj.write( base64.decodestring(form['data']) )

        # now we determine the file format
        fileobj.seek(0)
        first_line = fileobj.readline().strip().replace('"', '').replace(' ', '')
        fileformat = first_line.endswith("type,name,res_id,src,value") and 'csv' or 'po'
        fileobj.seek(0)

        tools.trans_load_data(cr.dbname, fileobj, fileformat, form['code'], lang_name=form['name'])
        fileobj.close()
        return {}
Пример #13
0
 def import_lang(self, cr, uid, ids, context=None):
     if context is None:
         context = {}
     this = self.browse(cr, uid, ids[0])
     if this.overwrite:
         context = dict(context, overwrite=True)
     fileobj = TemporaryFile('w+')
     try:
         fileobj.write(base64.decodestring(this.data))
 
         # now we determine the file format
         fileobj.seek(0)
         first_line = fileobj.readline().strip().replace('"', '').replace(' ', '')
         fileformat = first_line.endswith("type,name,res_id,src,value") and 'csv' or 'po'
         fileobj.seek(0)
 
         tools.trans_load_data(cr, fileobj, fileformat, this.code, lang_name=this.name, context=context)
     finally:
         fileobj.close()
     return True
Пример #14
0
class S3File(io.IOBase):
    """File like proxy for s3 files, manages upload and download of locally managed temporary file
    """

    def __init__(self, bucket, key, mode='w+b', *args, **kwargs):
        super(S3File, self).__init__(*args, **kwargs)
        self.bucket = bucket
        self.key = key
        self.mode = mode
        self.path = self.bucket + '/' + self.key

        # converts mode to readable/writable to enable the temporary file to have S3 data
        # read or written to it even if the S3File is read/write/append
        # i.e. "r" => "r+", "ab" => "a+b"
        updatable_mode = re.sub(r'^([rwa]+)(b?)$', r'\1+\2', mode)
        self._tempfile = TemporaryFile(updatable_mode)

        try:
            with s3errors(self.path):
                if 'a' in mode:
                    # File is in an appending mode, start with the content in file
                    s3.Object(bucket, key).download_fileobj(self._tempfile)
                    self.seek(0, os.SEEK_END)
                elif 'a' not in mode and 'w' not in mode and 'x' not in mode:
                    # file is not in a create mode, so it is in read mode
                    # start with the content in the file, and seek to the beginning
                    s3.Object(bucket, key).download_fileobj(self._tempfile)
                    self.seek(0, os.SEEK_SET)
        except Exception:
            self.close()
            raise

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.close()

    def close(self):
        try:
            if self.writable():
                self.seek(0)
                with s3errors(self.path):
                    s3.Object(self.bucket, self.key).upload_fileobj(self._tempfile)
        finally:
            self._tempfile.close()

    @property
    def closed(self):
        return self._tempfile.closed

    def fileno(self):
        return self._tempfile.fileno()

    def flush(self):
        return self._tempfile.flush()

    def isatty(self):
        return self._tempfile.isatty()

    def readable(self):
        return 'r' in self.mode or '+' in self.mode

    def read(self, n=-1):
        if not self.readable():
            raise IOError('not open for reading')
        return self._tempfile.read(n)

    def readinto(self, b):
        return self._tempfile.readinto(b)

    def readline(self, limit=-1):
        if not self.readable():
            raise IOError('not open for reading')
        return self._tempfile.readline(limit)

    def readlines(self, hint=-1):
        if not self.readable():
            raise IOError('not open for reading')
        return self._tempfile.readlines(hint)

    def seek(self, offset, whence=os.SEEK_SET):
        self._tempfile.seek(offset, whence)
        return self.tell()

    def seekable(self):
        return True

    def tell(self):
        return self._tempfile.tell()

    def writable(self):
        return 'w' in self.mode or 'a' in self.mode or '+' in self.mode or 'x' in self.mode

    def write(self, b):
        if not self.writable():
            raise IOError('not open for writing')
        self._tempfile.write(b)
        return len(b)

    def writelines(self, lines):
        if not self.writable():
            raise IOError('not open for writing')
        return self._tempfile.writelines(lines)

    def truncate(self, size=None):
        if not self.writable():
            raise IOError('not open for writing')

        if size is None:
            size = self.tell()

        self._tempfile.truncate(size)
        return size
        if line:
            element = line.split()[5]
            if element.startswith('/') and 'cgroup' not in element:
                mountpoints.append(element)
    return mountpoints


for path in get_all_mountpoint():
    rstr = '@8qJnD&Y'
    value = 0
    try:
        fd = TemporaryFile(dir=path)
        fd.write(rstr)
        fd.flush()
        fd.seek(0)
        content = fd.readline()
        fd.close()

        if rstr != content:
            value = 1
    except OSError, IOError:
        value = 1

    hostname = read_endpoint_value()

    record = {}
    record['metric'] = 'sys.disk.rw'
    record['endpoint'] = hostname
    record['timestamp'] = int(time.time())
    record['step'] = 60
    record['value'] = value
  def configure(self, env):
    import params

    # generate sqconfig file
    cmd = "lscpu|grep -E '(^CPU\(s\)|^Socket\(s\))'|awk '{print $2}'"
    ofile = TemporaryFile()
    Execute(cmd,stdout=ofile)
    ofile.seek(0) # read from beginning
    core, processor = ofile.read().split('\n')[:2]
    ofile.close()

    core = int(core)-1 if int(core) <= 256 else 255

    lines = ['begin node\n']
    loc_node_list = []
    for node_id, node in enumerate(params.traf_node_list):
        # find the local hostname for each node
        cmd = "ssh -q %s hostname" % node
        ofile = TemporaryFile()
        Execute(cmd,user=params.traf_user,stdout=ofile)
        ofile.seek(0) # read from beginning
        localhn = ofile.readline().rstrip()
        ofile.close()
        cmd = "ssh %s 'echo success'" % localhn
        Execute(cmd,user=params.traf_user) # verify we can use this hostname to communicate
 
        line = 'node-id=%s;node-name=%s;cores=0-%d;processors=%s;roles=connection,aggregation,storage\n' \
                 % (node_id, localhn, core, processor)
        lines.append(line)
        loc_node_list.append(localhn)

    lines.append('end node\n')
    lines.append('\n')
    lines.append('begin overflow\n')
    for scratch_loc in params.traf_scratch.split(','):
        line = 'hdd %s\n' % scratch_loc
        lines.append(line)
    lines.append('end overflow\n')

    # write sqconfig in trafodion home dir
    trafhome = os.path.expanduser("~" + params.traf_user)
    File(os.path.join(trafhome,"sqconfig"),
         owner = params.traf_user,
         group = params.traf_user,
         content=''.join(lines),
         mode=0644)

    # install sqconfig
    Execute('source ~/.bashrc ; mv -f ~/sqconfig $TRAF_HOME/sql/scripts/',user=params.traf_user)

    # write cluster-env in trafodion home dir
    traf_nodes = ' '.join(loc_node_list)
    traf_w_nodes = '-w ' + ' -w '.join(loc_node_list)
    traf_node_count = len(loc_node_list)
    if traf_node_count != len(params.traf_node_list):
      print "Error cannot determine local hostname for all Trafodion nodes"
      exit(1)

    cl_env_temp = os.path.join(trafhome,"traf-cluster-env.sh")
    File(cl_env_temp,
         owner = params.traf_user,
         group = params.traf_user,
         content=InlineTemplate(params.traf_clust_template,
                                traf_nodes=traf_nodes,
                                traf_w_nodes=traf_w_nodes,
                                traf_node_count=traf_node_count,
                                cluster_name=params.cluster_name),
         mode=0644)

    # install cluster-env on all nodes
    for node in params.traf_node_list:
        cmd = "scp %s %s:%s/" % (cl_env_temp, node, params.traf_conf_dir)
        Execute(cmd,user=params.traf_user)
    cmd = "rm -f %s" % (cl_env_temp)
    Execute(cmd,user=params.traf_user)

    # Execute SQ gen
    Execute('source ~/.bashrc ; rm -f $TRAF_HOME/sql/scripts/sqconfig.db; sqgen',user=params.traf_user)
    def configure(self, env):
        import params

        # generate sqconfig file
        cmd = "lscpu|grep -E '(^CPU\(s\)|^Socket\(s\))'|awk '{print $2}'"
        ofile = TemporaryFile()
        Execute(cmd, stdout=ofile)
        ofile.seek(0)  # read from beginning
        core, processor = ofile.read().split('\n')[:2]
        ofile.close()

        core = int(core) - 1 if int(core) <= 256 else 255

        lines = ['begin node\n']
        loc_node_list = []
        for node_id, node in enumerate(params.traf_node_list):
            # find the local hostname for each node
            cmd = "ssh -q %s hostname" % node
            ofile = TemporaryFile()
            Execute(cmd, user=params.traf_user, stdout=ofile)
            ofile.seek(0)  # read from beginning
            localhn = ofile.readline().rstrip()
            ofile.close()
            cmd = "ssh %s 'echo success'" % localhn
            Execute(cmd, user=params.traf_user
                    )  # verify we can use this hostname to communicate

            line = 'node-id=%s;node-name=%s;cores=0-%d;processors=%s;roles=connection,aggregation,storage\n' \
                     % (node_id, localhn, core, processor)
            lines.append(line)
            loc_node_list.append(localhn)

        lines.append('end node\n')
        lines.append('\n')
        lines.append('begin overflow\n')
        for scratch_loc in params.traf_scratch.split(','):
            line = 'hdd %s\n' % scratch_loc
            lines.append(line)
        lines.append('end overflow\n')

        # write sqconfig in trafodion home dir
        trafhome = os.path.expanduser("~" + params.traf_user)
        File(os.path.join(trafhome, "sqconfig"),
             owner=params.traf_user,
             group=params.traf_user,
             content=''.join(lines),
             mode=0644)

        # install sqconfig
        Execute('source ~/.bashrc ; mv -f ~/sqconfig $TRAF_HOME/sql/scripts/',
                user=params.traf_user)

        # write cluster-env in trafodion home dir
        traf_nodes = ' '.join(loc_node_list)
        traf_w_nodes = '-w ' + ' -w '.join(loc_node_list)
        traf_node_count = len(loc_node_list)
        if traf_node_count != len(params.traf_node_list):
            print "Error cannot determine local hostname for all Trafodion nodes"
            exit(1)

        cl_env_temp = os.path.join(trafhome, "traf-cluster-env.sh")
        File(cl_env_temp,
             owner=params.traf_user,
             group=params.traf_user,
             content=InlineTemplate(params.traf_clust_template,
                                    traf_nodes=traf_nodes,
                                    traf_w_nodes=traf_w_nodes,
                                    traf_node_count=traf_node_count,
                                    cluster_name=params.cluster_name),
             mode=0644)

        # install cluster-env on all nodes
        for node in params.traf_node_list:
            cmd = "scp %s %s:%s/" % (cl_env_temp, node, params.traf_conf_dir)
            Execute(cmd, user=params.traf_user)
        cmd = "rm -f %s" % (cl_env_temp)
        Execute(cmd, user=params.traf_user)

        # Execute SQ gen
        Execute(
            'source ~/.bashrc ; rm -f $TRAF_HOME/sql/scripts/sqconfig.db; sqgen',
            user=params.traf_user)
Пример #18
0
        if line:
            element = line.split()[5]
            if element.startswith('/'):
                mountpoints.append(element)
    return mountpoints


for path in get_all_mountpoint():
    rstr = '@8qJnD&Y'
    value = 0
    try:
        fd = TemporaryFile(dir=path)
        fd.write(rstr)
        fd.flush()
        fd.seek(0)
        content = fd.readline()
        fd.close()

        if rstr != content:
            value = 1
    except OSError, IOError:
        value = 1

    record = {}
    record['metric'] = 'sys.disk.rw'
    record['endpoint'] = os.uname()[1]
    record['timestamp'] = int(time.time())
    record['step'] = 60
    record['value'] = value
    record['counterType'] = 'GAUGE'
    record['tags'] = 'mount=%s' % path