示例#1
0
def _get_bat_status_win():
    # First, lets (quickly) try to even see if this system _has_ a battery
    line = Popen('wmic path win32_battery get batterystatus',
                 shell=True, stdout=PIPE, stderr=PIPE).stderr.readline().decode('utf8')
    if "No Instance" in line:
        # Nope, no battery. Bail out now.
        raise BatteryException("No battery in system")

    # We're pretty sure we have a battery, so get its status
    try:
        out = Popen('wmic path win32_battery get batterystatus',
                    shell=True, stdout=PIPE).stdout.read().decode('utf8').strip()
        state = int(out.split('\n')[-1])
        out = Popen('wmic path win32_battery get estimatedchargeremaining',
                    shell=True, stdout=PIPE).stdout.read().decode('utf8').strip()
        charge = int(out.split('\n')[-1])
    except ValueError:
        # This fails if one of these two commands doesn't parse correctly into
        # an int, which probably means that there is no battery to get the status
        # of, so we are pobably on a desktop. Just abort.
        raise BatteryException("Couldn't parse charge state")

    status = powerline.discharge if state == 1 else powerline.charge

    return(status, charge)
示例#2
0
def system_info():
    deps = []

    pdfimages = Popen(['pdfimages', '-v'], stderr=PIPE).communicate()[1]
    deps.append(('pdfimages', pdfimages.split('\n')[0]))

    tesseract = Popen(['tesseract', '-v'], stderr=PIPE).communicate()[1]
    deps.append(('tesseract', tesseract.split('\n')[0]))

    # hocr2pdf doesn't support a --version flag but prints the version to
    # stderr if called without arguments
    hocr2pdf = Popen(['hocr2pdf', '--help'], stdout=PIPE, stderr=PIPE)
    hocr2pdf = hocr2pdf.communicate()[1]
    hocr2pdf = [line for line in hocr2pdf if 'version' in line]
    if hocr2pdf:
        deps.append(('hocr2pdf', hocr2pdf))

    gs = check_output(['gs', '--version']).split('\n')[0]
    deps.append(('gs', gs))

    identify = check_output(['identify', '--version']).split('\n')[0]
    deps.append(('identify', identify))

    convert = check_output(['convert', '--version']).split('\n')[0]
    deps.append(('convert', convert))

    return deps
示例#3
0
 def test_ignoring(self):
     """
     Tests for ignoring invalid files.
     """
     result = Popen(['do_cleanup', '1_data/1'], stdout=PIPE, cwd=test_folder).communicate()[0]
     self.assertTrue(result.split('\n')[0].startswith('Ignoring file '), 'First file should be ignored')
     self.assertEquals(len(result.split('\n')), 11, 'Invalid message printed')
示例#4
0
 def test_clean(self):
     """
     Tests for clean directory.
     """
     result = Popen(['do_cleanup', '2_data/2'], stdout=PIPE, cwd=test_folder).communicate()[0]
     self.assertEquals(result.split('\n')[0], 'Directory 2_data/2 clean', 'Clean directory reported as dirty')
     self.assertEquals(len(result.split('\n')), 2, 'Invalid message printed')
示例#5
0
def find_cal(obs_id):
    obs_id = obs_id.split('\n')[0]
    obs_id = obs_id.strip()
    cal_pref = 'HydA'
    do_cal_pref = False
    if do_cal_pref:
       cal_output = Popen(["python", "/short/ek6/MWA_Code/MWA_extras/find_calibrator.py", "-v","--matchproject","--source="+cal_pref,  str(obs_id)], stdout=PIPE).communicate()[0]
    else:
       cal_output = Popen(["python", "/short/ek6/MWA_Code/MWA_extras/find_calibrator.py", "-v","--matchproject", str(obs_id)], stdout=PIPE).communicate()[0]
    try:
       cal_id = cal_output.split()[7]
       cal_name = cal_output.split()[10]
       print str(obs_id)+": recommended cal is "+str(cal_id)+' = '+str(cal_name)
    except:
       print str(obs_id)+": cannot find suitable cal "
       cal_id=None
       pass;
    os.chdir('/short/ek6/CALS/')
    cal_num = None
    return_cal = None
    for cal in glob.glob('*.cal'):
        cal_num = cal[0:10]
        if cal_id == cal_num:
           print "Found calibration file "+cal
           return_cal = '/short/ek6/CALS/'+cal
    if return_cal == None:
           print "No calibrator file found, please generate it"
    return return_cal
    def get_binetflowinfos(self):
        """ Get info about binetflow files"""
        if self.binetflowinfo == False and self.get_type() == 'binetflow':
            # Get the time in the first line, ignoring the header
            binetflow_first_flow = Popen('head -n 2 '+self.get_name()+'|tail -n 1', shell=True, stdin=PIPE, stdout=PIPE).communicate()[0]
            first_flow_date = parser.parse(binetflow_first_flow.split(',')[0])

            # Get the time in the last line
            binetflow_last_flow = Popen('tail -n 1 '+self.get_name(), shell=True, stdin=PIPE, stdout=PIPE).communicate()[0]
            last_flow_date = parser.parse(binetflow_last_flow.split(',')[0])

            # Compute the difference
            time_diff = last_flow_date - first_flow_date
            self.set_duration(time_diff)

            # Now fill the data for binetflows
            self.binetflowinfo = {}
            # Duration
            self.binetflowinfo['Duration'] = self.get_duration()

            # Amount of flows
            amount_of_flows = Popen('wc -l '+self.get_name(), shell=True, stdin=PIPE, stdout=PIPE).communicate()[0].split()[0]
            self.binetflowinfo['Amount of flows'] = amount_of_flows


        # Always return true
        return True
示例#7
0
文件: boxplot.py 项目: afh/pdfledger
def main(output_loc, parameters):
    moneyFmt = ticker.FuncFormatter(price)
    data_parameters = ['-F', '%(amount)\n', '-E', '--budget', '-p', 'this year', '-d', 'd < [this month]', '-M', 'reg'] + parameters
    parameters += ['-F', '%(account)\n', '-p', 'this month', '--flat', '--no-total', '--budget', '-M', 'bal', '^exp']

    output = Popen(["ledger"] + parameters, stdout=PIPE).communicate()[0]
    accounts = [acct for acct in output.split('\n') if acct != ""]

    data = []
    labels = []
    for acct in accounts:
        output = Popen(["ledger"] + data_parameters + ["^" + acct], stdout=PIPE).communicate()[0]
        values = []
        for value in output.split('\n'):
            if value == "":
                continue
            value = value.replace('$', '')
            value = float(value.strip())
            values.append(value)
        data.append(values)
        labels.append(acct.split(':')[-1])

    fig = plt.figure()
    ax = fig.add_subplot(111)

    boxplot(data)

    title('Boxplot of expenses by month this year')
    ax.yaxis.set_major_formatter(moneyFmt)
    ax.format_ydata = price

    fig.autofmt_xdate()
    ax.set_xticklabels(labels)

    savefig(output_loc+"budgetboxplot.pdf")
示例#8
0
 def test_help(self):
     """
     Tests for printing usage msg if -h is given
     """
     result = Popen(['do_cleanup', '-rh'], stdout=PIPE).communicate()[0]
     self.assertEquals(result.split('\n')[0], 'Usage:', 'No help printed')
     self.assertEquals(len(result.split('\n')), 10, 'Invalid message printed')
示例#9
0
 def test_nofiles(self):
     """
     Tests for directories with no repozos files
     """
     result = Popen(['do_cleanup', '1_data'], stdout=PIPE, cwd=test_folder).communicate()[0]
     self.assertEquals(result.split('\n')[0], 'Directory 1_data contains no repozos files')
     self.assertEquals(len(result.split('\n')), 2, 'Invalid message printed')
示例#10
0
 def system_driver(self,sysname):
     sysopt = self.sys_opts[sysname]
     rmsd = 0.0
     # This line actually runs TINKER
     xyzfnm = sysname+".xyz"
     if 'optimize' in sysopt and sysopt['optimize'] == True:
         if self.FF.rigid_water:
             #os.system("cp rigid.key %s" % os.path.splitext(xyzfnm)[0] + ".key")
             o, e = Popen(["./%s" % self.optprog,xyzfnm,"1e-4"],stdout=PIPE,stderr=PIPE).communicate()
         else:
             o, e = Popen(["./%s" % self.optprog,xyzfnm,"1e-4"],stdout=PIPE,stderr=PIPE).communicate()
         cnvgd = 0
         for line in o.split('\n'):
             if "Normal Termination" in line:
                 cnvgd = 1
         if not cnvgd:
             print o
             print "The system %s did not converge in the geometry optimization - printout is above." % sysname
             #warn_press_key("The system %s did not converge in the geometry optimization" % sysname)
         o, e = Popen(["./analyze",xyzfnm+'_2',"E"],stdout=PIPE,stderr=PIPE).communicate()
         if self.FF.rigid_water:
             oo, ee = Popen(['./superpose', xyzfnm, xyzfnm+'_2', '1', 'y', 'u', 'n', '0'], stdout=PIPE, stderr=PIPE).communicate()
         else:
             oo, ee = Popen(['./superpose', xyzfnm, xyzfnm+'_2', '1', 'y', 'u', 'n', '0'], stdout=PIPE, stderr=PIPE).communicate()
         for line in oo.split('\n'):
             if "Root Mean Square Distance" in line:
                 rmsd = float(line.split()[-1])
         os.system("rm %s" % xyzfnm+'_2')
     else:
         o, e = Popen(["./analyze",xyzfnm,"E"],stdout=PIPE,stderr=PIPE).communicate()
     # Read the TINKER output. 
     for line in o.split('\n'):
         if "Total Potential Energy" in line:
             return float(line.split()[-2].replace('D','e')) * kilocalories_per_mole, rmsd * angstrom
     warn_press_key("Total potential energy wasn't encountered for system %s!" % sysname)
示例#11
0
 def test_rec(self):
     """
     Tests for recursive option handling.
     """
     result = Popen(['do_cleanup', '-r', '1_data'], stdout=PIPE, cwd=test_folder).communicate()[0]
     self.assertEquals(result.split('\n')[0], 'Directory 1_data contains no repozos files')
     self.assertEquals(len(result.split('\n')), 12, 'Invalid message printed')
示例#12
0
 def test_noargs(self):
     """
     Tests for printing usage msg if no args are given
     """
     result = Popen(['do_cleanup'], stdout=PIPE).communicate()[0]
     self.assertEquals(result.split('\n')[0], 'Usage:', 'No help printed')
     self.assertEquals(len(result.split('\n')), 10)
def find_limit(value,clevel):
    alpha = 1-clevel
    if value not in val_cache:
        combine_args = ['combine',
                        'Example_AQGC2_workspace.root',
                        '-M','HybridNew',
                        '--freq',
                        '--testStat','PL',
                        '--rule','CLsplusb',
                        '--toysH','500',
                        '--clsAcc','0.002',                        
                        '--singlePoint','a0W=0,aCW=%e'%value]
        
        result = Popen(combine_args,stdout=PIPE,stderr=PIPE).communicate()[0]
        result = result.split('\n')[-3]
        result = result.split('=')[-1]
        result = result.split('+/-')
        central_val = float(result[0])
        error       = float(result[1])
        val_cache[value] = [central_val,error]    
       
    print 'limit at %e: %f +/- %e'%(value,
                                    val_cache[value][0],
                                    val_cache[value][1])
    return val_cache[value][0] >= alpha, val_cache[value]
示例#14
0
	def genPackageDict(self):
		self.packageDict = {}
		self.totalSize = 0;
		for package in self.packageList:
			if package:
				info = Popen(['pacman','-Si',package.split()[0]],stdout = PIPE)
				sizes = Popen(['grep','Size'],stdin=info.stdout,stdout=PIPE).communicate()[0]
				self.packageDict[package] = [sizes.split()[3],sizes.split()[8]]
def execute_files(cl_argument=""):
    from subprocess import Popen, PIPE
    # 5. call python version
    print "running python version: "
    python_result = Popen("python tempB.py "+cl_argument, shell=True, stdout=PIPE).stdout.read()
    print "running c version: "
    c_result = Popen("./"+c_file+" "+cl_argument, shell=True, stdout=PIPE).stdout.read()
    return python_result.split('\n'), c_result.split('\n')
示例#16
0
    def test_rosnode(self):
        topics = ['/chatter', '/foo/chatter', '/bar/chatter']
        
        # wait for network to initialize
        rospy.init_node('test')
        nodes = ['/talker', '/foo/talker', '/bar/talker', rospy.get_caller_id()]
        
        for i, t in enumerate(topics):
            rospy.Subscriber(t, std_msgs.msg.String, self.callback, i)
        all = set(range(0, len(topics)))

        timeout_t = time.time() + 10.
        while time.time() < timeout_t and self.vals != all:
            time.sleep(0.1)
        self.assertEquals(self.vals, all, "failed to initialize graph correctly")
            

        # network is initialized
        cmd = 'rosnode'

        # list
        # - we aren't matching against the core services as those can make the test suites brittle
        output = Popen([cmd, 'list'], stdout=PIPE).communicate()[0]
        output = output.decode()
        l = set(output.split())
        for t in nodes:
            self.assert_(t in l, "%s not in %s"%(t, l))

        output = Popen([cmd, 'list', '-a'], stdout=PIPE).communicate()[0]
        output = output.decode()
        l = set(output.split())
        for t in nodes:
            for e in l:
                if t in e:
                    break
            else:
                self.fail("did not find [%s] in list [%s]"%(t, l))

        output = Popen([cmd, 'list', '-u'], stdout=PIPE).communicate()[0]
        output = output.decode() 
        l = set(output.split())
        self.assert_(len(l), "list -u is empty")
        for e in l:
            self.assert_(e.startswith('http://'))

        for name in nodes:
            # type
            output = Popen([cmd, 'info', name], stdout=PIPE).communicate()[0]
            output = output.decode()
            # not really validating output as much as making sure it's not broken
            self.assert_(name in output)
            self.assert_('chatter' in output)
            self.assert_('Publications' in output)
            self.assert_('Subscriptions' in output)                        

            if 0:
                #ping
                stdout, stderr = run_for([cmd, 'ping', name], 3.)
示例#17
0
    def moments_driver(self):
        # This line actually runs TINKER
        if self.optimize_geometry:
            o, e = Popen(["./optimize","input.xyz","1.0e-6"],stdout=PIPE,stderr=PIPE).communicate()
            o, e = Popen(["./analyze","input.xyz_2","M"],stdout=PIPE,stderr=PIPE).communicate()
        else:
            o, e = Popen(["./analyze","input.xyz","M"],stdout=PIPE,stderr=PIPE).communicate()
        # Read the TINKER output.
        qn = -1
        ln = 0
        for line in o.split('\n'):
            s = line.split()
            if "Dipole X,Y,Z-Components" in line:
                dipole_dict = OrderedDict(zip(['x','y','z'], [float(i) for i in s[-3:]]))
            elif "Quadrupole Moment Tensor" in line:
                qn = ln
                quadrupole_dict = OrderedDict([('xx',float(s[-3]))])
            elif qn > 0 and ln == qn + 1:
                quadrupole_dict['xy'] = float(s[-3])
                quadrupole_dict['yy'] = float(s[-2])
            elif qn > 0 and ln == qn + 2:
                quadrupole_dict['xz'] = float(s[-3])
                quadrupole_dict['yz'] = float(s[-2])
                quadrupole_dict['zz'] = float(s[-1])
            ln += 1

        calc_moments = OrderedDict([('dipole', dipole_dict), ('quadrupole', quadrupole_dict)])

        if 'polarizability' in self.ref_moments:
            if self.optimize_geometry:
                o, e = Popen(["./polarize","input.xyz_2"],stdout=PIPE,stderr=PIPE).communicate()
            else:
                o, e = Popen(["./polarize","input.xyz"],stdout=PIPE,stderr=PIPE).communicate()
            # Read the TINKER output.
            pn = -1
            ln = 0
            polarizability_dict = OrderedDict()
            for line in o.split('\n'):
                s = line.split()
                if "Molecular Polarizability Tensor" in line:
                    pn = ln
                elif pn > 0 and ln == pn + 2:
                    polarizability_dict['xx'] = float(s[-3])
                    polarizability_dict['yx'] = float(s[-2])
                    polarizability_dict['zx'] = float(s[-1])
                elif pn > 0 and ln == pn + 3:
                    polarizability_dict['xy'] = float(s[-3])
                    polarizability_dict['yy'] = float(s[-2])
                    polarizability_dict['zy'] = float(s[-1])
                elif pn > 0 and ln == pn + 4:
                    polarizability_dict['xz'] = float(s[-3])
                    polarizability_dict['yz'] = float(s[-2])
                    polarizability_dict['zz'] = float(s[-1])
                ln += 1
            calc_moments['polarizability'] = polarizability_dict

        os.system("rm -rf *_* *[0-9][0-9][0-9]*")
        return calc_moments
示例#18
0
 def test_delete(self):
     """
     Tests for deletion (only dry run) of files.
     """
     result = Popen(['do_cleanup', '1_data/1'], stdout=PIPE, cwd=test_folder).communicate()[0]
     for f in result.split('\n')[1:]:
         if f:
             self.assertTrue(f.startswith('Will delete '), 'Will not delete deleteable file')
     self.assertEquals(len(result.split('\n')), 11)#, 'Invalid message printed')
示例#19
0
    def remote_origin(self, repo_tmp):
        cwd = os.getcwd()
        os.chdir(repo_tmp)
        status = Popen(['git', 'remote', '-v'], stdout=PIPE).communicate()[0]
        status = status.split('\n')[0]
        status = status.split('\t')[1]
        status = status.split()[0]
        os.chdir(cwd)

        return status
示例#20
0
    def remote_origin(self, repo_tmp):
        cwd = os.getcwd()
        os.chdir(repo_tmp)
        status = Popen(["git", "remote", "-v"], stdout=PIPE).communicate()[0]
        status = status.split("\n")[0]
        status = status.split("\t")[1]
        status = status.split()[0]
        os.chdir(cwd)

        return status
示例#21
0
def fs_display(mount=""):
    p1 = Popen(["df", "-Ph", mount], stdout=PIPE).communicate()[0]
    used = [line for line in p1.split("\n") if line][1]
    used = used.split()[2]
    total = [line for line in p1.split("\n") if line][1]
    total = total.split()[1]
    if mount == "/":
        mount = "/root"
    fs = mount.rpartition("/")[2].title()
    part = "%s / %s" % (used, total)
    output(fs, part)
示例#22
0
def fs_display(mount=''):
	p1 = Popen(['df', '-TPh',  mount], stdout=PIPE).communicate()[0]
	used = [line for line in p1.split('\n') if line][1]
	used = used.split()[3]
	total = [line for line in p1.split('\n') if line][1]
	total = total.split()[2]
	type = [line for line in p1.split('\n') if line][1]
	type = type.split()[1]
	if mount == '/': mount = '/root'
	fs = mount.rpartition('/')[2].title() + " FS"
	part = '%s / %s (%s)' % (used, total, type)
   	output (fs, part)
示例#23
0
def get_purch2(lig_name):
    import re
    st = re.search('[1-9]',lig_name).start()
    zinc_id = lig_name[st:14]
    pre_cmd = 'curl http://zinc15.docking.org/substances.txt:zinc_id+smiles+purchasability -F zinc_id-in='+zinc_id+' -F count=all'
    cmd = pre_cmd.split()
    from subprocess import Popen, PIPE
    output,error = Popen(cmd,stdout = PIPE, stderr= PIPE).communicate()
    if len(output.split())==3:
        return output.split()[2]
    else:
        return 'UNKNOWN'
示例#24
0
def get_zonation_info():
    ''' Function to retrieve Zonation version info.

    NOTE: Zonation must be in PATH.

    @return tuple Zonation version number
    '''
    version = Popen(['zig3', '-v'], stdout=PIPE)
    version = version.communicate()[0]
    version = version.split('\n')[0].strip()
    version = version.split(':')[1].strip()
    version = tuple(version.split('.'))

    return version
示例#25
0
def run(program, program_name, prevout):
    program_file = open(program_name, "w")
    program_file.write(program)
    program_file.close()
    output = Popen(["./gunderscript", "main", program_name], stdout = PIPE).stdout.read()
    realout = "\n".join(output.split("\n")[3:-2])
    if prevout == realout[:len(prevout)]:
        realout = realout[len(prevout):]
    if "Compiler Error" in output:
        print("\n".join(output.split("\n")[5:]))
        return (False, False)
    elif len(realout) > 0:
        print(realout)
    return (True, "\n".join(output.split("\n")[3:-2]))
示例#26
0
def structureAnalysis(structure_file, propertyOfInterest="ss"):
    '''
        given a structure file it returns the position that are either single stranded "ss" or double stranded "ds"
    '''
    
    if path.exists("tmp/structures/"+structure_file+".ct"):
        if propertyOfInterest == "ds":
            output = Popen("perl 3rdParty/unafold/ss-count.pl tmp/structures/" + structure_file + ".ct | awk /[[:digit:]][[:blank:]]0/'{print $1}'", stdout=PIPE, shell=True).stdout.read()
            return [eval(k) for k in output.split()]
        elif propertyOfInterest == "ss":
            output = Popen("perl 3rdParty/unafold/ss-count.pl tmp/structures/" + structure_file + ".ct | awk /[[:digit:]][[:blank:]]1/'{print $1}'", stdout=PIPE, shell=True).stdout.read()
            return [eval(k) for k in output.split()]
        else:
            return []
示例#27
0
    def test_cmd_list(self):
        # - multi-line
        output1 = Popen([os.path.join(_SCRIPT_FOLDER,'rosmsg'), 'list'], stdout=PIPE).communicate()[0]
        l1 = [x.strip() for x in output1.split('\n') if x.strip()]
        for p in ['std_msgs/String', 'test_rosmaster/Floats']:
            self.assert_(p in l1)
        for p in ['std_srvs/Empty', 'roscpp/Empty']:
            self.assert_(p not in l1)

        output1 = Popen([os.path.join(_SCRIPT_FOLDER,'rossrv'), 'list'], stdout=PIPE).communicate()[0]
        l1 = [x.strip() for x in output1.split('\n') if x.strip()]
        for p in ['std_srvs/Empty', 'roscpp/Empty']:
            self.assert_(p in l1)
        for p in ['std_msgs/String', 'test_rosmaster/Floats']:
            self.assert_(p not in l1)
示例#28
0
def get_list():
    """
    Get the list of jobs running, with maximum amount of information :)
    """
    table = Popen(CMD, shell=True, stdout=PIPE).communicate()[0]
    jobs = {}
    headers = table.split('\n')[0].split()
    for line in table.split('\n')[1:]:
        if not line:
            continue
        jobid = line.split()[2]
        jobs[jobid] = {}
        for i, val in enumerate(line.split()):
            jobs[jobid][headers[i]] = val
    return jobs
def main():
    """Print virtualenv and python version."""
    workon_home = os.environ.get('WORKON_HOME')
    workon_home = Path(workon_home)

    for virtualenv in workon_home.iterdir():
        if virtualenv.is_dir():
            for python_bin in Path(f'{virtualenv}/bin/').iterdir():
                if python_bin.name == 'python':
                    virtual_environment = str(virtualenv).rpartition('/')[-1]
                    command = [f'{python_bin}',
                               '-c',
                               "import sys;print(sys.version.split()[0]);"
                               ]
                    stdout, _ = Popen(command, stdout=PIPE).communicate()
                    stdout = stdout.decode('utf-8')
                    python_version = stdout.strip()
                if python_bin.name == 'pip':
                    command = [f'{python_bin}',
                               'freeze'
                               ]
                    stdout, _ = Popen(command, stdout=PIPE).communicate()
                    stdout = stdout.decode('utf-8')
                    packages = [p.strip() for p in stdout.split()]
            with open(f'virtualenvs-{os.uname()[1].split(".")[0]}.md', 'a') as f:
                f.write(template.render(virtualenv=virtual_environment,
                                        version=python_version,
                                        packages=packages))
示例#30
0
 def load(self, chain=None, ns=None):
     '''
     Load chains of this table from the system
     :param chain: which chain to load, None for all
     :param ns: which ns to load, None for root
     :return:
     '''
     if chain:
         run_cmd = self.run_cmd + ' ' + chain
     else:
         run_cmd = self.run_cmd
     if ns:
         run_cmd = 'ip netns exec %s %s' % (ns, run_cmd)
     rules, err = Popen(run_cmd, stdout=PIPE, stderr=PIPE,
                        shell=True).communicate()
     if err:
         error("Failed to run %s, err=%s\n" % (run_cmd, err))
         return
     self.chains, self.ns = {}, ns  # cleaning exiting rules
     chains = rules.split('\n\n')
     for chain in chains:  # some chain
         r = chain.splitlines()  # lines of rule
         #if not r:
         #    continue
         title = r[0].split()  # r[0] is the title row
         if title[0] == 'Chain' and title[1] not in self.chains:  # title
             if 'DROP' in title:
                 self.chains[title[1]] = IPchain(title[1], 'DROP')
             else:
                 self.chains[title[1]] = IPchain(title[1])
         keys = r[1].split()
         keys.append('flags')
         self.chains[title[1]].set_keys(keys)
         self.chains[title[1]].add_rules(r[2:])  # those are real rules
示例#31
0
def get_config(args: SimpleNamespace, extra: SimpleNamespace) -> Configuration:
    pandoc_path, pandoc_version = pandoc_version_and_path(args.pandoc_path)
    logi(f"Pandoc path is {pandoc_path}")
    if args.print_pandoc_opts:
        out, err = Popen([str(pandoc_path), "--help"],
                         stdout=PIPE,
                         stderr=PIPE).communicate()
        if err:
            loge(f"Pandoc exited with error {err.decode('utf-8')}")
        else:
            loge(f"Pandoc options are \n{out.decode('utf-8')}")
        sys.exit(0)
    config = Configuration(args.watch_dir,
                           args.output_dir,
                           config_file=args.config_file,
                           pandoc_path=pandoc_path,
                           pandoc_version=pandoc_version,
                           no_citeproc=args.no_citeproc,
                           csl_dir=args.csl_dir,
                           templates_dir=args.templates_dir,
                           post_processor=args.post_processor,
                           same_output_dir=args.same_output_dir,
                           dry_run=args.dry_run)
    # FIXME: No other args should be given with this
    if args.print_generation_opts:
        for ft in filter(None, args.generation.split(",")):  # type: ignore
            opts = config._conf[ft]
            logi(f"Generation options for {ft} are:\n\t{[*opts.items()]}")
            sys.exit(0)
        else:
            loge(f"No generation options for {ft}")
    # NOTE: The program assumes that extensions startwith '.'
    if args.exclude_regexp:
        logi("Excluding files for given filters",
             str(args.exclude_regexp.split(',')))
        config.set_excluded_regexp(args.exclude_regexp.split(','),
                                   args.exclude_ignore_case)
    if args.inclusions:
        inclusions = args.inclusions
        inclusions = inclusions.split(",")
        config.set_included_extensions(
            [value for value in inclusions if value.startswith(".")])
        if args.excluded_files:
            for ef in args.excluded_files.split(','):
                assert type(ef) == str
            config.set_excluded_files(args.excluded_files.split(','))
    if args.exclusions:
        exclusions = args.exclusions
        exclusions = exclusions.split(",")
        excluded_extensions = [
            value for value in exclusions if value.startswith(".")
        ]
        excluded_folders = list(set(exclusions) - set(excluded_extensions))
        config.set_excluded_extensions(excluded_extensions)
        config.set_excluded_folders(excluded_folders)
    if not args.generation:
        loge("Generation options cannot be empty")
        sys.exit(1)
    diff = set(args.generation.split(",")) - set(gentypes)
    if diff:
        loge(f"Unknown generation type {diff}")
        loge(f"Choose from {gentypes}")
        sys.exit(1)

    config.log_level = args.log_level
    if config.log_level > 2:
        logi("\n".join(out.split("\n")[:3]))
        logi("-" * len(out.split("\n")[2]))
    if args.log_file:
        config.log_file = args.log_file
        logw("Log file isn't implemented yet. Will output to stdout")
    # TODO: Need Better checks
    # NOTE: These options will override pandoc options in all the sections of
    #       the config file
    for i, arg in enumerate(extra):
        if not arg.startswith('-'):
            if not (i >= 1 and extra[i - 1] == "-V"):
                loge(
                    f"pandoc option {arg} must be preceded with -, e.g. -{arg} or --{arg}=some_val"
                )
                sys.exit(1)
        if arg.startswith('--') and '=' not in arg:
            loge(
                f"pandoc option {arg} must be joined with =. e.g. {arg}=some_val"
            )
            sys.exit(1)
    logbi(f"Will generate for {args.generation.upper()}")
    logbi(f"Extra pandoc args are {extra}")
    config.set_cmdline_opts(args.generation.split(','), extra)
    return config
示例#32
0
                                   modelMapping[feat][0],
                                   modelMapping[feat][1],
                                   modelMapping[feat][2], currIndex, 1000)
                functionMapping[feat](modelMapping[feat][2])
                for q in queryTerms.keys():
                    command = "awk -F \" \" '$1 ~ " + q + "{print}' " + modelMapping[
                        feat][1] + " > " + pathTempFilteredFile
                    os.system(command)
                    for doc in docsInCurrIndex[q]:
                        #print "document => "+doc
                        res = Popen([
                            "grep", "-e", " " + doc + " ", pathTempFilteredFile
                        ],
                                    stdout=subprocess.PIPE).stdout.read()
                        #print "("+res+")"
                        tRes = res.split(" ")
                        if len(tRes) > 1:
                            featuresValues[q][doc][feat] = float(tRes[2])
                        # Pour certains modeles le max = 0 avant normalisation
                        #else :
                        #	featuresValues[q][doc][feat] = 0.0

    for ind in getDependantFeatures(currIndex):
        feat = infoFeatures[ind]["name"]
        if feat in functionMapping:
            for q in queryTerms:
                #for doc in featuresValues[q].keys() :
                for doc in docsInCurrIndex[q]:
                    #print "document => "+doc
                    s = infoFeatures[ind]["dependant"]
                    tab = s.split(",")
示例#33
0
def runTest(testName, siteConfig, testDir, numNodes, fdata):

    log('Stopping accumulo')
    syscall('$ACCUMULO_HOME/bin/stop-all.sh')

    log('Creating slaves file for this test')
    slavesPath = siteConfig.get('SLAVES')
    nodesPath = testDir + '/nodes/%d' % numNodes
    syscall('head -n %d %s > %s' % (numNodes, slavesPath, nodesPath))

    log('Copying slaves file to accumulo config')
    syscall('cp ' + nodesPath + ' $ACCUMULO_HOME/conf/slaves')

    log('Removing /accumulo directory in HDFS')
    syscall("hadoop fs -rmr /accumulo")

    log('Initializing new Accumulo instance')
    instance = siteConfig.get('INSTANCE_NAME')
    passwd = siteConfig.get('PASSWORD')
    syscall('printf "%s\nY\n%s\n%s\n" | $ACCUMULO_HOME/bin/accumulo init' %
            (instance, passwd, passwd))

    log('Starting new Accumulo instance')
    syscall('$ACCUMULO_HOME/bin/start-all.sh')

    sleepTime = 30
    if numNodes > 120:
        sleepTime = int(numNodes / 4)
    log('Sleeping for %d seconds' % sleepTime)
    time.sleep(sleepTime)

    log('Setting up %s test' % testName)
    syscall(
        '$ACCUMULO_HOME/bin/accumulo org.apache.accumulo.test.scalability.Run %s setup %s'
        % (testName, numNodes))

    log('Sleeping for 5 seconds')
    time.sleep(5)

    log('Starting %s clients' % testName)
    numThreads = numNodes
    if int(numNodes) > 128:
        numThreads = '128'
    syscall(
        'pssh -P -h %s -p %s "$ACCUMULO_HOME/bin/accumulo org.apache.accumulo.test.scalability.Run %s client %s >/tmp/scale.out 2>/tmp/scale.err &" < /dev/null'
        % (nodesPath, numThreads, testName, numNodes))

    log('Sleeping for 30 sec before checking how many clients started...')
    time.sleep(30)
    output = Popen(["hadoop fs -ls /accumulo-scale/clients"],
                   stdout=PIPE,
                   shell=True).communicate()[0]
    num_clients = int(output.split()[1])
    log('%s clients started!' % num_clients)

    log('Waiting until %d clients finish.' % num_clients)
    last = 0
    done = 0
    while done < num_clients:
        time.sleep(5)
        output = Popen(["hadoop fs -ls /accumulo-scale/results"],
                       stdout=PIPE,
                       shell=True).communicate()[0]
        if not output:
            sys.stdout.write('.')
            sys.stdout.flush()
            continue
        done = int(output.split()[1])
        if done != last:
            sys.stdout.write('.%s' % done)
        else:
            sys.stdout.write('.')
        sys.stdout.flush()
        last = done
        sys.stdout.flush()
    log('\nAll clients are finished!')

    log('Copying results from HDFS')
    resultsDir = "%s/results/%s" % (testDir, numNodes)
    syscall('hadoop fs -copyToLocal /accumulo-scale/results %s' % resultsDir)

    log('Calculating results from clients')
    times = []
    totalMs = 0L
    totalEntries = 0L
    totalBytes = 0L
    for fn in os.listdir(resultsDir):
        for line in open('%s/%s' % (resultsDir, fn)):
            words = line.split()
            if words[0] == 'ELAPSEDMS':
                ms = long(words[1].strip())
                totalMs += ms
                times.append(ms)
                totalEntries += long(words[2].strip())
                totalBytes += long(words[3].strip())
    times.sort()

    print times
    numClients = len(times)
    min = times[0] / 1000
    avg = (float(totalMs) / numClients) / 1000
    median = times[int(numClients / 2)] / 1000
    max = times[numClients - 1] / 1000

    log('Tservs\tClients\tMin\tAvg\tMed\tMax\tEntries\tMB')
    log('%d\t%d\t%d\t%d\t%d\t%d\t%dM\t%d' %
        (numNodes, numClients, min, avg, median, max, totalEntries / 1000000,
         totalBytes / 1000000))
    fdata.write('%d\t%d\t%d\t%d\t%d\t%d\t%dM\t%d\n' %
                (numNodes, numClients, min, avg, median, max,
                 totalEntries / 1000000, totalBytes / 1000000))
    fdata.flush()

    time.sleep(5)

    log('Tearing down %s test' % testName)
    syscall(
        '$ACCUMULO_HOME/bin/accumulo org.apache.accumulo.test.scalability.Run %s teardown %s'
        % (testName, numNodes))

    time.sleep(10)
示例#34
0
def search_sploitz(vendor):
    out = Popen('searchsploit %s' % vendor, shell=True,
                stdout=PIPE).communicate()[0]
    if out is None:
        return None
    return out.split('\n')[2:-1]
示例#35
0
#!/usr/bin/python

from sys import argv, maxsize, exit
from subprocess import Popen, PIPE

if len(argv) < 3:
    exit(1)
harness_opt, exe_opt, code, pf_args = argv[1], argv[2], argv[3], argv[4:]

m32_or_m64 = "-m64" if maxsize > 2**32 else "-m32"
output, _ = Popen(
    ["sea", "pf", "--cex=%s" % harness_opt, m32_or_m64, code] + pf_args,
    stdout=PIPE).communicate()

if "sat" in output.split("\n"):
    print("sat")
    Popen(["sea", "exe", m32_or_m64, "-g", code, harness_opt, "-o",
           exe_opt]).communicate()
    expected_error, _ = Popen(exe_opt, stdout=PIPE).communicate()
    print(expected_error)
elif "unsat" in output.split("\n"):
    print("unsat")
else:
    print(output)
示例#36
0
def getTip():
    out, err = Popen("hg tip", shell=True, stdout=PIPE).communicate()
    return out.split("\n")[0].split(":")[2].strip()
示例#37
0
    def runJob(self):
        """Run the athena job. Returns JobReport with the result of the run.
        Can be overridden in derived class, typically for a composite transform,
        running several athena jobs."""

        self.logger().info('Using %s' % (trfenv.trfPath))

        #
        # start with the pre-run actions
        #
        self.doPreRunActions()
        # gather environment only after preRunActions, as they may change the environment
        self.gatherEnvironmentInfo()

        # Prepare for running athena job
        #
        # open the logfile
        logFile = fileutil.Tee(self._logFilename, 'a')

        #
        # get maximum event number, valgrind version and jobOptions to process
        #
        # maxEventsArg = self.getArgumentOfType("MaxEvents")
        # maxEvents = maxEventsArg and maxEventsArg.value()
        # self.logger().info( 'In runJob() %d' % ( maxEvents ) )

        stacktraceDepthArg = self.getArgumentOfType("StacktraceDepth")
        stacktraceDepth = stacktraceDepthArg and stacktraceDepthArg.value()
        self.logger().info(' valgrind monitors up to stacktraceDepth: %s' %
                           (stacktraceDepth))

        valgrindToolArg = self.getArgumentOfType("ValgrindTool")
        valgrindTool = valgrindToolArg and valgrindToolArg.value()
        self.logger().info(' will use valgrind tool %s' % (valgrindTool))

        valgrindToolOptArg = self.getArgumentOfType("ValgrindToolOpt")
        valgrindToolOpt = valgrindToolOptArg and valgrindToolOptArg.value()
        self.logger().info(' will use these valgrind tool options %s' %
                           (valgrindToolOpt))

        valgrindGenSuppressionArg = self.getArgumentOfType(
            "ValgrindGenSuppression")
        valgrindGenSuppression = valgrindGenSuppressionArg and valgrindGenSuppressionArg.value(
        )
        self.logger().info(' will produce suppressions file %s' %
                           (valgrindGenSuppression))

        valVersionArg = self.getArgumentOfType("ValgrindVersion")
        valVersion = valVersionArg and valVersionArg.value()
        self.logger().info(' will use valgrind version %s' % (valVersion))

        jobOptionsArg = self.getArgumentOfType("JobOptions")
        jobOptions = jobOptionsArg and jobOptionsArg.value()
        self.logger().info(' processing these jobOptions: %s' % (jobOptions))

        needInputFileArg = self.getArgumentOfType("NeedInputFile")
        needInputFile = needInputFileArg and needInputFileArg.value()
        self.logger().info(' need some input file: %s' % (needInputFile))

        checkFilesArg = self.getArgumentOfType("CheckFiles")
        checkFiles = checkFilesArg and checkFilesArg.value()
        self.logger().info(' doing checks on produced files: %s' %
                           (checkFiles))

        recoTrfArg = self.getArgumentOfType("RecoTrf")
        recoTrf = recoTrfArg and recoTrfArg.value()
        self.logger().info(' calling jobTransform in this way %s' %
                           (recoTrf.replace('+', ' ').replace('#', '+')))

        athenaOptArg = self.getArgumentOfType("AthenaOpt")
        athenaOpt = athenaOptArg and athenaOptArg.value()
        self.logger().info(' calling athena with these optional arguments %s' %
                           (athenaOpt))

        trackOriginsArg = self.getArgumentOfType("TrackOrigins")
        trackOrigins = trackOriginsArg and trackOriginsArg.value()
        self.logger().info(
            ' track origins of uninitialized variables (memory hungry !): %s' %
            (trackOrigins))

        dumpInstrArg = self.getArgumentOfType("DumpInstr")
        dumpInstr = dumpInstrArg and dumpInstrArg.value()
        self.logger().info(' callgrind: collect at instruction level: %s' %
                           (dumpInstr))

        instrAtStartArg = self.getArgumentOfType("InstrAtStart")
        instrAtStart = instrAtStartArg and instrAtStartArg.value()
        self.logger().info(
            ' callgrind: start collecting at start of program: %s' %
            (instrAtStart))

        recExCommonLinksArg = self.getArgumentOfType("RecExCommonLinks")
        recExCommonLinks = recExCommonLinksArg and recExCommonLinksArg.value()
        self.logger().info(' run RecExCommon_links.sh before: %s' %
                           (recExCommonLinks))

        reqCmtConfigArg = self.getArgumentOfType("ReqCmtConfig")
        reqCmtConfig = reqCmtConfigArg and reqCmtConfigArg.value()
        self.logger().info(' requiring CMTCONFIG for this job to be %s' %
                           (reqCmtConfig))

        self.logger().info('Doing some tests before executing valgrind')
        tmp = os.system(' which athena.py > /dev/null ')
        self.logger().info(' Do we have athena setup ? %s' % (tmp))

        # determine the platform from uname and modify for valgrind usage
        platform = os.uname()[4]
        valgrindMachPath = 'ia32'
        self.logger().info(' will use as platform ? %s' % (platform))
        if platform == "x86_64":
            valgrindMachPath = 'amd64'

        gzip_logfile = False
        tmp = os.environ['CMTCONFIG']
        mach = tmp.split('-')
        if len(mach) != 4:
            self.logger().error(
                'Cannot determine linux versions and compiler from $CMTCONFIG !! It has %d instead of 4 elements !'
                % (len(mach)))
            exit(0)

        self.logger().info(' will also use : %s %s' % (mach[1], mach[2]))
        # valgrindBasePath='/afs/cern.ch/sw/lcg/external/valgrind/3.4.1/' + mach[1] + '_' + valgrindMachPath + '_' + mach[2]
        valgrindBasePath = '/afs/cern.ch/user/r/rig/sw/vg-3.8.1'
        if valVersion == "3.8.0":
            valgrindBasePath = '/afs/cern.ch/user/r/rig/sw/vg-3.8.0'
            gzip_logfile = True
        if valVersion == "3.6.1":
            valgrindBasePath = '/afs/cern.ch/user/r/rig/sw/vg-3.6.1'
            gzip_logfile = True
        if valVersion == "trunk":
            valgrindBasePath = '/afs/cern.ch/user/r/rig/sw/vg-trunk'
            gzip_logfile = True

        self.logger().info('Setting up valgrind from located at %s' %
                           (valgrindBasePath))

        proName = os.environ['AtlasProject']
        tmp = os.path.split(os.getcwd())[0]
        jobName = os.path.split(tmp)[1]
        relName = os.environ['AtlasVersion']

        vetoed = False

        #fll=os.environ["FRONTIER_LOG_LEVEL"]
        #fsv=os.environ["FRONTIER_SERVER"]
        #os.environ.pop("FRONTIER_LOG_LEVEL")
        #os.environ.pop("FRONTIER_SERVER")

        if (reqCmtConfig != 'None'):
            self.logger().info(' specific CMTCONFIG required : %s ' %
                               reqCmtConfig)
            vetoed = True
            tmp = os.environ['CMTCONFIG']
            if (tmp.find(reqCmtConfig) > -1):
                vetoed = False

            # if ( tmp.find("x86_64") > -1 ):
            #     vetoed = True

            if vetoed:
                self.logger().info(' Vetoed: Yes, we will NOT run valgrind !')
                os.system('touch vetoed')
                self.logger().info(' Exiting !')
                sys.exit(2)
            else:
                self.logger().info(' Not vetoed, so we will run valgrind !')

        if os.access(valgrindBasePath, os.F_OK):
            self.logger().info(
                ' Everything looks OK, get now the jobOptions %s ' %
                jobOptions)
            get_files(jobOptions,
                      fromWhere='JOBOPTSEARCHPATH',
                      depth=1,
                      sep=',')

            self.logger().info(
                ' Everything looks OK, get now the suppression file(s)')
            get_files('valgrindRTT.supp')
            get_files('Gaudi.supp/Gaudi.supp')
            get_files('root.supp/root.supp')
            get_files('newSuppressions.supp')
            get_files('oracleDB.supp')
            # get_files ( 'valgrind-python.supp/valgrind-python.supp' )

            # add valgrind to PATH and LD_LIBRARY_PATH
            os.environ[
                'PATH'] = valgrindBasePath + '/bin' + ':' + os.environ['PATH']
            os.environ[
                'LD_LIBRARY_PATH'] = valgrindBasePath + '/lib' + ':' + os.environ[
                    'LD_LIBRARY_PATH']

            tmp = os.system('which valgrind > /dev/null')
            self.logger().info(' do we have valgrind setup correctly ? %s' %
                               (tmp))

            # somebody compiles in tmp directory, as valgrind also follows this compilation, we
            # need to supply absolute paths to all suppression files
            currentWorkArea = os.getcwd()

            if (recExCommonLinks):
                self.logger().info(' executing RecExCommon_links.sh')
                tmp = os.system('RecExCommon_links.sh > /dev/null')

            # tmp = os.system('setupLocalDBReplica_CERN.sh COMP200')
            # self.logger().info( ' executing RecExCommon_links.sh' )

            # toolOptions     = ' --leak-check=yes --show-reachable=yes --log-file=valgrind.out.process.\%p '
            toolOptions = ' --leak-check=yes --log-file=valgrind.out.process.\%p '

            valgrindOptions = ' --trace-children=yes --track-fds=yes '
            # valgrindOptions = ' --trace-children=yes --track-fds=yes '
            # only in 3.5.0:
            if valVersion == "3.6.1":
                valgrindOptions = valgrindOptions + ' --read-var-info=yes'

            valgrindOptions = valgrindOptions + ' --num-callers=' + str(
                stacktraceDepth)
            valgrindOptions = valgrindOptions + ' --suppressions=' + currentWorkArea + '/valgrindRTT.supp '
            valgrindOptions = valgrindOptions + ' --suppressions=' + currentWorkArea + '/newSuppressions.supp '
            valgrindOptions = valgrindOptions + ' --suppressions=' + currentWorkArea + '/oracleDB.supp '

            # add other suppressions files:
            valgrindOptions += '--suppressions=' + currentWorkArea + '/Gaudi.supp/Gaudi.supp '
            valgrindOptions += '--suppressions=' + currentWorkArea + '/root.supp/root.supp '

            # use suppression file shipped with root, if it exists
            # probably 99.99% overlap with ATLAS own's but might be a bit newer
            root_supp_path = os.path.expandvars(
                '$ROOTSYS/etc/valgrind-root.supp')
            if os.path.exists(root_supp_path):
                valgrindOptions += '--suppressions=$ROOTSYS/etc/valgrind-root.supp '
            # --trace-children-skip=/bin/sh,cmt.exe

            athenaOptions = ' `which python` `which athena.py` '

            pre = ' -c "'
            post = ' '
            if (athenaOpt != 'None'):
                athenaOptions += pre + athenaOpt
                pre = ';'
                post = '" '

            if (needInputFile == "EVGEN"):
                self.logger().info(' EVGEN file requested, now copying')
                tmp = os.system(
                    'cp /afs/cern.ch/atlas/maxidisk/d33/referencefiles/Sim/EVGEN.pool.root EVGEN.pool.root'
                )

            if (recoTrf != "None"):

                if (needInputFile != 'None'):
                    self.logger().info(
                        ' optionally using input file of type: %s ' %
                        needInputFile)
                    if (needInputFile == "ESD"):
                        # tmp = os.system('cp /afs/cern.ch/atlas/maxidisk/d33/releases/latest.ESD.pool.root ESD.pool.root')
                        tmp = os.system(
                            'cp /afs/cern.ch/atlas/project/rig/referencefiles/dataStreams_ESD.AOD_50Events/data10_7TeV.00167607.physics_JetTauEtmiss.recon.ESD.f298._lb0087._SFO-4._0001.1_50Events_rel.16.0.3.8_rereco ESD.pool.root'
                        )

                    if (needInputFile == "AOD"):
                        tmp = os.system(
                            'cp /afs/cern.ch/atlas/maxidisk/d33/releases/latest.AOD.pool.root AOD.pool.root'
                        )

                from subprocess import Popen, PIPE
                cmd = recoTrf.replace('+', ' ').replace('#', '+')

                # first check, that all arguments are allowed (outputfiles often change ...)
                output = Popen([cmd.split()[0] + ' -h'],
                               stdout=PIPE,
                               shell=True).communicate()[0]
                # self.logger().info( 'Here is the output : ' )
                # for outline in output.split('\n'):
                #  self.logger().info( 'PIPE >' + outline + '<' )
                allfound = True
                newcmd = ""
                sep = ""
                for args in cmd.split():
                    # self.logger().info( 'CMD >' + args + '<' )
                    thisargstr = args
                    if (args.find("=") >= 0):
                        thisargstr = args.split(
                            "=")[0] + '< with value >' + args.split("=")[1]

                    if (output.find(args.split("=")[0]) > 0
                            or args.split("=")[0].startswith("append_pre")):
                        self.logger().info('FOUND ARG >' + thisargstr + '<')
                        newcmd += sep + args
                        sep = " "
                    else:
                        self.logger().info(' NOT FOUND ARG >' + thisargstr +
                                           '<')
                        # an argument for the job transform wasn;t found !
                        # remove it
                        allfound = False

                self.logger().info('ARG CMP old>' + cmd + '<')
                self.logger().info('ARG CMP new>' + newcmd + '<')
                if (cmd != newcmd):
                    cmd = newcmd

                self.logger().info(
                    'running dummy jobTransform to create pickle file')
                cmd += ' --athenaopts="--config-only=rec.pkg --keep-configuration" --omitvalidation=ALL '
                self.logger().info(' cmd : ' + cmd)
                output = Popen([cmd], stdout=PIPE, shell=True).communicate()[0]
                self.logger().info('Here is the output :')
                for outline in output.split('\n'):
                    self.logger().info('TRF >' + outline + '<')

                os.system('ls -la >& dir.log')

            else:

                if (needInputFile != 'None'):
                    self.logger().info(
                        ' reco_trf will use input file of type: %s ' %
                        needInputFile)
                    if (needInputFile == "ESD"):
                        athenaOptions += pre + 'from AthenaCommon.AthenaCommonFlags import athenaCommonFlags;athenaCommonFlags.FilesInput=[\'/afs/cern.ch/atlas/maxidisk/d33/releases/latest.ESD.pool.root\']'
                        pre = ';'
                        post = '" '

                    if (needInputFile == "AOD"):
                        athenaOptions += pre + 'from AthenaCommon.AthenaCommonFlags import athenaCommonFlags;athenaCommonFlags.FilesInput=[\'/afs/cern.ch/atlas/maxidisk/d33/releases/latest.AOD.pool.root\']'
                        pre = ';'
                        post = '" '

                cmd = athenaOptions + post + jobOptions
                cmd += ' --config-only=rec.pkg --keep-configuration'

                from subprocess import Popen, PIPE
                self.logger().info(
                    'running dummy athena job to create pickle file')
                self.logger().info(' cmd : ' + cmd)
                output = Popen([cmd], stdout=PIPE, shell=True).communicate()[0]
                self.logger().info('Here is the output :')
                for outline in output.split('\n'):
                    self.logger().info('TRF >' + outline + '<')

                os.system('ls -la >& dir.log')

            athenaOptions += post

            athenaOptions += ' --stdcmalloc rec.pkg.pkl > out.valgrind.log 2>&1 '

            toolName = 'memcheck'

            trackOriginsStr = ''
            if (trackOrigins):
                trackOriginsStr = '--track-origins=yes'

            # toolOptions += " --malloc-fill=0xF0 --free-fill=0xF --error-limit=no --main-stacksize=268435456 --max-stackframe=33554432 " + trackOriginsStr
            toolOptions += " --malloc-fill=0xF0 --free-fill=0xF --error-limit=no " + trackOriginsStr

            if (valgrindTool == "callgrind"):
                toolOptions = ' --tool=callgrind --callgrind-out-file=callgrind.out.process.\%p '
                if dumpInstr:
                    toolOptions += ' --dump-instr=yes '
                if not instrAtStart:
                    toolOptions += ' --instr-atstart=no '
                toolName = valgrindTool

            if (valgrindTool == "massif"):
                toolOptions = ' --tool=massif --massif-out-file=massif.out.process.\%p --detailed-freq=1 --max-snapshots=300'
                toolName = valgrindTool

            if (valgrindTool == "dhat"):
                toolOptions = ' --tool=exp-dhat --show-top-n=2500 '
                toolName = valgrindTool

            if (valgrindTool == "ptrcheck"):
                toolOptions = ' --tool=exp-ptrcheck --error-limit=no '
                # to speed up ptrcheck, one can add --enable-sg-checks=no
                toolName = valgrindTool

            if (valgrindGenSuppression == "yes"):
                toolOptions += ' --gen-suppressions=all'

            if (valgrindToolOpt != 'None'):
                toolOptions += valgrindToolOpt

            self.logger().info(' now starting TopObserver !')
            thread.start_new_thread(TopObserver, (toolName, ))

            if not vetoed:
                self.logger().info(' now starting valgrind !')
                string = ' /usr/bin/time valgrind ' + toolOptions + valgrindOptions + athenaOptions
                self.logger().info(' now calling valgrind with : <%s>' %
                                   string)
                tmp = os.system(string)
                if gzip_logfile:
                    tmp = os.system("gzip out.valgrind.log")
                # tmp = os.system( athenaOptions )
            else:
                self.logger().info(' job was vetoed ! based on %s %s' %
                                   (proName, jobName))
                tmp = os.system('touch veto.log')

            # testing, no valgrind ...
            # tmp = os.system( ' athena.py --stdcmalloc ' + jobOptions + ' > out.valgrind.log 2>&1 ' )

            self.logger().info(' valgrind returned with code %s' % (tmp))
            tmp = os.system('touch done')
            time.sleep(5)

            if (valgrindTool == "callgrind"):
                tmp = os.system(
                    "mkdir t; cp callgrind.out.process.`grep 'ValgrindHelperAlg\ *INFO ValgrindHelperAlg:' out.valgrind.log | gawk '{ print $NF }'` t; gzip callgrind.out.process.*"
                )

            # clean up the local copies of the input files
            if (recoTrf != "None"):
                if (needInputFile != 'None'):
                    self.logger().info(
                        ' cleaning up local copies of the input file of type: %s '
                        % needInputFile)
                    if (needInputFile == "ESD"):
                        tmp = os.system('rm -f ESD.pool.root')

                    if (needInputFile == "AOD"):
                        tmp = os.system('rm -f AOD.pool.root')

            if (needInputFile == "EVGEN"):
                self.logger().info(' removing local copy of EVGEN file')
                tmp = os.system('rm -f EVGEN.pool.root')

            #if ( checkFiles == 'diff' and needInputFile != 'None' ):
            #    tmp = os.system('diffPoolFiles.py -r ' + needInputFile + '.pool.root -f copy_' + needInputFile + '.pool.root > diffFiles.log 2>&1')
            #
            #if ( checkFiles.beginsWith('check:') ):
            #    tmp = os.system('checkFile.py ' + checkFiles.rpartition(':') + ' > checkFile.log 2>&1')

            athenaReport = JobReport()

            thread.exit()

        else:
            self.logger().error(
                ' PATH does not point to a valid valgrind installation ! Cannot run !'
            )
            self.logger().error(' PATH: %s' % valgrindBasePath)

        # overwrite producer for new errors that are added
        athenaReport.setProducer(self.name(), self.version())

        #os.environ["FRONTIER_LOG_LEVEL"]=fll
        #os.environ["FRONTIER_SERVER"]=fsv

        return athenaReport
    def test_rosparam(self):
        ps = get_param_server()

        # network is initialized
        cmd = 'rosparam'
        names = ['/chatter', 'foo/chatter']

        # list
        params = [
            '/string',
            '/int',
            '/float',
            '/g1/string',
            '/g1/int',
            '/g1/float',
            '/g2/string',
            '/g2/int',
            '/g2/float',
        ]
        # - we aren't matching against the core services as those can make the test suites brittle
        output = Popen([cmd, 'list'], stdout=PIPE).communicate()[0]
        l = set(output.split())
        for t in params:
            self.assert_(t in l)

        # get
        # - strings
        output = Popen([cmd, 'get', "string"], stdout=PIPE).communicate()[0]
        self.assertEquals('foo-value', output.strip())
        # -- pretty
        output = Popen([cmd, 'get', '-p', "string"],
                       stdout=PIPE).communicate()[0]
        self.assertEquals('foo-value', output.strip())
        output = Popen([cmd, 'get', "/string"], stdout=PIPE).communicate()[0]
        self.assertEquals('foo-value', output.strip())
        output = Popen([cmd, 'get', "g1/string"], stdout=PIPE).communicate()[0]
        self.assertEquals('g1-foo-value', output.strip())
        output = Popen([cmd, 'get', "/g1/string"],
                       stdout=PIPE).communicate()[0]
        self.assertEquals('g1-foo-value', output.strip())
        output = Popen([cmd, 'get', "/g2/string"],
                       stdout=PIPE).communicate()[0]
        self.assertEquals('g2-foo-value', output.strip())
        # - ints
        output = Popen([cmd, 'get', "int"], stdout=PIPE).communicate()[0]
        self.assertEquals('1', output.strip())
        # -- pretty
        output = Popen([cmd, 'get', '-p', "int"], stdout=PIPE).communicate()[0]
        self.assertEquals('1', output.strip())
        output = Popen([cmd, 'get', "/int"], stdout=PIPE).communicate()[0]
        self.assertEquals('1', output.strip())
        output = Popen([cmd, 'get', "g1/int"], stdout=PIPE).communicate()[0]
        self.assertEquals('10', output.strip())
        output = Popen([cmd, 'get', "/g1/int"], stdout=PIPE).communicate()[0]
        self.assertEquals('10', output.strip())
        output = Popen([cmd, 'get', "/g2/int"], stdout=PIPE).communicate()[0]
        self.assertEquals('20', output.strip())
        # - floats
        output = Popen([cmd, 'get', "float"], stdout=PIPE).communicate()[0]
        self.assertEquals('1.0', output.strip())
        # -- pretty
        output = Popen([cmd, 'get', '-p', "float"],
                       stdout=PIPE).communicate()[0]
        self.assertEquals('1.0', output.strip())
        output = Popen([cmd, 'get', "/float"], stdout=PIPE).communicate()[0]
        self.assertEquals('1.0', output.strip())
        output = Popen([cmd, 'get', "g1/float"], stdout=PIPE).communicate()[0]
        self.assertEquals('10.0', output.strip())
        output = Popen([cmd, 'get', "/g1/float"], stdout=PIPE).communicate()[0]
        self.assertEquals('10.0', output.strip())
        output = Popen([cmd, 'get', "/g2/float"], stdout=PIPE).communicate()[0]
        self.assertEquals('20.0', output.strip())
        # - dictionary
        output = Popen([cmd, 'get', "g1"], stdout=PIPE).communicate()[0]
        import yaml
        d = yaml.load(output)
        self.assertEquals(d['float'], 10.0)
        self.assertEquals(d['int'], 10.0)
        self.assertEquals(d['string'], "g1-foo-value")
        self.assertEquals(set(['float', 'int', 'string']), set(d.keys()))

        # -- don't bother parsing pretty output of dictionary, but check for no errors
        check_call([cmd, 'get', '-p', "g1"])
        # --- with verbose
        check_call([cmd, 'get', '-pv', "g1"])

        # set
        # - integers
        Popen([cmd, 'set', "/set/test1", "1"], stdout=PIPE).communicate()[0]
        self.assertEquals(1, ps.getParam('/', '/set/test1')[2])
        # -- verbose
        Popen([cmd, 'set', '-v', "/set/test1", "1"],
              stdout=PIPE).communicate()[0]
        self.assertEquals(1, ps.getParam('/', '/set/test1')[2])
        Popen([cmd, 'set', "set/test1", "2"], stdout=PIPE).communicate()[0]
        self.assertEquals(2, ps.getParam('/', '/set/test1')[2])
        # - floats
        Popen([cmd, 'set', "/set/test2", "1.0"], stdout=PIPE).communicate()[0]
        self.assertEquals(1, ps.getParam('/', '/set/test2')[2])
        Popen([cmd, 'set', "set/test2", "2.0"], stdout=PIPE).communicate()[0]
        self.assertEquals(2, ps.getParam('/', '/set/test2')[2])
        # - booleans
        Popen([cmd, 'set', "/set/testbool", "true"],
              stdout=PIPE).communicate()[0]
        self.assertEquals(True, ps.getParam('/', '/set/testbool')[2])
        Popen([cmd, 'set', "set/testbool", "false"],
              stdout=PIPE).communicate()[0]
        self.assertEquals(False, ps.getParam('/', '/set/testbool')[2])
        # - strings
        #   TODO: test more interesting encodings, like multi-line
        Popen([cmd, 'set', "/set/teststr", "hi"], stdout=PIPE).communicate()[0]
        self.assertEquals("hi", ps.getParam('/', '/set/teststr')[2])
        Popen([cmd, 'set', "set/teststr", "hello world"],
              stdout=PIPE).communicate()[0]
        self.assertEquals("hello world", ps.getParam('/', '/set/teststr')[2])
        Popen([cmd, 'set', "set/teststr", "'true'"],
              stdout=PIPE).communicate()[0]
        self.assertEquals("true", ps.getParam('/', '/set/teststr')[2])
        # - list
        Popen([cmd, 'set', "set/testlist", "[]"], stdout=PIPE).communicate()[0]
        self.assertEquals([], ps.getParam('/', '/set/testlist')[2])
        Popen([cmd, 'set', "/set/testlist", "[1, 2, 3]"],
              stdout=PIPE).communicate()[0]
        self.assertEquals([1, 2, 3], ps.getParam('/', '/set/testlist')[2])
        # - dictionary
        Popen([cmd, 'set', "/set/testdict", "{a: b, c: d}"],
              stdout=PIPE).communicate()[0]
        self.assertEquals('b', ps.getParam('/', '/set/testdict/a')[2])
        self.assertEquals('d', ps.getParam('/', '/set/testdict/c')[2])
        #   - empty dictionary should be a noop
        Popen([cmd, 'set', "set/testdict", "{}"], stdout=PIPE).communicate()[0]
        self.assertEquals('b', ps.getParam('/', '/set/testdict/a')[2])
        self.assertEquals('d', ps.getParam('/', '/set/testdict/c')[2])
        #   - this should be an update
        Popen([cmd, 'set', "/set/testdict", "{e: f, g: h}"],
              stdout=PIPE).communicate()[0]
        self.assertEquals('b', ps.getParam('/', '/set/testdict/a')[2])
        self.assertEquals('d', ps.getParam('/', '/set/testdict/c')[2])
        self.assertEquals('f', ps.getParam('/', '/set/testdict/e')[2])
        self.assertEquals('h', ps.getParam('/', '/set/testdict/g')[2])
        # -- verbose
        check_call([cmd, 'set', '-v', "/set/testdictverbose", "{e: f, g: h}"])

        # delete
        ps.setParam('/', '/delete/me', True)
        self.assert_(ps.hasParam('/', '/delete/me')[2])
        Popen([cmd, 'delete', "/delete/me"], stdout=PIPE).communicate()[0]
        self.failIf(ps.hasParam('/', '/delete/me')[2])
示例#39
0
"""
This script is to extract all databases from Android device and store extracted db files.
Command used to find common databases - find /data/ -name '*.db'

"""
import hashlib
import os
from subprocess import Popen, PIPE, STDOUT
from scripts.os_check import ADB, SUC, PERM, SEP
from scripts.utils import ROOT_DIR, mkdir
from scripts.file_helper import convert_to_tsv

dbs_list_str = Popen([ADB, 'shell', SUC, 'find', '/data/', '-name', '*.db'], stdout=PIPE, stderr=STDOUT)\
    .stdout.read().decode('UTF-8')
dbs_list_str = dbs_list_str.strip()
DB_LIST = dbs_list_str.split('\n')

DLLS = []
OUTPUT = ROOT_DIR


def download_database(db_path):
    db_name = db_path.split('/')[-1]
    if Popen([ADB, 'shell', SUC, 'ls', db_path], stdout=PIPE, stderr=STDOUT).stdout.read().decode('UTF-8') \
            .replace('\r', '') \
            .replace('\n', '') == db_path:
        # print(db_path + " Exists")
        if 'su' in PERM:
            Popen([
                ADB, 'shell', SUC, 'dd', 'if=' + db_path,
                'of=/data/local/tmp/' + db_name
示例#40
0
 def _get_package_id(cls):
     if cls.MOCK_MODE:
         return 'abc'
     cmd = ['aapt', 'dump', 'badging', Config.getInstance().app()]
     out = Popen(cmd, stdout=PIPE).communicate()[0]
     return out.split("package: name='")[1].split("'", 1)[0]
示例#41
0
    def run_backup(self, mysql_and_credentials, mysql_dump_and_credentials,
                   DESTINATION, PREFIX, script_prefix, MY_INSTANCE_NAME):
        chain_exclude_tables = ""
        if self.args_list.EXCLUDE_TABLE:
            list_tables = self.args_list.EXCLUDE_TABLE.split()
            for table in list_tables:
                chain_exclude_tables += " --ignore-table=" + table
        else:
            list_tables = ""
            chain_exclude_tables = ""

        command1=mysql_and_credentials + " -e 'show databases' | sed '/Database/d' | grep -v 'information_schema' " \
                                         "| grep -v 'performance_schema' | grep -v 'sys'"
        stdout, stderr = Popen(command1, shell=True, stdout=PIPE,
                               stderr=PIPE).communicate()
        db_all = stdout.split('\n')[:-1]
        chain_exclude_dbs = ""
        if self.args_list.EXCLUDE_DB:
            list_dbs = self.args_list.EXCLUDE_DB.split()
        else:
            list_dbs = []
        i = None
        db_include = [i for i in db_all if i not in list_dbs]
        # get Mysql Version.
        mysql_version_command = mysql_and_credentials + " --version"
        stdout_mysql_version, stderr_mysql_version = Popen(
            mysql_version_command, shell=True, stdout=PIPE,
            stderr=PIPE).communicate()
        if '5.7' in stdout_mysql_version:
            mysql_version = '5.7'
        elif '5.6' in stdout_mysql_version:
            mysql_version = '5.6'
        elif '5.5' in stdout_mysql_version:
            mysql_version = '5.5'
        else:
            mysql_version = 'legacy'

        if not db_include:
            return "No databases found.", ''

        for DB_NAME in db_include:
            _SQL2="\"USE information_schema; SELECT TABLE_NAME FROM TABLES WHERE TABLE_SCHEMA='" + \
                  DB_NAME + "' AND TABLE_TYPE= 'BASE TABLE' AND ENGINE NOT like 'innodb';\""
            initial_command = " --opt --routines --triggers --events --flush-privileges " \
                                         "--skip-add-drop-table"
            if mysql_version == '5.7':
                set_specific_parameters = ' --set-gtid-purged=OFF'
            else:
                set_specific_parameters = ''
            if mysql_version == '5.5' or mysql_version == '5.6' or mysql_version == '5.7':
                set_specific_parameters += ' --single-transaction'
            # MYISAM Commanf without  --master-data=2
            command3=mysql_dump_and_credentials + initial_command + set_specific_parameters + \
                     " --dump-date --databases " + \
                     DB_NAME + chain_exclude_tables + "| gzip > " + DESTINATION + "/" + PREFIX + "_" + script_prefix + \
                     "_" + MY_INSTANCE_NAME + "_" + DB_NAME+".sql.gz"
            # InnoDB command with --master-data=2
            command4=mysql_dump_and_credentials + initial_command + set_specific_parameters + \
                     " --master-data=2 --skip-add-locks --skip-lock-tables --dump-date " \
                     "--databases " + DB_NAME + chain_exclude_tables + " | gzip > " \
                     + DESTINATION + "/" + PREFIX + "_" + script_prefix + "_" + MY_INSTANCE_NAME + "_" + DB_NAME + ".sql.gz"
            print "---- Backing up Instance: " + MY_INSTANCE_NAME + " Database : " + DB_NAME + " ---- "
            command5 = mysql_and_credentials + " -e " + _SQL2 + "|grep -v TABLE|wc -l"
            # print command5
            stdout2, stderr = Popen(command5,
                                    shell=True,
                                    stdout=PIPE,
                                    stderr=PIPE).communicate()
            if stdout2 != 0:
                print "---- " + DB_NAME + " has MYISAM TABLES , using DUMP backup method ---- "
                backup_stdout, backup_stderr = Popen(
                    command3, shell=True, stdout=PIPE,
                    stderr=PIPE).communicate()
            else:
                print "---- " + DB_NAME + " has all InnoDB tables , using InnoDB backup method ---- "
                backup_stdout, backup_stderr = Popen(
                    command4, shell=True, stdout=PIPE,
                    stderr=PIPE).communicate()
        print "---- Backup Done ---- "
        return backup_stdout, backup_stderr
示例#42
0
import os, glob, sys
from subprocess import Popen
import subprocess
import numpy as np
from netCDF4 import Dataset, netcdftime, num2date
import dimarray as da
import pandas as pd
import collections

try:
    os.chdir('/p/projects/tumble/carls/shared_folder/gmt')
except:
    os.chdir('/Users/peterpfleiderer/Documents/Projects/gmt')

cdoinfo = Popen('cdo info data_models/rcp85-xxx_rcp85_EC-EARTH_r1i1p1/tos.nc',
                shell=True,
                stdout=subprocess.PIPE).stdout.read()
cdoinfo.split('\n')[1]

cdoinfo = Popen('cdo info data_models/EC-EARTH_r1i1p1/tos.nc',
                shell=True,
                stdout=subprocess.PIPE).stdout.read()
cdoinfo.split('\n')[1].split(' 0 ')
示例#43
0
      q = j.queue.name

  # Get permitted_groups or accept the job

  permitted_groups = pbs.server().queue(q).resources_available['permitted_groups']
  if permitted_groups == None:
    e.accept()
  else:
    permitted_groups = permitted_groups.split(',')

  # Build a list of users from all permitted groups
  users = Set([])
  try:
    for g in permitted_groups:
       output = Popen([GETENT_CMD , "group", g], stdout=PIPE).communicate()[0].strip()
       output = output.split(':')[-1].split(',')
       users = users.union(output)
  except:
    pass

  # Check if job submitter is in the list of users
  if who in users:
    e.accept()
  else:
    e.reject('You are not permitted to submit jobs to queue %s.' % q)

  e.accept()

except:
  pbs.event().accept()
  pass
示例#44
0
def main():
    # check if MCL is installed
    if not find_executable('mcl'):
        print('\nWARNING: It is HIGHLY RECOMMENDED to have MCL installed ' +
              '(which do not seems to be).\nIf you are under Debian/Ubuntu' +
              ' just run "apt-get-install mcl".')
        follow = raw_input(
            '\n  You still have the option to follow with the ' +
            'installation. Do you want to follow? [y/N]')
        if follow.upper() != 'Y':
            exit('\n    Wise choice :)\n')

    # c module to find TADs
    pytadbit_module = Extension('pytadbit.tadbit_py',
                                language="c",
                                sources=['src/tadbit_py.c'],
                                extra_compile_args=['-std=c99'])
    # OLD c module to find TADs
    pytadbit_module_old = Extension('pytadbit.tadbitalone_py',
                                    language="c",
                                    sources=['src/tadbit_alone_py.c'],
                                    extra_compile_args=['-std=c99'])
    # c++ module to align and calculate all distances between group of 3D models
    eqv_rmsd_module = Extension('pytadbit.eqv_rms_drms',
                                language="c++",
                                sources=[
                                    'src/3d-lib/eqv_rms_drms_py.cpp',
                                    'src/3d-lib/matrices.cc',
                                    'src/3d-lib/3dStats.cpp',
                                    'src/3d-lib/align.cpp'
                                ],
                                extra_compile_args=["-ffast-math"])
    # c++ module to align a pair of 3D models
    aligner3d_module = Extension('pytadbit.aligner3d',
                                 language="c++",
                                 runtime_library_dirs=['3d-lib/'],
                                 sources=[
                                     'src/3d-lib/align_py.cpp',
                                     'src/3d-lib/matrices.cc',
                                     'src/3d-lib/3dStats.cpp',
                                     'src/3d-lib/align.cpp'
                                 ],
                                 extra_compile_args=["-ffast-math"])
    # c++ module to align and calculate consistency of a group of 3D models
    consistency_module = Extension('pytadbit.consistency',
                                   language="c++",
                                   runtime_library_dirs=['3d-lib/'],
                                   sources=[
                                       'src/3d-lib/consistency_py.cpp',
                                       'src/3d-lib/matrices.cc',
                                       'src/3d-lib/3dStats.cpp',
                                       'src/3d-lib/align.cpp'
                                   ],
                                   extra_compile_args=["-ffast-math"])
    # c++ module to get centroid of a group of 3D models
    centroid_module = Extension('pytadbit.centroid',
                                language="c++",
                                runtime_library_dirs=['3d-lib/'],
                                sources=[
                                    'src/3d-lib/centroid_py.cpp',
                                    'src/3d-lib/matrices.cc',
                                    'src/3d-lib/3dStats.cpp',
                                    'src/3d-lib/align.cpp'
                                ],
                                extra_compile_args=["-ffast-math"])

    # UPDATE version number
    version_full = open(path.join(PATH, '_pytadbit',
                                  '_version.py')).readlines()[0].split('=')[1]
    version_full = version_full.strip().replace('"', '')
    version = '.'.join(version_full.split('.')[:-1])
    revision = version_full.split('.')[-1]
    # try to use git to check if version number matches
    git_revision = git_version = None
    try:
        git_revision, err = Popen(['git', 'describe'],
                                  stdout=PIPE,
                                  stderr=PIPE).communicate()
        git_status, err2 = Popen(['git', 'diff'], stdout=PIPE,
                                 stderr=PIPE).communicate()
        if err or err2:
            raise OSError('git not found')
        plus = git_status != ''
        if plus:
            print '\n\nFOUND changes:\n' + git_status + '.'
        git_version = git_revision.split('-')[0]
        git_revision = str(int(git_revision.split('-')[1]) + plus)
    except OSError:
        git_revision = revision
        git_version = version
    else:
        if err:
            git_revision = revision
            git_version = version
    # update version number and write it to _version.py and README files
    revision = git_revision
    version = git_version
    version_full = '.'.join([version, revision])
    out = open(path.join(PATH, '_pytadbit', '_version.py'), 'w')
    out.write('__version__ = "%s"' % version_full)
    out.close()
    lines = []
    for line in open(path.join(PATH, 'README.rst')):
        if line.startswith('| Current version: '):
            old_v = sub('.*Current version: ([^ ]+ +).*', '\\1',
                        line).strip('\n')
            line = sub('Current version: [^ ]+ +',
                       ('Current version: ' + version_full + ' ' *
                        (len(old_v) - len(version_full))), line)
        lines.append(line.rstrip())
    out = open(path.join(PATH, 'README.rst'), 'w')
    out.write('\n'.join(lines))
    out.close()

    setup(
        name='TADbit',
        version=version_full,
        author=
        'Davide Bau, Francois Serra, Guillaume Filion and Marc Marti-Renom',
        author_email='*****@*****.**',
        ext_modules=[
            pytadbit_module, pytadbit_module_old, eqv_rmsd_module,
            centroid_module, consistency_module, aligner3d_module
        ],
        package_dir={'pytadbit': PATH + '/_pytadbit'},
        packages=[
            'pytadbit', 'pytadbit.parsers', 'pytadbit.tools',
            'pytadbit.boundary_aligner', 'pytadbit.utils',
            'pytadbit.tad_clustering', 'pytadbit.imp', 'pytadbit.mapping'
        ],
        # py_modules   = ["pytadbit"],
        platforms="OS Independent",
        license="GPLv3",
        description=
        'Identification, analysis and modelling of topologically associating domains from Hi-C data',
        long_description=(open("README.rst").read() +
                          open("doc/source/install.rst").read()),
        classifiers=TAGS,
        provides=["pytadbit"],
        keywords=["testing"],
        url='https://github.com/3DGenomes/tadbit',
        download_url='https://github.com/3DGenomes/tadbit/tarball/master',
        scripts=[
            'scripts/shrec.py', 'scripts/model_and_analyze.py',
            'scripts/tadbit'
        ],
        data_files=[(path.expanduser('~'), ['extras/.bash_completion'])])
示例#45
0
 def setUp(self):
     """ Get the inkscape version so we can know which dxfs to compare """
     inkscape = find_inkscape_path()
     out, _ = Popen([inkscape, '--version'], stdout=PIPE).communicate()
     self.inkscape_version = '.'.join(out.split(' ')[1].split('.')[:2])
    def test_rostopic(self):
        topics = ['/chatter', '/foo/chatter', '/bar/chatter']

        # wait for network to initialize
        rospy.init_node('test')
        for i, t in enumerate(topics):
            rospy.Subscriber(t, std_msgs.msg.String, self.callback, i)
        all = set(range(0, len(topics)))

        timeout_t = time.time() + 10.
        while time.time() < timeout_t and self.vals != all:
            time.sleep(0.1)

        # network is initialized
        cmd = 'rostopic'
        names = ['/chatter', 'foo/chatter']

        # list
        # - we aren't matching against the core services as those can make the test suites brittle
        output = Popen([cmd, 'list'], stdout=PIPE).communicate()[0]
        output = output.decode()
        l = set(output.split())
        for t in topics:
            self.assert_(t in l)

        for name in names:
            # type
            output = Popen([cmd, 'type', name], stdout=PIPE).communicate()[0]
            output = output.decode()
            self.assertEquals('std_msgs/String', output.strip())
            # check type of topic field
            output = Popen([cmd, 'type', name + '/data'],
                           stdout=PIPE).communicate()[0]
            output = output.decode()
            self.assertEquals('std_msgs/String data string', output.strip())

            # find
            output = Popen([cmd, 'find', 'std_msgs/String'],
                           stdout=PIPE).communicate()[0]
            output = output.decode()
            values = [n.strip() for n in output.split('\n') if n.strip()]
            self.assertEquals(set(values), set(topics))

            #echo
            # test with -c option to get command to terminate
            count = 3
            output = Popen(
                [cmd, 'echo', name, '-n', str(count)],
                stdout=PIPE).communicate()[0]
            output = output.decode()
            values = [n.strip() for n in output.split('\n') if n.strip()]
            values = [n for n in values if n != '---']
            self.assertEquals(
                count, len(values),
                "wrong number of echos in output:\n" + str(values))
            for n in values:
                self.assert_('data: "hello world ' in n, n)

            if 0:
                #bw
                stdout, stderr = run_for([cmd, 'bw', name], 3.)
                self.assert_('average:' in stdout,
                             "OUTPUT: %s\n%s" % (stdout, stderr))

                # hz
                stdout, stderr = run_for([cmd, 'hz', name], 2.)
                self.assert_('average rate:' in stdout)

                # delay
                stdout, stderr = run_for([cmd, 'delay', name], 2.)
                self.assert_('average rate:' in stdout)

        # pub
        #  - pub wait until ctrl-C, so we have to wait then kill it
        if 1:
            s = 'hello'
            t = '/pub/chatter'
            key = len(topics)
            rospy.Subscriber(t, std_msgs.msg.String, self.callback, key)

            #TODO: correct popen call
            args = [cmd, 'pub', t, 'std_msgs/String', s]
            popen = Popen(args, stdout=PIPE, stderr=PIPE, close_fds=True)

            # - give rostopic pub 5 seconds to send us a message
            all = set(range(0, key + 1))
            timeout_t = time.time() + 5.
            while time.time() < timeout_t and self.vals != all:
                time.sleep(0.1)
            # - check published value
            msg = self.msgs[key]
            self.assertEquals(s, msg.data)

            os.kill(popen.pid, signal.SIGKILL)

            # test with dictionary
            t = '/pub2/chatter'
            key = len(topics) + 1
            rospy.Subscriber(t, std_msgs.msg.String, self.callback, key)

            args = [cmd, 'pub', t, 'std_msgs/String', "{data: %s}" % s]
            popen = Popen(args, stdout=PIPE, stderr=PIPE, close_fds=True)

            # - give rostopic pub 5 seconds to send us a message
            all = set(range(0, key + 2))
            timeout_t = time.time() + 5.
            while time.time() < timeout_t and self.vals != all:
                time.sleep(0.1)

            # - check published value
            try:
                msg = self.msgs[key]
            except KeyError:
                self.fail("no message received on " + str(key))
            self.assertEquals(s, msg.data)

            os.kill(popen.pid, signal.SIGKILL)
示例#47
0
if_status = {x: y.isup for x, y in psutil.net_if_stats().items()}
if_blacklist = [
    "lo", "tun0", "wlan0"
]  # wlan0 in here because we'll add a bit of custom code to get signal strength

for iface, status in if_status.items():
    if iface in if_blacklist: continue
    out += f" - {f_light_gray if status else f_dark_gray}{iface}: {if_addresses[iface] if status else 'offline'}{f_dark_gray}\n"

try:
    if if_status['wlan0']:
        stdout, stderr = Popen(["iwconfig", "wlan0"],
                               stdin=PIPE,
                               stdout=PIPE,
                               stderr=PIPE).communicate(timeout=0.1)
        power = int(stdout.split(b"\n")[5][43:-6])
        out += f" - {f_light_gray}wlan0: {if_addresses[iface]} @ RF strength {wifi_strength(power)}{f_dark_gray}\n"
    else:
        out += f" - {f_dark_gray}wlan0: offline{f_dark_gray}\n"
except KeyError:
    # no wlan0, w/e
    pass

# ----------

out += f"\n{f_white}System Monitors{f_dark_gray}\n"

out += f" - {f_light_gray}CPU Frequency: {f_mild_blue}{psutil.cpu_freq().current} MHz{f_dark_gray}\n"
out += f" - {f_light_gray}CPU Usage: "
out += " ".join([get_cpu_text(x) for x in psutil.cpu_percent(percpu=True)])
out += f"{f_dark_gray}\n"
示例#48
0
def Pre_Process(filename):
    p1 = Popen(split("grep \'start\|end\' " + filename),
               stdout=PIPE,
               stderr=subprocess.PIPE)
    p2 = Popen(split("grep -v \'Allocation\'"),
               stdin=p1.stdout,
               stdout=subprocess.PIPE,
               stderr=subprocess.PIPE).communicate()[0]
    return p2.split('\n')


# if len(sys.argv) == 1:
#     pass
# elif len(sys.argv) == 3:
#     start_datetime = datetime.strptime(sys.argv[1] + ' ' + sys.argv[2], '%Y-%m-%d %H:%M:%S.%f')
# elif len(sys.argv) == 5:
#     start_datetime = datetime.strptime(sys.argv[1] + ' ' + sys.argv[2], '%Y-%m-%d %H:%M:%S.%f')
#     end_datetime = datetime.strptime(sys.argv[3] + ' ' + sys.argv[4], '%Y-%m-%d %H:%M:%S.%f')
# else:
#     print "Incorrect Number of Arguments. Please use one of the following"
#     print "python api_statistics.py"
#     print "python api_statistics.py <Start Date YYYY-MM-DD> <Start Time HH:MM:SS>"
#     print "python api_statistics.py <Start Date YYYY-MM-DD> <Start Time HH:MM:SS> <End Date YYYY-MM-DD> <End Time HH:MM:SS>"
# print "Search from:  %s to %s" % (str(start_datetime), str(end_datetime))

# compute_CSM_Master_stats('csm_resources.txt')
# compute_CSM_Master_stats('csm_master_LL.log')
# compute_CSM_Master_stats('csm_master.log.old.5')

# start_time = time.time()
# compute_CSM_Master_stats('csm_master_OR.log.old.1')
# elapsed_time = time.time() - start_time
# print elapsed_time
# compute_CSM_Master_stats('csm_master.log.old.2')
# elapsed_time = time.time() - elapsed_time
# print elapsed_time
# compute_CSM_Master_stats('csm_master.log.old.3')
# elapsed_time = time.time() - elapsed_time
# print elapsed_time
# compute_CSM_Master_stats('csm_master.log.old.4')
# elapsed_time = time.time() - elapsed_time
# print elapsed_time
# compute_CSM_Master_stats('csm_master.log.old.5')
# elapsed_time = time.time() - elapsed_time
# print elapsed_time

# start_time = time.time()
# compute_CSM_Master_stats('csm_master_LL.log.old.1')
# elapsed_time = time.time() - start_time
# print elapsed_time
# compute_CSM_Master_stats('csm_master_LL.log.old.2')
# elapsed_time = time.time() - elapsed_time
# print elapsed_time
# compute_CSM_Master_stats('csm_master_LL.log.old.3')
# elapsed_time = time.time() - elapsed_time
# print elapsed_time
# compute_CSM_Master_stats('csm_master_LL.log.old.4')
# elapsed_time = time.time() - elapsed_time
# print elapsed_time
# compute_CSM_Master_stats('csm_master_LL.log.old.5')
# elapsed_time = time.time() - elapsed_time
# print elapsed_time
示例#49
0
    hop.daemon = True
    hop.start()

    # Start sniffing
    sniff_thread = Thread(target=sniff_dot11, args=(wj_iface, ))
    sniff_thread.daemon = True
    sniff_thread.start()

    # Main loop.
    try:
        while 1:
            os.system("clear")
            print "Jamming devices: "
            if os.path.isfile('/tmp/wifiphisher-jammer.tmp'):
                proc = check_output(['cat', '/tmp/wifiphisher-jammer.tmp'])
                lines = proc.split('\n')
                lines += ["\n"] * (5 - len(lines))
            else:
                lines = ["\n"] * 5
            for l in lines:
                print l
            print "DHCP Leases: "
            if os.path.isfile('/var/lib/misc/dnsmasq.leases'):
                proc = check_output(['cat', '/var/lib/misc/dnsmasq.leases'])
                lines = proc.split('\n')
                lines += ["\n"] * (5 - len(lines))
            else:
                lines = ["\n"] * 5
            for l in lines:
                print l
            print "HTTP requests: "
示例#50
0
            shutil.rmtree(os.path.join(dist_dir, 'etc'))
        shutil.copytree(os.path.join(gtk_dir, 'etc'),
                        os.path.join(dist_dir, 'etc'))

        from subprocess import Popen, PIPE
        query_loaders = Popen(os.path.join(gtk_dir, 'bin',
                                           'gdk-pixbuf-query-loaders'),
                              stdout=PIPE).stdout.read()
        query_loaders = query_loaders.replace(
            gtk_dir.replace(os.sep, '/') + '/', '')

        loaders_path = os.path.join(dist_dir, 'etc', 'gtk-2.0',
                                    'gdk-pixbuf.loaders')
        with open(loaders_path, 'w') as loaders:
            loaders.writelines(
                [line + "\n" for line in query_loaders.split(os.linesep)])

        if os.path.isdir(os.path.join(dist_dir, 'lib')):
            shutil.rmtree(os.path.join(dist_dir, 'lib'))
        shutil.copytree(os.path.join(gtk_dir, 'lib'),
                        os.path.join(dist_dir, 'lib'))

        for file in glob.iglob(os.path.join(gtk_dir, 'bin', '*.dll')):
            if os.path.isfile(file):
                shutil.copy(file, dist_dir)

        if os.path.isdir(os.path.join(dist_dir, 'share', 'locale')):
            shutil.rmtree(os.path.join(dist_dir, 'share', 'locale'))
        shutil.copytree(os.path.join(gtk_dir, 'share', 'locale'),
                        os.path.join(dist_dir, 'share', 'locale'))
示例#51
0
# Serial no.
if 'error' in Popen([ADB, 'get-state'], stdout=PIPE,
                    stderr=STDOUT).stdout.read().decode('UTF-8'):
    sys.exit(" No Android device found !")
else:
    ADB_SN = Popen([ADB, 'get-serialno'], stdout=PIPE,
                   stderr=STDOUT).stdout.read().decode('UTF-8')
    print(" ADB serial: " + ADB_SN)
    REPORT.append(["ADB serial", ADB_SN])
''' Build properties '''
BUILDPROP = Popen([ADB, 'shell', SUC, 'cat', '/system/build.prop'],
                  stdout=PIPE,
                  stderr=STDOUT).stdout.read().decode('UTF-8')
# Manufacturer & Model
for manuf in BUILDPROP.split('\n'):
    if 'ro.product.manufacturer' in manuf:
        DEVICE_MANUF = manuf.strip().split('=')[1]
for model in BUILDPROP.split('\n'):
    if 'ro.product.model' in model:
        DEVICE_MODEL = model.strip().split('=')[1]
print(" Device model: %s %s" % (DEVICE_MANUF, DEVICE_MODEL))
REPORT.append(["Manufacturer", DEVICE_MANUF])
REPORT.append(["Model", DEVICE_MODEL])

# IMEI
IMEI = Popen([ADB, 'shell', SUC, 'dumpsys', 'iphonesubinfo'],
             stdout=PIPE,
             stderr=STDOUT).stdout.read().decode('UTF-8')
try:
    print(" IMEI: " + IMEI)
示例#52
0
#!/usr/bin/python
from time import sleep
from subprocess import Popen, PIPE, call
import syslog

syslog.syslog('Starting /home/pi/watchdog.py')
count = 0
while True:
    ps = Popen(['ps', '-C', 'omxplayer.bin', '-o', '%cpu'],
               stdout=PIPE).communicate()[0]
    a = ps.split('\n')[1]
    if (a == '') or (float(a) < 10) or (float(a) > 50):
        #print ('Out of range')
        count = count + 1
    else:
        #print ('Fine')
        count = 0
    if count > 5:
        syslog.syslog('Restarting service videoloop')
        count = 0
        call(['service', 'videoloop', 'restart'])
    sleep(2)
示例#53
0
def get_dependencies_version(dico=False):
    """
    Check versions of TADbit and all dependencies, as well and retieves system
    info. May be used to ensure reproductibility.
    
    :returns: string with description of versions installed
    """
    versions = {'  TADbit': __version__ + '\n\n'}
    try:
        import IMP
        try:
            versions['IMP'] = IMP.kernel.get_module_version()
            IMP.kernel.random_number_generator.seed(1)
            seed = IMP.kernel.random_number_generator()
        except AttributeError:
            versions['IMP'] = IMP.get_module_version()
            IMP.random_number_generator.seed(1)
            seed = IMP.random_number_generator()
        versions['IMP'] += ' (random seed indexed at 1 = %s)' % (seed)
    except ImportError:
        versions['IMP'] = 'Not found'
    try:
        import scipy
        versions['scipy'] = scipy.__version__
    except ImportError:
        versions['scipy'] = 'Not found'
    try:
        import numpy
        versions['numpy'] = numpy.__version__
    except ImportError:
        versions['numpy'] = 'Not found'
    try:
        import matplotlib
        versions['matplotlib'] = matplotlib.__version__
    except ImportError:
        versions['matplotlib'] = 'Not found'
    from subprocess import Popen, PIPE
    try:
        mcl, _ = Popen(['mcl', '--version'], stdout=PIPE,
                       stderr=PIPE).communicate()
        versions['MCL'] = mcl.split()[1]
    except:
        versions['MCL'] = 'Not found'
    try:
        chi, err = Popen(['chimera', '--version'], stdout=PIPE,
                         stderr=PIPE).communicate()
        versions['Chimera'] = chi.strip()
    except:
        versions['Chimera'] = 'Not found'
    try:
        chi, err = Popen(['chimera', '--version'], stdout=PIPE,
                         stderr=PIPE).communicate()
        versions['Chimera'] = chi.strip()
    except:
        versions['Chimera'] = 'Not found'
    try:
        uname, err = Popen(['uname', '-rom'], stdout=PIPE,
                           stderr=PIPE).communicate()
        versions[' Machine'] = uname
    except:
        versions[' Machine'] = 'Not found'

    if dico:
        return versions
    else:
        return '\n'.join(
            ['%15s : %s' % (k, versions[k]) for k in sorted(versions.keys())])
    def error(self, pkg, errormsg):
        logging.error("got a error from dpkg for pkg: '%s': '%s'" % (pkg, errormsg))
        # check if re-run of maintainer script is requested
        if not self.config.getWithDefault(
            "NonInteractive","DebugBrokenScripts", False):
            return
        # re-run maintainer script with sh -x/perl debug to get a better 
        # idea what went wrong
        # 
        # FIXME: this is just a approximation for now, we also need
        #        to pass:
        #        - a version after remove (if upgrade to new version)
        #
        #        not everything is a shell or perl script
        #
        # if the new preinst fails, its not yet in /var/lib/dpkg/info
        # so this is inaccurate as well
        environ = copy.copy(os.environ)
        environ["PYCENTRAL"] = "debug"
        cmd = []

        # find what maintainer script failed
        if "post-installation" in errormsg:
            prefix = "/var/lib/dpkg/info/"
            name = "postinst"
            argument = "configure"
            maintainer_script = "%s/%s.%s" % (prefix, pkg, name)
        elif "pre-installation" in errormsg:
            prefix = "/var/lib/dpkg/tmp.ci/"
            #prefix = "/var/lib/dpkg/info/"
            name = "preinst"
            argument = "install"
            maintainer_script = "%s/%s" % (prefix, name)
        elif "pre-removal" in errormsg:
            prefix = "/var/lib/dpkg/info/"
            name = "prerm"
            argument = "remove"
            maintainer_script = "%s/%s.%s" % (prefix, pkg, name)
        elif "post-removal" in errormsg:
            prefix = "/var/lib/dpkg/info/"
            name = "postrm"
            argument = "remove"
            maintainer_script = "%s/%s.%s" % (prefix, pkg, name)
        else:
            print("UNKNOWN (trigger?) dpkg/script failure for %s (%s) " % (pkg, errormsg))
            return

        # find out about the interpreter
        if not os.path.exists(maintainer_script):
            logging.error("can not find failed maintainer script '%s' " % maintainer_script)
            return
        with open(maintainer_script) as f:
            interp = f.readline()[2:].strip().split()[0]
        if ("bash" in interp) or ("/bin/sh" in interp):
            debug_opts = ["-ex"]
        elif ("perl" in interp):
            debug_opts = ["-d"]
            environ["PERLDB_OPTS"] = "AutoTrace NonStop"
        else:
            logging.warning("unknown interpreter: '%s'" % interp)

        # check if debconf is used and fiddle a bit more if it is
        with open(maintainer_script) as f:
            maintainer_script_text = f.read()
        if ". /usr/share/debconf/confmodule" in maintainer_script_text:
            environ["DEBCONF_DEBUG"] = "developer"
            environ["DEBIAN_HAS_FRONTEND"] = "1"
            interp = "/usr/share/debconf/frontend"
            debug_opts = ["sh","-ex"]

        # build command
        cmd.append(interp)
        cmd.extend(debug_opts)
        cmd.append(maintainer_script)
        cmd.append(argument)

        # check if we need to pass a version
        if name == "postinst":
            version = Popen("dpkg-query -s %s|grep ^Config-Version" % pkg,
                            shell=True, stdout=PIPE,
                            universal_newlines=True).communicate()[0]
            if version:
                cmd.append(version.split(":",1)[1].strip())
        elif name == "preinst":
            pkg = os.path.basename(pkg)
            pkg = pkg.split("_")[0]
            version = Popen("dpkg-query -s %s|grep ^Version" % pkg,
                            shell=True, stdout=PIPE,
                            universal_newlines=True).communicate()[0]
            if version:
                cmd.append(version.split(":",1)[1].strip())

        logging.debug("re-running '%s' (%s)" % (cmd, environ))
        ret = subprocess.call(cmd, env=environ)
        logging.debug("%s script returned: %s" % (name,ret))
示例#55
0
def gc(test_result=True):
    """Site-wide garbage collections."""

    days_ago = lambda days: datetime.today() - timedelta(days=days)

    log.debug('Collecting data to delete')

    logs = (ActivityLog.objects.filter(created__lt=days_ago(90)).exclude(
        action__in=amo.LOG_KEEP).values_list('id', flat=True))

    # Paypal only keeps retrying to verify transactions for up to 3 days. If we
    # still have an unverified transaction after 6 days, we might as well get
    # rid of it.
    contributions_to_delete = (Contribution.objects.filter(
        transaction_id__isnull=True,
        created__lt=days_ago(6)).values_list('id', flat=True))

    collections_to_delete = (Collection.objects.filter(
        created__lt=days_ago(2),
        type=amo.COLLECTION_ANONYMOUS).values_list('id', flat=True))

    for chunk in chunked(logs, 100):
        tasks.delete_logs.delay(chunk)
    for chunk in chunked(contributions_to_delete, 100):
        tasks.delete_stale_contributions.delay(chunk)
    for chunk in chunked(collections_to_delete, 100):
        tasks.delete_anonymous_collections.delay(chunk)
    # Incomplete addons cannot be deleted here because when an addon is
    # rejected during a review it is marked as incomplete. See bug 670295.

    log.debug('Cleaning up sharing services.')
    service_names = [s.shortname for s in SERVICES_LIST]
    # collect local service names
    original_language = translation.get_language()
    for language in settings.LANGUAGES:
        translation.activate(language)
        service_names.extend(
            [unicode(s.shortname) for s in LOCAL_SERVICES_LIST])
    translation.activate(original_language)

    AddonShareCount.objects.exclude(service__in=set(service_names)).delete()

    log.debug('Cleaning up test results extraction cache.')
    # lol at check for '/'
    if settings.MEDIA_ROOT and settings.MEDIA_ROOT != '/':
        cmd = ('find', settings.MEDIA_ROOT, '-maxdepth', '1', '-name',
               'validate-*', '-mtime', '+7', '-type', 'd', '-exec', 'rm',
               '-rf', "{}", ';')

        output = Popen(cmd, stdout=PIPE).communicate()[0]

        for line in output.split("\n"):
            log.debug(line)

    else:
        log.warning('MEDIA_ROOT not defined.')

    if settings.PACKAGER_PATH:
        log.debug('Cleaning up old packaged add-ons.')

        cmd = ('find', settings.PACKAGER_PATH, '-name', '*.zip', '-mtime',
               '+1', '-type', 'f', '-exec', 'rm', '{}', ';')
        output = Popen(cmd, stdout=PIPE).communicate()[0]

        for line in output.split("\n"):
            log.debug(line)

    if user_media_path('collection_icons'):
        log.debug('Cleaning up uncompressed icons.')

        cmd = ('find', user_media_path('collection_icons'), '-name',
               '*__unconverted', '-mtime', '+1', '-type', 'f', '-exec', 'rm',
               '{}', ';')
        output = Popen(cmd, stdout=PIPE).communicate()[0]

        for line in output.split("\n"):
            log.debug(line)

    USERPICS_PATH = user_media_path('userpics')
    if USERPICS_PATH:
        log.debug('Cleaning up uncompressed userpics.')

        cmd = ('find', USERPICS_PATH, '-name', '*__unconverted', '-mtime',
               '+1', '-type', 'f', '-exec', 'rm', '{}', ';')
        output = Popen(cmd, stdout=PIPE).communicate()[0]

        for line in output.split("\n"):
            log.debug(line)
    parser.add_option('--commit',
                      action="store",
                      type='string',
                      dest='commit',
                      default=None,
                      help='Git commit hash.')
    (options, args) = parser.parse_args()

    python_installdir = 'lib\\site-packages\\'
    os.environ['Path'] = os.path.join(os.path.abspath(options.installdir),
                                      'bin') + ';' + os.environ['PATH']
    qt_version = Popen(['openrave-config', '--qt-version'],
                       stdout=PIPE).communicate()[0].strip()
    version = Popen(['openrave-config', '--version'],
                    stdout=PIPE).communicate()[0].strip()
    soversion = '.'.join(version.split('.')[0:2])
    _soversion = '_'.join(version.split('.')[0:2])
    openravepy_dir = os.path.abspath(
        get_python_lib(1, prefix=options.installdir))
    openravepy_reldir = os.path.relpath(openravepy_dir,
                                        os.path.abspath(options.installdir))
    sys.path.insert(0, openravepy_dir)
    openravepy = __import__('openravepy')
    openravepy.examples = __import__('openravepy.examples',
                                     fromlist=['openravepy'])
    assert (openravepy.__version__ == version)
    args = dict()
    args['openrave_version'] = openravepy.__version__
    args['openrave_version_full'] = openravepy.__version__
    args['openrave_soversion'] = soversion
    args['openrave_commit'] = ''
示例#57
0
	def BaseProp(self):
		self.dlgBaseProp = QtWidgets.QDialog()
		uic.loadUi('ui/base_prop.ui',self.dlgBaseProp)
		name = self.tree.currentItem().text(0)
		infobase = self.tree.currentItem().text(1)
		host = self.tree.currentItem().text(2)
		port = self.tree.currentItem().text(3)
		cluster = self.tree.currentItem().text(4)
		repeat = True
		while(repeat):
			auth_cluster = self.getSessionAuth(cluster,'cluster')
			auth_base = self.getSessionAuth(infobase,'infobase')
			self.Mess('info','Получение свойств базы '+name+' на '+host+':'+port)
			app.processEvents()
			cmd = CMD_PREFIX + ' ' + RAC + ' infobase info --cluster=' + cluster + ' --infobase=' + infobase + auth_cluster + auth_base + host + ':' + port
			if DEBUG: print (cmd)
			ret,err = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()
			ret = ret.decode('utf-8')
			err = err.decode('utf-8')
			if len(err) > 0:
				if DEBUG: print('Есть ошибки:',err)
				if ERR_CLUSTER_RIGHTS in err:
					self.Mess('warn',ERR_CLUSTER_RIGHTS + ' ' + host + ':' + port)
					if not self.clusterLogPass(cluster,host,port): repeat = False
				if ERR_BASE_RIGHTS in err:
					self.Mess('warn',ERR_BASE_RIGHTS + ' '+name+' на '+host+':'+port)
					if not self.BaseLogPass(name,host,port): repeat = False
			else:
				repeat = False
				if DEBUG: print('ret:',ret)

				ar = ret.split('\n')
				BaseInfo = {}
				for line in ar:
					if line.find(' : ') > -1:
						key = (line.split(' : '))[0].strip()
						value = (line.split(' : '))[1].strip()
						BaseInfo[key]=value
				self.MessOff()
				self.dlgBaseProp.eName.setText(BaseInfo['name'])
				self.dlgBaseProp.eDescr.setText(BaseInfo['descr'].replace('"','').replace('\ ',' '))
				self.dlgBaseProp.eDBServer.setText(BaseInfo['db-server'])
				for dpt in DB_TYPES: self.dlgBaseProp.cbDBType.addItem(dpt)
				self.dlgBaseProp.cbDBType.setCurrentText(BaseInfo['dbms'])
				self.dlgBaseProp.eDBName.setText(BaseInfo['db-name'])
				self.dlgBaseProp.eDBUser.setText(BaseInfo['db-user'])
				if BaseInfo['license-distribution'] == 'allow': self.dlgBaseProp.cbLic.setCheckState(2)
				else: self.dlgBaseProp.cbLic.setCheckState(0)
				if BaseInfo['sessions-deny'] == 'on': self.dlgBaseProp.gpBlock.setChecked(1)
				else: self.dlgBaseProp.gpBlock.setChecked(0)
				self.dlgBaseProp.eBlockStart.setText(BaseInfo['denied-from'].replace('T',' '))
				self.dlgBaseProp.eBlockEnd.setText(BaseInfo['denied-to'].replace('T',' '))
				self.dlgBaseProp.eBlockMess.setText(BaseInfo['denied-message'].replace('"','').replace('\ ',' '))
				self.dlgBaseProp.eBlockCode.setText(BaseInfo['permission-code'].replace('"',''))
				if BaseInfo['scheduled-jobs-deny'] == 'on': self.dlgBaseProp.cbRegl.setCheckState(2)
				else: self.dlgBaseProp.cbRegl.setCheckState(0)

				self.dlgBaseProp.setWindowTitle('Свойства базы '+name+' на '+host+':'+port)
				if self.dlgBaseProp.exec():
					self.Mess('info','Запись свойств базы '+name+' на '+host+':'+port)
					app.processEvents()
					Descr =    " --descr='" + self.dlgBaseProp.eDescr.text().replace(' ','\ ') + "'"
					DBServer = " --db-server=" + self.dlgBaseProp.eDBServer.text()
					DBType =   " --dbms=" + self.dlgBaseProp.cbDBType.currentText()
					DBName =   " --db-name='" + self.dlgBaseProp.eDBName.text() + "'"
					DBUser =   "******" + self.dlgBaseProp.eDBUser.text() + "'"
					DBPasswd = " --db-pwd='" +  self.dlgBaseProp.eDBPasswd.text() + "'"
					Lic =      " --license-distribution=deny"
					if self.dlgBaseProp.cbLic.isChecked(): Lic = " --license-distribution=allow"
					Block =    " --sessions-deny=off"
					if self.dlgBaseProp.gpBlock.isChecked(): Block = " --sessions-deny=on"
					BlockStart = " --denied-from='" + self.dlgBaseProp.eBlockStart.text().replace(' ','T') + "'"
					BlockEnd =   " --denied-to='" + self.dlgBaseProp.eBlockEnd.text().replace(' ','T') + "'"
					BlockMess =  " --denied-message='" + self.dlgBaseProp.eBlockMess.text().replace(' ','\ ') + "'"
					BlockCode =  " --permission-code='" + self.dlgBaseProp.eBlockCode.text().replace(' ','\ ') + "'"
					Regl = " --scheduled-jobs-deny=off"
					if self.dlgBaseProp.cbRegl.isChecked(): Regl = " --scheduled-jobs-deny=on"

					cmd = CMD_PREFIX + ' ' + RAC + ' infobase update --cluster=' + cluster +' --infobase=' + infobase + auth_cluster + auth_base + Descr + DBServer + DBType + DBName + DBUser + DBPasswd + Lic + Block + BlockStart + BlockEnd + BlockMess + BlockCode + Regl + ' ' + host + ':' + port
					if DEBUG: print (cmd)
					try: ret = (subprocess.check_output(cmd, shell=True)).decode('utf-8')
					except Exception:
						if DEBUG: print(traceback.format_exc())
						if DEBUG: print("DEBUG: Невозможно записать свойства базы "+name)
						self.Mess('warn','Не записаны свойства базы '+name+' на '+host+':'+port)
					else:
						if DEBUG: print (ret)
		self.MessOff()
示例#58
0
def get_lofar_sw_ver():
    ps_out = Popen([LOFARBINPATH + 'swlevel', '-V'],
                   stdout=PIPE).communicate()[0]
    verstr = ps_out.split('-')[-1]
    ver_maj, ver_min, ver_pat = [int(ver.strip()) for ver in verstr.split('_')]
    return ver_maj, ver_min, ver_pat
示例#59
0
def get_dependencies_version(dico=False):
    """
    Check versions of TADbit and all dependencies, as well and retrieves system
    info. May be used to ensure reproducibility.

    :returns: string with description of versions installed
    """
    versions = {'  TADbit': __version__ + '\n\n'}
    try:
        import IMP
        try:
            versions['IMP'] = IMP.get_module_version()
            IMP.random_number_generator.seed(1)
            seed = IMP.random_number_generator()
        except AttributeError:
            versions['IMP'] = IMP.kernel.get_module_version()
            IMP.kernel.random_number_generator.seed(1)
            seed = IMP.kernel.random_number_generator()
        versions['IMP'] += ' (random seed indexed at 1 = %s)' % (seed)
    except ImportError:
        pass
        #  versions['IMP'] = 'Not found'
    try:
        import scipy
        versions['scipy'] = scipy.__version__
    except ImportError:
        versions['scipy'] = 'Not found'
    try:
        from gem import commands
        versions['gemtools'] = commands.__VERSION__
    except:
        versions['gemtools'] = 'Not found'
    try:
        from gem import executables
        out = Popen(executables['gem-mapper'],
                    shell=True,
                    stdout=PIPE,
                    stderr=PIPE,
                    universal_newlines=True).communicate()
        versions['gem-mapper'] = out[1].split(' - ')[0].split('build ')[1]
    except:
        versions['gem-mapper'] = 'Not found'
    try:
        import numpy
        versions['numpy'] = numpy.__version__
    except ImportError:
        versions['numpy'] = 'Not found'
    try:
        import matplotlib
        versions['matplotlib'] = matplotlib.__version__
    except ImportError:
        versions['matplotlib'] = 'Not found'
    try:
        mcl, _ = Popen(['mcl', '--version'],
                       stdout=PIPE,
                       stderr=PIPE,
                       universal_newlines=True).communicate()
        versions['MCL'] = mcl.split()[1]
    except:
        versions['MCL'] = 'Not found'
    # try:
    #     chi, err = Popen(['chimera', '--version'], stdout=PIPE,
    #                      stderr=PIPE).communicate()
    #     versions['Chimera'] = chi.strip()
    # except:
    #     versions['Chimera'] = 'Not found'
    # try:
    #     chi, err = Popen(['chimera', '--version'], stdout=PIPE,
    #                      stderr=PIPE).communicate()
    #     versions['Chimera'] = chi.strip()
    # except:
    #     versions['Chimera'] = 'Not found'
    try:
        uname, err = Popen(['uname', '-rom'],
                           stdout=PIPE,
                           stderr=PIPE,
                           universal_newlines=True).communicate()
        versions[' Machine'] = uname
    except:
        versions[' Machine'] = 'Not found'

    if dico:
        return versions
    else:
        return '\n'.join(
            ['%15s : %s' % (k, versions[k]) for k in sorted(versions.keys())])
示例#60
0
    def collect(self):
        use_sudo = str_to_bool(self.config['use_sudo'])
        if ((not os.access(self.config['bin'], os.X_OK) or
             (use_sudo and not os.access(self.config['sudo_cmd'], os.X_OK)))):
            return False

        command = [self.config['bin'], 'sensor']

        if use_sudo and getpass.getuser() != 'root':
            command.insert(0, self.config['sudo_cmd'])

        p = Popen(command, stdout=PIPE).communicate()[0][:-1]

        for i, v in enumerate(p.split("\n")):
            data = v.split("|")
            try:
                # Complex keys are fun!
                metric_name = data[0].strip()
                metric_name = metric_name.replace(".", "_")
                metric_name = metric_name.replace(" ",
                                                  self.config['delimiter'])
                metrics = []

                # Each sensor line is a column seperated by a | with the
                # following descriptions:
                # 1. Sensor ID
                # 2. Sensor Reading
                # 3. Units
                # 4. Status
                # 5. Lower Non-Recoverable
                # 6. Lower Critical
                # 7. Lower Non-Critical
                # 8. Upper Non-Critical
                # 9. Upper Critical
                # 10. Upper Non-Recoverable

                if not self.config['thresholds']:
                    metrics.append((metric_name, self.parse_value(data[1])))
                else:
                    metrics.append(
                        (metric_name + ".Reading", self.parse_value(data[1])))
                    metrics.append((metric_name + ".Lower.NonRecoverable",
                                    self.parse_value(data[4])))
                    metrics.append((metric_name + ".Lower.Critical",
                                    self.parse_value(data[5])))
                    metrics.append((metric_name + ".Lower.NonCritical",
                                    self.parse_value(data[6])))
                    metrics.append((metric_name + ".Upper.NonCritical",
                                    self.parse_value(data[7])))
                    metrics.append((metric_name + ".Upper.Critical",
                                    self.parse_value(data[8])))
                    metrics.append((metric_name + ".Upper.NonRecoverable",
                                    self.parse_value(data[9])))

                [
                    self.publish(name, value) for (name, value) in metrics
                    if value is not None
                ]

            except ValueError:
                continue
            except IndexError:
                continue

        return True