Ejemplo n.º 1
0
def revoke_access(user, ip, cnt, op_user, is_admin):
    # we allow admin to op on all |  or user to operate on himself
    if not (is_admin or user == op_user):
        raise Exception("auth violation")
    users = get_users()
    r, aips = get_fw_rules(users)
    assert "." in ip, "illegal ip %s" % ip
    ur = r[user]
    print "walking rules for user %s" % user

    for r in ur:
        # print 'comparing %s with ip %s , cnt %s'%(r,ip,cnt)
        if r["source"] == ip and str(r["cnt"]) == str(cnt):

            # delegated erase
            for dfw in DELEGATED_FIREWALLS:
                dcmd = "del "
                dcmd += " --user=%s" % escapeshellarg(user)
                dcmd += " %s" % escapeshellarg(r["source"])
                fcmd = "ssh " + dfw["ssh"] + " " + escapeshellarg(dfw["cmd"] % dcmd)
                print fcmd
                st, op = gso(fcmd)
                assert st == 0, "%s => %s" % (fcmd, op)

            # local erase
            cmd = "sudo iptables -DINPUT %s" % r["cnt"]
            st, op = gso(cmd)
            assert st == 0
            print cmd

    pass
Ejemplo n.º 2
0
def revoke_access(user, ip, cnt, op_user, is_admin):
    #we allow admin to op on all |  or user to operate on himself
    if not (is_admin or user == op_user): raise Exception('auth violation')
    users = get_users()
    r, aips = get_fw_rules(users)
    assert '.' in ip, "illegal ip %s" % ip
    ur = r[user]
    print 'walking rules for user %s' % user

    for r in ur:
        #print 'comparing %s with ip %s , cnt %s'%(r,ip,cnt)
        if r['source'] == ip and str(r['cnt']) == str(cnt):

            #delegated erase
            for dfw in DELEGATED_FIREWALLS:
                dcmd = 'del '
                dcmd += ' --user=%s' % escapeshellarg(user)
                dcmd += ' %s' % escapeshellarg(r['source'])
                fcmd = 'ssh ' + dfw['ssh'] + ' ' + escapeshellarg(
                    dfw['cmd'] % dcmd)
                print fcmd
                st, op = gso(fcmd)
                assert st == 0, "%s => %s" % (fcmd, op)

            #local erase
            cmd = 'sudo iptables -DINPUT %s' % r['cnt']
            st, op = gso(cmd)
            assert st == 0
            print cmd

    pass
Ejemplo n.º 3
0
def pack(comment):
    # rename the desired file folder in form of 20010101-comment
    date = time.strftime('%Y%m%d', time.localtime(time.time()))
    # des_folder: destination of status, output files
    des_folder = date + '-' + comment
    # remove then creat the folder
    gso('rm -rf %s' % des_folder)
    gso('mkdir -p %s/orbit_results' % des_folder)
    gso('cp {orbit,orbit.F} ./%s' % des_folder)
    gso('cp {*.plt,orbit.out,configuration.py} ./%s/orbit_results' %
        (des_folder))
    gso('rm -rf q63887*')
Ejemplo n.º 4
0
 def upload_arp_table(self):
     "Responsible for sending everything of value to the server"
     arp_data = run_or_die('arp -an')
     fd, file_name = tempfile.mkstemp(prefix="arp_")
     open(file_name, 'w').write(arp_data)
     output = self._upload_file(file_name)
     if output == OK:
         gso('rm -f %s' % file_name)
         os.close(fd)
         return OK
     os.close(fd)
     self.log("Error uploading")
     raise UploadException(self.options.server_url, output)
Ejemplo n.º 5
0
def kill_pro():
    cmd = "ps aux|grep tomcat|grep -v grep|awk '{print $2}'"
    status, result = gso(cmd)
    if result:
        cmd_kill = "kill -9 %s" % result
        status, result = gso(cmd_kill)
        if status:
            print(
                '<<<<<<<<<<<<<<<<<<<< tomcat process is dead! >>>>>>>>>>>>>>>>>>>>'
            )
            return 0
        else:
            print('<<<<<<<<<<<<<<<<<<<< killed abort! >>>>>>>>>>>>>>>>>>>>')
            return 1
Ejemplo n.º 6
0
 def hunt_and_explode(self):
     '''
     Used to find all type-5 package data in the repository and
     untar the files properly.
     '''
     base_path = make_path(get_spkg_path(), "repos")
     output = ""
     if sys.platform == "cli":
         tfn = "c:/spkg/tmp/tarballs.txt"
         cmd = 'bash -c "find %s -name \"*.tar.gz\"" > %s' % (base_path, tfn)
         #Logger.info("CMD: (%s)" % cmd)
         os.system(cmd)
         output = open(tfn).read()
     else:
         _status, output = gso('find %s -name "*.tar.gz"' % base_path)
         
     start_dir = get_slash_cwd()
     for full_tar_file_name in output.split('\n'):
         tmp_list = full_tar_file_name.split('/')
         tar_file_name = tmp_list[-1]
         base_name = tar_file_name.split('.tar.gz')[0]
         tar_file_dir = make_path(tmp_list[:-1] + [base_name])
         if not os.path.isdir(tar_file_dir):
             #Logger.info("Exploding %s..." % base_name)
             cmd = 'bash -c "mkdir -p %s"' % tar_file_dir
             #Logger.info("tar_file_dir: %s" % tar_file_dir)
             status = os.system(cmd)
             if status == OK:
                 cmd = 'bash -c "cd %s && tar -mxzf ../%s"'
                 cmd = cmd % (tar_file_dir, tar_file_name)
                 #Logger.info("Cmd: %s" % cmd)
                 if os.system(cmd) != OK:
                     msg = "could not untar %s" % (tar_file_name)
                     raise Exceptions.BadPackage(full_tar_file_name, msg)
     os.chdir(start_dir)
 def _sync_restore_data(self, package_name, target_list):
     '''
     Prior to a restore command, sends all necessary data to the remote
     machine to $SPKG_DIR/archive
     package_name -- name of package being restored
     target_list -- list of (one) name of a backup dataset
     '''
     if len(target_list) != 1:
         msg = "One and only one Restore Target can be specified (received %s)"
         raise RestoreFailure(msg % target_list)
     target = target_list[0]
     remote_dir = os.path.join(self.spkg_dir, "archive", package_name, target)
     self.run_cmd(" mkdir -p %s" % remote_dir)
     archive_dir = os.path.join(self.server_home, "archive",
                                self.machine_name, package_name, target)
     if not os.path.isdir(archive_dir):
         reason = "{0} does not exist".format(archive_dir)
         errmsg = "Cannot restore target {0} for package {1}/machine {2}"\
                  ": {3}"
         errmsg = errmsg.format(target, package_name, self.machine_name, reason)
         self.polling_log.error(errmsg)
         raise RestoreFailure(reason, target, package_name, self.machine_name)
         
     cmd = "rsync -La {0}/* {1}@{2}:{3}" 
     cmd = cmd.format(archive_dir, self.username, self.ip_address,
                      remote_dir)
     self.server_log.info("Running: %s" % cmd)
     self.polling_log.debug(cmd)
     self.polling_log.info("Copying archive to remote machine...")
     status, output = gso(cmd)
     if status != OK:
         msg = "Error rsyncing restore data: %s" % output
         raise RestoreFailure(msg)
Ejemplo n.º 8
0
def dumpDTF(videoPath):
	outputPath = os.path.splitext(videoPath)[0] + '_features.txt'
	command = './DenseTrack ' + videoPath + ' > ' + outputPath
	out = gso(command)

	if out[0]!=0:
		raise Exception("Error while getting dense trajectory features : " + out[1])
Ejemplo n.º 9
0
def perform_fastforward(repo,rev,push=False):
    print 'FAST FORWARDING %s TO %s'%(repo,rev)
    am = AutoMerger()
    am.completed = []
    am.args = getargs(JENKINS_TARGET_BRANCH,rev,push)
    am.merge(repo,rev,JENKINS_TARGET_BRANCH,'_')
    assert len(am.completed)==1,"completed %s for %s"%(am.completed,repo)
    for sm,pth in JENKINS_REPO_SUBMODULES.get(repo,{}).items():
        st,op = gso('cd repos/%s && git submodule | grep %s'%(repo,pth)) ; assert st==0
        cid = re.compile('^\-([0-9a-f]+) ').search(op).group(1)
        print 'UPDATING SUBMODULE %s at %s with rev %s'%(sm,pth,cid)
        ams = AutoMerger()
        ams.completed = []
        ams.args = getargs(JENKINS_TARGET_BRANCH,cid,push)
        ams.merge(sm,cid,JENKINS_TARGET_BRANCH,'_')
        assert len(ams.completed)==1,"completed %s for %s"%(ams.completed,sm)
        if ams.completed[0]['prev_rev']==ams.completed[0]['new_rev']:
            print 'NO CHANGES DETECTED IN %s'%sm
        else:
            print '%s : SUBMODULE UPDATED COMPLETE'%sm

    if am.completed[0]['prev_rev']==am.completed[0]['new_rev']:
        print 'NO CHANGES DETECTED in %s'%repo
    else:
        print 'SUCCESFULLY UPDATED: %s'%repo
Ejemplo n.º 10
0
def monitor():
    # a program to monitor running status
    if sys.version[0] == '2':
        if nplot == 2:
            snap_time = 45
        if nplot in [1,3]:
            snap_time = 3
        else:
            snap_time = 15
        status, output = gso('qstat')
        while output != '':
            print(output)
            time.sleep(snap_time)
            status, output = gso('qstat')
            print('\n\n\n\n\n')
        print('job is done')
Ejemplo n.º 11
0
def run_or_die(cmd):
    try:
        status, output = gso(cmd)
        if status != OK:
            raise CommandException(cmd, output)
    except OSError:
        raise CommandException(cmd, str(OSError))
    return output
Ejemplo n.º 12
0
def dumpDTF(videoPath):
    outputPath = os.path.splitext(videoPath)[0] + '_features.txt'
    command = './DenseTrack ' + videoPath + ' > ' + outputPath
    out = gso(command)

    if out[0] != 0:
        raise Exception("Error while getting dense trajectory features : " +
                        out[1])
Ejemplo n.º 13
0
 def _check_dispatcher_running(cls, uri):
     "Check for tcp port listener"
     tcp_port = uri.split(':')[-1].split('/')[0]
     cmd = 'lsof -i tcp:%s | grep python | grep -v grep' % tcp_port
     _status, output = gso(cmd)
     if output:
         return True
     return False
Ejemplo n.º 14
0
 def _build_component(self, component_dict, section_name,
                      svn_user, svn_password, tmp_path, debug, prepare):
     '''Export one item from SVN and create an appropriate TAR file
     component_dict -- dictionary of item data
     section_name -- either 'libs' or 'injectors'
     svn_user -- account to use for logging in to svn
     svn_password -- password for said account
     '''
     tar_file_path = component_dict.get("path", '')
     svn_url = component_dict["svn"]
     if svn_url.endswith('/'):
         svn_url = svn_url[:-1]
     base = svn_url.split('/')[-1]
     if prepare:
         version = "HEAD"
     else:
         version = component_dict.get("version", "HEAD")
     expected_file_path = os.path.join(self.server_home, "repos",
                                       section_name,
                                       "%s-%s.tar.gz" % (base, version))
     
     if tar_file_path == expected_file_path:
         if os.path.isfile(tar_file_path):
             msg = "No need to build %s: already exists" % base
             self.polling_log.info(msg)
             return None
     start_path = os.getcwd()
     checkout_dir = os.path.join(tmp_path, section_name, base)
     if not os.path.isdir(checkout_dir):
         os.system("mkdir -p %s" % checkout_dir)
     
     msg = "Checking out and building %s..." % base
     self.polling_log.info(msg)
     version = self._svn_checkout(version, svn_user, svn_password,
                        svn_url, checkout_dir, debug)
     os.chdir( checkout_dir )
     section_dir =  os.path.join(self.server_home, "repos", section_name)
     if not os.path.isdir(section_dir):
         status = os.system("mkdir -p %s" % section_dir)
         if status != OK:
             msg = "Could not create required directory: %s" % section_dir
             raise CnmServerException(msg)
         else:
             msg = "Created required directory: %s" % section_dir
             self.server_log.warning(msg)
     tar_file_path = os.path.join(section_dir, 
                                  "%s-%s.tar.gz" % (base, version))
     cmd = "tar -czf %s *" % ( tar_file_path )
     status, output = gso(cmd)
     if status != OK:
         self.server_log.error("Command: %s" % cmd)
         self.server_log.error("Current Directory: %s" % os.getcwd())
         self.server_log.error("Output from command: %s" % output)
         msg = "Could not create tarball"
         os.chdir( start_path )
         raise CnmServerException(msg)
     os.chdir( start_path )
     return tar_file_path, version
Ejemplo n.º 15
0
def get_fw_rules(users=None, by_user=True):
    st, op = gso('sudo iptables -tfilter -nvL %s' % IPT_CHAIN)
    assert st == 0
    if by_user:
        rt = {}
    else:
        rt = []
    all_allowed = []
    cnt = 0
    for row in op.split('\n'):
        row = row.strip()
        res = matchre.search(row)
        rule_params = strre.search(row)
        if not rule_params: continue
        cnt += 1
        #print 'cnt %s : %s'%(cnt,rule_params.groups())
        if res:
            #print 'searching %s\nwith\n%s'%(strrestr,row)
            dptre = re.compile('dpt\:(\d+)')
            dptres = dptre.search(row)
            if dptres:
                dport = dptres.group(1)
            else:
                dport = None

            source = rule_params.group('source')
            dtraw = res.group('data')
            dt = json.loads(base64.b64decode(dtraw))
            user = dt['u']
            stamp = dt['s']
            if users and user not in users:
                raise Exception('unknown user %s' % user)
            if by_user and user not in rt:
                #raise Exception('adding user %s because not in %s'%(user,rt.keys()))
                rt[user] = []
            row = {
                'source':
                source,
                'cnt':
                cnt,
                'pkts':
                rule_params.group('pkts'),
                'age': (datetime.datetime.now() -
                        datetime.datetime.fromtimestamp(stamp)),
                'note':
                dt['n'],
                'dport':
                dport
            }
            if by_user:
                rt[user].append(row)
            else:
                row['user'] = user
                rt.append(row)
            aaw = source
            if dport: aaw += '=>:' + dport
            all_allowed.append(aaw)
    return rt, all_allowed
Ejemplo n.º 16
0
def get_version():
    "Get the SVN revision number for this build"
    status, output = gso("git rev-parse --short HEAD")
    if status != OK:
        raise BuildConfigurationException("Unable to obtain SVN version")
    version_info = output.strip()
    print ">>> (%s)" % version_info
    version_python_code = VERSION_TEMPLATE % version_info
    open("src/_version.py", 'w').write(version_python_code)
Ejemplo n.º 17
0
def check_pro():
    cmd = 'ps aux|grep tomcat|grep -v grep'
    status, result = gso(cmd)
    if result:
        print('<<<<<<<<<<<<<<<<<<<< tomcat running >>>>>>>>>>>>>>>>>>>>')
        return 0
    else:
        print('<<<<<<<<<<<<<<<<<<<< tomcat stopped >>>>>>>>>>>>>>>>>>>>')
        return 1
Ejemplo n.º 18
0
def monitor():
    # a program to monitor running status
    if sys.version[0] == '2':
        snap_time = 45
        status, output = gso('qstat')
        pattern = '(\d*).service'
        try:
            jobid = re.findall(pattern, output)[-1]
        except IndexError:
            print('no jobid found, check the grammer')
            print(output)
            return
        print('jobid is ', jobid, '\n')
        while re.findall(jobid, output):
            print(output)
            time.sleep(snap_time)
            status, output = gso('qstat')
            print('\n\n\n\n\n')
        print('job is done')
Ejemplo n.º 19
0
def sub_task(comment, submit):
    claim()
    # make file
    print('making files, please wait')
    print('making equilibrium files')
    status, output = gso('make FC=pgf90 eqs')
    print(output)
    status, output = gso('./eqs')
    print(output)
    status, output = gso('make FC=pgf90')
    print(output)
    # let the user choose weather to submit the job
    # make sure it is 1 when using batch test
    if submit == 1:
        status, output = gso('qsub ./job.pbs')
        print(output)
        use.monitor()
        use.pack(comment)
    else:
        print('not submitted')
Ejemplo n.º 20
0
def get_fw_rules(users=None, by_user=True):
    st, op = gso("sudo iptables -tfilter -nvL %s" % IPT_CHAIN)
    assert st == 0
    if by_user:
        rt = {}
    else:
        rt = []
    all_allowed = []
    cnt = 0
    for row in op.split("\n"):
        row = row.strip()
        res = matchre.search(row)
        rule_params = strre.search(row)
        if not rule_params:
            continue
        cnt += 1
        # print 'cnt %s : %s'%(cnt,rule_params.groups())
        if res:
            # print 'searching %s\nwith\n%s'%(strrestr,row)
            dptre = re.compile("dpt\:(\d+)")
            dptres = dptre.search(row)
            if dptres:
                dport = dptres.group(1)
            else:
                dport = None

            source = rule_params.group("source")
            dtraw = res.group("data")
            dt = json.loads(base64.b64decode(dtraw))
            user = dt["u"]
            stamp = dt["s"]
            if users and user not in users:
                raise Exception("unknown user %s" % user)
            if by_user and user not in rt:
                # raise Exception('adding user %s because not in %s'%(user,rt.keys()))
                rt[user] = []
            row = {
                "source": source,
                "cnt": cnt,
                "pkts": rule_params.group("pkts"),
                "age": (datetime.datetime.now() - datetime.datetime.fromtimestamp(stamp)),
                "note": dt["n"],
                "dport": dport,
            }
            if by_user:
                rt[user].append(row)
            else:
                row["user"] = user
                rt.append(row)
            aaw = source
            if dport:
                aaw += "=>:" + dport
            all_allowed.append(aaw)
    return rt, all_allowed
Ejemplo n.º 21
0
def sub_task(comment, pdist, numeric, submit, monitor):
    print(ying)
    # make file
    print('making files, please wait')
    print('making equilibrium files')
    status, output = gso('make FC=pgf90 eqs')
    print(output)
    status, output = gso('./eqs')
    print(output)
    status, output = gso('make FC=pgf90')
    print(output)
    # let the user choose weather to submit the job
    # make sure it is 'y' when using batch test
    if submit == 'y':
        status, output = gso('qsub ./job.pbs')
        print(output)
        if monitor == 'y':
            use.monitor()
            use.pack(comment, pdist, numeric)
    else:
        print('not submitted')
 def _rsync_repository(self, tmp_path, dest):
     """Performing an RSYNC command to get all repos files to
     the remote machine"""
     cmd = "rsync -La %s %s@%s:%s" 
     cmd = cmd % (tmp_path, self.username, self.ip_address, dest)
     self.server_log.info("Running: %s" % cmd)
     files = glob.glob("%s/*" % tmp_path)
     self.polling_log.debug(cmd)
     status, output = gso(cmd)
     if status != OK:
         msg = "Error rsyncing repository: %s" % output
         raise MachineUnavailableException(self.machine_name, msg)
Ejemplo n.º 23
0
def modem_terminal():
    status, dmesg_out = gso("dmesg")
    if status != 0:
        raise Exception('dmesg returned ' + status)

    tty_lines = [line for line in dmesg_out.split('\n')
                 if 'GSM modem' in line]

    if len(tty_lines) == 0:
        raise Exception('No lines for GSM found in dmesg!')

    return sorted([line.split(' ')[-1] for line in tty_lines if 'tty' in line])[0]
Ejemplo n.º 24
0
 def runSvn(self, tokens, action):
     if len(tokens) < 3:
         return FAIL, ["Incomplete command"]
     fileName = os.path.join(mode.serverHome, tokens[2])
     if not os.path.isfile(fileName):
         return FAIL, ["Unknown file: %s" % fileName]
     cmd = "%s %s %s" % (self.svnCmd, action, fileName)
     status, output = gso(cmd)
     if status != OK:
         errmsg = ["Repository is corrupt. Please examine it manually"]
         errmsg += output.split('\n')
         return FAIL, errmsg
     return OK, output.split('\n')
Ejemplo n.º 25
0
def update_tomcat(filename, version):
    now_project_path = '/usr/local/tomcat/webapps/%s' % ln_name
    # TODO 先删除上一个版本解压之后的文件再更新软连接.
    shutil.rmtree(now_project_path)
    os.remove('/usr/local/tomcat/webapps/publish.war')
    cmd_ln = 'ln -s /usr/local/tomcat/webapps/publish.war /data/version/%s/%s' % (
        version, filename)
    status, result = gso(cmd_ln)
    if result:
        print('SoftConnect create success!')
        return 0
    else:
        print('SoftConnect create failed!')
        exit(1)
Ejemplo n.º 26
0
 def implement_rules(self, cmds):
     "Actually run the rules that we put together"
     if self.options.test:
         print "This script would implement the following rules:"
         for cmd in cmds:
             print "   %s" % cmd
     else:
         for cmd in cmds:
             if self.options.verbose: self.log("Running: [%s]..." % cmd)
             status, output = gso(cmd)
             if status != OK:
                 self.log("COMMAND FAILED: [%s]" % cmd)
                 raise DownloadException(self.options.server_url, output)
             if self.options.verbose: print "SUCCESS"
Ejemplo n.º 27
0
 def push(self, *args, **kw):
     config = get_config()
     if not self.cli_opts.label:
         raise SatCLIArgumentError, 'channel -l/--label required'
     
     channels = self.cli_opts.label.split(',')
     for channel in channels:
         if self.cli_opts.rpms:        
             rpms = glob(str(self.cli_opts.rpms))
             for rpm in rpms:
                 nosig_txt = ''
                 if config['allow_nosig']:
                     nosig_txt = '--nosig'
                 cmd = "%s %s -u %s -p %s --server %s -c %s %s" % \
                     (config['cmd_rhnpush'], rpm, config['user'], 
                      config['password'], config['server'], 
                      channel, nosig_txt)
                 res = gso(cmd)
                 if res[0] != 0:
                     log.warn(res[1])
                 
     if self.cli_opts.srpms:
         srpms = glob(str(self.cli_opts.srpms))
         for srpm in srpms:
             if os.path.exists(srpm):
                 nosig_txt = ''
                 if config['allow_nosig']:
                     nosig_txt = '--nosig'
                 cmd = "%s %s --source -u %s -p %s --server %s %s" % \
                     (config['cmd_rhnpush'], srpm, 
                      config['user'], config['password'], 
                      config['server'], nosig_txt)
                 res = gso(cmd)
                 if res[0] != 0:
                     log.warn(res[1])
             else:
                 log.warn("SRPM '%s' doesn't exist!" % srpm)          
Ejemplo n.º 28
0
def allow_access(user, ip=None, note=None, dport=None):
    assert ip or dport, "at least ip or dport have to be specified."
    users = get_users()
    rules, all_allowed = get_fw_rules(users)
    if ip and dport:
        cond = ip + "=>:" + dport in all_allowed
    elif dport:
        cond = "0.0.0.0/0=>:" + dport in all_allowed
    else:
        cond = ip in all_allowed

    if not cond:
        dt = base64.b64encode(json.dumps({"u": user, "s": time.time(), "n": note}))
        cmd = "sudo iptables -IINPUT %s" % IPT_INSPOS
        if ip:
            cmd += " -s %s" % (ip)
        if dport:
            cmd += " -p tcp --dport %s" % dport
        cmd += ' -j ACCEPT -m comment --comment="ServerAccess d:%s"' % (dt)
        # print cmd
        st, op = gso(cmd)
        assert st == 0, "%s => %s" % (cmd, op)

    for dfw in DELEGATED_FIREWALLS:
        dcmd = "add "
        if note:
            dcmd += " --note %s" % escapeshellarg(note)
        if dport:
            dcmd += " --dport=%s" % escapeshellarg(dport)
        if user:
            dcmd += " --user=%s" % escapeshellarg(user)
        if ip:
            dcmd += " %s" % escapeshellarg(ip)
        fcmd = "ssh " + dfw["ssh"] + " " + escapeshellarg(dfw["cmd"] % dcmd)
        print fcmd
        st, op = gso(fcmd)
        assert st == 0, "%s => %s" % (fcmd, op)
Ejemplo n.º 29
0
 def runSvnMult(self, action, argString='', parseFunction=statusAndFile):
     directories = ["include", "bom", "client"]
     cmdOutput = {}
     for directory in directories:
         cmdOutput[directory] = []
         directoryPath = os.path.join(mode.serverHome, directory)
         cmd = "%s %s %s %s" % (self.svnCmd, action, directoryPath, argString)
         status, output = gso(cmd)
         if status != OK:
             errmsg = ["Repository is corrupt. Please examine it manually"]
             errmsg += [cmd]
             errmsg += output.split('\n')
             return FAIL, errmsg
         cmdOutput = parseFunction(output, cmdOutput, directory)
     return OK, cmdOutput
Ejemplo n.º 30
0
def update_job():
    #TODO 把版本文件拷到指定目录下
    version = time.strftime('%Y-%m-%d-%H', time.localtime(time.time()))
    cmd_mkdir = "mkdir /data/version/%s" % version
    status, result = gso(cmd_mkdir)
    if not result:
        print('Create Version of the directory success!')
        file_paths = os.path.isdir(file_path)
        if not file_path:
            mkdir_req = os.mkdir(file_path)
        else:
            file = os.listdir(file_path)
            filename = file[0]
            cmd_cp = 'mv /data/file/%s /data/version/%s' % (filename, version)
            status, result = gso(cmd_cp)
            if result:
                print('copy file success!')
                return filename, version
            else:
                print('copy file filed!')
                exit(1)
    else:
        print('Create Version of the directory failed!')
        exit(1)
Ejemplo n.º 31
0
def start_pro():
    path = tomcat_pah + '/bin'
    print(path)
    os.chdir(path)
    cmd = "sh startup.sh"
    status, result = gso(cmd)
    print(result)
    if not result:
        print(
            "<<<<<<<<<<<<<<<<<<<< tomcat startuo failed >>>>>>>>>>>>>>>>>>>>")
        return 1
    else:
        print(
            "<<<<<<<<<<<<<<<<<<<< tomcat startup success >>>>>>>>>>>>>>>>>>>>")
        return 0
Ejemplo n.º 32
0
def allow_access(user, ip=None, note=None, dport=None):
    assert ip or dport, "at least ip or dport have to be specified."
    users = get_users()
    rules, all_allowed = get_fw_rules(users)
    if ip and dport:
        cond = ip + '=>:' + dport in all_allowed
    elif dport:
        cond = '0.0.0.0/0=>:' + dport in all_allowed
    else:
        cond = ip in all_allowed

    if not cond:
        dt = base64.b64encode(
            json.dumps({
                'u': user,
                's': time.time(),
                'n': note
            }))
        cmd = 'sudo iptables -IINPUT %s' % IPT_INSPOS
        if ip: cmd += ' -s %s' % (ip)
        if dport: cmd += ' -p tcp --dport %s' % dport
        cmd += ' -j ACCEPT -m comment --comment="ServerAccess d:%s"' % (dt)
        #print cmd
        st, op = gso(cmd)
        assert st == 0, "%s => %s" % (cmd, op)

    for dfw in DELEGATED_FIREWALLS:
        dcmd = 'add '
        if note: dcmd += ' --note %s' % escapeshellarg(note)
        if dport: dcmd += ' --dport=%s' % escapeshellarg(dport)
        if user: dcmd += ' --user=%s' % escapeshellarg(user)
        if ip: dcmd += ' %s' % escapeshellarg(ip)
        fcmd = 'ssh ' + dfw['ssh'] + ' ' + escapeshellarg(dfw['cmd'] % dcmd)
        print fcmd
        st, op = gso(fcmd)
        assert st == 0, "%s => %s" % (fcmd, op)
 def _process_backup(self, backup_dict, package_name):
     '''
     We've just instructed a package to back itself up. We need to haul the
     backup data back to archive it and collect some statistics on it.
     backup_dict -- Detailed information about the backup
         Example: {'__BACKUP_DIR__': '/tmp/tmp3GHMRw',
                   '__POST_BACKUP__': 'start',
                   '__PRE_BACKUP__': 'stop',
                   '__START_TIME__': 1284766556.062264,
                   'main_file': {'status': 0,
                                 'elapsed_time': 0.0013320446014404297,
                                 'file_name': '/tmp/foogazi/test_type_5',
                                 'size': 18L,
                                 'md5': {'test_type_5': 'a670f8128b02d00725cff2b4973fb47e',
                                         'test_type_5.bz2': '17e6a319e8e0b496dab7eb9b3ce03994'
                                        }
                                }
                  }
     '''
     remote_dir = backup_dict.get("__BACKUP_DIR__")
     if not remote_dir:
         msg = "Package does not request data be copied back from machine."
         self.polling_log.warning(msg)
         self.server_log.warning(msg)
         return backup_dict
     start_time = str(int(backup_dict.get("__START_TIME__")))
     archive_dir = os.path.join(self.server_home, "archive",
                                self.machine_name, package_name, start_time)
     os.system("mkdir -p %s" % archive_dir)
     cmd = "rsync -La %s@%s:%s/* %s" 
     cmd = cmd % (self.username, self.ip_address, remote_dir, archive_dir)
     self.polling_log.debug(cmd)
     status, output = gso(cmd)
     if status != OK:
         msg = "Error rsyncing remote backup: %s" % output
         backup_dict["__SYNC__"] = FAIL
         self.status = FAIL
     else:
         backup_dict["__SYNC__"] = OK
         self.run_cmd("rm -rf %s" % remote_dir)
         del backup_dict["__BACKUP_DIR__"]
         backup_summary = yaml.dump(backup_dict)
         summary_file = os.path.join(archive_dir, "backup_info.yml")
         open(summary_file, 'w').write(backup_summary)
     return backup_dict
Ejemplo n.º 34
0
def roll_code(ver):
    version = int(ver) + 2
    # TODO 按时间排序,然后重建软连接
    cmd_sort = "ls -lt /data/version|sed -n %sp|awk '{print $9}'" % version
    print(cmd_sort)
    status, result = gso(cmd_sort)
    filename = os.listdir('/data/version/%s' % result)
    if not status:
        print('准备回滚到版本号为%s的版本' % result)
        status = check_pro()
        if status == 0:
            kill = kill_pro()
            if kill == 0:
                code = update_tomcat(filename, result)
                if code == 0:
                    print(
                        '<<<<<<<<<<<<<<<<<<<< Remote release success! >>>>>>>>>>>>>>>>>>>>'
                    )
                    start_pro()
                else:
                    print(
                        '<<<<<<<<<<<<<<<<<<<< Remote release failed! >>>>>>>>>>>>>>>>>>>>'
                    )
                    exit(1)
            else:
                exit(1)
        else:
            filename, version = update_job()
            code = update_tomcat(filename, version)
            if code == 0:
                print(
                    '<<<<<<<<<<<<<<<<<<<< Remote release success! >>>>>>>>>>>>>>>>>>>>'
                )
                start_pro()
            else:
                print(
                    '<<<<<<<<<<<<<<<<<<<< Remote release failed! >>>>>>>>>>>>>>>>>>>>'
                )
                exit(1)
    else:
        print('获取版本号失败...')
        exit(1)
Ejemplo n.º 35
0
 def runParallel(self, ncpus) :
     if self.mainLoop:
         self.log.fatal("Cannot use custom main loop in multi-process mode, check your options")
         return 1
     self.setupParallelLogging( )
     from Gaudi.Configuration import Configurable
     import GaudiMP.GMPBase as gpp
     c = Configurable.allConfigurables
     self.log.info('-'*80)
     self.log.info('%s: Parallel Mode : %i '%(__name__, ncpus))
     from commands import getstatusoutput as gso
     metadataCommands = [ 'uname -a',
                          'echo $CMTCONFIG',
                          'echo $GAUDIAPPNAME',
                          'echo $GAUDIAPPVERSION']
     for comm in metadataCommands :
         s, o = gso( comm )
         if s :
             o = "Undetermined"
         string = '%s: %30s : %s '%(__name__, comm, o)
         self.log.info( string )
     try :
         events = str(c['ApplicationMgr'].EvtMax)
     except :
         events = "Undetermined"
     self.log.info('%s: Events Specified : %s '%(__name__,events))
     self.log.info('-'*80)
     # Parall = gpp.Coordinator(ncpus, shared, c, self.log)
     Parall = gpp.Coord( ncpus, c, self.log )
     sysStart = time()
     sc = Parall.Go()
     self.log.info('MAIN.PY : received %s from Coordinator'%(sc))
     if sc.isFailure() :
         return 1
     sysTime = time()-sysStart
     self.log.name = 'Gaudi/Main.py Logger'
     self.log.info('-'*80)
     self.log.info('%s: parallel system finished, time taken: %5.4fs', __name__, sysTime)
     self.log.info('-'*80)
     return 0
Ejemplo n.º 36
0
# -*-coding:utf-8 -*-
Ejemplo n.º 37
0
def pack(comment,pdist,numeric):
    # rename the desired file folder as 20010101-comment
    date = time.strftime('%Y%m%d',time.localtime(time.time()))
    # des_folder: destination of output files
    des_folder = date + '-' + comment
    # remove than creat the folder
    gso('rm -rf %s'%des_folder)
    gso('mkdir -p %s/orbit_results'%des_folder)
    # cp certain files based the value of pdist and numeric
    if pdist*numeric == 2:
        # numeric balance and distribution
        gso('cp {orbit,orbit.F,spdata,fbm_dist.dat,job.pbs} ./%s'%des_folder)
    elif pdist == 2:
        # numeric distribution
        gso('cp {orbit,orbit.F,fbm_dist.dat,job.pbs,spdata} ./%s'%des_folder)
    else:
        gso('cp {orbit,orbit.F,spdata,job.pbs} ./%s'%des_folder)
    # a program to package file 
    gso('cp {*.plt,orbit.out,configuration.py} ./%s/orbit_results'%(des_folder))
    gso('cp -r ./plot_functions ./%s'%(des_folder))
Ejemplo n.º 38
0
def run(start_date=None,end_date=None,makereport=False):
    usermap = {}


    conf = json.loads(open('config.json','r').read().replace('\n',''))
    GITHUB_USER = conf['user'] #open('githubuser.txt','r').read().strip()
    GITHUB_PASSWORD = conf['user']+':'+conf['password'] #open('githubpw.txt','r').read().strip()
    GITHUB_PROJECTS = conf['projects'] #open('githubprojects.txt','r').read().strip().split('\n')
    usermapjson = conf['usermap']
    for k,v in usermapjson.items():
        usermap[re.compile(re.escape(k))]=v

    by_story={}
    by_user={}
    by_project={}

    by_date={}

    user_project={}
    project_user={}
    user_date={}
    user_project_date={}
    project_date={}

    for proj in GITHUB_PROJECTS:
        ofn = os.path.join('data','%s.json'%proj)
        fetchnew=True

        if os.path.exists(ofn):
            st = os.stat(ofn)
            fmt = datetime.datetime.fromtimestamp(st.st_mtime)
            oneh = (datetime.datetime.now()-datetime.timedelta(hours=1))
            #print 'fmt = %s; oneh = %s'%(fmt,oneh)
            if fmt>=oneh:
                fetchnew=False
            else:
                os.unlink(ofn)
        if fetchnew:
            print 'fetching project %s'%proj
            pagenum=1
            wr={'commits':[]}
            while True:
                print 'taking page %s'%pagenum
                cmd = "curl -s -u '%s' 'http://github.com/api/v2/json/commits/list/%s/%s/master?page=%s'"%(GITHUB_PASSWORD,GITHUB_USER,proj,pagenum)
                st,op=gso(cmd) ; assert st==0
                pagenum+=1
                dt = json.loads(op)
                if 'commits' in dt: print '%s entries'%len(dt['commits'])
                if 'commits' not in dt or not len(dt['commits']): break
                wr['commits']+=dt['commits']

            fp = open(ofn,'w') ; fp.write(json.dumps(wr)); fp.close()

        else:
            print '%s is recent enough'%proj

    def initarr():
        return {'times':0,'diff':0,'removed':0,'added':0,'ids':[]}

    if start_date:
        fr,to = start_date,end_date
    else:
        if len(sys.argv)>1:
            fr,to = [datetime.datetime.strptime(it,'%Y-%m-%d').date() for it in sys.argv[1].split(':')]
            print 'report is for range %s - %s'%(fr,to)
        else:
            fr,to = None,None
    if len(sys.argv)>2:
        if sys.argv[2][0]=='@': rcptraw = open(sys.argv[2][1:],'r').read()
        else: rcptraw = rcpt = sys.argv[2]
        rcpt = rcptraw.split(',')
    else:
        rcpt = None

    comre = re.compile('([a-f0-9]{16})')
    for fn in glob.glob('data/*.json'):
        if comre.search(fn): continue
        obj = json.loads(open(fn,'r').read())

        print 'going over proj %s'%fn
        assert 'commits' in obj,'cannot find commits in %s'%fn
        for c in obj['commits']:
            projfn = os.path.basename(fn).replace('.json','')

            comfn = os.path.join('data','%s.%s.json'%(projfn,c['id']))
            storyre = re.compile('#(\d+)')
            stories=[]
            for stres in re.finditer(storyre,c['message']):
                storyid = stres.group(1)
                if storyid not in stories: stories.append(storyid)

            user = c['committer']['email']
            for um,uv in usermap.items():
                if um.search(user):
                    user = uv
                    break
            assert user,c
            proj = os.path.basename(fn).replace('.json','')
            date = datetime.datetime.strptime(c['authored_date'][0:-5],'%Y-%m-%dT%H:%M:%S-') #dateutil.parser.parse(c['authored_date'])

            if fr and date.date()<fr: continue
            if to and date.date()>to: continue
            #print '%s -> %s on %s'%(user,proj,date)
            comid = '/%s/%s/commit/%s'%(GITHUB_USER,projfn,c['id'])
            commsg = c['message']
            if not os.path.exists(comfn):
                print 'fetching commit %s'%c['id']
                comurl = 'http://github.com/api/v2/json/commits/show/%s/%s/%s'%(GITHUB_USER,projfn,c['id'])
                curlcmd = 'curl -u %s %s > %s'%(GITHUB_PASSWORD,comurl,comfn)
                st,op = gso(curlcmd) ; assert st==0
                assert os.path.exists(comfn)
            try:
                comdt = json.loads(open(comfn).read())
            except:
                raise Exception('could not load from %s'%comfn)

            if 'modified' in comdt['commit']:
                try:
                    dfsum = sum([len(mod['diff'].split('\n')) for mod in comdt['commit']['modified'] if 'diff' in mod])
                except:
                    raise Exception(comdt['commit']['modified'])
            else:
                dfsum =0

            if 'removed' in comdt['commit']:
                removed = len(comdt['commit']['removed'])
            else:
                removed = 0

            if 'added' in comdt['commit']:
                added = len(comdt['commit']['added'])
            else:
                added = 0

            if proj not in project_date: project_date[proj]={}
            if proj not in project_user: project_user[proj]={}
            if user not in project_user[proj]: project_user[proj][user]=initarr()
            if date.date() not in project_date[proj]: project_date[proj][date.date()] = initarr()

            if user not in by_user: 
                user_project_date[user]={}
                user_date[user]={}
                user_project[user]={}
                by_user[user]=initarr()
            if len(stories):
                for storyid in stories:
                    if storyid not in by_story: by_story[storyid]=initarr()
                    incr(by_story[storyid])
            def idsort(i1,i2):
                return cmp(i1[3],i2[3])
            def incr(o):
                #global dfsum,added,removed,comid,commsg
                o['times']+=1
                o['diff']+=dfsum
                o['removed']+=removed
                o['added']+=added
                o['ids'].append([comid,commsg,user,date])
                o['ids'].sort(idsort)
            incr(by_user[user])
            incr(project_user[proj][user])
            if proj not in by_project: 
                by_project[proj]=initarr()

            if proj not in user_project[user]:
                user_project[user][proj]=initarr()

            incr(user_project[user][proj])
            incr(by_project[proj])

            if date.date() not in by_date: 
                by_date[date.date()]=initarr()

            if date.date() not in user_date[user]:
                user_date[user][date.date()]=initarr()

            if proj not in user_project_date[user]: user_project_date[user][proj]={}
            if date.date() not in user_project_date[user][proj]: user_project_date[user][proj][date.date()]=initarr()

            incr(project_date[proj][date.date()])
            incr(user_project_date[user][proj][date.date()])
            incr(by_date[date.date()])
            incr(user_date[user][date.date()])

    if not makereport:
        return {'by_user':by_user,'by_story':by_story}
    def srtit(a1,a2):
        return cmp(a1[0],a2[0])
    def srt2(i1,i2):
        return cmp(i1[1]['diff'],i2[1]['diff'])
    def srt3(i1,i2):
        return cmp(i1[0],i2[0])
    import time
    def totimestamp(dt):
        return time.mktime(dt.timetuple()) + 0/1e6 #dt.microsecond/1e6

    jsexp=[]
    for user in user_date:
        items = user_date[user].items()

        #print([totimestamp(it[0]) for it in items])
        for item in items:
            #if user not in ['*****@*****.**','*****@*****.**']: continue #'*****@*****.**': continue
            tm = int(totimestamp(item[0]))
            curi = item[1]['diff']
            jsexp.append({'action':user,'time':curi,'curitems':int(tm)})
    def srtjsexp(i1,i2):
        return cmp(i1['curitems'],i2['curitems'])

    jsexp.sort(srtjsexp)

    op="""<doctype !html>
    <html>
    <head>
    <script type='text/javascript' src='jquery-1.6.1.min.js'></script>
    <script type='text/javascript' src='raphael-min.js'></script>
    <script type='text/javascript'>
    var data = %s;
    </script>
    <script type='text/javascript' src='plotgraph.js'></script>
    <style type='text/css'>
    #info { width:920px; height:200px; }
    thead { background-color:#abc; }
    </style>
    </head>
    <body>
    <div id='info'></div>
    """%(json.dumps(jsexp))

    dtpat = "<table><thead><tr><th>date<th>commits<th>added<th>removed<th>difflines<th>links</tr></thead><tbody>\n"
    dtendpat = "</tbody></table>"
    rowpat = "<tr><td><nobr>%s</nobr></td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>\n"
    def mkrow(date,dt,commits=True):
        if commits:
            cm = ', '.join(["<a href='https://github.com%s' title='%s by %s on %s'>%s</a>"%(com,commsg,user,stamp,com.split('/')[4][0:4]) for com,commsg,user,stamp in dt['ids']])
        else:
            cm=''
        rt= rowpat%(date,dt['times'],dt['added'],dt['removed'],dt['diff'],cm)
        return rt
    opa=[]
    if fr:opa.append("commits are starting from %s"%(fr))
    if to:opa.append("commits are until %s"%(to))
    opa.append("generated on %s"%datetime.datetime.now())
    op+=' :: '.join(['<small>%s</small>'%ope for ope in opa])+'<br />'

    op+="<h1>user totals</h1>"
    op+=dtpat
    uitems = by_user.items()
    uitems.sort(srt2,reverse=True)
    for user,commits in uitems:
        op+=mkrow(user,commits,commits=False)
    op+=dtendpat
    op+="<h1>project totals by date</h1>"
    for proj,dates in project_date.items():
        op+="<h2>%s</h2>"%proj
        op+=dtpat
        commits = dates.items()
        commits.sort(srt3)
        for date,commit in commits:
            op+=mkrow(date,commit,commits=True)
        op+=dtendpat
    op+="<h1>project totals by commiter</h1>"
    for proj,commiters in  project_user.items():
        op+="<h2>%s</h2>"%proj
        op+=dtpat.replace('date','commiter')
        commits = commiters.items()
        commits.sort(srt2,reverse=True)
        for user,commit in commits:
            op+=mkrow(user,commit,commits=True)

        op+=dtendpat
    for user,projects in user_project_date.items():
        op+="<h1>%s commits by %s into %s project(s)</h1>\n"%(by_user[user]['times'],user,len(user_project[user]))

        op+="<h2>by dates</h2>\n"
        dates = user_date[user].items()
        dates.sort(srtit)
        op+=dtpat
        for date,commits in dates:
            op+=mkrow(date,commits,commits=True)
        op+="</tbody></table>"

        for proj,dates in projects.items():
            op+="<h3>into project %s</h3>"%proj
            commits = dates.items()
            commits.sort(srtit)
            op+=dtpat
            for date,commits in commits:
                op+=mkrow(date,commits)
            op+="</tbody></table>"
    op+="</body></html>"
    ofn = 'commits'
    if fr: ofn+='-%s'%fr
    if to: ofn+=':%s'%to
    ofn+='.html'
    if not rcpt:
        fp = open(ofn,'w') ; fp.write(op) ; fp.close()
        print 'written to %s'%ofn
    else:
        srvr = conf['smtp_server']
        port = conf['smtp_port']
        un = conf['smtp_user']
        pw = conf['smtp_pw']
        sender = conf['smtp_sender']

        import smtplib
        from email.mime.multipart import MIMEMultipart
        from email.mime.text import MIMEText

        fromaddr = sender
        msg = MIMEMultipart('alternative')
        msg['Subject'] = 'project commits for %s - %s'%(fr,to)
        msg['From'] = sender
        #msg['Reply-To'] = sender
        part2 = MIMEText(op, 'html')
        msg.attach(part2)

        # Credentials (if needed)
        username = un
        password = pw

        # The actual mail send
        server = smtplib.SMTP('%s:%s'%(srvr,port))

        server.starttls()
        server.login(username,password)
        for rc in rcpt:
            print 'mailing to %s -> %s'%(fromaddr,rc)
            toaddrs  = rc
            msg['To']=rc
            server.sendmail(fromaddr, toaddrs, msg.as_string())
        server.quit()
Ejemplo n.º 39
0
    def create(self, *args, **kw):
        config = get_config()
        
        errors = []
        channels = []
        if not self.cli_opts.advisory:
            errors.append(('SatCLIArgumentError', 
                           'errata -a/--advisory required.'))
        
        if not self.cli_opts.rpms:
            errors.append(('SatCLIArgumentError', 
                           'errata --rpms required.'))                   
        
        if not self.cli_opts.channel and not self.cli_opts.channels_file:
            errors.append(('SatCLIArgumentError', 
                           'errata -c/--channel or --channels-file required.'))                   
        
        if self.cli_opts.channel:
            _channels = self.cli_opts.channel.split(',')
            for _c in _channels:
                channels.append(_c)
        
        if self.cli_opts.channels_file:
            if os.path.exists(self.cli_opts.channels_file):
                f = open(self.cli_opts.channels_file, 'r')
                for line in f.readlines():
                    channels.append(line.strip('\n'))
            else:
                log.warn("channels file '%s' doesn't exist!" % \
                         self.cli_opts.channels_file)
        
        if not self.cli_opts.synopsis:
            errors.append(('SatCLIArgumentError', 
                           'errata --synopsis required.'))
        
        if not self.cli_opts.product:
            errors.append(('SatCLIArgumentError', 
                           'errata --product required.'))
        
        if not self.cli_opts.advisory_type:
            errors.append(('SatCLIArgumentError', 
                           'errata --type required.'))
        
        if not self.cli_opts.advisory_type in ['bug', 'enhancement', 'security']:
            errors.append(('SatCLIArgumentError',
                       'errata --type must be one of bug, enhancement, security.'))                   

        if not self.cli_opts.description:
            errors.append(('SatCLIArgumentError', 
                           'errata --description required.'))
                           
        abort_on_error(errors)
                
        if not self.cli_opts.topic:
            self.cli_opts.topic = "%s update." % self.cli_opts.advisory_type.capitalize()
        
        if not self.cli_opts.solution:
            self.cli_opts.solution = config['errata']['solution']
        
        if self.cli_opts.keywords:
            self.cli_opts.keywords = self.cli_opts.keywords.split(',')
        else:
            self.cli_opts.keywords = []


        rpms = glob(str(self.cli_opts.rpms))
        rpms_data = []
        package_ids = []
        for r in rpms:
            nosig_txt = ''
            if config['allow_nosig']:
                nosig_txt = '--nosig'
            cmd = "%s %s -u %s -p %s --server %s %s" % \
                (config['cmd_rhnpush'], r, config['user'], 
                 config['password'], 
                 config['server'], nosig_txt)
            gso(cmd)
            rpm = RPM(file(r))  
            package = g.proxy.query(model.Package, just_one=True,
                                name=rpm[rpmdefs.RPMTAG_NAME], 
                                version=rpm[rpmdefs.RPMTAG_VERSION], 
                                release=rpm[rpmdefs.RPMTAG_RELEASE], 
                                arch=rpm[rpmdefs.RPMTAG_ARCH])
            rpms_data.append(package)
        if self.cli_opts.srpm:
            if os.path.exists(self.cli_opts.srpm):
                rpm = RPM(file(self.cli_opts.srpm))  
                nosig_txt = ''
                if config['allow_nosig']:
                    nosig_txt = '--nosig'
                cmd = "%s %s --source -u %s -p %s --server %s %s" % \
                    (config['cmd_rhnpush'], self.cli_opts.srpm, 
                     config['user'], config['password'], 
                     config['server'], nosig_txt)
                gso(cmd)
            else:
                log.warn("SRPM '%s' doesn't exist!" % self.cli_opts.srpm)    

        for p in rpms_data:
            package_ids.append(p.id)
        
        if self.cli_opts.advisory_type == 'bug':
            self.cli_opts.advisory_type = 'Bug Fix Advisory'
        elif self.cli_opts.advisory_type == 'enhancement':
            self.cli_opts.advisory_type = 'Product Enhancement Advisory'
        elif self.cli_opts.advisory_type == 'security':
            self.cli_opts.advisory_type = 'Security Advisory'        
            
            
        e = model.Errata()
        e.synopsis = self.cli_opts.synopsis
        e.advisory_name = self.cli_opts.advisory
        e.advisory_release = 1
        e.advisory_type = self.cli_opts.advisory_type
        e.product = self.cli_opts.product
        e.topic = self.cli_opts.topic
        e.description = self.cli_opts.description
        e.references = self.cli_opts.references or ''
        e.notes = self.cli_opts.notes or ''
        e.solution = self.cli_opts.solution
        e.bug_ids = []
        e.keywords = self.cli_opts.keywords or []
        e.package_ids = package_ids
        e.publish = self.cli_opts.publish
        e.channels = channels       
        g.proxy.create(e)
        res = g.proxy.query(model.Errata, just_one=True, all_data=True,
                            advisory=self.cli_opts.advisory)     
        return dict(errata=res)
Ejemplo n.º 40
0
# -*- coding: utf-8 -*-
Ejemplo n.º 41
0
def file_count():
    """Count the number of files within specified directory."""
    count = gso('ls | wc -l')[1]
    return count
Ejemplo n.º 42
0
# -*-coding:utf-8 -*-
Ejemplo n.º 43
0
def insert_1m_worker(request, amt, count):
    count = int(count)
    if amt == "1m":
        amt = 1000000
    elif amt == "100k":
        amt = 100000
    elif amt == "10k":
        amt = 10000
    else:
        amt = int(amt)

    def create_index():
        log.info("creating index")
        createindex_start = time.time()
        test_collection.ensure_index("indexed_id", unique=True)
        createindex_delta = time.time() - createindex_start
        ins = {"curitems": curitems, "time": createindex_delta, "action": "create_index"}
        return ins

    initialindex = bool(request.params.get("initialindex", False))
    noindex = bool(request.params.get("noindex", False))
    noselect = bool(request.params.get("noselect", False))
    colname = request.params.get("collection", "test_collection")
    sleep = int(request.params.get("sleep", 0))

    log.info("starting off with a count of %s" % count)
    records_per_iter = 100
    count_of_iter = amt / records_per_iter
    log.info("running %s iterations of %s records each. noindex=%s" % (count_of_iter, records_per_iter, noindex))
    start = time.time()
    log.info("getting collection")
    test_collection = get_collection(colname)
    global connection
    inserts = []
    curitems = int(request.params.get("startfrom", test_collection.count()))

    def insappend(ins):
        ins["stamp"] = datetime.datetime.now().strftime("%s")
        fname = "static/%s.data.js" % colname
        log.info(ins)
        inserts.append(ins)
        if os.path.exists(fname):
            fp = open(fname, "r")
            dt = fp.read()
            fp.close()
        else:
            dt = "var data = [];"
        objres = re.compile("var data = (.*);").search(dt)
        if objres:
            objstr = objres.group(1)
        else:
            objstr = "[]"
        obj = json.loads(objstr)
        obj.append(ins)
        fp = open(fname, "w")
        fp.write("var data = %s;" % json.dumps(obj))
        fp.close()

    args = {
        "action": "begin",
        "amt": amt,
        "count": count,
        "initialindex": initialindex,
        "noindex": noindex,
        "noselect": noselect,
        "colname": colname,
        "sleep": sleep,
    }
    insappend(args)

    if initialindex:
        insappend(create_index())

    for cnt in range(count):
        log.info("index section, going %s / %s" % (cnt, count))
        if not noindex:
            try:
                dropindex_start = time.time()
                log.info("dropping index (if exists)")
                test_collection.drop_index("indexed_id_1")
                dropindex_delta = time.time() - dropindex_start
                ins = {"curitems": curitems, "time": dropindex_delta, "action": "drop_index"}
                insappend(ins)

            except pymongo.errors.OperationFailure:
                log.info("index did not exist previously, lols")
        # get the current amount of items

        log.info("currently have %s items" % curitems)
        log.info("starting off insert")

        start_ins = time.time()
        log.info("starting ins %s" % cnt)
        for i in range(count_of_iter):
            ins_start = time.time()
            if i % 1000 == 0:
                log.info("batch %s, generating %s docs" % (i, records_per_iter))
            # docs = [get_test_item(1000000*int(count) + i*records_per_iter + e) for e in range(records_per_iter)]
            docs = []
            for e in range(records_per_iter):
                curitems += 1

                ti = get_test_item(curitems)
                if bool(request.params.get("verbose", False)):
                    log.info(ti)
                docs.append(ti)
            test_collection.insert(docs)
            if i % 1000 == 0:
                log.info("inserted, curitems = %s" % curitems)
        ins_delta = time.time() - start_ins
        ins = {"time": ins_delta, "curitems": curitems, "action": "insert"}  #'amt':amt,'count':cnt,
        # log.info('insert %s done in %s: %s'%(cnt,ins_delta,ins))
        insappend(ins)

        if "single" in colname:
            st, op = gso("du -s -m /var/lib/mongodb | cut -f1")
            print "du returned %s,%s" % (op, st)
            assert st == 0
            datasize = int(op)
        elif "sharded" in colname:
            st, op = gso('mongo --eval "db.%s.dataSize()" --quiet localhost:10000/test_database' % colname)
            assert st == 0
            datasize = int(op)
            st, op = gso('mongo --eval "db.printShardingStatus()" --quiet localhost:10000/test_database')
            assert st == 0
            cres = re.finditer("^([ \t]+)([\w]+)([ \t]+)([\d]+)", op, re.M)
            tchunks = 0
            shardsamt = 0
            achunks = []
            for r in cres:
                # print r.groups()
                shardname = r.group(2)
                chunks = int(r.group(4))
                tchunks += chunks
                shardsamt += 1
                achunks.append(chunks)
            ins = {"time": tchunks, "curitems": curitems, "action": "chunks_amt"}
            insappend(ins)
            avgdev = sum([abs(chnk - (tchunks / shardsamt)) for chnk in achunks])
            ins = {"time": avgdev, "curitems": curitems, "action": "chunks_deviation"}
            insappend(ins)
            ins = {"time": shardsamt, "curitems": curitems, "action": "shards_amt"}
            insappend(ins)
        ins = {"time": datasize, "curitems": curitems, "action": "data_size"}
        insappend(ins)

        if sleep:
            log.info("sleeping (for sharding pacification purposes) - %s" % sleep)
            time.sleep(sleep)
        log.info("insert phase done")

        if not noindex:
            try:
                ins = create_index()
                insappend(ins)
            except pymongo.errors.DuplicateKeyError:
                log.error("FAILURE TO CREATE UNIQUE INDEX")
                return {"success": False, "message": "unique index could not be created", "inserts": inserts}

        if not noselect and curitems:
            seldelta = 0
            selectnum = 1000
            for i in range(selectnum):
                uid = random.randrange(curitems)
                rt = get_item_worker(uid, colname)
                assert rt["success"], "we were not succesful getting worker %s out of %s  inserted (%s/%s)" % (
                    uid,
                    curitems,
                    i,
                    selectnum,
                )

                seldelta += rt["time"]
            ins = {"curitems": curitems, "time": seldelta, "action": "select%s" % selectnum}
            insappend(ins)

    delta = time.time() - start
    log.info("took us %s" % delta)
    return {"success": True, "time": delta, "count": test_collection.count(), "inserts": inserts}
Ejemplo n.º 44
0
# -*-coding:utf-8 -*-
Ejemplo n.º 45
0
#!/usr/bin/python

from commands import getstatusoutput as gso
import json,time

colname = 'sharded_30nodes_nobalancer'
mongos = open('masters.txt').read().strip().split('\n')[0]
simulate=False
runonce=True

while True:
    countcmd = """cat slaves.txt | xargs -P0 -n1 -I{}     ssh {} 'annotate-output +{} mongo --eval "db.%s.count()" --quiet localhost:27017/test_database' | egrep -v '(Finished|Started)' | awk '{print $3" "$1}' |sort -n"""%(colname)

    st,op = gso(countcmd)
    assert st==0
    sizes = op.split("\n")
    targetshard = sizes[0].split(' ')[1]
    sourceshard = sizes[-1].split(' ')[1]
    print '%s vs %s'%(sizes[0],sizes[-1])
    print 'going to move a chunk %s -> %s'%(sourceshard,targetshard)
    
    findchunk = """db.chunks.find({shard:'\"'%s'\"',ns:'\"'test_database.%s'\"'})"""%(sourceshard,colname)
    findchunkcmd ="""ssh %s 'echo "%s" | mongo --quiet localhost:10000/config'"""%(mongos,findchunk)
    #print findchunkcmd
    st,op = gso(findchunkcmd)
    assert st==0
    chunksop = '['+','.join(op.split('\n')[1:-2])+']'
    #print chunksop
    chunks = json.loads(chunksop)
    print 'loaded %s chunks. moving a random one'%len(chunks)
    movcmd = """db.runCommand({moveChunk:'\"'test_database.%s'\"',find:{indexed_id:'\"'%s'\"'},to:'\"'%s'\"'})"""%(colname,chunks[0]['min']['indexed_id'],targetshard)
Ejemplo n.º 46
0
# -*-coding:utf-8 -*-
Ejemplo n.º 47
0
	def parse(self, name):
		s, o = gso(self.command %(name))
		if s:
			raise OSError(o)
		return o
Ejemplo n.º 48
0
# -*-coding:utf-8 -*-
Ejemplo n.º 49
0
# -*-coding:utf-8 -*-
Ejemplo n.º 50
0
def file_count():
    """Count the number of files within specified directory."""
    count = gso('ls | wc -l')[1]
    return count