Esempio n. 1
0
def send_email():
  try:
    myconn = my.connect(host='', port=, db='', user=user_serveradmin,
                        passwd=passwd_serveradmin, charset='utf8')
  except Exception, e:
    print Exception, e
    sys.exist(0)
Esempio n. 2
0
def insert_data_mysql(insert_sql,tt):
  try:
    myconn = my.connect(host='', port=, db='', user=user_serveradmin,
                        passwd=passwd_serveradmin, charset='utf8')
  except Exception, e:
    print Exception, e
    sys.exist(0)
Esempio n. 3
0
def get_topicMatrix(Matrix, topicNum, fill = True):
    h,w = Matrix.shape
    if(True==fill):
        add = float(np.sum(Matrix))/(h*w)
        tmp = np.zeros((h,w), dtype=np.float32)
        for i in range(h):
            for j in range(w):
                if(0==Matrix[i][j]):
                    tmp[i][j] += add
                else:
                    tmp[i][j] += float(Matrix[i][j])
        Matrix = tmp
    m = min(h,w)
    if (m<topicNum):
        print "topicNum is large than the minor dimension of the Matrix!"
        sys.exist()
    u,s,v = np.linalg.svd(Matrix)
    l = len(s)
    sm = np.zeros((l,l))
    for i in range(topicNum):
        sm[i][i] = s[i]

    if(h<w):
        Matrix2 = (u.dot(sm)).dot(v[0:h,0:w])
    else:
        Matrix2 = (u[0:h,0:w].dot(sm)).dot(v)

    return Matrix2
Esempio n. 4
0
def get_redis_group_name():
  try:
    myconn = my.connect(host='', port=, db='', user=user_serveradmin,
                        passwd=passwd_serveradmin, charset='utf8')
  except Exception, e:
    print Exception, e
    sys.exist(0)
Esempio n. 5
0
def set_array_frec():
    client = PySimpleClient()
    master = client.getComponent("CONTROL/MASTER")
    arrayList = master.getAutomaticArrayComponents() + master.getManualArrayComponents()
    for array in arrayList:
        master.destroyArray(array.ComponentName)
    arrayList = []    
    antennas = master.getAvailableAntennas()
    master.createManualArray(antennas)
    arrayList = master.getAutomaticArrayComponents() +  master.getManualArrayComponents()
    if ( arrayList != 1 ):
        if( len(arrayList) == 0):
            print "Could not create an array!!"
            client.releaseComponent("CONTROL/MASTER")
            sys.exit(0)
        else:
            print "Could not destroy previosly arrays and create a new fresh array!"
            client.releaseComponent("CONTROL/MASTER")
            sys.exist(0)
    currentArray = client.getComponent(arrayList[0].ComponentName)
    client.releaseComponent("CONTROL/MASTER")
    setArrayName(currentArray.getArrayName())
    array = getArray()
    tp = array.getTotalPowerObservingMode()
    return tp
Esempio n. 6
0
def npy2bin(filename, savepath, overwriteFlag=0):
    file = Path(filename)
    sp = Path(savepath)
    if file.exists():
        if file.suffix == '.npy':
            if not (sp / str(file.stem + '.bin')).exists() or overwriteFlag:
                try:
                    data = np.load(file)
                    normFactor = getInt16ConvFactor(data)
                    dat2 = data2int16(data, normFactor)
                    dat2.tofile(str(sp / file.stem) + '.bin')
                    saveNormFactor(normFactor, file.stem, sp)
                except:
                    print('Error procesing {}'.format(file))
                    print(sys.exc_info()[0],
                          sys.exc_info()[1],
                          sys.exc_info()[2].tb_lineno)
                    sys.exit()
                print('Data Conversion completed for {} '.format(file))
            else:
                print('File exists and overwrite = false ')
        else:
            sys.exit('Invalid File')
    else:
        sys.exist('File Not Found {}'.format(file))
Esempio n. 7
0
def play(done):
    secret_word=random.choice(word)
    bad_guess=[]
    good_guess=[]
    complete=True       
    while True:
        draw(bad_guess,good_guess,secret_word)
        guess=getguess(bad_guess,good_guess)
        if guess in secret_word:
               good_guess.append(guess)
               complete=True
               for i in range(len(secret_word)):
                     if secret_word[i] not in good_guess:
                            complete=False
               if complete:
                    print('you won')
                    print('yout word is',good_guess)
                    done=True
        else:
                bad_guess.append(guess)
                if(len(bad_guess)==7):
                    print('you lost buddy')
                    done=True
        if done:
              ch=input('press n to exist and y to play again')
              if ch=='n':
                    sys.exist()
              else:
                    return play(done=False)
def main():

    try:
        opts, args = getopt.getopt(sys.argv[1:], "i:", ["input="])
    except getopt.GetoptError:
        usage()
        sys.exit(2)

    f = None

    for opt, arg in opts:
        if opt in ("-h", "--help"):
            usage()
            sys.exist(2)
        elif opt in ("-i", "--input"):
            try:
                f = open(arg, 'r')
            except (OSError, IOError) as e:
                usage()
                sys.exit(2)
            p = parser.Parser()
            ast = p.parse(f.read())
            if ast == None:
                print "ERROR! The file " + arg + " is not valid"
            else:
                print "AST was successfully built. Generating Finite Automata..."
                a = convert(ast)
                a.toHTML()
                print "FiniteAutomata.html was successfully generated! :)"
Esempio n. 9
0
def check_hosts_file():
  # Get the additional argumants. It might have been stored there.
  global extra_args

  # The argument for the hosts file.
  hosts_path_arg = '-i'

  # The current dir path the hosts file.
  cur_path_hosts = os.getcwd() + '/hosts'

  # Check if the hosts file argument was specified in command line.
  if hosts_path_arg in extra_args:

    # Make sure the file path is passed in.
    arg_hosts_path_id = extra_args.index(hosts_path_arg) + 1
    if extra_args[arg_hosts_path_id] == None:
      sys.exist(errors('HOSTS_PATH_DECLARATION'))

    # Return the path passed.
    return extra_args[arg_hosts_path_id]

  # If the hosts file exists in the local directory.
  elif os.path.isfile(cur_path_hosts):
    extra_args += [hosts_path_arg, cur_path_hosts]
    return cur_path_hosts

  # If none was passed and there isn't one in the current directory.
  else:
    return None
Esempio n. 10
0
def distance(p1, p2):
    if len(p1) != len(p2):
        sys.exist("Vectors have different length")
    sum = 0
    for i in range(len(p1)):
        sum += (p1[i] - p2[i])**2
    return math.sqrt(sum)
Esempio n. 11
0
def uninstall(args):
    makefile = os.path.join(args.build_dir, 'Makefile')
    if not os.path.exists(args.build_dir) or not os.path.exists(makefile):
        sys.exist("May not uninstall Cyclus since it has not yet been built.")
    rtn = subprocess.check_call(['make', 'uninstall'],
                                cwd=args.build_dir,
                                shell=(os.name == 'nt'))
Esempio n. 12
0
def install_zookeeper():
	# /mnt/zookeeper/data chown
	config = json.load(open('zookeeper-config.json'));
	data_dir_maked = subprocess.check_call(["sudo", "mkdir", "-p", "/mnt/zookeeper/data"])
	if 0 == data_dir_maked:
		subprocess.call(["sudo", "chown", "-R", "cloud-user", "/mnt/zookeeper"])
	else:
		print("Create dirctory /mnt/zookeeper/data failed")
		sys.exist(1)
	print("Create dirctory /mnt/zookeeper/data successfully")
	# myid
	myip = get_ip_address()
	mynode = [node for node in config['nodes'] if node['ip'] == myip][0]
	open("/mnt/zookeeper/data/myid", "w").write(str(mynode['id']))
	print("Set myid for zookeeper successfully")
	# cp zookeeper
	subprocess.call(['sudo', 'rm', '-rf', '/usr/local/zookeeper'])
	subprocess.call(['sudo', 'cp', '-r', './zookeeper', '/usr/local/zookeeper'])
	for node in config['nodes']:
		appendline('/usr/local/zookeeper/conf/zoo.cfg', 'server.'+str(node['id'])+'=zoo'+str(node['id'])+':2888:3888')
	
	subprocess.call(['sudo', 'chown', '-R', 'cloud-user', '/usr/local/zookeeper'])
	# hosts
	for node in config['nodes']:
		appendline('/etc/hosts', node['ip']+'\t'+'zoo'+str(node['id']))
Esempio n. 13
0
def read_and_validate_yaml(fd, fname, validate_yaml):
    """
    Get the YAML text from an ASDF formatted file.

    Parameters
    ----------
    fname : str
        Input file name
    fd : GenericFile
        for fname.
    validate_yaml: bool

    Return
    ------
    bytes
        The YAML portion of an ASDF file.
    yaml_version: tuple or None
    """
    YAML_TOKEN = b"%YAML"
    token = fd.read(len(YAML_TOKEN))
    if token != YAML_TOKEN:
        print(f"Error: No YAML in '{fname}'")
        sys.exit(1)

    yaml_version = None
    if validate_yaml:
        yaml_version = get_yaml_version(fd, token)

    # Get YAML reader and content
    reader = fd.reader_until(
        constants.YAML_END_MARKER_REGEX,
        7,
        "End of YAML marker",
        include=True,
        initial_content=token,
    )
    yaml_content = reader.read()

    # YAML validation implies we are reading from a normal YAML file, so
    # should not have any binary blocks.
    if not validate_yaml and not binary_block_exists(fd):
        delim = "!" * 70
        print(delim)
        print(f"No binary blocks exist in {fname}.  This ASDF file can")
        print("directly edited in any text file.  Or the file is poorly")
        print("formatted and cannot be corrected with this tool.")
        print(delim)
        sys.exit(1)

    if validate_yaml:
        # Create a YAML tree to validate
        # The YAML text must be converted to a stream.
        tree = yamlutil.load_tree(io.BytesIO(yaml_content))
        if tree is None:
            print("Error: 'yamlutil.load_tree' failed to return a tree.")
            sys.exist(1)

        schema.validate(tree)  # Failure raises an exception.

    return yaml_content, yaml_version
Esempio n. 14
0
def groups_master_slave_dffidc():
  try:
    myconn = my.connect(host='', port=, db='', user=user_serveradmin,
                        passwd=passwd_serveradmin, charset='utf8')
  except Exception, e:
    print Exception, e
    sys.exist(0)
Esempio n. 15
0
def main():

    try:
        opts, args = getopt.getopt(sys.argv[1:], "i:", ["input="])
    except getopt.GetoptError:
        usage()
        sys.exit(2)

    f = None

    for opt, arg in opts:
        if opt in ("-h", "--help"):
            usage()
            sys.exist(2)
        elif opt in ("-i", "--input"):
            try:
                f = open(arg, 'r')
            except (OSError, IOError) as e:
                usage()
                sys.exit(2)
            p = parser.Parser()
            ast = p.parse(f.read())
            if ast == None:
                print "ERROR! The file " + arg + " is not valid"
            else:
                print "AST was successfully built. Generating Finite Automata..."
                a = convert(ast)
                a.toHTML()
                print "FiniteAutomata.html was successfully generated! :)"
Esempio n. 16
0
def group_master_slave_diffidc_instance(thread_id,group_list,return_dict):
  try:
    myconn = my.connect(host='', port=, db='', user=user_serveradmin,
                        passwd=passwd_serveradmin, charset='utf8')
  except Exception, e:
    print Exception, e
    sys.exist(0)
Esempio n. 17
0
def install_zookeeper():
    # /mnt/zookeeper/data chown
    config = json.load(open('zookeeper-config.json'))
    data_dir_maked = subprocess.check_call(
        ["sudo", "mkdir", "-p", "/mnt/zookeeper/data"])
    if 0 == data_dir_maked:
        subprocess.call(
            ["sudo", "chown", "-R", "cloud-user", "/mnt/zookeeper"])
    else:
        print("Create dirctory /mnt/zookeeper/data failed")
        sys.exist(1)
    print("Create dirctory /mnt/zookeeper/data successfully")
    # myid
    myip = get_ip_address()
    mynode = [node for node in config['nodes'] if node['ip'] == myip][0]
    open("/mnt/zookeeper/data/myid", "w").write(str(mynode['id']))
    print("Set myid for zookeeper successfully")
    # cp zookeeper
    subprocess.call(['sudo', 'rm', '-rf', '/usr/local/zookeeper'])
    subprocess.call(
        ['sudo', 'cp', '-r', './zookeeper', '/usr/local/zookeeper'])
    for node in config['nodes']:
        appendline(
            '/usr/local/zookeeper/conf/zoo.cfg', 'server.' + str(node['id']) +
            '=zoo' + str(node['id']) + ':2888:3888')

    subprocess.call(
        ['sudo', 'chown', '-R', 'cloud-user', '/usr/local/zookeeper'])
    # hosts
    for node in config['nodes']:
        appendline('/etc/hosts', node['ip'] + '\t' + 'zoo' + str(node['id']))
Esempio n. 18
0
def read_baits(fn):
    adict = {}
    hasPrimer = True
    new = []
    for line in cmn.file2lines(fn):
        if line.strip() == '':
            continue
        sp, name, seq = line.split()
        if len(seq) != 698:
            hasPrimer = False
            if len(seq) == 658:
                #fixable
                seq = add_primer(seq)
            else:
                print('Error! didn\'t recognize the length of the bait %s %s' %
                      (sp, name))
                sys.exist()
        newline = '%s\t%s\t%s\n' % (sp, name, seq)
        new.append(newline)
        key = '%s_%s' % (sp, name)
        adict[key] = seq

    if not hasPrimer:
        print('revise the input baits to add primer...')
        cmn.write_file(''.join(new), fn)

    return adict
Esempio n. 19
0
def get_topicMatrix(Matrix, topicNum, fill=True):
    h, w = Matrix.shape
    if (True == fill):
        add = float(np.sum(Matrix)) / (h * w)
        tmp = np.zeros((h, w), dtype=np.float32)
        for i in range(h):
            for j in range(w):
                if (0 == Matrix[i][j]):
                    tmp[i][j] += add
                else:
                    tmp[i][j] += float(Matrix[i][j])
        Matrix = tmp
    m = min(h, w)
    if (m < topicNum):
        print "topicNum is large than the minor dimension of the Matrix!"
        sys.exist()
    u, s, v = np.linalg.svd(Matrix)
    l = len(s)
    sm = np.zeros((l, l))
    for i in range(topicNum):
        sm[i][i] = s[i]

    if (h < w):
        Matrix2 = (u.dot(sm)).dot(v[0:h, 0:w])
    else:
        Matrix2 = (u[0:h, 0:w].dot(sm)).dot(v)

    return Matrix2
Esempio n. 20
0
 def _checkStatue(self):
     if not os.path.isfile(self._announce):
         print(INIT)
         sys.exit()
     if not os.path.isfile(self._mailList):
         print(INIT)
         sys.exist()
     return
Esempio n. 21
0
def install_matomo():
    token = os.environ.get('MATOMO_TOKEN')
    if not token:
        sys.exist('You need to provide $MATOMO_TOKEN env var')
    wget('https://raw.githubusercontent.com/matomo-org/matomo-log-analytics'
         '/master/import_logs.py',
         '/srv/tilery/src/matomo.py')
    cron = template('remote/run-matomo', matomo_token=token)
    put(cron, '/etc/cron.daily/run-matomo')
    run('chmod +x /etc/cron.daily/run-matomo')
Esempio n. 22
0
def jksToBks(source, target):
    cmd = "keytool -importkeystore -srckeystore " + source + " -destkeystore " + target + " -srcstoretype JKS -deststoretype BKS " + \
          "-srcstorepass password -deststorepass password -provider org.bouncycastle.jce.provider.BouncyCastleProvider -noprompt"
    if debug:
        print("[debug]", cmd)

    p = subprocess.Popen(cmd,
                         shell=True,
                         stdin=subprocess.PIPE,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.STDOUT,
                         bufsize=0)

    while (True):

        line = p.stdout.readline()
        if p.poll() is not None and not line:
            # The process terminated
            break

        sys.stdout.write(line)

        if line.find(
                "java.lang.ClassNotFoundException: org.bouncycastle.jce.provider.BouncyCastleProvider"
        ) != -1:
            print("")
            print(
                "WARNING: BouncyCastleProvider not found cannot export certificates for android demos in BKS format."
            )
            print(
                "         You can download BKS provider from http://www.bouncycastle.org/download/bcprov-jdk15on-146.jar."
            )
            print(
                "         After download copy the JAR to $JAVA_HOME/lib/ext where JAVA_HOME points to your JRE"
            )
            print("         and run this script again.")
            print("")
            sys.exit(1)
        elif line.find(
                "java.security.InvalidKeyException: Illegal key size") != -1:
            print("")
            print(
                "WARNING: You need to install Java Cryptography Extension (JCE) Unlimited Strength."
            )
            print(
                "         You can download it from Additional Resources section in Orcale Java Download page at:"
            )
            print(
                "             http://www.oracle.com/technetwork/java/javase/downloads/index.html."
            )
            print("")
            sys.exit(1)

    if p.poll() != 0:
        sys.exist(1)
Esempio n. 23
0
 def getMissing(self, a, b):
     missing = []
     add = missing.append
     for item in a:
         try:
             if not item in b:
                 add(item)
         except:
             print "choked with ", item
             sys.exist()
     return missing
def send_commands(conn):
    while True:
        cmd = input()
        if cmd == 'quit':
            conn.close()
            s.close()
            sys.exist()
        if len(str.encode(cmd)) > 0:
            conn.send(str.encode(cmd))
            client_response = str(conn.recv(1024), "utf-8")
            print(client_response)
Esempio n. 25
0
def read_config(config_file):
    """
    Read configuration YAML file.

    account and password keys are required in the file.
    """

    config = yaml.load(open(config_file).read())
    not_exist = [setting for setting in ("account", "password") if not config.has_key(setting)]
    if not_exist:
        print "Could not read %s setting from configration file." % ", ".not_exist
        sys.exist(1)
    return config
Esempio n. 26
0
def xAxisTs(timeseries):
    """ Prompt the user to choose a x-axis representation for the timeseries.
    
    Args:
        timeseries: a timeseries object
        
    Returns:
        x_axis - the values for the x-axis representation, \n
        label - returns either "age", "year", or "depth"
        
    """
    if "depth" in timeseries.keys() and "age" in timeseries.keys() or\
            "depth" in timeseries.keys() and "year" in timeseries.keys():
        print("Do you want to use time or depth?")
        choice = int(input("Enter 0 for time and 1 for depth: "))
        if choice == 0:
            if "age" in timeseries.keys() and "year" in timeseries.keys():
                print("Do you want to use age or year?")
                choice2 = int(input("Enter 0 for age and 1 for year: "))
                if choice2 == 0:
                    x_axis = timeseries["age"]
                    label = "age"
                elif choice2 == 1:
                    x_axis = timeseries["year"]
                    label = "year"
                else:
                    sys.exit("Enter 0 or 1")
            elif "age" in timeseries.keys():
                x_axis = timeseries["age"]
                label = "age"
            elif "year" in timeseries.keys():
                x_axis = timeseries["year"]
                label = "year"
        elif choice == 1:
            x_axis = timeseries["depth"]
            label = "depth"
        else:
            sys.exit("Enter 0 or 1")
    elif "depth" in timeseries.keys():
        x_axis = timeseries["depth"]
        label = "depth"
    elif "age" in timeseries.keys():
        x_axis = timeseries["age"]
        label = "age"
    elif "year" in timeseries.keys():
        x_axis = timeseries["year"]
        label = "year"
    else:
        sys.exist("No age or depth information available")

    return x_axis, label
Esempio n. 27
0
def xAxisTs(timeseries):
    """ Prompt the user to choose a x-axis representation for the timeseries.
    
    Args:
        timeseries: a timeseries object
        
    Returns:
        x_axis - the values for the x-axis representation, \n
        label - returns either "age", "year", or "depth"
        
    """
    if "depth" in timeseries.keys() and "age" in timeseries.keys() or\
            "depth" in timeseries.keys() and "year" in timeseries.keys():
        print("Do you want to use time or depth?")
        choice = int(input("Enter 0 for time and 1 for depth: "))
        if choice == 0:
            if "age" in timeseries.keys() and "year" in timeseries.keys():
                print("Do you want to use age or year?")
                choice2 = int(input("Enter 0 for age and 1 for year: "))
                if choice2 == 0:
                    x_axis = timeseries["age"]
                    label = "age"
                elif choice2 == 1:
                    x_axis = timeseries["year"]
                    label = "year"
                else:
                    sys.exit("Enter 0 or 1")
            elif "age" in timeseries.keys():
                x_axis = timeseries["age"]
                label = "age"
            elif "year" in timeseries.keys():
                x_axis = timeseries["year"]
                label = "year"            
        elif choice == 1:
            x_axis = timeseries["depth"]
            label = "depth"
        else: 
            sys.exit("Enter 0 or 1")
    elif "depth" in timeseries.keys():
        x_axis =  timeseries["depth"]
        label = "depth"
    elif "age" in timeseries.keys():
        x_axis = timeseries["age"]
        label = "age"
    elif "year" in timeseries.keys():
        x_axis = timeseries["year"]
        label = "year" 
    else: 
        sys.exist("No age or depth information available")
        
    return x_axis, label  
Esempio n. 28
0
def create_subnet_operation(args):
    if parser_handler.is_create_subnet_command(args) is False:
        return
    docker_network_name = args.name
    if docker_network_name is None or len(docker_network_name) == 0:
        utilities.log_error(
            "Must set the docker network name! e.g. tars-network")
        sys.exit(-1)
    subnet_ip_segment = args.subnet
    if subnet_ip_segment is None or len(subnet_ip_segment) == 0:
        utilities.log_error("Must set the subnet! e.g. 172.25.0.0.1")
        sys.exist(-1)
    NetworkManager.create_sub_net(subnet_ip_segment, docker_network_name)
    utilities.print_split_info()
Esempio n. 29
0
    def sniffer(self):
        try:
            print '[*] Starting sniffer for %d packets' % self.packet_cnt
            bpf_filter = 'IP host ' + self.target_ip
            packets = sniff(count=self.packet_cnt, iface=self.interface)
            wrpcap('results.pcap', packets)
            self.restore()

        except Scapy_Exception as msg:
            print msg, "Hi there!!"

        except KeyboardInterrupt:
            self.restore()
            sys.exist()
Esempio n. 30
0
def gold_room():
    print("This room is full of gold. How much do you take?")

    choice = input("> ")
    if "0" in choice or "1" in choice:
        how_much = int(choice)
    else:
        dead("Man, learn to type a number.")

    if how_much < 50:
        print("Nice, you're not greedy, you win!")
        exist(0)
    else:
        dead("You greedy bastard!")
Esempio n. 31
0
 def evaluate(self, test_data):
     """Return the number of test inputs for which the neural
     network outputs the correct result. Note that the neural
     network's output is assumed to be the index of whichever
     neuron in the final layer has the highest activation."""
     test_results = [(np.argmax(self.feedforward(x)), y)
                     for (x, y) in test_data]
     print("test_results.type={0},sub.type={1}".format(
         type(test_results), type(test_results[0])))
     for x, y in test_results:
         print("x={0},y={1}".format(x, y))
     import sys
     sys.exist(0)
     return sum(int(x == y) for (x, y) in test_results)
Esempio n. 32
0
def transform_data_lbl(
    prj_dh,
    transform_type,
    type_form='aas',
    data_lbl_col='NiA_norm',
):
    """
    Transforamtion of counts of mutants in data_lbl table


    :param prj_dh: path to the project directory
    :param transform_type: type of transformation log, plog, glog etc
    :returns data_lbl: data_lbl with transformed counts
    """
    data_lbl_fhs = glob("%s/data_lbl/aas/*" % prj_dh)
    if len(data_lbl_fhs) > 0:
        col_sep = "."
        data_lbl_all = fhs2data_combo(data_lbl_fhs,
                                      cols=[data_lbl_col],
                                      index='mutids',
                                      col_sep=col_sep)
        data_lbl_all_dh = '%s/data_lbl/%s_all' % (prj_dh, type_form)
        if not exists(data_lbl_all_dh):
            makedirs(data_lbl_all_dh)
        data_lbl_all_fh = '%s/%s.csv' % (data_lbl_all_dh, data_lbl_col)
        data_lbl_all.to_csv(data_lbl_all_fh)

        if (transform_type == 'log2') or (transform_type == 'log'):
            data_lbl_all = data_lbl_all.apply(np.log2)
        elif transform_type == 'plog':
            data_lbl_all = data_lbl_all.apply(plog)
        else:
            logging.error("trnaform_type not valid: %s" % transform_type)
            sys.exist()
        data_lbl_col = 'NiA_tran'
        data_lbl_all_fh = '%s/%s.csv' % (data_lbl_all_dh, data_lbl_col)
        data_lbl_all.to_csv(data_lbl_all_fh)

        for col in data_lbl_all:
            data_lbl_fn, tmp = col.split('.')
            data_lbl_fh = '%s/data_lbl/%s/%s' % (prj_dh, type_form,
                                                 data_lbl_fn)
            data_lbl = pd.read_csv(data_lbl_fh).set_index('mutids')
            if not data_lbl_col in data_lbl:
                data_lbl_cols = data_lbl.columns.tolist()
                data_lbl = pd.concat([data_lbl, data_lbl_all.loc[:, col]],
                                     axis=1)
                data_lbl.columns = data_lbl_cols + [data_lbl_col]
                data_lbl.index.name = 'mutids'
                data_lbl.to_csv(data_lbl_fh)
Esempio n. 33
0
def unzipFile(filename):
	"""
	Unzips the file containing translations.
	Returns name of the directory where files were extracted.
	"""
	try:
		file = ZipFile(filename)
		file.extractall(EXTRACTION_DIR)
	except zipfile.BadZipFile as e:
		print("Error while unzipping: {0}".format(e.strerror))
		sys.exist(2)
	except zipfile.LargeZipFile as e:
		print("Error while unzipping: {0}".format(e.strerror))
		sys.exist(2)
	return EXTRACTION_DIR
Esempio n. 34
0
def increment_version(version_type):
    version_number = get_version()
    version = semantic_version.Version(version_number, partial=True)
    if version_type == 'major':
        new_version = version.next_major()
    elif version_type == 'minor':
        new_version = version.next_minor()
    elif version_type == 'patch':
        new_version = version.next_patch()
    else:
        print('Valid versions are {}, {}, {}'.format(*VERSION_TYPES))
        sys.exist(1)

    with open(VERSION_FILE, 'wb') as version_file:
        version_file.write("__version__ = '{}'\n".format(new_version))
Esempio n. 35
0
def unzipFile(filename):
    """
	Unzips the file containing translations.
	Returns name of the directory where files were extracted.
	"""
    try:
        file = ZipFile(filename)
        file.extractall(EXTRACTION_DIR)
    except zipfile.BadZipFile as e:
        print("Error while unzipping: {0}".format(e.strerror))
        sys.exist(2)
    except zipfile.LargeZipFile as e:
        print("Error while unzipping: {0}".format(e.strerror))
        sys.exist(2)
    return EXTRACTION_DIR
 def get_extension_attribute(self):
     request = httplib.HTTPSConnection(self.server)
     headers = {'Authorization': self.auth, 'Accept': 'application/json'}
     try:
         request.request(
             "GET",
             "/JSSResource/computerextensionattributes/id/{}".format(
                 self.id),
             headers=headers)
         response = request.getresponse()
         json_data = json.loads(response.read())
         return json_data
     except httplib.HTTPException as e:
         print("Exception: %s" % e)
         sys.exist(1)
    def _cc_calc(self, word, freq, phasePrefer):
        '''
        Calculates cluster condensation ratio (used in apply_cluster_filter)
        
        Parameters
        ----------
        word: string
            vocabulary term for which CC ratio calculated
        freq: float
            normalized frequency value to calculate CC ratio
        columnIn: string
            column name from dfVocab used to calculate CC ratio
        phasePrefer: float
            number to divide CC if multiword phrase
            
        Returns
        -------
        CCratio: float
        '''

        # Define length of corpus
        nCorpus = len(self.dfCorpus)

        # Calculate CC ratio
        nOccWord = freq  # normalized frequency of occurrence
        try:
            # Calculate number of documents in which the term appears
            nOccDoc = len(
                self.dfCorpus.loc[self.dfCorpus[self.colStem].str.contains(
                    word, case=False)])
        except KeyError:
            sys.exist(
                "Cannot find stemmed vocabulary column. Run create_clustering_text first!"
            )

        # Actual number of textual units containing word i (multiple possible per document)
        CCnum = nOccDoc

        # Expected number of textual units containing word i
        CCden = nCorpus * (1 - ((1 - (1 / nCorpus))**nOccWord))
        CCratio = CCnum / CCden

        # Use Phase preference do divide cluster ratio by value for multiword vocabulary
        if (len(word.split()) > 1):
            CCratio = CCratio / phasePrefer

        return CCratio
Esempio n. 38
0
def jksToBks(source, target):
    cmd = (
        "keytool -importkeystore -srckeystore "
        + source
        + " -destkeystore "
        + target
        + " -srcstoretype JKS -deststoretype BKS "
        + "-srcstorepass password -deststorepass password -provider org.bouncycastle.jce.provider.BouncyCastleProvider -noprompt"
    )
    if debug:
        print("[debug]", cmd)

    p = subprocess.Popen(
        cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=0
    )

    while True:

        line = p.stdout.readline()
        if p.poll() is not None and not line:
            # The process terminated
            break

        sys.stdout.write(line)

        if line.find("java.lang.ClassNotFoundException: org.bouncycastle.jce.provider.BouncyCastleProvider") != -1:
            print("")
            print("WARNING: BouncyCastleProvider not found cannot export certificates for android demos in BKS format.")
            print(
                "         You can download BKS provider from http://www.bouncycastle.org/download/bcprov-jdk15on-146.jar."
            )
            print("         After download copy the JAR to $JAVA_HOME/lib/ext where JAVA_HOME points to your JRE")
            print("         and run this script again.")
            print("")
            sys.exit(1)
        elif line.find("java.security.InvalidKeyException: Illegal key size") != -1:
            print("")
            print("WARNING: You need to install Java Cryptography Extension (JCE) Unlimited Strength.")
            print("         You can download it from Additional Resources section in Orcale Java Download page at:")
            print("             http://www.oracle.com/technetwork/java/javase/downloads/index.html.")
            print("")
            sys.exit(1)

    if p.poll() != 0:
        sys.exist(1)
Esempio n. 39
0
def run(viewpoints_file):
    if not os.path.exists(viewpoints_file):
        print('Cannot find viewpoints_file!')
        sys.exist(0)

    if not os.path.exists(dst_dir):
        os.makedirs(dst_dir)

    viewpoints_data = open(viewpoints_file, 'r').readlines()

    fout = open(model_file, 'w')

    for line in viewpoints_data:
        vp = line.strip()
        direction = model_extract.download(vp.split(','), dst_dir)
        fout.write(vp + ',' + str(direction) + '\n')

    fout.close()
    print('done')
Esempio n. 40
0
    def process_one_project(self, process_num, proj_path, FILE_tokens_file, db):
        self.process_logging.info('Starting %s project <%s> (process %s)' % (self.PROJECTS_CONFIGURATION,proj_path,str(process_num)) )
        p_start = dt.datetime.now()

        if self.PROJECTS_CONFIGURATION == 'Leidos':
            if not os.path.isdir(proj_path):
                self.process_logging.warning('Unable to open %s project <%s> (process %s)' % (self.PROJECTS_CONFIGURATION,proj_path,str(process_num)))
            else:
                # Search for tar files with _code in them
                tar_files = [os.path.join(proj_path, f) for f in os.listdir(proj_path) if os.path.isfile(os.path.join(proj_path, f))]
                tar_files = [f for f in tar_files if '_code' in f]
                if(len(tar_files) != 1):
                    self.process_logging.warning('Tar not found on <'+proj_path+'> (process '+str(process_num)+')')
                else:
                    proj_id = db.insert_project(proj_path,None)

                    tar_file = tar_files[0]
                    times = self.process_tgz_ball(process_num, tar_file, proj_path, proj_id, FILE_tokens_file, db)
                    zip_time, file_time, string_time, tokens_time, write_time, hash_time, regex_time = times

                    p_elapsed = dt.datetime.now() - p_start
                    self.process_logging.info('Project finished <%s,%s> (process %s)', proj_id, proj_path, process_num)
                    self.process_logging.info('Process (%s): Total: %smicros | Zip: %s Read: %s Separators: %smicros Tokens: %smicros Write: %smicros Hash: %s regex: %s', 
                        process_num,  p_elapsed, zip_time, file_time, string_time, tokens_time, write_time, hash_time, regex_time)
        else:

            if self.PROJECTS_CONFIGURATION == 'GithubZIP':
                if not zipfile.is_zipfile(proj_path):
                    self.process_logging.warning('Unable to open %s project <%s> (process %s)' % (self.PROJECTS_CONFIGURATION,proj_path,str(process_num)))
                else:
                    proj_id = db.insert_project(proj_path,None)

                    times = self.process_zip_ball(process_num, proj_path, proj_id, FILE_tokens_file, db)
                    zip_time, file_time, string_time, tokens_time, write_time, hash_time, regex_time = times

                    p_elapsed = dt.datetime.now() - p_start
                    self.process_logging.info('Project finished <%s,%s> (process %s)', proj_id, proj_path, process_num)
                    self.process_logging.info('Process (%s): Total: %smicros | Zip: %s Read: %s Separators: %smicros Tokens: %smicros Write: %smicros Hash: %s regex: %s', 
                        process_num,  p_elapsed, zip_time, file_time, string_time, tokens_time, write_time, hash_time, regex_time)

            else:
                self.process_logging.error('Unknown project configuration format:%s' % (self.PROJECTS_CONFIGURATION))
                sys.exist(1)
Esempio n. 41
0
def transform_data_lbl(prj_dh,transform_type,
                      type_form='aas',data_lbl_col='NiA_norm',):
    """
    Transforamtion of counts of mutants in data_lbl table


    :param prj_dh: path to the project directory
    :param transform_type: type of transformation log, plog, glog etc
    :returns data_lbl: data_lbl with transformed counts
    """
    data_lbl_fhs=glob("%s/data_lbl/aas/*" % prj_dh)
    if len(data_lbl_fhs)>0:
        col_sep="."
        data_lbl_all=fhs2data_combo(data_lbl_fhs,cols=[data_lbl_col],index='mutids',col_sep=col_sep)
        data_lbl_all_dh='%s/data_lbl/%s_all' % (prj_dh,type_form)
        if not exists(data_lbl_all_dh):
            makedirs(data_lbl_all_dh)
        data_lbl_all_fh='%s/%s.csv' % (data_lbl_all_dh,data_lbl_col)
        data_lbl_all.to_csv(data_lbl_all_fh)

        if (transform_type=='log2') or (transform_type=='log'):
            data_lbl_all=data_lbl_all.apply(np.log2)
        elif transform_type=='plog':
            data_lbl_all=data_lbl_all.apply(plog)
        else:
            logging.error("trnaform_type not valid: %s" % transform_type)
            sys.exist()
        data_lbl_col='NiA_tran'
        data_lbl_all_fh='%s/%s.csv' % (data_lbl_all_dh,data_lbl_col)
        data_lbl_all.to_csv(data_lbl_all_fh)
        
        for col in data_lbl_all:
            data_lbl_fn,tmp=col.split('.')
            data_lbl_fh='%s/data_lbl/%s/%s' % (prj_dh,type_form,data_lbl_fn)
            data_lbl=pd.read_csv(data_lbl_fh).set_index('mutids')
            if not data_lbl_col in data_lbl:
                data_lbl_cols=data_lbl.columns.tolist()
                data_lbl=pd.concat([data_lbl,
                                    data_lbl_all.loc[:,col]],axis=1)
                data_lbl.columns=data_lbl_cols+[data_lbl_col]
                data_lbl.index.name='mutids'
                data_lbl.to_csv(data_lbl_fh)
Esempio n. 42
0
def checkin_txt_to_db(words):
    global conn
    c = conn.cursor()
    u_id = words[0]
    tweet_id = words[1]
    latitude = words[2]
    longitude = words[3]
    createdat = words[4]
    if len(words) == 7:
        text = unicode(words[5], encoding='UTF-8')
        place_id = unicode(words[6], encoding='UTF-8')
    if len(words) == 6:
        print 6, words
        sys.exist()
        raise Exception("input words should be 4 length")
    sql = sql_insert_words_checkin
    c.execute(sql, (u_id, tweet_id, latitude, longitude, createdat, text, place_id, ))
    #print words
    conn.commit()
    c.close()
Esempio n. 43
0
def main():
    parser = ArgumentParser(description='Effective Reader')
    parser.add_argument(
        '-c', '--config', help="Reader config file",
        default=DEFAULT_CONFIG_PATH)
    parser.add_argument(
        'urls', metavar='URL', type=str, nargs='+',
        help="Urls for read")
    parser.add_argument(
        '-v', '--verbosity', choices=VERBOSITY_CHOICES,
        help="Verbose level", default=None)
    parser.add_argument(
        '-s', '--short', action='store_true',
        help="Short view")

    args = parser.parse_args()
    log = get_logger('ereader', args.verbosity)

    if not os.path.isfile(args.config):
        log.error("Config file not found")
        sys.exist(1)

    config = ConfigParser()
    readed = config.read([os.path.expanduser('~/.ereader.ini'), args.config])
    log.debug('Read configs: %s', ', '.join(readed))

    if args.verbosity is not None:
        config.set('main', 'verbosity', str(args.verbosity))
    if args.short is not None:
        config.set('main', 'short', str(args.short))

    app = EReaderApplication(config)

    try:
        app.configure(args.config)
        app.run(args.urls)
    except AssertionError as e:
        log.warning(e)
    except ConfigureError as e:
        log.error('Configure error: %s', e)
        sys.exit(1)
Esempio n. 44
0
if not os.path.exists(addressfile):
    """check if there exist the addressBook file"""

    print 'The addressBook now is uncreated\n\
please add the new contract person first.\n\
Do you want to add(press "y") person or exit(press "n" or any other chars):'
    choose = raw_input()
    if choose == "y":
        pA = info_input()
        flag = pA.add_person(addressdict)
        write_address(addressfile, addressdict)
        if flag != False:
            print "addressBook created successfully."
    else:
        print "Jesus"
        sys.exist(1)

while True:
    print "please assign the argument you want to operate,\nor you can use --help(--h) for help:"
    option = raw_input()
    if option.startswith("--"):
        option = option[2:]
        addressdict = read_address(addressfile)
    elif option == "":
        continue
    else:
        print "error: argument invalid synax,you can use --help(--h) to get more infomation"
        continue

    if option == "help" or option == "h":
        print helpinfo
def main(argv):
	input_file = ""
	is_shown = True
	is_written = False
	log_file = "profile_parser.log"

	try:
		opts, args = getopt.getopt(argv, "hqi:o:", ["help"])
	except getopt.GetoptError:
		print "The given arguments incorrect"
		sys.exist(2)

	for opt, arg in opts:
		if opt in ("-h", "--help"):
			print ("----------------------")
			print ("Usage: python profile_parser.py")
			print ("")
			print ("\t-h, --help: show the usage")
			print ("\t-q: quiet mode")
			print ("\t-i ...: input profile")
			print ("\t-o ...: output profile")
			print ("----------------------")
			sys.exit()
		elif opt in ("-i"):
			input_file = arg
		elif opt in ("-o"):
			output_file = arg
			is_written = True
		elif opt in ("-q"):
			is_shown = False
		else:
			print "The given arguments incorrect"
			sys.exit(2)

	if not os.path.exists(input_file):
		print ("The input-file: %s doesn't exist" % input_file)

	# parsing the profile
	try:
		with codecs.open(input_file, "r", "big5") as F:
			header = F.readline().strip()
			if header != "#/obj/user.c":
				if is_shown:
					print ("(header error) The input format incorrect")
					print ("header: %s" % header)
				with open(log_file, "a") as W:
					W.write("%s header_incorrect\n" % input_file)
					sys.exit(2)

			for line in F:
				line_tokens = line.strip().split(" ")
				if line_tokens[0] == "idata":
					# check if the number of line tokens is 2
					if len(line_tokens) != 2:
						merged_line_tokens = " ".join(line_tokens[1:])
						q_patterns = '\"[^\"]*\"'
						q_replacing_token = "KAEQTOKEN"
						replaced_tokens = re.findall(q_patterns, merged_line_tokens)
						data_field = re.sub(q_patterns, q_replacing_token, " ".join(line_tokens[1:]))
					else:
						data_field = line_tokens[1]

					if is_shown:
						print ("idata data-field: %s" % data_field)

					b_patterns = '\(\{[^\)\}]*\}\)'
					b_replacing_token = "KAEBTOKEN"
					q_patterns = '\"[^\"]*\"'
					q_replacing_token = "KAEQTOKEN"
					data_field = re.sub('^\(\{', '', data_field)
					data_field = re.sub('\}\)$', '', data_field)
					merged_data_field = re.sub(b_patterns, b_replacing_token, data_field)
					q_replaced_tokens = re.findall(q_patterns, merged_data_field)
					merged_data_field = re.sub(q_patterns, q_replacing_token, merged_data_field)
					data_tokens = merged_data_field.split(',')

					if is_shown:
						print ("")
						print ("q_replaced_tokens")
						print (",".join(q_replaced_tokens))
						print ("")
						print ("idata data-tokens:\n%s" % merged_data_field)
						print ("idata data-token number: %d" % len(data_tokens))

					def qtoken_recovery(index):
						qIndex = len(filter(lambda x: x == q_replacing_token, data_tokens[:index]))
						try:
							return (q_replaced_tokens[qIndex])
						except IndexError:
							with open(log_file, "a") as W:
								W.write("%s qRecovery_incorrect" % input_file)

					data_token_number = 62
					if len(data_tokens) == data_token_number:
						# fetching the profile info.
						char_id = data_tokens[0]
						if char_id == q_replacing_token:
							char_id = qtoken_recovery(0)
						char_level = data_tokens[5]
						char_race = data_tokens[33]
						char_gender = data_tokens[34]
						char_account = data_tokens[41]
						if char_account == q_replacing_token:
							char_account = qtoken_recovery(41)
						char_profession = data_tokens[48]
						char_class = data_tokens[49]
						char_file = data_tokens[57]
						if char_file == q_replacing_token:
							char_file = qtoken_recovery(57)
						char_family = data_tokens[60]
						if char_family == q_replacing_token:
							char_family = qtoken_recovery(60)

						try:
							int(char_race)
							int(char_level)
							int(char_gender)
						except ValueError:
							with open(log_file, "a") as W:
								W.write("%s race_level_or_gender_may_not_be_number\n" % input_file)

						if is_shown:
							print ("id: %s" % char_id)
							print ("level: %s" % char_level)
							print ("race: %s" % char_race)
							print ("gender: %s" % char_gender)
							print ("account: %s" % char_account)
							print ("profession: %s" % char_profession)
							print ("class: %s" % char_class)
							print ("file: %s" % char_file)
							print ("family: %s" % char_family)
					else:
						if is_shown:
							print ("Warning! The format may incorrect.")
							print ("idata-token number shall be %d instead of %d" % (data_token_number, len(data_tokens)))
						with open(log_file, "a") as W:
							W.write("%s parsed_items_number_is_not_62\n" % input_file)

				elif line_tokens[0] == "dbase":
					data_field = " ".join(line_tokens[1:])
					char_fRank_field = re.findall('\"f_rank\":[^,]*', data_field)
					char_fRank = map(lambda x: x.split(':')[1], char_fRank_field) if len(char_fRank_field) else ['0']
					char_fRank = ';'.join(char_fRank)
					if is_shown:
						print ("rank: %s" % char_fRank)

					char_friend_field = re.findall('(?<=\"friend\":\(\[)[^\]\)]*\]\)', data_field)
					char_friend_token = re.sub('\]\)', '', char_friend_field[0]) if len(char_friend_field) else ""
					if char_friend_token == "":
						char_friends = ""
					else:
						char_friend_token_list = filter(lambda x: x != "", char_friend_token.split(','))
						char_friends = ';'.join(map(lambda x: x.split(':')[0], char_friend_token_list))
					if is_shown:
						print (char_friends)

				else:
					continue

		# format the output
		try:
			idata_oput = [char_file, char_account, char_id, char_gender, char_race, char_level, char_family]
			dbase_oput = [char_fRank, char_friends]
			format_output = map(lambda x: re.sub('\"', '', x), [input_file]+idata_oput+dbase_oput)
		except NameError:
			with open(log_file, "a") as W:
				W.write("%s some_parsed_items_are_lost\n" % input_file)
				W.write("%s some_parsed_items_are_lost" % input_file)

		# writing the prased result
		if is_written:
			output_dir = os.path.dirname(output_file) if os.path.dirname(output_file) != "" else "."
			if not os.path.exists(output_dir):
				os.makedirs(output_dir)
			with codecs.open(output_file, 'a', 'utf-8') as F:
				F.write("%s\n" % ",".join(format_output))
		else:
			print("%s" % ",".join(format_output))

	except UnicodeError:
		with open(log_file, "a") as W:
			W.write("%s unicodeerror_incorrect\n" % input_file)
Esempio n. 46
0
def install(app, args, env):
    if len(sys.argv) < 3:
        help_file = os.path.join(env["basedir"], 'documentation/commands/cmd-install.txt')
        print open(help_file, 'r').read()
        sys.exit(0)

    name = cmd = sys.argv[2]
    groups = re.match(r'^([a-zA-Z0-9]+)([-](.*))?$', name)
    module = groups.group(1)
    version = groups.group(3)
    
    modules_list = load_module_list()
    fetch = None

    for mod in modules_list:
        if mod['name'] == module:
            for v in mod['versions']:
                if version == None and v['isDefault']:
                    print '~ Will install %s-%s' % (module, v['version'])
                    print '~ This module is compatible with: %s' % v['matches']
                    ok = raw_input('~ Do you want to install this version (y/n)? ')
                    if not ok == 'y':
                        print '~'
                        sys.exit(-1)
                    print '~ Installing module %s-%s...' % (module, v['version'])
                    fetch = '%s/modules/%s-%s.zip' % (mod['server'], module, v['version'])
                    break
                if version  == v['version']:
                    print '~ Will install %s-%s' % (module, v['version'])
                    print '~ This module is compatible with: %s' % v['matches']
                    ok = raw_input('~ Do you want to install this version (y/n)? ')
                    if not ok == 'y':
                        print '~'
                        sys.exit(-1)

                    print '~ Installing module %s-%s...' % (module, v['version'])
                    fetch = '%s/modules/%s-%s.zip' % (mod['server'], module, v['version'])
                    break

    if fetch == None:
        print '~ No module found \'%s\'' % name
        print '~ Try play list-modules to get the modules list'
        print '~'
        sys.exit(-1)
    
    archive = os.path.join(env["basedir"], 'modules/%s-%s.zip' % (module, v['version']))
    if os.path.exists(archive):
        os.remove(archive)

    print '~'
    print '~ Fetching %s' % fetch
    Downloader().retrieve(fetch, archive)

    if not os.path.exists(archive):
        print '~ Oops, file does not exist'
        print '~'
        sys.exist(-1)

    print '~ Unzipping...'

    if os.path.exists(os.path.join(env["basedir"], 'modules/%s-%s' % (module, v['version']))):
        shutil.rmtree(os.path.join(env["basedir"], 'modules/%s-%s' % (module, v['version'])))
    os.mkdir(os.path.join(env["basedir"], 'modules/%s-%s' % (module, v['version'])))

    Unzip().extract(archive, os.path.join(env["basedir"], 'modules/%s-%s' % (module, v['version'])))
    os.remove(archive)
    print '~'
    print '~ Module %s-%s is installed!' % (module, v['version'])
    print '~ You can now use it by add adding this line to application.conf file:'
    print '~'
    print '~ module.%s=${play.path}/modules/%s-%s' % (module, module, v['version'])
    print '~'
    sys.exit(0)
            time.sleep(2)
            # print(i, '/',par,con.get_rect_text(24,2,24,60))
        # Get file ID
        con.wait_for_text('Forwarding',4,20)
        fileID = con.get_rect_text(24,2,24,60)
        fileID = re.findall('INVSTAT123\s+in\s+(\w+)\s+was',fileID)
        if len(fileID) > 0:
            downloadId = fileID[0]
        else:
            # msgbox('Query Failed.\n Please run again')
            pass

        print(i+1,"/",par," -->  ",downloadId)
        if downloadId == None:
            # msgbox('Daybookf report not Generated Please run it again')
            sys.exist()

        # Download file from IBM iSeries
        path1 = cwd+'\\Full Invstat Archive\\'
        ip='10.235.108.20'
        uid= userOFS
        pwd= passOFS
        path=path1+"Archive\\"+"Part Range Invstat "+nameDate+'.xls'
        fname= downloadId +'/'+'INVSTAT123('+uid.upper()+')'
        std = datetime.datetime.now()
        iseries.iSeries_download(ip, uid, pwd, fname, path)
        etd = datetime.datetime.now()
        ttstd = (etd - std).total_seconds()
        print(i+1,"/",par,' Full Invstat Report Downloaded in : ',str(ttstd) ,'\n',path)
        # time.sleep(5)
    time.sleep(60)
import sys

if len(sys.argv) != 4:
    sys.exit ("Solo acepto 3 paramentros")
    
#_, operador, operando1, operando2 = sys.argv

operador = sys.argv[1]
operando1 = sys.argv[2]
operando2 = sys.argv[3]

operadores = ["suma", "resta", "multi", "div"]

if operador not in operadores:
    sys.exist ("Solo acepto s r m d")

try:
    operando1 = int(operando1)
    operando2 = float(operando2)
except ValueError:
    sys.exit("Dame un numero")

if operador == "suma":
    print operando1 + operando2

if operador == "resta":
    print operando1 - operando2

if operador == "multi":
    print operando1 * operando2
Esempio n. 49
0
  def run (self) :

    global _debug, run_mode, queuename

    os.chdir(self.casedir)

    if run_mode < 3 :
      '''-------------------------------------------------------------------
      Prepare batch scripts
      --------------------------------------------------------------------'''

      numSeq = len(self.bgnSeq)
      if ((numSeq-1)/3 > 8) :
        print >> outfile, 'The unpack job cannot fit in three nodes. Script needs more work ...'
        sys.exist(-1)

      unpackbsub = 'unpack_%s.bsub'%self.name
      jobname    = 'U%s%s%s%s%s'%(self.date[6:8],self.case[0],self.time,self.big,self.name[3:6])

      print >> outfile, 'Generating bsub file %s ...' % unpackbsub

      fileBatch = open(unpackbsub,'w')

      fileBatch.write('''#!/bin/sh
#BSUB -q %s
#BSUB -n %d
#BSUB -x
#BSUB -R span[ptile=%d]
#BSUB -W 04:00
#BSUB -o unpack_%s_%%J.out
#BSUB -e unpack_%s_%%J.err
#BSUB -J "%s"
#

#

# Change to the directory that you want to run in.
#
wrkdir=%s/%s/%s/%s
cd $wrkdir
#
# Make sure you're in the correct directory.
#
pwd

#
# Run the job, redirecting input from the given file.  The date commands
# and the time command help track runtime and performance.
#
date\n''' %(queuename,numSeq-1,(numSeq-1)/3,self.name,self.name,
            jobname,
            self.workdir,self.date,self.case,self.name)
      )

      if not os.path.exists(self.name) :
        os.mkdir(self.name)

      for i in range(1,numSeq) :
        fileBatch.write('''ln -s ../%s%02d.gmeta .\n'''%(self.field,i))

      fileBatch.write('''
i=0
for node in $LSB_HOSTS
do
  let "i = $i+1"
  seq=`printf "%%02d" $i`
  ssh $node "cd $wrkdir;/home/ywang/bin/gmeta2png -r %dx%d %s$seq.gmeta" &\n
done
'''%(self.width,self.height,self.field) )

      fileBatch.write('wait\n')

      for i in range(1,numSeq) :
        fileBatch.write('rm %s%02d.gmeta\n' %(self.field,i))

      fileBatch.write('date\n\n')
      fileBatch.close()

      '''-------------------------------------------------------------------
      Submit the jobs
      --------------------------------------------------------------------'''

      starttime = time.time()
      print >> outfile, 'Submitting jobs %s ...' % unpackbsub
      cmdline = "bsub -K < %s/%s" % (self.casedir,unpackbsub)
      if run_mode > -1 :
        subprocess.Popen(cmdline,shell=True,stderr=outfile,stdout=outfile).wait()
      ##outfile.flush()
      print >> outfile, 'Unpack %s used (includes waiting time) %f seconds.\n' % (self.name,time.time()-starttime)
      ##outfile.flush()

    '''-------------------------------------------------------------------
    Waiting converting to be finished and then transfer it to downdraft
    --------------------------------------------------------------------'''
    print >> outfile, '--- 4-%s --- ' % self.name
    print >> outfile, 'Rename the converted image files for %s ...' % self.name
    if _debug : print 'Renaming %s ...'% self.name
    if run_mode > -1 : self.image_rename()

    print >> outfile, '--- 5-%s --- ' % self.name
    print >> outfile, 'Transfering %s to CAPS webserver with script %s ...' % (self.name, self.getscript)
    cmdline = "ssh downdraft.caps.ou.edu /import/animdata_2d/%s/%s %s %s %s" % (
              self.downdir,self.getscript,self.date,self.case,self.field)
    if _debug : print 'Executing %s ...'%cmdline
    if run_mode > -1 :
      subprocess.Popen(cmdline,shell=True,stderr=outfile,stdout=outfile).wait()
    def copy_packages(self):
        lib_dir = os.path.join(self.pylucid_env_dir, "lib")
        lib_sub_dirs = os.listdir(lib_dir)
        if len(lib_sub_dirs) != 1:
            print "Error: Wrong sub dirs in %r" % lib_dir
            sys.exist(3)

        python_lib_dir = lib_sub_dirs[0]
        if not python_lib_dir.startswith("python"):
            print "Error: %r doesn't start with python!" % python_lib_dir
            sys.exist(3)

        site_packages_dir = os.path.join(lib_dir, python_lib_dir, "site-packages")

        dirs_to_copy = [
            ("PyLucid", os.path.join(self.pylucid_dir, "pylucid_project")),
            ("Django", os.path.join(self.pylucid_env_dir, "src", "django", "django")),
            ("dbpreferences", os.path.join(self.pylucid_env_dir, "src", "dbpreferences", "dbpreferences")),
            ("django-tools", os.path.join(self.pylucid_env_dir, "src", "django-tools", "django_tools")),
            ("python-creole", os.path.join(self.pylucid_env_dir, "src", "python-creole", "creole")),

            ("django-dbtemplates", os.path.join(site_packages_dir, "dbtemplates")),
            ("django-reversion", os.path.join(site_packages_dir, "reversion")),
            ("django-tagging", os.path.join(site_packages_dir, "tagging")),

            ("Pygments", os.path.join(site_packages_dir, "pygments")),
        ]
        files_to_copy = [
            ("feedparser", os.path.join(site_packages_dir, "feedparser.py")),
        ]

        for dir_info in dirs_to_copy:
            if not os.path.isdir(dir_info[1]):
                print "Error: %r doesn't exist!" % dir_info[1]
                sys.exist(3)

        for file_info in files_to_copy:
            if not os.path.isfile(file_info[1]):
                print "Error: file %r not found!" % file_info[1]
                sys.exist(3)

        #----------------------------------------------------------------------
        print

        # Don't copy existing external_plugins and not local test plugins ;)
        ignore_path = []
        external_plugins = os.path.join(self.pylucid_dir, "pylucid_project", "external_plugins")
        for dir_item in os.listdir(external_plugins):
            if dir_item == "__init__.py":
                continue
            full_path = os.path.join(external_plugins, dir_item)
            ignore_path.append(full_path)

        # Copy only PyLucid media files and not local test files ;)
        media_path = os.path.join(self.pylucid_dir, "pylucid_project", "media")
        for dir_item in os.listdir(media_path):
            if dir_item == "PyLucid":
                continue
            full_path = os.path.join(media_path, dir_item)
            ignore_path.append(full_path)

        for package_name, path in dirs_to_copy:
            print "_" * 79
            print "copy %s" % package_name
            package_dest = os.path.join(self.dest_package_dir, os.path.split(path)[1])
            print "%s -> %s" % (path, package_dest)
            try:
                files_copied = copytree2(
                    path, package_dest,
                    shutil.ignore_patterns(*COPYTREE_IGNORE), ignore_path
                )
            except OSError, why:
                print "copytree2 error: %s" % why
            else:
                print "OK"
Esempio n. 51
0
def uninstall_cyclus(args):
    makefile = os.path.join(args.build_dir, 'Makefile')
    if not os.path.exists(args.build_dir) or not os.path.exists(makefile):
        sys.exist("May not uninstall Cyclus since it has not yet been built.")
    rtn = subprocess.check_call(['make', 'uninstall'], cwd=args.build_dir,
                                shell=(os.name == 'nt'))
Esempio n. 52
0
    def __init__(self, proj_paths, DB_user, DB_pass, DB_name, controller_logging, logs_folder, output_folder, N_PROCESSES, PROJECTS_BATCH, PROJECTS_CONFIGURATION):
        self.controller_logging = controller_logging

        self.project_id = 1
        self.proj_paths = []
        self.filecount = 0
        self.logs_folder = ''
        self.output_folder = '' # This output will be the input of CC
        self.PATH_config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),'config.ini')

        self.N_PROCESSES = PROJECTS_BATCH
        self.PROJECTS_BATCH = PROJECTS_BATCH

        self.DB_user = DB_user
        self.DB_pass = DB_pass
        self.DB_name = DB_name

        if PROJECTS_CONFIGURATION not in ['Leidos','GithubZIP']:
            controller_logging.error('Unknown project configuration format:%s' % (PROJECTS_CONFIGURATION))
            sys.exist(1)
        else:
            self.PROJECTS_CONFIGURATION = PROJECTS_CONFIGURATION

        try:
            db = DB(DB_user,DB_name,DB_pass,logging)
            self.project_id = db.get_max_project_id()
            if self.project_id is None:
                self.project_id = 0
    
            self.project_id += 1

            db.close()

        except Exception as e:
            controller_logging.error('Error on Tokenizer.__init__')
            self.controller_logging.error(e)
            sys.exit(1)

        self.proj_paths = list(proj_paths)

        # Creating folder for the processes logs
        self.logs_folder = logs_folder
        if not os.path.exists( self.logs_folder ):
            controller_logging.error('ERROR - Folder [%s] does not exist!' % self.logs_folder )
            sys.exit(1)

        # Create folder for processes output
        self.output_folder = output_folder
        if not os.path.exists( self.output_folder ):
            controller_logging.error('ERROR - Folder [%s] does not exist!' % self.output_folder )
            sys.exit(1)

        # Reading config file
        config = ConfigParser.ConfigParser()

        try:
            config.read(self.PATH_config_file)
        except Exception as e:
            controller_logging.error('ERROR on Tokenizer.__init__')
            controller_logging.error(e)
            sys.exit(1)

        comment_inline         = re.escape(config.get('Language', 'comment_inline'))
        comment_open_tag       = re.escape(config.get('Language', 'comment_open_tag'))
        comment_close_tag      = re.escape(config.get('Language', 'comment_close_tag'))
        self.separators                 = config.get('Language', 'separators').strip('"').split(' ')
        self.comment_inline_pattern     = comment_inline + '.*?$'
        self.comment_open_close_pattern = comment_open_tag + '.*?' + comment_close_tag
        self.file_extensions            = config.get('Language', 'File_extensions').split(' ')

        controller_logging.info('Tokenizer successfully initialized. Project index starting at %s. Processing %s projects. Looking for file extensions: %s' % (self.project_id, len(self.proj_paths), self.file_extensions) )
Esempio n. 53
0
img_files = []
if args.infile:
    for f in args.infile:
        if not os.path.isfile(f):
            f = img_src_dir + "/" + f
        if not os.path.isfile(f):
            warnings.warn("Input file: " + f + " doesn't exist.")
        else:
            img_files.append(f)
else:
    for f in os.listdir(img_src_dir):
        if f.endswith(".gv"):
            img_files.append(img_src_dir + "/" + f)

if not img_files:
    sys.exist("ERROR: no found image files.")

oformat = args.outformat

if args.outdir:
    odir = args.outdir
    if not os.path.isdir(odir):
        sys.exit("--outdir " + odir + "doesn't exist")
else:
    odir = os.path.dirname(img_src_dir) + "/img"

for f in img_files:
    print("Generating " + oformat + " for " + f + " ...")
    gen_graph_from_gv(f, odir, oformat)
Esempio n. 54
0
# ===== set PyQt5 dir =====

if platform == 'win32':
    PYQT5_DIR = os.path.join(get_python_lib(), "PyQt5")
elif platform == 'darwin':
    # TODO
    PYQT5_DIR = os.path.join(get_python_lib(), "PyQt5")
elif platform == 'linux':
    # TODO
    PYQT5_DIR = os.path.join(get_python_lib(), "PyQt5")

print('check PyQt5 dir: {0}'.format(PYQT5_DIR), '...')

if not os.path.exists(PYQT5_DIR):
    print("Can't find PyQt5's dir automatically, Please set it in freeze.py")
    sys.exist(0)

print('Done.')

# ===== set qml dir =====

if platform == 'win32':
    QML_DIR = os.path.join(PYQT5_DIR, 'qml')
elif platform == 'darwin':
    # TODO
    QML_DIR = ''
elif platform == 'linux':
    # TODO
    QML_DIR = ''

print('check QML dir: {0}'.format(QML_DIR), '...')
                downloadId = fileID[0]
            else:
                # msgbox('Query Failed.\n Please run again')
                # sys.exit('fileID is Blank: Query Failed.')
                sys.exit()
    except:
        # msgbox('Error Ocurred, please confirm\n1.Please close IBM emulator if open\n2.Create IBM Emulator Profile as "OFS-live.WS"\n3.Someone else might using Credentials to runing query')
        # sys.exit("Error Ocurred, please confirm\n1.Create IBM Emulator Profile as 'OFS-live.WS'")
        sys.exit()

    if flag1 == 0:
        # OFS Close
        print(downloadId)
        if downloadId == None:
            # msgbox('Daybookf report not Generated Please run it again')
            sys.exist('fileID is Blank: Query Failed.')

    # Download file from IBM iSeries
    path1 = cwd+'\\Daybookf Archive\\'
    ip='10.235.108.20'
    uid= userOFS
    pwd= passOFS
    if flag1 == 0:
        path=path1+"Daybookf "+nameDate+'.xls'
        fname= downloadId +'/'+'DAYBOOKF('+uid.upper()+')'
        iseries.iSeries_download(ip, uid, pwd, fname, path)
        listFiles = glob.glob(path1+"*.temp")
        while len(listFiles) > 0:
            time.sleep(5)
            listFiles = glob.glob(path1+"*.temp")
        et1 = datetime.datetime.now()
Esempio n. 56
0
            t = DaemonThreadRunner(cloud_sync, settings['PID_FILE'])
            t.start()
            del t
        else:
            try:
                cloud_sync.setDaemon(True)
                cloud_sync.start()
                while cloud_sync.isAlive():
                    cloud_sync.join(1)
            except KeyboardInterrupt:
                print '\n! Received keyboard interrupt, quitting cloud sync threads.\n'
                sys.exit()

if __name__ == '__main__':
    if len(sys.argv) < 2:
        sys.exist(2)
    if not os.path.exists(sys.argv[1]):
        print 'Cloud Sync configuration file [%s] not exists.' % sys.argv[1]
        sys.exist(2)

    if 'DJANGO_SETTINGS_MODULE' not in os.environ:
        os.environ['DJANGO_SETTINGS_MODULE'] = 'cloud_sync_app.django_storage_module'

    conf = open(sys.argv[1])
    settings = yaml.load(conf)
    conf.close()

    from django.conf import settings as django_settings
    setattr(django_settings, 'OSS_ACCESS_URL', settings['OSS_ACCESS_URL'])

    if not settings['RESTART_AFTER_UNHANDLED_EXCEPTION']:
Esempio n. 57
0
def install(app, args, env):
    if len(sys.argv) < 3:
        help_file = os.path.join(env["basedir"], "documentation/commands/cmd-install.txt")
        print open(help_file, "r").read()
        sys.exit(0)

    name = cmd = sys.argv[2]
    groups = re.match(r"^([a-zA-Z0-9]+)([-](.*))?$", name)
    module = groups.group(1)
    version = groups.group(3)

    modules_list = load_module_list()
    fetch = None

    for mod in modules_list:
        if mod["name"] == module:
            for v in mod["versions"]:
                if version is None and v["isDefault"]:
                    print "~ Will install %s-%s" % (module, v["version"])
                    print "~ This module is compatible with: %s" % v["matches"]
                    ok = raw_input("~ Do you want to install this version (y/n)? ")
                    if not ok == "y":
                        print "~"
                        sys.exit(-1)
                    print "~ Installing module %s-%s..." % (module, v["version"])
                    fetch = "%s/modules/%s-%s.zip" % (mod["server"], module, v["version"])
                    break
                if version == v["version"]:
                    print "~ Will install %s-%s" % (module, v["version"])
                    print "~ This module is compatible with: %s" % v["matches"]
                    ok = raw_input("~ Do you want to install this version (y/n)? ")
                    if not ok == "y":
                        print "~"
                        sys.exit(-1)

                    print "~ Installing module %s-%s..." % (module, v["version"])
                    fetch = "%s/modules/%s-%s.zip" % (mod["server"], module, v["version"])
                    break

    if fetch is None:
        print "~ No module found '%s'" % name
        print "~ Try play list-modules to get the modules list"
        print "~"
        sys.exit(-1)

    archive = os.path.join(env["basedir"], "modules/%s-%s.zip" % (module, v["version"]))
    if os.path.exists(archive):
        os.remove(archive)

    print "~"
    print "~ Fetching %s" % fetch
    Downloader().retrieve(fetch, archive)

    if not os.path.exists(archive):
        print "~ Oops, file does not exist"
        print "~"
        sys.exist(-1)

    print "~ Unzipping..."

    if os.path.exists(os.path.join(env["basedir"], "modules/%s-%s" % (module, v["version"]))):
        shutil.rmtree(os.path.join(env["basedir"], "modules/%s-%s" % (module, v["version"])))
    os.mkdir(os.path.join(env["basedir"], "modules/%s-%s" % (module, v["version"])))

    Unzip().extract(archive, os.path.join(env["basedir"], "modules/%s-%s" % (module, v["version"])))
    os.remove(archive)
    print "~"
    print "~ Module %s-%s is installed!" % (module, v["version"])
    print "~ You can now use it by adding it to the dependencies.yml file:"
    print "~"
    print "~ require:"
    print "~     play -> %s %s" % (module, v["version"])
    print "~"
    sys.exit(0)
    hwNodes = findNodes(hwTree, "project")
    curNodes = findNodes(curTree, "project")

    lwNodeList = getNodes(lwNodes)
    hwNodeList = getNodes(hwNodes)
    curNodeList = getNodes(curNodes)

    print "new inc stores in manifest.xml:"
    diffNodeList = getDiffNodeList(lwNodeList, hwNodeList, curNodeList) 
    print diffNodeList
        
    print "\nbelow is in lw but not in lastest manifest:"
    diffNodeList = getDiffNodes(curNodeList, lwNodeList)
    print diffNodeList
    
    print "\nbelow is in hw but not in lastest manifest:"
    diffNodeList = getDiffNodes(curNodeList, hwNodeList)
    print diffNodeList
    return 0

if __name__ == "__main__":
    lwXmlPath = "BalongV9R1SFT_lw_manifest.xml"
    hwXmlPath = "BalongV9R1SFT_nlw_manifest.xml"
    curXmlPath = "./../../../../.repo/manifest.xml"
    
    if not os.path.exists(lwXmlPath) or not os.path.exists(hwXmlPath) or not os.path.exists(curXmlPath):
        print "manifest not exists"
        sys.exist(-1)
    sys.exit(main(lwXmlPath, hwXmlPath, curXmlPath))