示例#1
0
def test_disable_pyparsing_arity_trimming_works():
    """Tests that arity trimming has been disabled and parse actions with
    the wrong number of arguments will raise TypeErrors"""
    for func in [lambda a: None, lambda a, b: None, lambda a, b, c, d: None]:
        element = Literal('test').setParseAction(func)
        with raises(TypeError):
            element.parseString('test')
示例#2
0
def test_disable_pyparsing_arity_trimming_works():
    """Tests that arity trimming has been disabled and parse actions with
    the wrong number of arguments will raise TypeErrors"""
    for func in [lambda a: None, lambda a, b: None, lambda a, b, c, d: None]:
        element = Literal('test').setParseAction(func)
        with raises(TypeError):
            element.parseString('test')
示例#3
0
def check_unnecessary_include(self, code):
    grammar = Literal('#') + Literal('include') + Literal('<') + Word(alphanums)
    try:
        grammar.parseString(code)
        begin = code.find("<")
        end = code.find(">")
        included_library = code[begin + 1:end]
        if included_library not in self.includes:
            self.add_error(label="UNNECESSARY_INCLUDE")
    except ParseException:
        return
示例#4
0
def check_local_include(self, code):
    grammar = Literal('#') + Literal('include') + Literal('"') + Word(alphanums)
    try:
        grammar.parseString(code)
        begin = code.find('"')
        included_file = code[begin + 1:]
        end = included_file.find('"')
        included_file = included_file[:end]
        if included_file not in self.includes:
            self.local_includes[self.current_file].append(included_file)
    except ParseException:
        return
示例#5
0
def check_local_include(self, code):
    grammar = Literal('#') + Literal('include') + Literal('"') + Word(
        alphanums)
    try:
        grammar.parseString(code)
        begin = code.find('"')
        included_file = code[begin + 1:]
        end = included_file.find('"')
        included_file = included_file[:end]
        if included_file not in self.includes:
            self.local_includes[self.current_file].append(included_file)
    except ParseException:
        return
示例#6
0
def check_unnecessary_include(self, code):
    grammar = Literal('#') + Literal('include') + Literal('<') + Word(
        alphanums + '.' + '_') + Literal('>')
    try:
        grammar.parseString(code)
        begin = code.find("<")
        end = code.find(">")
        included_library = code[begin + 1:end]
        if included_library not in self.includes:
            self.add_error(label="UNNECESSARY_INCLUDE",
                           data={"library": included_library})
    except ParseException:
        return
示例#7
0
def readFile():
    tempo = []
    ''' READING FILE'''
    fd = open('times.pl', 'r')
    buffer = fd.read().split('\n')
    '''Parsing lines'''
    '''Diferentes tipos de parsers'''
    firstLine = Literal('timetable') + Suppress('(') + Word(alphas) + Suppress(
        ',') + Word(alphas) + Suppress(',')
    carreiraLine = Optional(
        Suppress('[')) + Word(nums + ':' + nums) + Suppress('/') + Word(
            nums + ':' +
            nums) + Suppress('/') + Word(alphanums) + Suppress('/') + (
                Literal('alldays') ^
                (Suppress('[') + OneOrMore(Word(alphas + ',') ^ Word(alphas)) +
                 Suppress(']'))) + (Suppress(']).') ^ Suppress(','))
    '''Inicio de parser'''
    i = 0
    index_tempo = 0
    while not (buffer == [''] or buffer == []):
        aux = firstLine.parseString(buffer[0])
        tempo.append(Percursos(aux[1], aux[2]))
        del buffer[0]
        while not buffer[0] == '':
            buffer[0] = buffer[0].replace(' ', '')
            aux = carreiraLine.parseString(buffer[0])
            tempo[index_tempo].viagens.append(Carreiras(*aux))
            del buffer[0]
        del buffer[0]
        index_tempo += 1

    return tempo
示例#8
0
def parse_common_block(s: str) -> List[str]:
    """Parse a common block."""
    myword = Word(alphanums + "_")
    inside = OneOrMore(myword + ZeroOrMore("*") + ZeroOrMore(","))
    parenthesis = ZeroOrMore(Char("(") + inside + Char(")"))
    parser = Literal("common") + Char('/') + myword + Char('/') + \
        OneOrMore(Group(myword + parenthesis + ZeroOrMore(Char(","))))

    return parser.parseString(s).asList()
class Bionames (Operator):

    def __init__(self):
        """ Initialize the operator. """
        super(Bionames, self).__init__("bionames")
        self.url = "https://bionames.renci.org/lookup/{input}/{type}/"
        self.select_grammar = \
                              Literal("select") + White() + ( QuotedString('"') | Word( alphas+"_"+"$", alphanums+"_" ) ) + \
                              White() + Literal("from") + White() + Word(alphas) + White() + Literal("as") + White() + Word(alphas+"_")
        self.select_grammar = Literal("select") + White() + (QuotedString('"')|Word(alphas+"$"+"_")) + White() + Literal("from") + White() + Word(alphas+"_") + White() + Literal("as") + White() + Word(alphas+"_")
    def execute_query (self, query, context):

        #Literal("select") + White() + ( QuotedString('"') | Word( alphas+"_"+"$", alphanums+"_" ) ) + White() + Literal("from") + White() + Word(alphas)
        print (f"parsing query {query}")
        vals = self.select_grammar.parseString (query)
        print (vals)
        select_key, w0, input_string, w1, from_key, w2, category, w3, as_key, w4, name = self.select_grammar.parseString (query) #'select "this is a string" from bob')

#        logger.debug (f"
        context.set_result (name,
                            self.get_ids (
                                name = context.resolve_arg(input_string),
                                type_name = category))
        
    def get_ids (self, name, type_name):
        url = self.url.format (**{
            "input" : name,
            "type"  : type_name
        })
        logger.debug (f"url: {url}")
        result = None
        response = requests.get(
            url = url,
            headers = {
                'accept': 'application/json'
            })
        if response.status_code == 200 or response.status_code == 202:
            result = response.json ()
        else:
            raise ValueError (response.text)
        logger.debug (f"bionames result {result}")
        return result
    
    def invoke (self, event):
        result = {}
        if event.query:
            if isinstance(event.query, str):
                self.execute_query (q, event.context)
            elif isinstance(event.query, list):
                for q in event.query:
                    self.execute_query (q, event.context)
        else:
            result = self.get_ids (
                name=event.input,
                type_name=event.type)
        
        return result
示例#10
0
def typeSwitch(line):
    global typedversion

    typeflag = Literal("#") + "option" + Literal("=") + oneOf("untyped", "typed")
    res = typeflag.parseString(line)
    if res[3] == "untyped":
        typedversion = False
    elif res[3] == "typed":
        typeversion = True
    else:
        print "Cannot determine whether typed or untyped."
        raise ParseException

    str = "Detected "
    if not typedversion:
        str += "un"
    str += "typed version."
    print str
示例#11
0
def typeSwitch(line):
    global typedversion

    typeflag = Literal("#") + "option" + Literal("=") + oneOf(
        "untyped", "typed")
    res = typeflag.parseString(line)
    if res[3] == "untyped":
        typedversion = False
    elif res[3] == "typed":
        typeversion = True
    else:
        print "Cannot determine whether typed or untyped."
        raise ParseException

    str = "Detected "
    if not typedversion:
        str += "un"
    str += "typed version."
    print str
示例#12
0
    def FromString(cls, desc):
        """Parse this stop condition from a string representation.

        The string needs to match:
        run_time number [seconds|minutes|hours|days|months|years]

        Args:
            desc (str): The description

        Returns:
            TimeBasedStopCondition
        """

        parse_exp = Literal(u'run_time').suppress() + time_interval(u'interval')

        try:
            data = parse_exp.parseString(desc)
            return TimeBasedStopCondition(data[u'interval'][0])
        except ParseException:
            raise ArgumentError(u"Could not parse time based stop condition")
示例#13
0
# Copyright L.P.Klyne 2013 
# Licenced under 3 clause BSD licence 

from pyparsing import Literal, Empty, replaceWith

def insertResult(v):
    """
    Parser helper function that simply inserts a result in 
    the list of values returned.
    """
    return Empty().setParseAction( replaceWith(v) )

p1 = Literal("1")
p2 = Literal("2")+insertResult("B")     # 'AttributeError: 'NoneType' object has no attribute 'streamline''
p3 = insertResult("B")+Literal("3")   # Blows python stack

r1 = p1.parseString("1")
r2 = p2.parseString("2")
r3 = p3.parseString("3")

print r2

print r3
    date=datetime.datetime.strptime(site_contents[begin_date:end_date].strip().replace(',',''), '%b %d %Y')
except ValueError:
    date=datetime.datetime.strptime(site_contents[begin_date:end_date].strip().replace(',',''), '%B %d %Y')
date=date-datetime.timedelta(hours=24)
start_index = site_contents.find('Sows Purchased (Live and Carcass Basis)')
labels = [ '300-399', '400-449', '450-499', '500-549', '550/up' ]
x = 0
parsed = []
# Loops through each label in labels and parses its line of data on the website.
# Then it creates a table with the parsed data elements and moves to the next label.
while x < len(labels):
    label_index = site_contents.find(labels[x], start_index) # index of labels[x] on the website
    #grammar for each line of data    
    line_grammar = Literal(labels[x]) + Word(nums+',') + Word(nums) + Word(nums+'.'+'-') + Word(nums+'.')
    line_end = site_contents.find('\r\n', label_index) # index of the end of the line to be parsed
    parsed = line_grammar.parseString(site_contents[label_index:line_end]).asList() # parses line and converts to list
    parsed.append(parsed[4]) # add the weighted average to end of the list because split on next line will overwrite parsed[4]
    [ parsed[3], parsed[4] ] = parsed[3].split('-') # split the price range into low price and high price
    headings = [ 'Date', 'Head Count', 'Avg Wgt', 'Low Price', 'High Price', 'Wtd Avg Price' ]
    data = { 'Date': [date.strftime('%Y-%m-%d')], 'Head Count': [parsed[1]], 'Avg Wgt': [parsed[2]], 'Low Price': [parsed[3]], \
           'High Price': [parsed[4]], 'Wtd Avg Price': [parsed[5]] }
    data_df = pd.DataFrame(data, columns = headings)
    data_df.index = data_df['Date']
    data_df = data_df.drop('Date', 1)
    quandl_code = 'USDA_LM_HG230_' + parsed[0].replace('-', '_').replace('/', '_') + '\r'# build unique quandl code
    reference_text = '  Historical figures from USDA can be verified using the LMR datamart located ' \
    '\n  at http://mpr.datamart.ams.usda.gov.\n' 
    print 'code: ' + quandl_code + '\n'
    print 'name: National Daily Sows Purchased- ' + parsed[0] + ' pounds\n'
    print 'description: National daily direct sow and boar report. This dataset contains '\
    ' head count, average weight, price range, and weighted average for sows in the weight range ' + parsed[0] +\
示例#15
0
class Manager(object):
    '''
    A manager to orchestrate the creation and 
    deletion of container clusters
  '''
    def __init__(self, logger):
        self.salt_client = salt.client.LocalClient()
        self.etcd = Etcd(logger)
        self.logger = logger
        # Parse out the username and formation name
        # from the ETCD directory string
        self.formation_parser = Literal('/formations/') + \
          Word(srange("[0-9a-zA-Z_-]")).setResultsName('username') + Literal('/') + \
          Word(srange("[0-9a-zA-Z_-]")).setResultsName('formation_name')

    def fqdn_to_shortname(self, fqdn):
        if '.' in fqdn:
            return fqdn.split('.')[0]
        else:
            return fqdn

    def check_salt_key_used(self, hostname):
        self.logger.info(
            "Checking if the key for {host} is already used".format(
                host=hostname))
        s = subprocess.Popen('salt-key', shell=True, stdout=PIPE)
        salt_list = s.communicate()[0]

        if hostname in salt_list:
            return True
        else:
            return False

    def check_port_used(self, host, port):
        self.logger.info(
            "Checking if {port} on {host} is open with salt-client".format(
                host=host, port=port))
        results = self.salt_client.cmd(
            host,
            'cmd.run',
            ['netstat -an | grep %s | grep tcp | grep -i listen' % port],
            expr_form='list')
        self.logger.debug("Salt return: {lsof}".format(lsof=results[host]))

        if results[host] is not '':
            return True
        else:
            return False

    # TODO
    def check_for_existing_formation(self, formation_name):
        # If the user passed in an existing formation name lets append to it
        pass

    def get_docker_cluster(self):
        # Return a list of docker hosts
        cluster = self.etcd.get_key('docker_cluster')
        if cluster is not None:
            return cluster.split(',')
        else:
            return None

    def get_load_balancer_cluster(self):
        # Return a list of nginx hosts
        cluster = self.etcd.get_key('nginx_cluster')
        if cluster is not None:
            return cluster.split(',')
        else:
            return None

    def order_cluster_by_load(self, cluster_list):
        # Sample salt output
        # {'dlceph01.drwg.local': '0.27 0.16 0.15 1/1200 26234'}

        # define grammar
        point = Literal('.')
        number = Word(nums)
        floatnumber = Combine(number + point + number)
        float_list = OneOrMore(floatnumber)

        results = self.salt_client.cmd(','.join(cluster_list),
                                       'cmd.run', ['cat /proc/loadavg'],
                                       expr_form='list')
        load_list = []
        self.logger.debug("Salt load return: {load}".format(load=results))

        for host in results:
            host_load = results[host]
            match = float_list.parseString(host_load)
            if match:
                one_min = match[0]
                five_min = match[1]
                fifteen_min = match[2]
                self.logger.debug(
                    "Adding Load({host}, {one_min}, {five_min}, {fifteen_min}".
                    format(host=host,
                           one_min=one_min,
                           five_min=five_min,
                           fifteen_min=fifteen_min))
                load_list.append(Load(host, one_min, five_min, fifteen_min))
            else:
                self.logger.error("Could not parse host load output")

        # Sort the list by fifteen min load
        load_list = sorted(load_list, key=lambda x: x.fifteen_min_load)
        for load in load_list:
            self.logger.debug("Sorted load list: " + str(load))

        return load_list

    # Retun a list of formations the user owns
    def list_formations(self, username):
        formation_list = []
        formations = self.etcd.list_directory('formations/' + username)
        for formation in formations:
            parse_results = self.formation_parser.parseString(formation)
            if parse_results:
                formation_name = parse_results['formation_name']
                formation_list.append(formation_name)
            else:
                self.logger.error("Could not parse the ETCD string")
        self.logger.info('Formation list {formations} for user {user}'.format(
            formations=formation_list, user=username))
        return formation_list

    # Load the formation and return a Formation object
    def load_formation_from_etcd(self, username, formation_name):
        f = Formation(username, formation_name)
        app_list = json.loads(
            json.loads(
                self.etcd.get_key(
                    '/formations/{username}/{formation_name}'.format(
                        username=username, formation_name=formation_name))))
        for app in app_list:
            # If our host doesn't support swapping we're going to get some garbage
            # message in here
            if "WARNING" in app['container_id']:
                app['container_id'] = app['container_id'].replace("WARNING: Your "\
                  "kernel does not support memory swap capabilities. Limitation discarded.\n","")
                #Message changed in docker 0.8.0
                app['container_id'] = app['container_id'].replace("WARNING: WARNING:"\
                  "Your kernel does not support swap limit capabilities. Limitation "\
                  "discarded.\n","")
            app['container_id'].strip('\n')

            # Set volumes if needed
            volumes = None
            if app['volumes']:
                self.logger.info("Setting volumes to: " +
                                 ''.join(app['volumes']))
                volumes = app['volumes']

            f.add_app(app['container_id'], app['hostname'], app['cpu_shares'],
                      app['ram'], app['port_list'], app['ssh_port'], 22,
                      app['host_server'], volumes)

        # Return fully parsed and populated formation object
        return f

    def save_formation_to_etcd(self, formation):
        name = formation.name
        username = formation.username

        self.etcd.set_key(
            'formations/{username}/{formation_name}'.format(
                username=username, formation_name=name), formation)

    # TODO write code to add new apps to load balancer
    def add_app_to_nginx(self, app):
        pass

    # TODO write code to add new apps to the load balancer
    def add_app_to_apache(self, app):
        pass

    def start_application(self, app):
        # Run a salt cmd to startup the formation
        docker_command = "docker run -c={cpu_shares} -d -i -t -h=\"{hostname}\" -m={ram}m "\
          "--name={hostname} {port_list} {volume_list} {image} /sbin/my_init -- bash"

        self.logger.info("Port list %s" % app.port_list)
        port_list = ' '.join(map(lambda x: '-p ' + x, app.port_list))

        # Only create this list if needed
        volume_list = ''
        if app.volume_list:
            volume_list = ' '.join(map(lambda x: '-v ' + x, app.volume_list))

        d = docker_command.format(cpu_shares=app.cpu_shares,
                                  hostname=app.hostname,
                                  ram=app.ram,
                                  image=app.docker_image,
                                  port_list=port_list,
                                  volume_list=volume_list)

        self.logger.info(
            "Starting up docker container on {host_server} with cmd: {docker_cmd}"
            .format(host_server=app.host_server, docker_cmd=d))

        salt_process = self.salt_client.cmd(app.host_server,
                                            'cmd.run', [d],
                                            expr_form='list')
        container_id = salt_process[app.host_server]
        if container_id:
            if "WARNING" in container_id:
                container_id = container_id.replace("WARNING: "\
                  "Your kernel does not support swap limit capabilities. Limitation "\
                  "discarded.\n","")
                container_id.strip("\n")
            #Docker only uses the first 12 chars to identify a container
            app.change_container_id(container_id[0:12])

    def bootstrap_application(self, app):
        # Log into the host with paramiko and run the salt bootstrap script
        host_server = self.fqdn_to_shortname(app.host_server)

        self.logger.info(
            "Bootstrapping {hostname} on server: {host_server} port: {port}".
            format(hostname=app.hostname,
                   host_server=host_server,
                   port=app.ssh_port))

        try:
            ssh = paramiko.SSHClient()
            ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
            ssh.connect(hostname=host_server,
                        port=app.ssh_port,
                        username='******',
                        password='******')

            transport = paramiko.Transport((host_server, app.ssh_port))
            transport.connect(username='******', password='******')
            sftp = paramiko.SFTPClient.from_transport(transport)
            sftp.put('bootstrap.sh', '/root/bootstrap.sh')
            sftp.put('start.sh', '/root/start.sh')

            ssh.exec_command("chmod +x /root/bootstrap.sh")
            ssh.exec_command("chmod +x /root/start.sh")
            stdin, stdout, stderr = ssh.exec_command("bash /root/start.sh")
            self.logger.debug(''.join(stdout.readlines()))
            ssh.close()
        except SSHException:
            self.logger.error(
                "Failed to log into server.  Shutting it down and cleaning up the mess."
            )
            self.delete_container(app.host_server, app.container_id)

    # Stops and deletes a container
    def delete_container(self, host_server, container_id):
        results = self.salt_client.cmd(
            host_server,
            'cmd.run',
            ['docker stop {container_id}'.format(container_id=container_id)],
            expr_form='list')
        self.logger.debug(
            "Salt return: {stop_cmd}".format(stop_cmd=results[host_server]))

        results = self.salt_client.cmd(
            host_server,
            'cmd.run',
            ['docker rm {container_id}'.format(container_id=container_id)],
            expr_form='list')
        self.logger.debug(
            "Salt return: {rm_cmd}".format(rm_cmd=results[host_server]))

    # Stops and deletes a formation. Use with caution
    def delete_formation(self, user, formation_name):
        formation_list = self.list_formations(user)
        if formation_name in formation_list:
            pass
        else:
            self.logger.error("Formation name not found!")

    def list_containers(self, user, formation_name):
        pass

    def create_containers(self,
                          user,
                          number,
                          formation_name,
                          cpu_shares,
                          ram,
                          port_list,
                          hostname_scheme,
                          volume_list,
                          docker_image,
                          force_host_server=None):

        f = Formation(user, formation_name)
        # Convert ram to bytes from MB
        ram = ram * 1024 * 1024

        # Get the cluster machines on each creation
        cluster_list = self.get_docker_cluster()
        circular_cluster_list = CircularList(
            self.order_cluster_by_load(cluster_list))

        # Loop for the requested amount of containers to be created
        for i in range(1, number + 1):
            # [{"host_port":ssh_host_port, "container_port":ssh_container_port}]
            ssh_host_port = 9022 + i
            ssh_container_port = 22
            host_server = circular_cluster_list[i].hostname
            hostname = '{hostname}{number}'.format(hostname=hostname_scheme,
                                                   number=str(i).zfill(3))

            # First check if we can add this host to salt.  If not exit with -1
            if self.check_salt_key_used(hostname):
                self.logger.error(
                    'Salt key is already taken for {hostname}'.format(
                        hostname=hostname))
                sys.exit(-1)

            # We are being asked to overwrite this
            if force_host_server:
                host_server = force_host_server
            validated_ports = []

            while self.check_port_used(host_server, ssh_host_port):
                ssh_host_port = ssh_host_port + 1

            for port in port_list:
                self.logger.info(
                    "Checking if port {port} on {host} is in use".format(
                        port=port, host=host_server))
                if ':' in port:
                    ports = port.split(':')

                    # Only check if the host port is free.  The container port should be free
                    while self.check_port_used(host_server, ports[0]):
                        ports[0] = int(ports[0]) + 1

                    # Add this to the validated port list
                    validated_ports.append(
                        '{host_port}:{container_port}'.format(
                            host_port=str(ports[0]),
                            container_port=str(ports[1])))
                else:
                    while self.check_port_used(host_server, port):
                        port = int(port) + 1
                    validated_ports.append(str(port))

            self.logger.info(
                'Adding app to formation {formation_name}: {hostname} cpu_shares={cpu} '
                'ram={ram} ports={ports} host_server={host_server} docker_image={docker_image}'
                .format(formation_name=formation_name,
                        hostname=hostname,
                        cpu=cpu_shares,
                        ram=ram,
                        ports=validated_ports,
                        host_server=host_server,
                        docker_image=docker_image))

            f.add_app(None, '{hostname}'.format(hostname=hostname), cpu_shares,
                      ram, validated_ports, ssh_host_port, ssh_container_port,
                      host_server, docker_image, volume_list)

        # Lets get this party started
        for app in f.application_list:
            self.start_application(app)
            #self.logger.info("Sleeping 2 seconds while the container starts")
            #time.sleep(2)
            #self.bootstrap_application(app)

        self.logger.info("Saving the formation to ETCD")
        self.save_formation_to_etcd(f)
        'Nebraska', 'Oklahoma', 'South Dakota', 'Texas', 'Washington', 'Other States' ]
 # List of the sections of data that need to be found
 name_labels = [ 'Cattle on Feed', 'Cattle Placed on Feed', 'Marketed', 'Other Disappearance' ]
 end = site_contents.find('Number of Cattle on Feed on 1,000+ Capacity Feedlots by Month') # set to point at beginning of report (will be changed each iteration of following loop)
 new_date = date - relativedelta(months = 1) # subtract one month because data is for previous month that report is published
 n = 0
 while n < len(name_labels):
     end = site_contents.find(name_labels[n], end) # store where name_label occurs (always after previous name_label)
     x = 0
     while x < len(state_labels):
         start = site_contents.find(state_labels[x], end) # find where the state name occurs 
         end = site_contents.find('\r\n', start) # end is changed to end of line
         # This is the grammar for each line of data. It starts with the name of the state and is followed by a varying number
         # of periods. Then five numbers of data follow the colon.
         line_grammar = Literal(state_labels[x]) + Suppress(ZeroOrMore(Literal('.'))) + Suppress(Literal(':')) + Word(nums+',') * 5
         parsed = line_grammar.parseString(site_contents[start:end])[3] # parse the line and only keep the fourth element because it contains most recent data
         headings = ['Date', 'Thousand Head'] 
         # The 'Cattle on Feed' data corresponds to the current month so 1 month is added to the date and
         # the year, month, and day are converted to strings  
         if n == 0:
             month = str((new_date + relativedelta(months = 1)).month)
             day = (new_date + relativedelta(months = 1)).day
             year = (new_date + relativedelta(months = 1)).year
         else:
             year = str(new_date.year)
             month = str(new_date.month)
             day = str(new_date.day)
         if len(month) == 1:
             month = '0' + month # prepend 0 to month if it is one digit
         data = {'Date': [str(year) + str(month) + str(day)], 'Thousand Head': [parsed]}
         data_df = pd.DataFrame(data, columns = headings)
示例#17
0
from pyparsing import Literal, Empty, replaceWith


def insertResult(v):
    """
    Parser helper function that simply inserts a result in 
    the list of values returned.
    """
    return Empty().setParseAction(replaceWith(v))


p1 = Literal("1")
p2 = Literal("2") + insertResult(
    "B")  # 'AttributeError: 'NoneType' object has no attribute 'streamline''
p3 = insertResult("B") + Literal("3")  # Blows python stack

r1 = p1.parseString("1")
r2 = p2.parseString("2")
r3 = p3.parseString("3")

print r2

print r3
示例#18
0
  def start_verifying(self):
    # Parse out the username and formation name 
    # from the ETCD directory string
    formation_parser = Literal('/formations/') + \
      Word(srange("[0-9a-zA-Z_-]")).setResultsName('username') + Literal('/') + \
      Word(srange("[0-9a-zA-Z_-]")).setResultsName('formation_name')

    # call out to ETCD and load all the formations
    formation_list = []

    user_list = self.etcd.list_directory('formations')
    if user_list:
      for user in user_list:
        formations = self.etcd.list_directory(user)
        for formation in formations:
          parse_results = formation_parser.parseString(formation)
          if parse_results:
            formation_name = parse_results['formation_name']
            username = parse_results['username']

            self.logger.info('Attempting to load formation: {formation_name} '
              'with username: {username}'.format(formation_name=formation_name,
                username=username))
            f = self.manager.load_formation_from_etcd(username, formation_name)
            formation_list.append(f)
          else:
            self.logger.error("Could not parse the ETCD string")

      if formation_list:
        # TODO Use background salt jobs
        # Start verifying things
        # Ask salt to do these things for me and give me back an job_id
        # results = self.salt_client.cmd_async(host, 'cmd.run', 
        #   ['netstat -an | grep %s | grep tcp | grep -i listen' % port], 
        #   expr_form='list')
        # 
        # salt-run jobs.lookup_jid <job id number>
        for f in formation_list:
          for app in f.application_list:
            # Check to make sure it's up and running
            self.logger.info("Running verification on app: "
              "{app_name}".format(app_name=app.hostname))
            self.logger.info('{server} docker ps | grep {container_id}'.format(
              server=app.host_server, 
              container_id=app.container_id))
            results = self.salt_client.cmd(app.host_server, 'cmd.run', 
              ['docker ps | grep {container_id}'.format(container_id=app.container_id)], 
              expr_form='list')
            if results:
              self.logger.debug("Salt return: {docker_results}".format(
                docker_results=results[app.host_server]))
              if results[app.host_server] == "":
                self.logger.error("App {app} is not running!".format(
                  app=app.hostname))
                # Start the app back up and run start.sh on there
                self.start_application(app)
              else:
                self.logger.info("App {app} is running.  Checking if "
                  "cron is running also".format(app=app.hostname))
                # Check if cron is running on the container and bring it back 
                # up if needed
                # Log in with ssh and check if cron is up and running
                self.logger.info("Sleeping 2 seconds while the container starts")
                time.sleep(2)
                self.check_running_application(app)
            else:
              self.logger.error("Call out to server {server} failed. Moving it".format(
                server=app.host_server))
              # move the container
              self.move_application(app)
示例#19
0
    def start_verifying(self):
        # Parse out the username and formation name
        # from the ETCD directory string
        formation_parser = Literal('/formations/') + \
          Word(srange("[0-9a-zA-Z_-]")).setResultsName('username') + Literal('/') + \
          Word(srange("[0-9a-zA-Z_-]")).setResultsName('formation_name')

        # call out to ETCD and load all the formations
        formation_list = []

        user_list = self.etcd.list_directory('formations')
        if user_list:
            for user in user_list:
                formations = self.etcd.list_directory(user)
                for formation in formations:
                    parse_results = formation_parser.parseString(formation)
                    if parse_results:
                        formation_name = parse_results['formation_name']
                        username = parse_results['username']

                        self.logger.info(
                            'Attempting to load formation: {formation_name} '
                            'with username: {username}'.format(
                                formation_name=formation_name,
                                username=username))
                        f = self.manager.load_formation_from_etcd(
                            username, formation_name)
                        formation_list.append(f)
                    else:
                        self.logger.error("Could not parse the ETCD string")

            if formation_list:
                # TODO Use background salt jobs
                # Start verifying things
                # Ask salt to do these things for me and give me back an job_id
                # results = self.salt_client.cmd_async(host, 'cmd.run',
                #   ['netstat -an | grep %s | grep tcp | grep -i listen' % port],
                #   expr_form='list')
                #
                # salt-run jobs.lookup_jid <job id number>
                for f in formation_list:
                    for app in f.application_list:
                        # Check to make sure it's up and running
                        self.logger.info(
                            "Running verification on app: "
                            "{app_name}".format(app_name=app.hostname))
                        self.logger.info(
                            '{server} docker ps | grep {container_id}'.format(
                                server=app.host_server,
                                container_id=app.container_id))
                        results = self.salt_client.cmd(
                            app.host_server,
                            'cmd.run', [
                                'docker ps | grep {container_id}'.format(
                                    container_id=app.container_id)
                            ],
                            expr_form='list')
                        if results:
                            self.logger.debug(
                                "Salt return: {docker_results}".format(
                                    docker_results=results[app.host_server]))
                            if results[app.host_server] == "":
                                self.logger.error(
                                    "App {app} is not running!".format(
                                        app=app.hostname))
                                # Start the app back up and run start.sh on there
                                self.start_application(app)
                            else:
                                self.logger.info(
                                    "App {app} is running.  Checking if "
                                    "cron is running also".format(
                                        app=app.hostname))
                                # Check if cron is running on the container and bring it back
                                # up if needed
                                # Log in with ssh and check if cron is up and running
                                self.logger.info(
                                    "Sleeping 2 seconds while the container starts"
                                )
                                time.sleep(2)
                                self.check_running_application(app)
                        else:
                            self.logger.error(
                                "Call out to server {server} failed. Moving it"
                                .format(server=app.host_server))
                            # move the container
                            self.move_application(app)
try:
    date=datetime.datetime.strptime(site_contents[begin_date:end_date].strip().replace(',',''), '%b %d %Y')
except ValueError:
    date=datetime.datetime.strptime(site_contents[begin_date:end_date].strip().replace(',',''), '%B %d %Y')
date = date - datetime.timedelta(days = 5)
# list of each region in the report
labels = [ 'North East', 'South Atlantic', 'North Central', 'South Central', 'West', 'U.S. total' ]
# Loops through each region and uses pyparsing to find the head and average 
# live weight for the turkeys slaughtered. 
x = 0
while x < len(labels):
    suppress = Suppress(Word(printables))
    line = Literal(labels[x]) + suppress * 4 + Word(nums+',') + Word(nums+'.') # grammar for each line of data following a region
    first = site_contents.find(labels[x]) # index of label
    end = site_contents.find('\r\n', first) # index of end of the line
    line = line.parseString(site_contents[first:end]) # parse line and store in list "line"
    line = [float(y.replace(',','')) for y in line[1:]] # remove commas and convert to floats
    headings = [ 'Date','Actual Turkey Slaughter', 'Turkey Average Weight' ]
    data={ 'Date':[date.strftime('%Y-%m-%d')], 'Actual Turkey Slaughter': [line[0]], 'Turkey Average Weight': [line[1]] }
    data_df = pd.DataFrame(data, columns = headings)
    data_df.index = data_df['Date']
    data_df = data_df.drop('Date', 1)
    name = labels[x].replace(' ','_').replace('.','')
    quandl_code = 'USDA_NW_PY021_' + name.upper() + '\r'
    print 'code: ' + quandl_code
    print 'name: Weekly National Turkey Slaughter- ' + labels[x].title() + '\r'
    reference_text = '  Historical figures from USDA can be verified using the LMR datamart located ' \
    '\n  at http://mpr.datamart.ams.usda.gov.\n' 
    print 'description:  Weekly national turkey slaughter data' \
    '\n  from the USDA NW_PY021 report published by the USDA Agricultural Marketing Service ' \
    '\n  (AMS). This dataset covers the ' + labels[x] + '.\n'\
示例#21
0
class Manager(object):
  '''
    A manager to orchestrate the creation and 
    deletion of container clusters
  '''
  def __init__(self, logger):
    self.salt_client = salt.client.LocalClient()
    self.etcd = Etcd(logger)
    self.logger = logger
    # Parse out the username and formation name 
    # from the ETCD directory string
    self.formation_parser = Literal('/formations/') + \
      Word(srange("[0-9a-zA-Z_-]")).setResultsName('username') + Literal('/') + \
      Word(srange("[0-9a-zA-Z_-]")).setResultsName('formation_name')

  def fqdn_to_shortname(self, fqdn):
    if '.' in fqdn:
      return fqdn.split('.')[0]
    else:
      return fqdn

  def check_salt_key_used(self, hostname):
    self.logger.info("Checking if the key for {host} is already used".format(
      host=hostname))
    s = subprocess.Popen('salt-key', shell=True, stdout=PIPE)
    salt_list = s.communicate()[0]

    if hostname in salt_list:
      return True
    else:
      return False

  def check_port_used(self, host, port):
    self.logger.info("Checking if {port} on {host} is open with salt-client".format(
      host=host, port=port))
    results = self.salt_client.cmd(host, 'cmd.run', 
      ['netstat -an | grep %s | grep tcp | grep -i listen' % port], 
      expr_form='list')
    self.logger.debug("Salt return: {lsof}".format(lsof=results[host]))

    if results[host] is not '':
      return True
    else:
      return False

  # TODO
  def check_for_existing_formation(self, formation_name):
    # If the user passed in an existing formation name lets append to it
    pass

  def get_docker_cluster(self):
    # Return a list of docker hosts
    cluster = self.etcd.get_key('docker_cluster')
    if cluster is not None:
      return cluster.split(',')
    else:
      return None

  def get_load_balancer_cluster(self):
    # Return a list of nginx hosts
    cluster = self.etcd.get_key('nginx_cluster')
    if cluster is not None:
      return cluster.split(',')
    else:
      return None

  def order_cluster_by_load(self, cluster_list):
    # Sample salt output
    # {'dlceph01.drwg.local': '0.27 0.16 0.15 1/1200 26234'}

    # define grammar
    point = Literal('.')
    number = Word(nums) 
    floatnumber = Combine( number + point + number)
    float_list = OneOrMore(floatnumber)

    results = self.salt_client.cmd(','.join(cluster_list), 'cmd.run', ['cat /proc/loadavg'], expr_form='list')
    load_list = []
    self.logger.debug("Salt load return: {load}".format(load=results))

    for host in results:
      host_load = results[host]
      match = float_list.parseString(host_load)
      if match:
        one_min = match[0]
        five_min = match[1]
        fifteen_min = match[2]
        self.logger.debug("Adding Load({host}, {one_min}, {five_min}, {fifteen_min}".format(
          host=host, one_min=one_min, five_min=five_min, fifteen_min=fifteen_min))
        load_list.append(Load(host, one_min, five_min, fifteen_min))
      else:
        self.logger.error("Could not parse host load output")

    # Sort the list by fifteen min load
    load_list = sorted(load_list, key=lambda x: x.fifteen_min_load)
    for load in load_list:
      self.logger.debug("Sorted load list: " + str(load))

    return load_list

  # Retun a list of formations the user owns
  def list_formations(self, username):
    formation_list = []
    formations = self.etcd.list_directory('formations/'+username)
    for formation in formations:
      parse_results = self.formation_parser.parseString(formation)
      if parse_results:
        formation_name = parse_results['formation_name']
        formation_list.append(formation_name)
      else:
        self.logger.error("Could not parse the ETCD string")
    self.logger.info('Formation list {formations} for user {user}'.format(
      formations=formation_list, user=username))
    return formation_list

  # Load the formation and return a Formation object
  def load_formation_from_etcd(self, username, formation_name):
    f = Formation(username,formation_name) 
    app_list = json.loads(json.loads(
      self.etcd.get_key('/formations/{username}/{formation_name}'.format(
        username=username, formation_name=formation_name))))
    for app in app_list:
      # If our host doesn't support swapping we're going to get some garbage 
      # message in here
      if "WARNING" in app['container_id']:
        app['container_id'] = app['container_id'].replace("WARNING: Your "\
          "kernel does not support memory swap capabilities. Limitation discarded.\n","")
        #Message changed in docker 0.8.0
        app['container_id'] = app['container_id'].replace("WARNING: WARNING:"\
          "Your kernel does not support swap limit capabilities. Limitation "\
          "discarded.\n","")
      app['container_id'].strip('\n')

      # Set volumes if needed
      volumes = None
      if app['volumes']:
        self.logger.info("Setting volumes to: " + ''.join(app['volumes']))
        volumes = app['volumes']

      f.add_app(app['container_id'], app['hostname'], app['cpu_shares'],
        app['ram'], app['port_list'], app['ssh_port'], 22, app['host_server'], volumes)

    # Return fully parsed and populated formation object
    return f

  def save_formation_to_etcd(self, formation):
    name = formation.name
    username = formation.username

    self.etcd.set_key('formations/{username}/{formation_name}'.format(
      username=username, formation_name=name), formation)

  # TODO write code to add new apps to load balancer
  def add_app_to_nginx(self, app):
    pass

  # TODO write code to add new apps to the load balancer
  def add_app_to_apache(self, app):
    pass

  def start_application(self, app):
    # Run a salt cmd to startup the formation
    docker_command = "docker run -c={cpu_shares} -d -i -t -h=\"{hostname}\" -m={ram}m "\
      "--name={hostname} {port_list} {volume_list} {image} /sbin/my_init -- bash"

    self.logger.info("Port list %s" % app.port_list)
    port_list = ' '.join(map(lambda x: '-p ' + x, app.port_list))

    # Only create this list if needed
    volume_list = ''
    if app.volume_list:
      volume_list = ' '.join(map(lambda x: '-v ' + x, app.volume_list))

    d = docker_command.format(cpu_shares=app.cpu_shares, 
      hostname=app.hostname, ram=app.ram, image=app.docker_image, 
      port_list=port_list, volume_list=volume_list) 

    self.logger.info("Starting up docker container on {host_server} with cmd: {docker_cmd}".format(
      host_server=app.host_server, docker_cmd=d))

    salt_process = self.salt_client.cmd(app.host_server,'cmd.run', [d], expr_form='list')
    container_id = salt_process[app.host_server]
    if container_id:
      if "WARNING" in container_id:
        container_id = container_id.replace("WARNING: "\
          "Your kernel does not support swap limit capabilities. Limitation "\
          "discarded.\n","")
        container_id.strip("\n")
      #Docker only uses the first 12 chars to identify a container
      app.change_container_id(container_id[0:12])

  def bootstrap_application(self, app):
    # Log into the host with paramiko and run the salt bootstrap script 
    host_server = self.fqdn_to_shortname(app.host_server)

    self.logger.info("Bootstrapping {hostname} on server: {host_server} port: {port}".format(
      hostname=app.hostname, 
      host_server=host_server,
      port=app.ssh_port))

    try:
      ssh = paramiko.SSHClient()
      ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
      ssh.connect(hostname=host_server, port=app.ssh_port, 
        username='******', password='******')

      transport = paramiko.Transport((host_server, app.ssh_port))
      transport.connect(username = '******', password = '******')
      sftp = paramiko.SFTPClient.from_transport(transport)
      sftp.put('bootstrap.sh', '/root/bootstrap.sh')
      sftp.put('start.sh', '/root/start.sh')

      ssh.exec_command("chmod +x /root/bootstrap.sh")
      ssh.exec_command("chmod +x /root/start.sh")
      stdin, stdout, stderr = ssh.exec_command("bash /root/start.sh")
      self.logger.debug(''.join(stdout.readlines()))
      ssh.close()
    except SSHException:
      self.logger.error("Failed to log into server.  Shutting it down and cleaning up the mess.")
      self.delete_container(app.host_server, app.container_id)

  # Stops and deletes a container
  def delete_container(self, host_server, container_id):
    results = self.salt_client.cmd(host_server, 'cmd.run', 
      ['docker stop {container_id}'.format(container_id=container_id)], 
      expr_form='list')
    self.logger.debug("Salt return: {stop_cmd}".format(stop_cmd=results[host_server]))

    results = self.salt_client.cmd(host_server, 'cmd.run', 
      ['docker rm {container_id}'.format(container_id=container_id)], 
      expr_form='list')
    self.logger.debug("Salt return: {rm_cmd}".format(rm_cmd=results[host_server]))

  # Stops and deletes a formation. Use with caution
  def delete_formation(self, user, formation_name):
    formation_list = self.list_formations(user)
    if formation_name in formation_list:
      pass
    else:
      self.logger.error("Formation name not found!")

  def list_containers(self, user, formation_name):
    pass

  def create_containers(self, user, number, formation_name,
    cpu_shares, ram, port_list, hostname_scheme, volume_list, 
    docker_image, force_host_server=None):

    f = Formation(user, formation_name)
    # Convert ram to bytes from MB
    ram = ram * 1024 * 1024

    # Get the cluster machines on each creation
    cluster_list = self.get_docker_cluster()
    circular_cluster_list = CircularList(self.order_cluster_by_load(cluster_list))

    # Loop for the requested amount of containers to be created
    for i in range(1, number+1):
      # [{"host_port":ssh_host_port, "container_port":ssh_container_port}]
      ssh_host_port = 9022 + i
      ssh_container_port = 22
      host_server = circular_cluster_list[i].hostname
      hostname = '{hostname}{number}'.format(
        hostname=hostname_scheme,
        number=str(i).zfill(3))

      # First check if we can add this host to salt.  If not exit with -1
      if self.check_salt_key_used(hostname):
        self.logger.error('Salt key is already taken for {hostname}'.format(
          hostname=hostname))
        sys.exit(-1)

      # We are being asked to overwrite this
      if force_host_server:
        host_server = force_host_server
      validated_ports = []

      while self.check_port_used(host_server, ssh_host_port):
        ssh_host_port = ssh_host_port +1

      for port in port_list:
        self.logger.info("Checking if port {port} on {host} is in use".format(
          port=port, host=host_server))
        if ':' in port:
          ports = port.split(':')

          # Only check if the host port is free.  The container port should be free
          while self.check_port_used(host_server, ports[0]):
            ports[0] = int(ports[0]) + 1

          # Add this to the validated port list
          validated_ports.append('{host_port}:{container_port}'.format(
            host_port = str(ports[0]),
            container_port = str(ports[1])))
        else:
          while self.check_port_used(host_server, port):
            port = int(port) + 1
          validated_ports.append(str(port))

      self.logger.info('Adding app to formation {formation_name}: {hostname} cpu_shares={cpu} '
        'ram={ram} ports={ports} host_server={host_server} docker_image={docker_image}'.format(
          formation_name=formation_name, hostname=hostname, 
          cpu=cpu_shares, ram=ram, ports=validated_ports, host_server=host_server,
          docker_image=docker_image))

      f.add_app(None, '{hostname}'.format(hostname=hostname), 
        cpu_shares, ram, validated_ports, ssh_host_port, 
        ssh_container_port, host_server, docker_image, volume_list)

    # Lets get this party started
    for app in f.application_list:
      self.start_application(app)
      #self.logger.info("Sleeping 2 seconds while the container starts")
      #time.sleep(2)
      #self.bootstrap_application(app)

    self.logger.info("Saving the formation to ETCD")
    self.save_formation_to_etcd(f)
# names of each cut in the report
labels = ['BREAST - B/S', 'TENDERLOINS', 'BREAST - WITH RIBS', 'BREAST - LINE RUN', 'LEGS', 'LEG QUARTERS (BULK)',\
        'DRUMSTICKS', 'THIGHS', 'B/S THIGHS', 'WINGS (WHOLE)', 'BACKS AND NECKS (STRIPPED)', 'LIVERS (5 POUND TUBS)',\
        'GIZZARDS (HEARTS)']

ending_index = 0 # initializes ending_index to 0 to be used in following loop       
# Loops through each cut in labels and uses pyparsing to find the weighted average
# and volume for that cut. The data and data are formatted into a table and the 
# relevant quandl data is printed.
x = 0
while x < len(labels):
    line = Literal(labels[x]) + Word(nums+'-') + Word(nums+'.') + Word(nums+',') # grammar to find each label's data
    starting_index = site_contents.find(labels[x], ending_index) # stores the index of the beginning of each label's data
    ending_index = site_contents.find('\r\n', starting_index) # stores the index of the end of the label's data
    text = site_contents[starting_index:ending_index] # the line to be parsed is from starting_index to ending_index
    parsed = line.parseString(text) # parses the line and stores it in "parsed"
    headings = ['Date', 'Weighted Average (Price)', 'Volume (Lbs)']
    data = {'Date': [date.strftime('%Y-%m-%d')], 'Weighted Average (Price)': [parsed[2]], 'Volume (Lbs)': [parsed[3].replace(',','')]}
    data_df = pd.DataFrame(data, columns = headings)
    data_df.index = data_df['Date']
    data_df = data_df.drop('Date', 1)
    replace = re.compile('[ /]') # list of characters to be replaced 
    remove = re.compile('[,%#-&()!$+<>?/\'"{}.*@]') # list of characters to be removed
    name1 = replace.sub('_', labels[x]) # replace certain characters with '_'
    name2 = remove.sub('', name1).upper() # remove certain characters and convert to upper case
    name2 = name2.translate(None, '-') # ensure '-' character is removed
    quandl_code = 'USDA_AJ_PY047_' + name2 + '\r'
    print 'code: ' + quandl_code
    print 'name: Daily Northeast Broiler/Fryer Parts- ' + labels[x].title() + '\r'
    reference_text = '  Historical figures from USDA can be verified using the LMR datamart located ' \
    '\n  at http://mpr.datamart.ams.usda.gov.\n'