示例#1
0
        def timedtext_to_srt(timedtext):
            def _format_srt_time(millisec):
                sec, milli = divmod(millisec, 1000)
                m, s = divmod(int(sec), 60)
                h, m = divmod(m, 60)
                return "{:02}:{:02}:{:02},{}".format(h, m, s, milli)

            i = 1
            srt = ""
            dom = parse_xml(timedtext)
            body = dom.get_elements_by_tag_name("body")[0]
            paras = body.get_elements_by_tag_name("p")
            for para in paras:
                srt += str(i) + "\n"
                srt += (
                    _format_srt_time(int(para.attributes["t"].value))
                    + " --> "
                    + _format_srt_time(
                        int(para.attributes["t"].value)
                        + int(para.attributes["d"].value)
                    )
                    + "\n"
                )
                for child in para.child_nodes:
                    if child.node_name == "br":
                        srt += "\n"
                    elif child.node_name == "#text":
                        srt += str(child.data)
                    srt += "\n\n"
                i += 1

            return srt
示例#2
0
def get_latest_version() -> Version:
    raw_rss = requests.get(RELEASES_RSS_URL).text
    rss = parse_xml(raw_rss)
    version = (rss.childNodes[0].getElementsByTagName("channel")
               [0].getElementsByTagName("item")[0].getElementsByTagName(
                   "title")[0].childNodes[0].nodeValue)
    return Version(version)
示例#3
0
文件: server.py 项目: BloatIt/bloatit
 def load_entity_collection(self,manager):
     url = self._build_entity_collection_url(manager);
     
     response = urllib.urlopen(url).read()
     try:
         dom = parse_xml(response)
     except xml.parsers.expat.ExpatError:
         print "Error parsing response: "
         print response
         return None
         
     xml_rest = dom.getElementsByTagName('rest').item(0)
     
     xml_collection = xml_rest.getElementsByTagName(manager.collection_code).item(0)
     
     xml_entities = xml_collection.getElementsByTagName(manager.code)
     
     entity_list = list()
     
     exec 'from '+manager.base_module+' import '+manager.base_class
     
     entity_class = locals()[manager.base_class]
         
     for entity in xml_entities:
         id = int(entity.childNodes[0].data)
         obj = server.create_entity(entity_class, id)
         entity_list.append(obj)
     
     return entity_list
示例#4
0
    def upload_file(self, name, fd):
        assert _FILEUPLOAD, "poster needs to be installed, hg+https://bitbucket.org/chrisatlee/poster"

        if not self._authenticated:
            self._authenticate()

        url = self._url + "/file/upload"

        params = (MultipartParam(name='Filedata',
                                 fileobj=fd,
                                 filename=name,
                                 filetype='application/octet-stream'), )

        datagen, headers = multipart_encode(params)
        request = Request(url, datagen, headers)

        opener = register_openers()
        opener.addheaders.append(('Cookie', 'token=%s' % self._token))

        reply = opener.open(request)

        dom = parse_xml(reply)

        reply_dom = dom.getElementsByTagName('reply')[0]

        status = get_text_by_tag(reply_dom, 'status')
        if status != 'OK':
            raise Exception("Upload Failed")

        return reply
示例#5
0
    def load_entity_collection(self, manager):
        url = self._build_entity_collection_url(manager)

        response = urllib.urlopen(url).read()
        try:
            dom = parse_xml(response)
        except xml.parsers.expat.ExpatError:
            print "Error parsing response: "
            print response
            return None

        xml_rest = dom.getElementsByTagName('rest').item(0)

        xml_collection = xml_rest.getElementsByTagName(
            manager.collection_code).item(0)

        xml_entities = xml_collection.getElementsByTagName(manager.code)

        entity_list = list()

        exec 'from ' + manager.base_module + ' import ' + manager.base_class

        entity_class = locals()[manager.base_class]

        for entity in xml_entities:
            id = int(entity.childNodes[0].data)
            obj = server.create_entity(entity_class, id)
            entity_list.append(obj)

        return entity_list
示例#6
0
    def upload_file(self, name, fd):
        assert _FILEUPLOAD, "poster needs to be installed, hg+https://bitbucket.org/chrisatlee/poster"
        
        if not self._authenticated:
            self._authenticate()

        url = self._url + "/file/upload"


        params = (MultipartParam(name='Filedata',
                                 fileobj=fd,
                                 filename=name,
                                 filetype='application/octet-stream'),)

        datagen, headers = multipart_encode(params)
        request = Request(url, datagen, headers)

        opener = register_openers()
        opener.addheaders.append(('Cookie', 'token=%s' % self._token))
        
        reply = opener.open(request)
        
        dom = parse_xml(reply)

        reply_dom = dom.getElementsByTagName('reply')[0]

        status = get_text_by_tag(reply_dom, 'status')
        if status != 'OK':
            raise Exception("Upload Failed")

        return reply
示例#7
0
def normalize_cov_xml(xmlfile, dest=None):
    """Read XMLFILE, remove build-environment nondeterminism from the
       XML structure, and write it back out to DEST (or to XMLFILE if
       DEST is not specified)."""
    with parse_xml(xmlfile) as doc:
        root = doc.documentElement

        epoch = os.environ.get('SOURCE_DATE_EPOCH')
        cwd = os.getcwd()

        if epoch is not None:
            root.setAttribute('timestamp', epoch)
        else:
            try:
                root.removeAttribute('timestamp')
            except DOMNotFoundError:
                pass

        for source in doc.getElementsByTagName('source'):
            source.normalize()
            if (source.firstChild is not None
                    and source.firstChild.nodeType == DOMNode.TEXT_NODE
                    and source.firstChild.data.strip() == cwd):
                source.replaceChild(doc.createTextNode("."), source.firstChild)

        newxml = doc.documentElement.toprettyxml(indent=" ")

    if dest is None:
        dest = xmlfile
    if isinstance(dest, str):
        with open(dest, "wt", encoding="utf-8") as w:
            w.write(newxml)
    else:
        dest.write(newxml)
示例#8
0
        def timedtext_to_srt(timedtext):
            def _format_srt_time(millisec):
                sec, milli = divmod(millisec, 1000)
                m, s = divmod(int(sec), 60)
                h, m = divmod(m, 60)
                return "{:02}:{:02}:{:02},{}".format(h, m, s, milli)

            i = 1
            srt = ""
            dom = parse_xml(timedtext)
            body = dom.get_elements_by_tag_name("body")[0]
            paras = body.get_elements_by_tag_name("p")
            for para in paras:
                srt += str(i) + "\n"
                srt += (_format_srt_time(int(para.attributes["t"].value)) +
                        " --> " + _format_srt_time(
                            int(para.attributes["t"].value) +
                            int(para.attributes["d"].value)) + "\n")
                for child in para.child_nodes:
                    if child.node_name == "br":
                        srt += "\n"
                    elif child.node_name == "#text":
                        srt += str(child.data)
                    srt += "\n\n"
                i += 1

            return srt
示例#9
0
def find_enclosed_urls(feed_url):
    feed=urlopen(feed_url)
    feed_doc=parse_xml(feed)
    for item in feed_doc.getElementsByTagName('item'):
        title=get_text(item,'title')
        enclosure=get_node(item,'enclosure')
        if enclosure is not None:
            yield enclosure.getAttribute('url'), title
示例#10
0
 def _get_directory_entries(self, xml_string):
     document = parse_xml(xml_string)
     results = []
     for directory_entry in document.getElementsByTagName('DirectoryEntry'):
         name = self._get_text(directory_entry.getElementsByTagName('Name')[0])
         number = self._get_text(directory_entry.getElementsByTagName('Telephone')[0])
         results.append((name, number))
     return results
示例#11
0
 def _parse_config(self, config):
     self._config = parse_xml(config)
     self.texture_path = self._parse_data('texture', 'name')
     self.texture = Image(self.texture_path).texture
     # self.emitter_x = float(self._parse_data('sourcePosition', 'x'))
     # self.emitter_y = float(self._parse_data('sourcePosition', 'y'))
     self.emitter_x_variance = float(
         self._parse_data('sourcePositionVariance', 'x'))
     self.emitter_y_variance = float(
         self._parse_data('sourcePositionVariance', 'y'))
     self.gravity_x = float(self._parse_data('gravity', 'x'))
     self.gravity_y = float(self._parse_data('gravity', 'y'))
     self.emitter_type = int(self._parse_data('emitterType'))
     self.max_num_particles = int(self._parse_data('maxParticles'))
     self.life_span = max(0.01, float(self._parse_data('particleLifeSpan')))
     self.life_span_variance = float(
         self._parse_data('particleLifespanVariance'))
     self.start_size = float(self._parse_data('startParticleSize'))
     self.start_size_variance = float(
         self._parse_data('startParticleSizeVariance'))
     self.end_size = float(self._parse_data('finishParticleSize'))
     self.end_size_variance = float(
         self._parse_data('FinishParticleSizeVariance'))
     self.emit_angle = math.radians(float(self._parse_data('angle')))
     self.emit_angle_variance = math.radians(
         float(self._parse_data('angleVariance')))
     self.start_rotation = math.radians(
         float(self._parse_data('rotationStart')))
     self.start_rotation_variance = math.radians(
         float(self._parse_data('rotationStartVariance')))
     self.end_rotation = math.radians(float(
         self._parse_data('rotationEnd')))
     self.end_rotation_variance = math.radians(
         float(self._parse_data('rotationEndVariance')))
     self.speed = float(self._parse_data('speed'))
     self.speed_variance = float(self._parse_data('speedVariance'))
     self.radial_acceleration = float(
         self._parse_data('radialAcceleration'))
     self.radial_acceleration_variance = float(
         self._parse_data('radialAccelVariance'))
     self.tangential_acceleration = float(
         self._parse_data('tangentialAcceleration'))
     self.tangential_acceleration_variance = float(
         self._parse_data('tangentialAccelVariance'))
     self.max_radius = float(self._parse_data('maxRadius'))
     self.max_radius_variance = float(self._parse_data('maxRadiusVariance'))
     self.min_radius = float(self._parse_data('minRadius'))
     self.rotate_per_second = math.radians(
         float(self._parse_data('rotatePerSecond')))
     self.rotate_per_second_variance = math.radians(
         float(self._parse_data('rotatePerSecondVariance')))
     self.start_color = self._parse_color('startColor')
     self.start_color_variance = self._parse_color('startColorVariance')
     self.end_color = self._parse_color('finishColor')
     self.end_color_variance = self._parse_color('finishColorVariance')
     self.blend_factor_source = self._parse_blend('blendFuncSource')
     self.blend_factor_dest = self._parse_blend('blendFuncDestination')
示例#12
0
def parse_xml_file(fn, verbose = False):
	if verbose:
		print fn
	doc = parse_xml(fn)
	ret = XMLNode(doc.childNodes[0])
	if verbose:
		dump2(ret)
		print
	return ret
示例#13
0
def svg_to_path(file_obj, file_type=None):
    def complex_to_float(values):
        return np.array([[i.real, i.imag] for i in values])

    def load_line(svg_line):
        points = complex_to_float([svg_line.point(0.0), svg_line.point(1.0)])
        if not starting:
            points[0] = vertices[-1]
        entities.append(Line(np.arange(2) + len(vertices)))
        vertices.extend(points)

    def load_arc(svg_arc):
        points = complex_to_float(
            [svg_arc.start, svg_arc.point(.5), svg_arc.end])
        if not starting:
            points[0] = vertices[-1]
        entities.append(Arc(np.arange(3) + len(vertices)))
        vertices.extend(points)

    def load_quadratic(svg_quadratic):
        points = complex_to_float(
            [svg_quadratic.start, svg_quadratic.control, svg_quadratic.end])
        if not starting:
            points[0] = vertices[-1]
        entities.append(Bezier(np.arange(3) + len(vertices)))
        vertices.extend(points)

    def load_cubic(svg_cubic):
        points = complex_to_float([
            svg_cubic.start, svg_cubic.control1, svg_cubic.control2,
            svg_cubic.end
        ])
        if not starting:
            points[0] = vertices[-1]
        entities.append(Bezier(np.arange(4) + len(vertices)))
        vertices.extend(points)

    # first, we grab all of the path strings from the xml file
    xml = parse_xml(file_obj.read())
    paths = [p.attributes['d'].value for p in xml.getElementsByTagName('path')]

    entities = deque()
    vertices = deque()
    loaders = {
        'Arc': load_arc,
        'Line': load_line,
        'CubicBezier': load_cubic,
        'QuadraticBezier': load_quadratic
    }

    for svg_string in paths:
        starting = True
        for svg_entity in parse_path(svg_string):
            loaders[svg_entity.__class__.__name__](svg_entity)

    return {'entities': np.array(entities), 'vertices': np.array(vertices)}
示例#14
0
def read_feeds_from_opml(opml_file):
    opml_doc=parse_xml(opml_file)
    outlines=opml_doc.getElementsByTagName('outline')
    for outline in outlines:
        if outline.hasAttribute('type') and outline.hasAttribute('xmlUrl'):
            if outline.getAttribute('type') == 'rss':
                url=outline.getAttribute('xmlUrl')
                text=None
                if outline.hasAttribute('text'):
                    text=outline.getAttribute('text')
                yield (url, text)
示例#15
0
    def _get_reply(self, url, params={}):
        params['token'] = self._token

        f = urlopen(url, urlencode(params))
        dom = parse_xml(f)
        reply = dom.getElementsByTagName('reply')[0]
        status = get_text_by_tag(reply, 'status')
        if status != 'OK':
            raise Exception("Authentication failure")

        return reply
示例#16
0
文件: common.py 项目: eddiezab/Kvasir
    def _get_reply(self, url, params={}):
        params['token'] = self._token

        f = urlopen(url, urlencode(params))
        dom = parse_xml(f)
        reply = dom.getElementsByTagName('reply')[0]
        status = get_text_by_tag(reply, 'status')
        if status != 'OK':
            raise Exception("Authentication failure")

        return reply
示例#17
0
def parse_pspec(location):
    spec = parse_xml(location)
    source_name = get_value(gall(spec, "Source")[0], "Name")
    updates = gall(gall(spec, "History")[0], "Update")
    highest_release = 0
    version = None
    for update in updates:
        release = int(get_attr(update, "release"))
        if release > highest_release:
            highest_release = release
            version = get_value(update, "Version")
    return (source_name, version)
示例#18
0
def svg_to_path(file_obj, file_type=None):
    def complex_to_float(values):
        return np.array([[i.real, i.imag] for i in values])

    def load_line(svg_line):
        points = complex_to_float([svg_line.point(0.0),
                                   svg_line.point(1.0)])
        if not starting: points[0] = vertices[-1]
        entities.append(Line(np.arange(2)+len(vertices)))
        vertices.extend(points)

    def load_arc(svg_arc):
        points = complex_to_float([svg_arc.start, 
                                   svg_arc.point(.5), 
                                   svg_arc.end])
        if not starting: points[0] = vertices[-1]
        entities.append(Arc(np.arange(3)+len(vertices)))
        vertices.extend(points)
    def load_quadratic(svg_quadratic):
        points = complex_to_float([svg_quadratic.start, 
                                   svg_quadratic.control, 
                                   svg_quadratic.end])
        if not starting: points[0] = vertices[-1]
        entities.append(Bezier(np.arange(3)+len(vertices)))
        vertices.extend(points)
    def load_cubic(svg_cubic):
        points = complex_to_float([svg_cubic.start, 
                                   svg_cubic.control1, 
                                   svg_cubic.control2, 
                                   svg_cubic.end])
        if not starting: points[0] = vertices[-1]
        entities.append(Bezier(np.arange(4)+len(vertices)))
        vertices.extend(points)
    
    # first, we grab all of the path strings from the xml file
    xml   = parse_xml(file_obj.read())
    paths = [p.attributes['d'].value for p in xml.getElementsByTagName('path')]

    entities = deque()
    vertices = deque()  
    loaders  = {'Arc'             : load_arc,
                'Line'            : load_line,
                'CubicBezier'     : load_cubic,
                'QuadraticBezier' : load_quadratic}

    for svg_string in paths:
        starting = True
        for svg_entity in parse_path(svg_string):
            loaders[svg_entity.__class__.__name__](svg_entity)
            #starting = False

    return {'entities' : np.array(entities),
            'vertices' : np.array(vertices)}
示例#19
0
def parse_pspec(location):
    spec = parse_xml(location)
    source_name = get_value(gall(spec, "Source")[0], "Name")
    updates = gall(gall(spec, "History")[0], "Update")
    highest_release = 0
    version = None
    for update in updates:
        release = int(get_attr(update, "release"))
        if release > highest_release:
            highest_release = release
            version = get_value(update, "Version")
    return (source_name, version)
示例#20
0
def DoMain(argv):
    """Main function called from the command line.

  Args:
    argv: Command line parameters, without the program name.
  Returns:
    If --fonts_dir is specified, a newline-separated list of font files.
  """
    parser = argparse.ArgumentParser()
    parser.add_argument('-i',
                        '--input_xml',
                        required=True,
                        help='path to fonts.xml to be filtered')
    parser.add_argument('-o',
                        '--output_xml',
                        help='path to write a filtered XML file')
    parser.add_argument('-f',
                        '--fonts_dir',
                        help='prints a list of font files prefixed by the '
                        'specified FONTS_DIR (for GYP inputs)')
    parser.add_argument('package_categories', nargs=argparse.REMAINDER)
    options = parser.parse_args(argv)

    if not (options.output_xml or options.fonts_dir):
        parser.error('No output, specify output_xml, fonts_dir, or both')

    # Make a dictionary mapping package name to category.
    # E.g. ['sans-serif=1', 'serif=2'] becomes {'sans-serif':'1', 'serif':'2'}
    package_categories = dict(
        pkg.split('=') for pkg in options.package_categories)

    fonts_doc = parse_xml(options.input_xml)

    kept_families = []
    kept_fonts = []
    filter_function = partial(SelectFont,
                              package_categories=package_categories)
    FilterFonts(filter_function, fonts_doc, kept_families, kept_fonts)

    if options.output_xml:
        with open(options.output_xml, 'w') as f:
            f.write(fonts_doc.toprettyxml(indent='  '))

    if options.fonts_dir:
        # Join with '/' rather than os.path.join() because this is for GYP, which
        # even on Windows wants slashes rather than backslashes.
        # Make a set for unique fonts since .ttc files may be listed more than once.
        result = [
            '/'.join((options.fonts_dir, font))
            for font in sorted(set(kept_fonts))
        ]
        return '\n'.join(result)
示例#21
0
文件: particles.py 项目: 2xR/legacy
    def _parse_config(self, config):
        self._config = parse_xml(config)

        texture_path = self._parse_data('texture', 'name')
        config_dir_path = os.path.dirname(os.path.abspath(config))
        path = os.path.join(config_dir_path, texture_path)
        if os.path.exists(path):
            self.texture_path = path
        else:
            self.texture_path = texture_path

        parse_data = self._parse_data
        self.texture = Image(self.texture_path).texture
        self.emitter_x = float(parse_data('sourcePosition', 'x'))
        self.emitter_y = float(parse_data('sourcePosition', 'y'))
        self.emitter_x_variance = float(parse_data('sourcePositionVariance', 'x'))
        self.emitter_y_variance = float(parse_data('sourcePositionVariance', 'y'))
        self.gravity_x = float(parse_data('gravity', 'x'))
        self.gravity_y = float(parse_data('gravity', 'y'))
        self.emitter_type = int(parse_data('emitterType'))
        self.max_num_particles = int(parse_data('maxParticles'))
        self.life_span = max(0.01, float(parse_data('particleLifeSpan')))
        self.life_span_variance = float(parse_data('particleLifespanVariance'))
        self.start_size = float(parse_data('startParticleSize'))
        self.start_size_variance = float(parse_data('startParticleSizeVariance'))
        self.end_size = float(parse_data('finishParticleSize'))
        self.end_size_variance = float(parse_data('FinishParticleSizeVariance'))
        self.emit_angle = radians(float(parse_data('angle')))
        self.emit_angle_variance = radians(float(parse_data('angleVariance')))
        self.start_rotation = radians(float(parse_data('rotationStart')))
        self.start_rotation_variance = radians(float(parse_data('rotationStartVariance')))
        self.end_rotation = radians(float(parse_data('rotationEnd')))
        self.end_rotation_variance = radians(float(parse_data('rotationEndVariance')))
        self.speed = float(parse_data('speed'))
        self.speed_variance = float(parse_data('speedVariance'))
        self.radial_acceleration = float(parse_data('radialAcceleration'))
        self.radial_acceleration_variance = float(parse_data('radialAccelVariance'))
        self.tangential_acceleration = float(parse_data('tangentialAcceleration'))
        self.tangential_acceleration_variance = float(parse_data('tangentialAccelVariance'))
        self.max_radius = float(parse_data('maxRadius'))
        self.max_radius_variance = float(parse_data('maxRadiusVariance'))
        self.min_radius = float(parse_data('minRadius'))
        self.rotate_per_second = radians(float(parse_data('rotatePerSecond')))
        self.rotate_per_second_variance = radians(float(parse_data('rotatePerSecondVariance')))
        self.start_color = self._parse_color('startColor')
        self.start_color_variance = self._parse_color('startColorVariance')
        self.end_color = self._parse_color('finishColor')
        self.end_color_variance = self._parse_color('finishColorVariance')
        self.blend_factor_source = self._parse_blend('blendFuncSource')
        self.blend_factor_dest = self._parse_blend('blendFuncDestination')
示例#22
0
def parseResponse(xml):
    doc = parse_xml(xml)
    #doc = Sax2.FromXml(xml)
    rspChildren = doc.getElementsByTagName("rsp")
    if len(rspChildren):
        rsp = rspChildren[0]
        if rsp.attributes.getNamedItem("stat").value == "fail":
            return Error().fromElement(rsp.getElementsByTagName("error")[0])
        out_list = []
        for x in rsp.childNodes:
            if x.nodeType == ELEMENT_NODE: 
                out_list.append(fromElement(x))
        return out_list
    raise Exception("Nothing found?")
示例#23
0
    def _get_reply(self, url, params={}):
        params["token"] = self._token

        f = urlopen(url, urlencode(params))
        dom = parse_xml(f)
        reply = dom.getElementsByTagName("reply")[0]
        status = None
        for node in reply.childNodes:
            if node.nodeName == "status":
                status = node.firstChild.nodeValue
        if status != "OK":
            raise Exception("Authentication failure")

        return reply
示例#24
0
    def _get_reply(self, url, params={}):
        params['token'] = self._token

        f = urlopen(url, urlencode(params))
        dom = parse_xml(f)
        reply = dom.getElementsByTagName('reply')[0]
        status = None
        for node in reply.childNodes:
            if node.nodeName == 'status':
                status = node.firstChild.nodeValue
        if status != 'OK':
            raise Exception("Authentication failure")

        return reply
示例#25
0
def set_classpath_from_eclipse():
    from xml.dom.minidom import parse as parse_xml
    eclipse_classpath = path_resolv.Path(".classpath")
    if not eclipse_classpath.exists():
        print("WARNING - please run with \".classpath\" in $(pwd), attempting to resolve...")
        eclipse_classpath = path_resolv.resolve(".classpath")
        assert eclipse_classpath.exists()
    doc = parse_xml(eclipse_classpath)
    entries = doc.getElementsByTagName("classpathentry")
    path_elts = [os.environ["CLASSPATH"]]
    for entry in entries:
        if entry.getAttribute("kind") == "lib":
            path_elts.append(path_resolv.resolve(entry.getAttribute("path")))
    os.environ["CLASSPATH"] = path_resolv.Path.pathjoin(*path_elts)
示例#26
0
    def _parse_config(self, config):
        self._config = parse_xml(config)

        texture_path = self._parse_data("texture", "name")
        config_dir_path = os.path.dirname(os.path.abspath(config))
        path = os.path.join(config_dir_path, texture_path)
        if os.path.exists(path):
            self.texture_path = path
        else:
            self.texture_path = texture_path

        self.texture = Image(self.texture_path).texture
        # self.emitter_x = float(self._parse_data('sourcePosition', 'x'))
        # self.emitter_y = float(self._parse_data('sourcePosition', 'y'))
        self.emitter_x_variance = float(self._parse_data("sourcePositionVariance", "x"))
        self.emitter_y_variance = float(self._parse_data("sourcePositionVariance", "y"))
        self.gravity_x = float(self._parse_data("gravity", "x"))
        self.gravity_y = float(self._parse_data("gravity", "y"))
        self.emitter_type = int(self._parse_data("emitterType"))
        self.max_num_particles = int(self._parse_data("maxParticles"))
        self.life_span = max(0.01, float(self._parse_data("particleLifeSpan")))
        self.life_span_variance = float(self._parse_data("particleLifespanVariance"))
        self.start_size = float(self._parse_data("startParticleSize"))
        self.start_size_variance = float(self._parse_data("startParticleSizeVariance"))
        self.end_size = float(self._parse_data("finishParticleSize"))
        self.end_size_variance = float(self._parse_data("FinishParticleSizeVariance"))
        self.emit_angle = math.radians(float(self._parse_data("angle")))
        self.emit_angle_variance = math.radians(float(self._parse_data("angleVariance")))
        self.start_rotation = math.radians(float(self._parse_data("rotationStart")))
        self.start_rotation_variance = math.radians(float(self._parse_data("rotationStartVariance")))
        self.end_rotation = math.radians(float(self._parse_data("rotationEnd")))
        self.end_rotation_variance = math.radians(float(self._parse_data("rotationEndVariance")))
        self.speed = float(self._parse_data("speed"))
        self.speed_variance = float(self._parse_data("speedVariance"))
        self.radial_acceleration = float(self._parse_data("radialAcceleration"))
        self.radial_acceleration_variance = float(self._parse_data("radialAccelVariance"))
        self.tangential_acceleration = float(self._parse_data("tangentialAcceleration"))
        self.tangential_acceleration_variance = float(self._parse_data("tangentialAccelVariance"))
        self.max_radius = float(self._parse_data("maxRadius"))
        self.max_radius_variance = float(self._parse_data("maxRadiusVariance"))
        self.min_radius = float(self._parse_data("minRadius"))
        self.rotate_per_second = math.radians(float(self._parse_data("rotatePerSecond")))
        self.rotate_per_second_variance = math.radians(float(self._parse_data("rotatePerSecondVariance")))
        self.start_color = self._parse_color("startColor")
        self.start_color_variance = self._parse_color("startColorVariance")
        self.end_color = self._parse_color("finishColor")
        self.end_color_variance = self._parse_color("finishColorVariance")
        self.blend_factor_source = self._parse_blend("blendFuncSource")
        self.blend_factor_dest = self._parse_blend("blendFuncDestination")
 def _parse_schema_file(self, schema_file):
     """
     Parse the given .schema or .schema.in file. Return True if successful
     or false if the file can't be accessed.
     
     In case the file can't be parsed correctly, raise BadSchemaFileError.
     """
     
     from xml.dom.minidom import parse as parse_xml
     from xml.parsers.expat import ExpatError
     
     try:
         content = parse_xml(schema_file)
     except IOError:
         return False
     except ExpatError, e:
         raise BadSchemaFileError, e.message
示例#28
0
    def load_entity(self, entity):
        manager = type(entity).objects
        url = self._build_entity_url(manager, entity.id)
        dom = parse_xml(urllib.urlopen(url).read())

        xml_rest_node = dom.getElementsByTagName('rest').item(0)

        xml_entity_node = xml_rest_node.getElementsByTagName(
            manager.code).item(0)

        for attribute in type(entity).__dict__:
            if attribute.startswith('__'):
                continue
            subclass = type(entity).__dict__[attribute]
            if issubclass(subclass.__class__, field.Field):
                type(entity).__dict__[attribute].add_attribute(
                    entity, xml_entity_node, attribute)
示例#29
0
    def __download_xsd(self, url):
        is_root = self.xsd_url == url

        if url not in self.xsd_map:
            xsd_local_name = self.__get_local_name(url)
            xsd_local_path = self.__get_local_path(xsd_local_name, is_root)

            dom = parse_xml(self.__download_url(url))

            self.xsd_map[url] = xsd_local_name
            self.__resolve_imports(dom, is_root)
            self.__resolve_includes(dom, is_root)

            with open(xsd_local_path, 'wb') as f:
                f.write(dom.toxml('UTF-8'))

        return self.xsd_map[url]
示例#30
0
文件: server.py 项目: BloatIt/bloatit
 def load_entity(self,entity):
     manager = type(entity).objects
     url = self._build_entity_url(manager, entity.id);
     dom = parse_xml(urllib.urlopen(url).read())
     
     xml_rest_node = dom.getElementsByTagName('rest').item(0)
     
     
     xml_entity_node = xml_rest_node.getElementsByTagName(manager.code).item(0)
     
     
     
     for attribute in type(entity).__dict__:
         if attribute.startswith('__'):
             continue
         subclass = type(entity).__dict__[attribute]
         if issubclass(subclass.__class__ , field.Field):
             type(entity).__dict__[attribute].add_attribute(entity, xml_entity_node, attribute)
示例#31
0
    def get_weather(self, location):

        handle = urllib.urlopen('http://api.wunderground.com/auto/wui/geo/WXCurrentObXML/index.xml?query={0}'.format(location))
        data = handle.read()
        handle.close()
        
        dom = parse_xml(data)

        return {
            'weather': dom.getElementsByTagName('weather')[0].childNodes[0].data,
            'temperature': dom.getElementsByTagName('temp_c')[0].childNodes[0].data,
            'humidity': dom.getElementsByTagName('relative_humidity')[0].childNodes[0].data.replace("%", ""),
            'wind_direction': dom.getElementsByTagName('wind_dir')[0].childNodes[0].data,
            'wind_speed': str(round(float(dom.getElementsByTagName('wind_mph')[0].childNodes[0].data) * 1.609, 1)),
            'pressure': dom.getElementsByTagName('pressure_mb')[0].childNodes[0].data,
            'visibility': dom.getElementsByTagName('visibility_km')[0].childNodes[0].data,
            'icon': dom.getElementsByTagName('icon')[0].childNodes[0].data
            }
示例#32
0
def svg_to_path(file_obj, file_type=None):
    """
    Load an SVG file into a Path2D object.

    Parameters
    -----------
    file_obj: open file object
    file_type: unused

    Returns
    -----------
    loaded: dict with kwargs for Path2D constructor
    """
    # first, we grab all of the path strings from the xml file
    xml = parse_xml(file_obj.read())
    paths = [p.attributes['d'].value for p in xml.getElementsByTagName('path')]

    return _svg_path_convert(paths)
示例#33
0
    def _do_parse(self, input_string, table):
        """Parse an XML object and return a table data.

        Args:
          input_string (string): An XML string.
          table (Table): An empty instance of class `Table`.

        Returns:
          None
        """

        dom = parse_xml(input_string)

        # Handle the <struct> element in the XML tree.
        node = dom.getElementsByTagName('struct')[0]
        cpu_addr, record_size, record_num = XmlParser.get_struct_info(node)
        members = [XmlParser.get_member_info(i)
                   for i in node.getElementsByTagName('member')]

        title = dom.getElementsByTagName('rom')[0].getAttribute('name')

        # Read ROM image and list the records.
        with RomImage(title) as mem:
            mapper = make_mapper(header=get_snes_header(mem))

            # Locate the address of the array on ROM image.
            mem.seek(mapper.from_cpu(cpu_addr))

            # Obtain each byte sequence.
            rows = []
            for _ in range(record_num):
                byte_string = mem.read(record_size)
                rows.append(
                    [i.process(byte_string) for i in members])

        table.cpu_addr = cpu_addr
        table.record_size = record_size
        table.columns = members
        table.rows = rows

        assert len(rows) == record_num
示例#34
0
def strip_junitxml_time_attrs(xmlfile, dest=None):
    """Read XMLFILE, remove all time= attributes from <testsuite> and
       <testcase> elements, and write it back out to DEST (or to XMLFILE
       if DEST is not specified)."""
    with parse_xml(xmlfile) as doc:
        for el in itertools.chain(doc.getElementsByTagName("testsuite"),
                                  doc.getElementsByTagName("testcase")):
            for attr in ["time", "timestamp"]:
                try:
                    el.removeAttribute(attr)
                except DOMNotFoundError:
                    pass

        newxml = doc.documentElement.toprettyxml(indent=" ")

    if dest is None:
        dest = xmlfile
    if isinstance(dest, str):
        with open(dest, "rt", encoding="utf-8") as w:
            w.write(newxml)
    else:
        dest.write(newxml)
示例#35
0
 def load_db(self):
     ''' Load the database '''
     xml = parse_xml(self.db_file)
     entries = xml.getElementsByTagName("entry")
     for entry in entries:
         vuln = Vulnerability()
         vuln.cve_id = get_attr(entry, "id")
         # Find the vulnerable software for this cve_id
         affected_xml = gall(entry, "vuln:vulnerable-software-list")
         if len(affected_xml) == 0:
             continue
         affected = affected_xml[0]
         vuln.description = get_value(entry, "vuln:summary")
         for link in gall(entry, "vuln:references"):
             type_ref = get_attr(link, "reference_type")
             href = get_attr(gall(link, "vuln:reference")[0], "href")
             source = get_value(link, "vuln:source")
             vuln.web_links.append("%s [%s]  - %s" %
                                   (source, type_ref, href))
         for product in gall(affected, "vuln:product"):
             value = product.firstChild.nodeValue
             # Parse the field
             fld = value.split("cpe:/")
             #print value
             fields = fld[1].split(":")
             vendor = fields[1]
             product = fields[2]
             version = None
             try:
                 version = fields[3]
                 vuln.affected_versions.append(version)
             except:
                 pass
             if vuln.vendor is None:
                 vuln.vendor = vendor
             if vuln.product is None:
                 vuln.product = product
             #print vendor, product, version
         self.vulnerabilities.append(vuln)
示例#36
0
 def load_db(self):
     ''' Load the database '''
     xml = parse_xml(self.db_file)
     entries = xml.getElementsByTagName("entry")
     for entry in entries:
         vuln = Vulnerability()
         vuln.cve_id = get_attr(entry, "id")
         # Find the vulnerable software for this cve_id
         affected_xml = gall(entry, "vuln:vulnerable-software-list")
         if len(affected_xml) == 0:
             continue
         affected = affected_xml[0]
         vuln.description = get_value(entry, "vuln:summary")
         for link in gall(entry, "vuln:references"):
             type_ref = get_attr(link, "reference_type")
             href = get_attr(gall(link, "vuln:reference")[0], "href")
             source = get_value(link, "vuln:source")
             vuln.web_links.append("%s [%s]  - %s" %
                                   (source, type_ref, href))
         for product in gall(affected, "vuln:product"):
             value = product.firstChild.nodeValue
             # Parse the field
             fld = value.split("cpe:/")
             #print value
             fields = fld[1].split(":")
             vendor = fields[1]
             product = fields[2]
             version = None
             try:
                 version = fields[3]
                 vuln.affected_versions.append(version)
             except:
                 pass
             if vuln.vendor is None:
                 vuln.vendor = vendor
             if vuln.product is None:
                 vuln.product = product
             #print vendor, product, version
         self.vulnerabilities.append(vuln)
示例#37
0
        def timedtext_to_srt(timedtext):
            def _format_srt_time(millisec):
                sec, milli = divmod(millisec, 1000)
                m, s = divmod(int(sec), 60)
                h, m = divmod(m, 60)
                return "%02d:%02d:%02d,%s" % (h, m, s, milli)

            srt = ""
            dom = parse_xml(timedtext)
            body = dom.getElementsByTagName("body")[0]
            paras = body.getElementsByTagName("p")
            for i, para in enumerate(paras):
                srt += smart_text(i+1) + "\n"
                srt += _format_srt_time(int(para.attributes['t'].value)) + ' --> ' + \
                       _format_srt_time(int(para.attributes['t'].value) + int(para.attributes['d'].value)) + "\n"
                for child in para.childNodes:
                    if child.nodeName == 'br':
                        srt += "\n"
                    elif child.nodeName == '#text':
                        srt += smart_text(child.data)
                    srt += "\n\n"

            return srt
示例#38
0
 def __init__(self, schema, ns, name_query, prefixes):
     self.schema_dom = parse_xml(schema)
     self.ns = ns
     self.uri = name_query.params.li[0][1]
     self.prefixes = list(prefixes.items())
VCardURL = namedtuple('VCardURL', 'name url')

username = os.environ.get('GUSER', None)
password = os.environ.get('GPASS', None)

base_url = "https://google.com"
top_url  = "%s/m8/carddav/principals/__uids__/%s/lists/default/" % (base_url, username)

response = requests.request('PROPFIND', top_url, auth=(username, password))

if response.status_code != 207:
    print "ERROR: Could not fetch list of vcards."
    print "HEADERS: ",
    PP(response.headers)

dom = parse_xml(response.text)
response_elements = dom.getElementsByTagName('d:response')

def is_card_entry(el):
    ct = el.getElementsByTagName('d:getcontenttype')
    return len(ct) > 0 and ct[0].hasChildNodes() and ct[0].childNodes[0].data == 'text/vcard'

vcards = [
    VCardURL(name, "%s%s" % (top_url, name))
        for name in [
            el.getElementsByTagName('d:displayname')[0].childNodes[0].data
            for el in filter(is_card_entry, response_elements)
        ]
]

for vcard in vcards:
示例#40
0
username = os.environ.get('GUSER', None)
password = os.environ.get('GPASS', None)

base_url = "https://google.com"
top_url = "%s/m8/carddav/principals/__uids__/%s/lists/default/" % (base_url,
                                                                   username)

response = requests.request('PROPFIND', top_url, auth=(username, password))

if response.status_code != 207:
    print "ERROR: Could not fetch list of vcards."
    print "HEADERS: ",
    PP(response.headers)

dom = parse_xml(response.text)
response_elements = dom.getElementsByTagName('d:response')


def is_card_entry(el):
    ct = el.getElementsByTagName('d:getcontenttype')
    return len(ct) > 0 and ct[0].hasChildNodes(
    ) and ct[0].childNodes[0].data == 'text/vcard'


vcards = [
    VCardURL(name, "%s%s" % (top_url, name)) for name in [
        el.getElementsByTagName('d:displayname')[0].childNodes[0].data
        for el in filter(is_card_entry, response_elements)
    ]
]
Thumbnail: /assets/pictures/thumbs/thumb-bitcoinupdate.png
*/

%(title)s
===
%(description)s

[Link zur aktuellen Folge](%(href)s)
"""

rss_url = "http://bitcoinupdate.com/podcast.php"
rss_data = req.get(rss_url)
if rss_data.status_code != 200:
    raise Exception("status code not 200: %s" % rss_data.status_code)

rss = parse_xml(rss_data.text.encode("utf8"))

def get_val(el, what):
    n = el.getElementsByTagName(what)[0]
    return n.childNodes[0].data

most_recent_date = None
title, description, href = None, None, None

items = rss.getElementsByTagName("item")
for item in items:
    pubDate = get_val(item, "pubDate")
    t = get_val(item, "title")
    d = get_val(item, "description")
    h = get_val(item, "link")
示例#42
0
def svg_to_path(file_obj, file_type=None):
    '''
    Load an SVG file into a Path2D object.

    Parameters
    -----------
    file_obj: open file object
    file_type: unused

    Returns
    -----------
    loaded: dict with kwargs for Path2D constructor
    '''
    def complex_to_float(values):
        return np.array([[i.real, i.imag] for i in values])

    def load_line(svg_line):
        points = complex_to_float([svg_line.point(0.0), svg_line.point(1.0)])
        if not starting:
            points[0] = vertices[-1]
        entities.append(entities_mod.Line(np.arange(2) + len(vertices)))
        vertices.extend(points)

    def load_arc(svg_arc):
        points = complex_to_float(
            [svg_arc.start, svg_arc.point(.5), svg_arc.end])
        if not starting:
            points[0] = vertices[-1]
        entities.append(entities_mod.Arc(np.arange(3) + len(vertices)))
        vertices.extend(points)

    def load_quadratic(svg_quadratic):
        points = complex_to_float(
            [svg_quadratic.start, svg_quadratic.control, svg_quadratic.end])
        if not starting:
            points[0] = vertices[-1]
        entities.append(entities_mod.Bezier(np.arange(3) + len(vertices)))
        vertices.extend(points)

    def load_cubic(svg_cubic):
        points = complex_to_float([
            svg_cubic.start, svg_cubic.control1, svg_cubic.control2,
            svg_cubic.end
        ])
        if not starting:
            points[0] = vertices[-1]
        entities.append(entities_mod.Bezier(np.arange(4) + len(vertices)))
        vertices.extend(points)

    # first, we grab all of the path strings from the xml file
    xml = parse_xml(file_obj.read())
    paths = [p.attributes['d'].value for p in xml.getElementsByTagName('path')]

    entities = deque()
    vertices = deque()
    loaders = {
        'Arc': load_arc,
        'Line': load_line,
        'CubicBezier': load_cubic,
        'QuadraticBezier': load_quadratic
    }

    for svg_string in paths:
        starting = True
        for svg_entity in parse_path(svg_string):
            loaders[svg_entity.__class__.__name__](svg_entity)

    loaded = {'entities': np.array(entities), 'vertices': np.array(vertices)}
    return loaded
示例#43
0
password = os.environ.get('GPASS', None)

url = "https://google.com/m8/carddav/principals/__uids__/%s/lists/default/" % username
data = """
<?xml version="1.0" encoding="utf-8" ?>
<card:addressbook-query xmlns:D="DAV:"
                  xmlns:card="urn:ietf:params:xml:ns:carddav">
  <D:prop>
    <D:getetag/>
    <card:address-data>
      <card:prop name="UID"/>
    </card:address-data>
  </D:prop>
  <card:filter test="anyof">
    <card:prop-filter name="EMAIL">
      <card:text-match collation="i;unicode-casemap" match-type="contains">april</card:text-match>
    </card:prop-filter>
  </card:filter>
</card:addressbook-query>
"""
response = requests.request('PROPFIND', url, auth=(username, password))

print "Status Code: %s" % (response.status_code)

print "\n== HEADERS =="
for item in response.headers.items():
    print "%s: %s" % item

print "\n== BODY =="
print parse_xml(response.content).toprettyxml()
示例#44
0
    def _parse_config(self, config):
        self._config = parse_xml(config)

        texture_path = self._parse_data('texture', 'name')
        config_dir_path = os.path.dirname(os.path.abspath(config))
        path = os.path.join(config_dir_path, texture_path)
        if os.path.exists(path):
            self.texture_path = path
        else:
            self.texture_path = texture_path

        self.texture = Image(self.texture_path).texture
        #self.emitter_x = 0.0
        #self.emitter_y = 0.0
        try_x=None
        try_y=None
        if self._has_value('sourcePosition','x'):
            try_x=self._parse_data('sourcePosition', 'x')
            try_y=self._parse_data('sourcePosition', 'y')
        #else ignore -- everything is ok (sourcePosition not present in later versions of pex)
        #if try_x is not None:
            #self.emitter_x = float(try_x)
        #if try_y is not None:
            #self.emitter_y = float(try_y)
        if try_x is not None and try_y is not None:
            self.pos = float(try_x), float(try_y)
        self.emitter_x_variance = float(self._parse_data('sourcePositionVariance', 'x'))
        self.emitter_y_variance = float(self._parse_data('sourcePositionVariance', 'y'))
        self.gravity_x = float(self._parse_data('gravity', 'x'))
        self.gravity_y = float(self._parse_data('gravity', 'y'))
        self.emitter_type = int(self._parse_data('emitterType'))
        self.max_num_particles = int(self._parse_data('maxParticles'))
        self.life_span = max(0.01, float(self._parse_data('particleLifeSpan')))
        self.life_span_variance = float(self._parse_data('particleLifespanVariance'))
        self.start_size = float(self._parse_data('startParticleSize'))
        self.start_size_variance = float(self._parse_data('startParticleSizeVariance'))
        self.end_size = float(self._parse_data('finishParticleSize'))
        self.end_size_variance = float(self._parse_data('FinishParticleSizeVariance'))
        self.emit_angle = math.radians(float(self._parse_data('angle')))
        self.emit_angle_variance = math.radians(float(self._parse_data('angleVariance')))
        self.start_rotation = math.radians(float(self._parse_data('rotationStart')))
        self.start_rotation_variance = math.radians(float(self._parse_data('rotationStartVariance')))
        self.end_rotation = math.radians(float(self._parse_data('rotationEnd')))
        self.end_rotation_variance = math.radians(float(self._parse_data('rotationEndVariance')))
        self.speed = float(self._parse_data('speed'))
        self.speed_variance = float(self._parse_data('speedVariance'))
        self.radial_acceleration = float(self._parse_data('radialAcceleration'))
        self.radial_acceleration_variance = float(self._parse_data('radialAccelVariance'))
        self.tangential_acceleration = float(self._parse_data('tangentialAcceleration'))
        self.tangential_acceleration_variance = float(self._parse_data('tangentialAccelVariance'))
        self.max_radius = float(self._parse_data('maxRadius'))
        self.max_radius_variance = float(self._parse_data('maxRadiusVariance'))
        self.min_radius = float(self._parse_data('minRadius'))
        self.rotate_per_second = math.radians(float(self._parse_data('rotatePerSecond')))
        self.rotate_per_second_variance = math.radians(float(self._parse_data('rotatePerSecondVariance')))
        self.start_color = self._parse_color('startColor')
        self.start_color_variance = self._parse_color('startColorVariance')
        self.end_color = self._parse_color('finishColor')
        self.end_color_variance = self._parse_color('finishColorVariance')
        self.blend_factor_source = self._parse_blend('blendFuncSource')
        self.blend_factor_dest = self._parse_blend('blendFuncDestination')
示例#45
0
def parse(xmlf1_file):
    document = parse_xml(xmlf1_file).documentElement
    return MMOS(document.getElementsByTagName("MMO"))
示例#46
0
 def __init__(self, schema, ns, name_query, prefixes):
     self.schema_dom = parse_xml(schema)
     self.ns = ns
     self.uri = name_query.params.li[0][1]
     self.prefixes = list(prefixes.items())
示例#47
0
def process_rss_feed(run_id,url):

    """
    Get the RSS feed from the web, and return a dict of it's contents.
    """

    successful = True

    # download the RSS feed and prase to a list of dom objects
    xml_string = urllib.urlopen(url).read()
    xml_dom = parse_xml(xml_string)
    items = xml_dom.getElementsByTagName('item')

    print "Parsing {0} items ...".format(len(items))

    # process each item
    new_dispatch_guids = []
    new_dispatch_count = 0
    for item in items:
        
        # pre-process xml 
        xml = item.toxml()
        xml = xml.replace('<item>','').replace('</item>','')
        xml = xml.replace('\t','').replace('\n\n','\n').replace('\r','')
        xml = "{0}\n{1}\n{2}".format(_geoheader,xml,_geofooter)

        # conver to dom object for querying
        item_dom = parse_xml(xml)

        # generate fields that will be used to result dict
        title = text_from_tag('title',item_dom)
        link = text_from_tag('link',item_dom)
        pub_date_time = text_from_tag('pubDate',item_dom) # .replace('-','-0') 
        description = text_from_tag('description',item_dom)
        source_lat = text_from_tag('geo:lat',item_dom)
        source_lng = text_from_tag('geo:long',item_dom)
      
        # parse fields
        dispatch_text = title.split(' at ')[0].strip()
        short_address =  title.split(' at ')[1].strip()
        dispatch_datetime = datetime.datetime.strptime(pub_date_time[:-6],'%a, %d %b %Y %X')
        status_text = description.split(',')[0].split('Status:')[1].strip()
        guid = description.split(',')[1].split('ID:')[1].strip()

        #print "Working on: [{0}] : {2} : '{1}'".format(guid, title, status_text)

        # add guid to list of new guids seen
        new_dispatch_guids.append(guid)

        # see if the dispatch already exists within the database, and if it doesn't add it
        exists = Dispatches.check_exists(DBSession, guid, status_text)
        if not exists:

            print "GUID: {0}, Status: {1} - does not exist within database, adding.".format(guid, status_text)

            geocode_lat,geocode_lng,full_address,geocode_successful = geocode_address(short_address)
            #geocode_lat = 0; geocode_lng = 0; full_address = ''; geocode_successful = False 

            # need to check to make sure that we geo-coded correctly.  This is a sanity check
            # to make sure we are within monroe county.
            if geocode_successful == True \
                    and (geocode_lat > _geofence['latmax'] \
                    or geocode_lat < _geofence['latmin'] \
                    or geocode_lat < _geofence['lngmin'] \
                    or geocode_lng > _geofence['lngmax']):
                #geocode_lat = 0; geocode_lng = 0; full_address = '';
                geocode_successful = False;

            # create the dispatch in the database
            dispatch = Dispatches.add_dispatch(
                session = DBSession,
                run_id = run_id,
                status_text = status_text,
                short_address = short_address,
                guid = guid,
                dispatch_text = dispatch_text,
                dispatch_datetime = dispatch_datetime,
                source_lat = source_lat,
                source_lng = source_lng,
                geocode_lat = geocode_lat,
                geocode_lng = geocode_lng,
                full_address = full_address,
                geocode_successful = geocode_successful,
            )

            # inc our count of new dispatches
            new_dispatch_count += 1

    # get list of current dispatches (not closed)
    current_dispatch_guids = CurrentDispatches.get_current_dispatch_guids(DBSession)

    #print "Current Dispatches:"
    #for g in current_dispatch_guids:
    #    print "\t%s" % g

    # keep track of the guis we have just removed so we don't re-add them
    removed_guids = []

    # keep track of the number of dispatches that we close
    closed_dispatch_count = 0

    # check to see if there are any guis that are in the current dispatches list
    # but are not within the RSS feed, and close them
    for current_dispatch_guid in current_dispatch_guids:
        if not current_dispatch_guid in new_dispatch_guids:
            CurrentDispatches.remove_current_dispatch(
                session = DBSession,
                guid = current_dispatch_guid,
            )
            Dispatches.close_dispatch(
                session = DBSession,
                run_id = run_id,
                guid = current_dispatch_guid,
            )
            removed_guids.append(current_dispatch_guid)
            closed_dispatch_count += 1
            print "Removed '{0}' from the current dispatch list".format(current_dispatch_guid)

    # see if there are any new dispatches that need to be added to the current 
    # dispatches list, and add them.
    for new_dispatch_guid in new_dispatch_guids:
        if not new_dispatch_guid in current_dispatch_guids and \
                not new_dispatch_guid in removed_guids:
            CurrentDispatches.add_current_dispatch(
                session = DBSession,
                guid = new_dispatch_guid,
            )
            print "Added '{0}' to the current dispatch list".format(new_dispatch_guid)

    return new_dispatch_count, closed_dispatch_count, successful