def find_part_xml(self, part): """Pull the XML for an annual edition, first checking locally""" logger.info("Find Part xml for %s CFR %s", self.title, part) url = CFR_PART_URL.format(year=self.year, title=self.title, volume=self.vol_num, part=part) filename = url.split('/')[-1] for xml_path in settings.LOCAL_XML_PATHS + [xml_sync.GIT_DIR]: xml_path = os.path.join(xml_path, 'annual', filename) logger.debug("Checking locally for file %s", xml_path) if os.path.isfile(xml_path): with open(xml_path) as f: return XMLWrapper(f.read(), xml_path) logger.debug("GET %s", url) response = requests.get(url) if response.status_code == 200: return XMLWrapper(response.content, url)
def find_part_xml(self, part): """Pull the XML for an annual edition, first checking locally""" logger.info("Find Part xml for %s CFR %s", self.title, part) url = CFR_PART_URL.format(year=self.year, title=self.title, volume=self.vol_num, part=part) filename = url.split('/')[-1] for xml_path in settings.LOCAL_XML_PATHS: xml_path = os.path.join(xml_path, 'annual', filename) logger.debug("Checking locally for file %s", xml_path) if os.path.isfile(xml_path): with open(xml_path, 'rb') as f: return XMLWrapper(f.read(), xml_path) client = http_client() first_try_url = settings.XML_REPO_PREFIX + 'annual/' + filename logging.info('trying to fetch annual edition from %s', first_try_url) response = client.get(first_try_url) if response.status_code != requests.codes.ok: logger.info('failed. fetching from %s', url) response = client.get(url) if response.status_code == requests.codes.ok: return XMLWrapper(response.content, url)
def deserialize(self, content): return XMLWrapper(content, str(self))