def sync(): f = urllib2.urlopen(url + '?order=d') soup = BeautifulSoup(f.read()) for link in soup.find_all('a'): link = link.get('href') filename = urllib2.unquote(link) if '.zip' in link.lower(): if not version_imported('HTMBUZZ:' + filename): try: logger.info('Importing :' + filename) download(url + link, filename) except Exception as e: logger.exception(filename, exc_info=True) pass
def sync(): f = urllib2.urlopen(url+'?order=d') soup = BeautifulSoup(f.read()) for link in soup.find_all('a'): link = link.get('href') filename = urllib2.unquote(link) if '.zip' in link.lower(): if not version_imported('QBUZZ:'+filename): try: logger.info('Importing :'+filename) download(url+link,filename) except Exception as e: logger.error(filename,exc_info=True) pass
def sync(): f = urllib2.urlopen(url + '/' + path) soup = BeautifulSoup(f.read()) files = [] for link in soup.find_all('a'): link = link.get('href') filename = urllib2.unquote(link).split('/')[-1] if '.zip' in link.lower(): if not version_imported('TEC:' + filename): files.append((link, filename)) for link, filename in sorted(files): try: print 'FILE ' + filename logger.info('Importing :' + filename) download(url + link, filename) except Exception as e: logger.error(filename, exc_info=True) pass
def sync(): f = urllib2.urlopen(url+'/'+path) soup = BeautifulSoup(f.read()) files = [] for link in soup.find_all('a'): link = link.get('href') filename = urllib2.unquote(link).split('/')[-1] if '.zip' in link.lower(): if not version_imported('TEC:'+filename): files.append((link,filename)) for link,filename in sorted(files): try: print 'FILE '+filename logger.info('Importing :'+filename) download(url+link,filename) except Exception as e: logger.error(filename,exc_info=True) pass