def __init__(self, country, polygon_id, mapping, layer, **args): self.polygon_id = polygon_id self.mapping = mapping self.layer = layer Source.__init__(self, **args) self.fileUrl = u'mapillary-feature-{0}-{1}.csv'.format(country, SourceVersion.version(self.mapping)) self.fileUrlCache = 120
def fetch_cached(self): country = self.config.options['country'] polygon_id = self.config.polygon_id delay = 120 url = 'mapillary-feature-{0}-{1}.csv'.format( country, SourceVersion.version( "merge_data/mapillary-traffic-signs.mapping.json")) file_name = hashlib.sha1(url.encode('utf-8')).hexdigest() cache = os.path.join(config.dir_cache, file_name) cur_time = time.time() if os.path.exists(cache): statbuf = os.stat(cache) if (cur_time - delay * 24 * 60 * 60) < statbuf.st_mtime: # force cache by local delay return cache tmp_file = self.fetch(polygon_id) outfile = codecs.open(cache + ".url", "w", "utf-8") outfile.write(url) outfile.close() shutil.move(tmp_file, cache) # set timestamp os.utime(cache, (cur_time, cur_time)) return cache
def __init__(self, country, polygon_id, mapping, layer, logger, **args): self.polygon_id = polygon_id self.mapping = mapping self.layer = layer self.logger = logger Source.__init__(self, **args) self.fileUrl = u'mapillary-feature-{0}-{1}.csv'.format(country, SourceVersion.version(self.mapping)) self.fileUrlCache = 120
def analyser_version(self): return SourceVersion.version(self.parser.source.time(), self.__class__)
def analyser_version(self): return SourceVersion.version(*([self.__class__] + list(map(lambda p: p.__class__, self.plugins))))
def analyser_version(self): return SourceVersion.version(self.__class__)
def __init__(self, country, polygon_id, logger, **args): self.polygon_id = polygon_id self.logger = logger Source.__init__(self, **args) self.fileUrl = 'mapillary-feature-{0}-{1}.csv'.format(country, SourceVersion.version("merge_data/mapillary-traffic-signs.mapping.json")) self.fileUrlCache = 120