def downloadData(sensor_ids, step, download_startdate, download_enddate, outfn=None, pool_len=48): """ Funcion que arme para bajar datos de la api de teracode Ej: sensor_ids = [...] # Sacar de waypoints.py download_startdate = "2015-07-01T00:00:00-00:00" download_enddate = "2015-07-12T00:00:01-00:00" step = datetime.timedelta(days=2) newdata = downloadData ( sensor_ids, step, download_startdate, download_enddate, outfn="raw_api_01_11.json") """ # vsensids = virtsens["id_sensor"].unique() token = config.api['token'] host = config.api['host'] urltpl = "https://{0}/api/data/%s?token=%s&fecha_desde=%s&fecha_hasta=%s".format( host) # end = dateutil.parser.parse(download_enddate) start = download_startdate end = download_enddate urls = [] if step > (download_enddate - download_startdate): step = download_enddate - download_startdate else: logger.info( "condicion step sin cumplir step:{0} enddate:{1} startdate:{2}". format(step, download_enddate, download_startdate)) while start < end: startdate, enddate = start, start + step for sensor_id in sensor_ids: # startdate, enddate, sensor_id url = urltpl % (sensor_id, token, startdate.strftime("%Y-%m-%dT%H:%M:%S-03:00"), enddate.strftime("%Y-%m-%dT%H:%M:%S-03:00")) if not url in urls: urls.append(url) start += step """cambiar funcion map por api_sensores_fake""" #alldata = map(api_sensores_fake, urls) try: pool = multiprocessing.Pool(pool_len) alldata = pool.map(getData, urls) except Exception, e: logger.error("pool multiprocessing, error:", traceback=True) send_email_error("pool multiprocessing, error: {0}".format(e)) alldata = []
def createDBEngine(self): """ http://docs.sqlalchemy.org/en/latest/core/engines.html?highlight=create_engine#sqlalchemy.create_engine engine = sqlalchemy.create_engine( "postgres://postgres@/postgres") engine = sqlalchemy.create_engine("sqlite:///analysis.db") """ try: self.__engine = create_engine( self.cfg.db_url, echo=self.debug, pool_timeout=60, echo_pool=self.debug_pool) self.Base.prepare(self.__engine, reflect=True) self.__c = True except Exception, e: msg_error = "OperationalError: not connect {0} , traceback:{1}".format(self.cfg.db_url, e) logger.error("OperationalError: not connect {0} ".format(self.cfg.db_url), traceback=True) send_email_error(msg_error)
def downloadData(sensor_ids, step, download_startdate, download_enddate, outfn=None, pool_len=48): """ Funcion que arme para bajar datos de la api de teracode Ej: sensor_ids = [...] # Sacar de waypoints.py download_startdate = "2015-07-01T00:00:00-00:00" download_enddate = "2015-07-12T00:00:01-00:00" step = datetime.timedelta(days=2) newdata = downloadData ( sensor_ids, step, download_startdate, download_enddate, outfn="raw_api_01_11.json") """ # vsensids = virtsens["id_sensor"].unique() token = config.api['token'] host = config.api['host'] urltpl = "{0}/api/data/%s?token=%s&fecha_desde=%s&fecha_hasta=%s".format( host) # end = dateutil.parser.parse(download_enddate) start = download_startdate end = download_enddate urls = [] if step > (download_enddate - download_startdate): step = download_enddate - download_startdate else: logger.info("condicion step sin cumplir step:{0} enddate:{1} startdate:{2}".format( step, download_enddate, download_startdate)) while start < end: startdate, enddate = start, start + step for sensor_id in sensor_ids: # startdate, enddate, sensor_id url = urltpl % (sensor_id, token, startdate.strftime( "%Y-%m-%dT%H:%M:%S-03:00"), enddate.strftime("%Y-%m-%dT%H:%M:%S-03:00")) if not url in urls: urls.append(url) start += step """cambiar funcion map por api_sensores_fake""" #alldata = map(api_sensores_fake, urls) try: pool = multiprocessing.Pool(pool_len) alldata = pool.map(getData, urls) except Exception, e: logger.error("pool multiprocessing, error:", traceback=True) send_email_error("pool multiprocessing, error: {0}".format(e)) alldata = []
def createDBEngine(self): """ http://docs.sqlalchemy.org/en/latest/core/engines.html?highlight=create_engine#sqlalchemy.create_engine engine = sqlalchemy.create_engine( "postgres://postgres@/postgres") engine = sqlalchemy.create_engine("sqlite:///analysis.db") """ try: self.__engine = create_engine(self.cfg.db_url, echo=self.debug, pool_timeout=60, echo_pool=self.debug_pool) self.Base.prepare(self.__engine, reflect=True) self.__c = True except Exception, e: msg_error = "OperationalError: not connect {0} , traceback:{1}".format( self.cfg.db_url, e) logger.error("OperationalError: not connect {0} ".format( self.cfg.db_url), traceback=True) send_email_error(msg_error)
try: response = requests.get(url) if (response.status_code == 200): return response.json() else: error_msg = "hubo timeout del count:{0} request en {1} codigo:{2}".format( i, url, response.status_code) logger.error(error_msg) except requests.exceptions.Timeout, e: error_msg = "hubo timeout del count:{0} request en {1}".format( i, url) logger.error(error_msg, traceback=True) except: return None if i == 3: send_email_error(error_msg) time.sleep(3) return None def downloadData(sensor_ids, step, download_startdate, download_enddate, outfn=None, pool_len=48): """ Funcion que arme para bajar datos de la api de teracode Ej: sensor_ids = [...] # Sacar de waypoints.py download_startdate = "2015-07-01T00:00:00-00:00"
try: response = requests.get(url) if (response.status_code == 200): return response.json() else: error_msg = "hubo error del count:{0} request en {1} codigo:{2}".format( i, url, response.status_code) logger.error(error_msg) except requests.exceptions.Timeout, e: error_msg = "hubo timeout del count:{0} request en {1}".format( i, url) logger.error(error_msg, traceback=True) except: return None if i == 3: send_email_error(error_msg) time.sleep(3) return None def downloadData(sensor_ids, step, download_startdate, download_enddate, outfn=None, pool_len=48): """ Funcion que arme para bajar datos de la api de teracode Ej: sensor_ids = [...] # Sacar de waypoints.py download_startdate = "2015-07-01T00:00:00-00:00" download_enddate = "2015-07-12T00:00:01-00:00" step = datetime.timedelta(days=2) newdata = downloadData ( sensor_ids, step, download_startdate, download_enddate, outfn="raw_api_01_11.json") """