def __init__(self, datafile): DataFile.__init__(self, datafile) self.fieldnames = [ 'Route', 'IdSection', 'Date', 'Period', 'DayType', 'TotalDistance', 'TotalTime', 'Speed', 'Observations', 'InvalidObservations', 'Operator', 'RouteUser', 'IsEndSection' ]
def test_field_names(self): old_file_name_list = [ '2020-11-30-with-evasion-2.profile', '2020-11-30-with-evasion-2.profile.gz', '2020-11-30-with-evasion-2.profile.zip', '2020-03-20.profile', '2020-03-20.profile.gz', '2020-03-20.profile.zip' ] current_file_name_list = [ '2021-06-30.profile', '2021-06-30.profile.gz', '2021-06-30.profile.zip' ] for file_name in current_file_name_list + old_file_name_list: file_path = os.path.join(os.path.dirname(__file__), 'files', file_name) profile_uploader = ProfileFile(file_path) data_file = DataFile(file_path) with data_file.get_file_object() as csvfile: reader = csv.reader(csvfile, delimiter="|") row = next(reader) if file_name in current_file_name_list: self.assertEqual(profile_uploader.fieldnames, row) else: # old files don't have same header self.assertTrue( all(col in profile_uploader.fieldnames for col in row))
def __init__(self, datafile): DataFile.__init__(self, datafile) self.fieldnames = [ 'Fecha', 'TipoDia', 'version', 'nExpediciones', 'minTiempoExpediciones', 'maxTiempoExpediciones', 'mediaTiempoExpediciones', 'nPatentes', 'nGPS', 'mediaTiempoEntreGPS', 'nGPSConServicio', 'nGPSSinServicio', 'nTrxTotales', 'nTrxTotalesBus(%)', 'nTrxTotalesMetro(%)', 'nTrxTotalesMetroTren(%)', 'nTrxTotalesZonasPagas(%)', 'nTarjetas', 'nTrxsConServicio', 'nTrxsSinServicio', 'nEtapasConBajadaBus(%)', 'nEtapasConBajadaMetro(%)', 'nEtapasConBajadaMetroTren(%)', 'nEtapasConBajadaZonasPagas(%)', 'nViajes', 'nViajes1E(%)', 'nViajes2E(%)', 'nViajes3E(%)', 'nViajes4E(%)', 'nViajes5E(%)', 'nViajesSoloMetro(%)', 'nViajesConAlgunaEtapaEnMetro(%)', 'nViajesSinBajadaFinal(%)', 'tViajeTotal', 'dViajeTotal', 'vViajeTotal', 'nViajeMediaPM', 'tViajeMediaPM', 'dViajeMediaPM', 'vViajeMediaPM', 'nViajeMediaPT', 'tViajeMediaPT', 'dViajeMediaPT', 'vViajeMediaPT', "nTrxPM(%)", "nTrxPT(%)", "nBajadas", "nBajadasPM", "nBajadasPT", "nParadasE", "nParadasT", "nParadasL", "nParadasI", "nTrxE(%)", "nTrxT(%)", "nTrxL(%)", "nTrxI(%)", "par1", "par2", "par3", "par4", "par5", "par6", "par7", "par8", "par9", "par10", "trx1", "trx2", "trx3", "trx4", "trx5", "trx6", "trx7", "trx8", "trx9", "trx10", "parBus1", "parBus2", "parBus3", "parBus4", "parBus5", "parBus6", "parBus7", "parBus8", "parBus9", "parBus10", "trx1_", "trx2_", "trx3_", "trx4_", "trx5_", "trx6_", "trx7_", "trx8_", "trx9_", "trx10_" ]
def __init__(self, datafile): DataFile.__init__(self, datafile) self.fieldnames = [ 'route', 'licensePlate', 'a', 'b', 'expeditionStartTime', 'expeditionEndTime', 'fulfillment', 'c', 'd', 'e', 'f', 'g', 'h', 'periodId' ]
def __init__(self, datafile): DataFile.__init__(self, datafile) self.fieldnames = [ "Dia", "TipoDia", "Servicio", "Operador", "ServicioUsuario", "PeriodoTS", "inicio", "fin", "CodigoParInicio", "CodigoParFin", "UsuarioParInicio", "UsuarioParFin", "NombreParInicio", "NombreParFin", "zonaS", "zonaB", "ConBajada", "SinBajada", "Expandida" ]
def __init__(self, datafile): DataFile.__init__(self, datafile) self.fieldnames = ['operador', 'id_etapa', 'correlativo_viajes', 'correlativo_etapas', 'tipo_dia', 'tipo_transporte', 'fExpansionServicioPeriodoTS', 'tiene_bajada', 'tiempo2', 'tiempo_subida', 'tiempo_bajada', 'tiempo_etapa', 'media_hora_subida', 'media_hora_bajada', 'x_subida', 'y_subida', 'x_bajada', 'y_bajada', 'dist_ruta_paraderos', 'dist_eucl_paraderos', 'servicio_subida', 'servicio_bajada', 'parada_subida', 'parada_bajada', 'comuna_subida', 'comuna_bajada', 'zona_subida', 'zona_bajada', 'sitio_subida', 'fExpansionZonaPeriodoTS', 'tEsperaMediaIntervalo', 'periodoSubida', 'periodoBajada', 'tiempoIniExpedicion']
def test_field_names(self): file_name_list = ['2021-06-30.speed', '2021-06-30.speed.gz', '2021-06-30.speed.zip'] for file_name in file_name_list: file_path = os.path.join(os.path.dirname(__file__), 'files', file_name) speed_uploader = SpeedFile(file_path) data_file = DataFile(file_path) with data_file.get_file_object() as csvfile: reader = csv.reader(csvfile, delimiter="|") row = next(reader) self.assertEqual(speed_uploader.fieldnames, row)
def test_field_names(self): file_name_list = [ "2021-05-19.shape", "2021-05-19.shape.gz", "2021-05-19.shape.zip" ] for file_name in file_name_list: file_path = os.path.join(os.path.dirname(__file__), 'files', file_name) shape_uploader = ShapeFile(file_path) data_file = DataFile(file_path) with data_file.get_file_object() as csvfile: reader = csv.reader(csvfile, delimiter="|") row = next(reader) self.assertEqual(shape_uploader.fieldnames, row)
def test_stop_by_route_field_names(self): file_name_list = [ '2021-05-19.stop', '2021-05-19.stop.gz', '2021-05-19.stop.zip' ] for file_name in file_name_list: file_path = os.path.join(os.path.dirname(__file__), 'files', file_name) stop_by_route_uploader = StopByRouteFile(file_path) data_file = DataFile(file_path) with data_file.get_file_object() as csvfile: reader = csv.reader(csvfile, delimiter="|") row = next(reader) self.assertEqual(stop_by_route_uploader.fieldnames, row)
def test_field_names(self): file_name_list = [ '2019-08-10.paymentfactor', '2019-08-10.paymentfactor.gz', '2019-08-10.paymentfactor.zip' ] for file_name in file_name_list: file_path = os.path.join(os.path.dirname(__file__), 'files', file_name) paymentfactor_uploader = PaymentFactorFile(file_path) data_file = DataFile(file_path) with data_file.get_file_object() as csvfile: reader = csv.reader(csvfile, delimiter="|") row = next(reader) self.assertEqual(paymentfactor_uploader.fieldnames, row)
def test_field_names(self): file_name_list = [ '2019-03-06.opdata', '2019-03-06.opdata.gz', '2019-03-06.opdata.zip' ] for file_name in file_name_list: file_path = os.path.join(os.path.dirname(__file__), 'files', file_name) opdata_uploader = OPDataFile(file_path) data_file = DataFile(file_path) with data_file.get_file_object() as csvfile: reader = csv.reader(csvfile, delimiter="|") row = next(reader) self.assertEqual(opdata_uploader.fieldnames, row)
def test_field_names(self): file_name_list = [ '2016-05-23.expedition', '2016-05-23.expedition.zip', '2016-05-23.expedition.gz' ] for file_name in file_name_list: file_path = os.path.join(os.path.dirname(__file__), 'files', file_name) expedition_uploader = ExpeditionFile(file_path) data_file = DataFile(file_path) with data_file.get_file_object() as csvfile: reader = csv.reader(csvfile, delimiter="|") row = next(reader) self.assertEqual(expedition_uploader.fieldnames, row)
def test_field_names(self): file_name_list = [ '2017-05-08.odbyroute', '2017-05-08.odbyroute.gz', '2017-05-08.odbyroute.zip' ] for file_name in file_name_list: file_path = os.path.join(os.path.dirname(__file__), 'files', file_name) odbyroute_uploader = OdByRouteFile(file_path) data_file = DataFile(file_path) with data_file.get_file_object() as csvfile: reader = csv.reader(csvfile, delimiter="|") row = next(reader) self.assertEqual(odbyroute_uploader.fieldnames, row)
def test_field_names(self): file_name_list = [ '2018-10-01.general', '2018-10-01.general.zip', '2018-10-01.general.gz' ] for file_name in file_name_list: file_path = os.path.join(os.path.dirname(__file__), 'files', file_name) general_uploader = GeneralFile(file_path) data_file = DataFile(file_path) with data_file.get_file_object() as csvfile: reader = csv.reader(csvfile, delimiter="|") row = next(reader) self.assertEqual(general_uploader.fieldnames, row)
def test_field_names(self): file_name_list = [ '2021-06-30.trip', '2021-06-30.trip.gz', '2021-06-30.trip.zip' ] for file_name in file_name_list: file_path = os.path.join(os.path.dirname(__file__), 'files', file_name) trip_uploader = TripFile(file_path) data_file = DataFile(file_path) with data_file.get_file_object() as csvfile: reader = csv.reader(csvfile, delimiter="|") row = next(reader) # delete last column because is an empty column row = row[:len(row) - 1] self.assertEqual(trip_uploader.fieldnames, row)
def __init__(self, datafile): DataFile.__init__(self, datafile) self.fieldnames = ['Servicio', 'ServicioUsuario', 'Operador', 'Correlativo', 'Codigo', 'CodigoUsuario', 'Nombre', 'Latitud', 'Longitud', 'esZP'] self.routes_by_stop = defaultdict(lambda: set()) with self.get_file_object() as f: next(f) # skip header delimiter = '|' reader = csv.DictReader(f, delimiter=delimiter, fieldnames=self.fieldnames) for row in reader: self.routes_by_stop[row['Codigo']].add(row['ServicioUsuario']) for authStopCode in self.routes_by_stop.keys(): route_list = list(self.routes_by_stop[authStopCode]) route_list.sort() self.routes_by_stop[authStopCode] = route_list
def __init__(self, datafile): DataFile.__init__(self, datafile) self.fieldnames = [ 'Operador', 'ServicioSentido', 'ServicioUsuario', 'Patente', 'Paradero', 'NombreParada', 'Hini', 'Hfin', 'Cumplimiento', 'Correlativo', 'idExpedicion', 'DistEnRuta', '#Subidas', '#SubidasLejanas', 'Subidastotal', 'SubidasExpandidas', '#Bajadas', '#BajadasLejanas', 'Bajadastotal', 'BajadasExpandidas', 'Carga', 'Capacidad', 'TiempoGPSInterpolado', 'TiempoPrimeraTrx', 'TiempoGPSMasCercano', 'Tiempo', 'nSubidasTmp', 'ParaderoUsuario', 'PeriodoTSExpedicion', 'PeriodoTSParada', 'TipoDia', 'ZP', 'DeltaTrxs', 'MHSalida', 'MHPasada', 'ExpedicionConProblema', 'subidas_evadidas', 'bajadas_evadidas', 'subidas_corregidas', 'bajadas_corregidas', 'carga_corregida', 'subidas_conbajada', '%evasion', 'tipo_evasion', 'uniforme', 'pax-km_tramo', 'pax-km_corregido_tramo', 'plazas-km_tramo' ]
def __init__(self, datafile): DataFile.__init__(self, datafile) self.fieldnames = [ "tipodia", "factor_expansion", "n_etapas", "tviaje", "distancia_eucl", "distancia_ruta", "tiempo_subida", "tiempo_bajada", "mediahora_subida", "mediahora_bajada", "periodo_subida", "periodo_bajada", "tipo_transporte_1", "tipo_transporte_2", "tipo_transporte_3", "tipo_transporte_4", "srv_1", "srv_2", "srv_3", "srv_4", "paradero_subida", "paradero_bajada", "comuna_subida", "comuna_bajada", "zona_subida", "zona_bajada", "modos", "tiempo_subida1", "tiempo_subida2", "tiempo_subida3", "tiempo_subida4", "tiempo_bajada1", "tiempo_bajada2", "tiempo_bajada3", "tiempo_bajada4", "zona_subida1", "zona_subida2", "zona_subida3", "zona_subida4", "zona_bajada1", "zona_bajada2", "zona_bajada3", "zona_bajada4", "paraderosubida_1era", "paraderosubida_2da", "paraderosubida_3era", "paraderosubida_4ta", "paraderobajada_1era", "paraderobajada_2da", "paraderobajada_3era", "paraderobajada_4ta", "mediahora_bajada_1era", "mediahora_bajada_2da", "mediahora_bajada_3era", "mediahora_bajada_4ta", "periodo_bajada_1era", "periodo_bajada_2da", "periodo_bajada_3era", "periodo_bajada_4ta" ] self.null_date = "1970-01-01 00:00:00"
def __init__(self, datafile): DataFile.__init__(self, datafile) self.fieldnames = [ 'Route', 'IsSectionInit', 'Latitude', 'Longitude', 'Operator', 'RouteUser' ]
def __init__(self, datafile): DataFile.__init__(self, datafile) self.fieldnames = ["FECHA", "TIPODIA", "ASIGNACION", "ZP", "NOMBRE", "UN", "TOTAL", "SUMAN", "RESTAN", "NEUTRAS", "FACTOR", "SERVS", "TRXS", "VALIDATORID"]
def __init__(self, datafile): DataFile.__init__(self, datafile) self.fieldnames = [ "id", "tiempo", "sitio", "op", "servicio_sonda", "servicio_usuario", "periodo", "tipo_dia" ]
def __init__(self, datafile): DataFile.__init__(self, datafile) self.fieldnames = ['ServicioSentido', 'UN', 'Servicio', 'Sentido', 'ServicioTS', 'TipoDia', 'PeriodoTS', 'HoraIni', 'HoraFin', 'Frecuencia', 'Capacidad', 'Distancia', 'Velocidad']