Beispiel #1
0
def campana():
    reload(sys)
    sys.setdefaultencoding('utf8')
    SERVER="192.168.20.63\MV"
    USER="******"
    PASSWORD="******"
    DATABASE="Mirror_UN1002XZCVBN"
    TABLE_DB = "dbo.Tb_Campanas"
    HOY = datetime.datetime.today().strftime('%Y-%m-%d')

    #Nos conectamos a la BD y obtenemos los registros
    conn = _mssql.connect(server=SERVER, user=USER, password=PASSWORD, database=DATABASE)
    conn.execute_query('SELECT Id_Campana,Nombre_Campana,Codigo_Campana,Id_UEN,Fecha_Creacion,Estado FROM ' + TABLE_DB )
    # conn.execute_query('SELECT Id_Gestion,Id_Causal,Fecha_Seguimiento,Id_Usuario,Valor_Obligacion,Id_Docdeu, Nota FROM ' + TABLE_DB + ' where CAST(Fecha_Seguimiento AS date) >= CAST(' + "'2019-02-01' as DATE) ")

    cloud_storage_rows = ""

    # Debido a que los registros en esta tabla pueden tener saltos de linea y punto y comas inmersos
    for row in conn:
        text_row =  ""
        text_row += str(row['Id_Campana']).encode('utf-8') + "|" 
        text_row += str(row['Nombre_Campana']).encode('utf-8') + "|"
        text_row += str(row['Codigo_Campana']).encode('utf-8') + "|"
        text_row += str(row['Id_UEN']).encode('utf-8') + "|"
        text_row += str(row['Fecha_Creacion']).encode('utf-8') + "|"
        text_row += str(row['Estado']).encode('utf-8') + "|"
        # text_row += str(row['Logo']).encode('utf-8') + "|"
        text_row += "\n"
        cloud_storage_rows += text_row
        
    conn.close()
    
    filename = "campanas/Unificadas_campanas" + ".csv"
    #Finalizada la carga en local creamos un Bucket con los datos
    gcscontroller.create_file(filename, cloud_storage_rows, "ct-unificadas")

    try:
        deleteQuery = "DELETE FROM `contento-bi.unificadas.Campanas` WHERE 1=1"
        client = bigquery.Client()
        query_job = client.query(deleteQuery)
        query_job.result()
    except:
        print("no se pudo eliminar")

    #Primero eliminamos todos los registros que contengan esa fecha
    
    # time.sleep(60)
    
    flowAnswer = unificadas_campanas_beam.run()
  
    # time.sleep(180)
    # Poner la ruta en storage cloud en una variable importada para posteriormente eliminarla 
    storage_client = storage.Client()
    bucket = storage_client.get_bucket('ct-unificadas')
    blob = bucket.blob("campanas/Unificadas_campanas" + ".csv")
    # Eliminar el archivo en la variable
    blob.delete()
    
    # return jsonify(flowAnswer), 200
    return "Campana cargada" + "flowAnswer" 
Beispiel #2
0
def Ejecutar():
    print('#################################ENTRO AL CDR')
    schema = [
        'id_call', 'type_call', 'talk_time', 'id_agent', 'agent_name',
        'agent_identification', 'skill', 'date', 'hour', 'day_of_week',
        'typing_code', 'descri_typing_code', 'typing_code2',
        'descri_typing_code2', 'hit', 'telephone_destination',
        'telephone_costs', 'telephone_number', 'who_hangs_up',
        'customer_identification', 'month', 'screen_recording', 'operation',
        'ring', 'abandon'
    ]

    print(
        '##################################################LLAMAMOS AL SERVICIO'
    )
    extraccion = extraccion_service_general(KEY_REPORT, CODE_REPORT, sin_datos,
                                            schema, table_id, key_delete)
    filename = extraccion[0]
    cloud_storage_rows = extraccion[1]
    output = extraccion[2]
    cont_excepciones = extraccion[3]
    cont_no_contenido = extraccion[4]
    cont_registros = extraccion[5]
    cont_token = extraccion[6]
    lista_instancias_excepcion = extraccion[7]
    lista_instancias_sin_contenido = extraccion[8]
    lista_instancias_token = extraccion[9]
    sub_path = extraccion[10]
    dateini = extraccion[11]
    dateend = extraccion[12]

    gcscontroller.create_file(filename, cloud_storage_rows, "ct-telefonia")
    ejecutar = cdr_beam2.run(output, KEY_REPORT)
    storage_client = storage.Client()
    bucket = storage_client.get_bucket('ct-telefonia')
    blob = bucket.blob(sub_path + fecha + '.csv')

    return ("Se acaba de ejecutar el proceso de " + KEY_REPORT +
            " Para actualizar desde: " + dateini + " hasta " + dateend +
            ' con ' + str(cont_registros) + ' registros' + '\n' +
            "INFORMACION: <b>instancias con error --> </b>" +
            str(cont_excepciones) + '\n' + 'DETALLE:' +
            str(lista_instancias_excepcion) + '\n' +
            '---------------------------------------' + '\n' +
            '<b>instancias sin contenido: </b>' + str(cont_no_contenido) +
            '\n' + 'DETALLE:' + str(lista_instancias_sin_contenido) +
            '---------------------------------------' + '\n' +
            '<b>instancias con problemas de TOKEN: </b> ' + str(cont_token) +
            '\n' + 'DETALLE: ' + str(lista_instancias_token) + '')
Beispiel #3
0
def Ejecutar():
    print('#################################ENTRO AL LLAMADAS_REPORT')
    schema = [
        'date', 'id_agent', 'name', 'id_queue', 'type_call', 'tel_number',
        'cod_act', 'id_customer', 'comment', 'duration', 'hung_up', 'cost',
        'id_campaing', 'id_call'
    ]

    print(
        '##################################################LLAMAMOS AL SERVICIO'
    )
    extraccion = extraccion_service_general(KEY_REPORT, CODE_REPORT, sin_datos,
                                            schema, table_id, key_delete)
    filename = extraccion[0]
    cloud_storage_rows = extraccion[1]
    output = extraccion[2]
    cont_excepciones = extraccion[3]
    cont_no_contenido = extraccion[4]
    cont_registros = extraccion[5]
    cont_token = extraccion[6]
    lista_instancias_excepcion = extraccion[7]
    lista_instancias_sin_contenido = extraccion[8]
    lista_instancias_token = extraccion[9]
    sub_path = extraccion[10]
    dateini = extraccion[11]
    dateend = extraccion[12]

    gcscontroller.create_file(filename, cloud_storage_rows, "ct-telefonia")
    ejecutar = llamadas_report_beam2.run(output, KEY_REPORT)
    storage_client = storage.Client()
    bucket = storage_client.get_bucket('ct-telefonia')
    blob = bucket.blob(sub_path + fecha + '.csv')

    return ("Se acaba de ejecutar el proceso de " + KEY_REPORT +
            " Para actualizar desde: " + dateini + " hasta " + dateend +
            ' con ' + str(cont_registros) + ' registros' + '\n' +
            "INFORMACION: <b>instancias con error --> </b>" +
            str(cont_excepciones) + '\n' + 'DETALLE:' +
            str(lista_instancias_excepcion) + '\n' +
            '---------------------------------------' + '\n' +
            '<b>instancias sin contenido: </b>' + str(cont_no_contenido) +
            '\n' + 'DETALLE:' + str(lista_instancias_sin_contenido) +
            '---------------------------------------' + '\n' +
            '<b>instancias con problemas de TOKEN: </b> ' + str(cont_token) +
            '\n' + 'DETALLE: ' + str(lista_instancias_token) + '')
Beispiel #4
0
def workforce():
    
    dateini = request.args.get('dateini')
    dateend = request.args.get('dateend')

    if dateini is None: 
        dateini = ""
    else:
        dateini = dateini    

    if dateend is None: 
        dateend = ""
    else:
        dateend = dateend          

    client = bigquery.Client()
    QUERY = ('select Tabla,servidor,usuario,contrasena,data_base,tabla_bd from `contento-bi.Contento.Usuario_conexion_bases` where Tabla = "workforce"')
    query_job = client.query(QUERY)
    rows = query_job.result()
    data = ""

    for row in rows:
        servidor = row.servidor
        usuario = row.usuario
        contrasena = row.contrasena
        data_base = row.data_base 
        tabla_bd = row.tabla_bd
    

    reload(sys)
    sys.setdefaultencoding('utf8')
    HOY = datetime.datetime.today().strftime('%Y-%m-%d')

    # Nos conectamos a la BD y obtenemos los registros
  


    if dateini == "":    
        conn = _mssql.connect(server=servidor, user=usuario, password=contrasena, database=data_base)
        conn.execute_query('SELECT documento_neg, segmento,Iter,Fecha_Malla,Hora_Inicio,Fecha_Final,Hora_Final,logueo,Deslogueo,Dif_Inicio,Dif_Final,Ausentismo,tiempo_malla,tiempo_conexion,tiempo_conexion_tiempo,Tiempo_EstAux,Tiempo_EstAux_tiempo,tiempo_estaux_out,tiempo_estaux_out_tiempo,adherencia_malla,adherencia_tiempo,Centro_Costo,rel_unico,rel_orden FROM ' + tabla_bd + ' WHERE CONVERT(DATE, FECHA_MALLA) = CONVERT(DATE,GETDATE())') 
        cloud_storage_rows = ""

        # conn = _mssql.connect(server=servidor, user=usuario, password=contrasena, database=data_base)
        # conn.execute_query('SELECT documento_neg, segmento,Iter,Fecha_Malla,Hora_Inicio,Fecha_Final,Hora_Final,logueo,Deslogueo,Dif_Inicio,Dif_Final,Ausentismo,tiempo_malla,tiempo_conexion,tiempo_conexion_tiempo,Tiempo_EstAux,Tiempo_EstAux_tiempo,tiempo_estaux_out,tiempo_estaux_out_tiempo,adherencia_malla,adherencia_tiempo,Centro_Costo,rel_unico,rel_orden FROM ' + tabla_bd ) 
        # cloud_storage_rows = ""
    else:
        conn = _mssql.connect(server=servidor, user=usuario, password=contrasena, database=data_base)
        conn.execute_query('SELECT documento_neg, segmento,Iter,Fecha_Malla,Hora_Inicio,Fecha_Final,Hora_Final,logueo,Deslogueo,Dif_Inicio,Dif_Final,Ausentismo,tiempo_malla,tiempo_conexion,tiempo_conexion_tiempo,Tiempo_EstAux,Tiempo_EstAux_tiempo,tiempo_estaux_out,tiempo_estaux_out_tiempo,adherencia_malla,adherencia_tiempo,Centro_Costo,rel_unico,rel_orden FROM ' + tabla_bd + ' WHERE CONVERT(DATE, FECHA_MALLA)'  ' between ' + "'" + dateini + "'" +" and " + "'" + dateend + "'"  )   
        cloud_storage_rows = ""



    # Debido a que los registros en esta tabla pueden tener saltos de linea y punto y comas inmersos
    for row in conn:
        text_row =  ""
        text_row += str(row['documento_neg']).encode('utf-8') + "|"
        text_row += str(row['segmento']).encode('utf-8') + "|"
        text_row += str(row['Iter']).encode('utf-8') + "|"
        text_row += str(row['Fecha_Malla']).encode('utf-8') + "|"
        text_row += str(row['Hora_Inicio']).encode('utf-8') + "|"
        text_row += str(row['Fecha_Final']).encode('utf-8') + "|"
        text_row += str(row['Hora_Final']).encode('utf-8') + "|"
        text_row += str(row['logueo']).encode('utf-8') + "|"
        text_row += str(row['Deslogueo']).encode('utf-8') + "|"
        text_row += str(row['Dif_Inicio']).encode('utf-8') + "|"
        text_row += str(row['Dif_Final']).encode('utf-8') + "|"
        text_row += str(row['Ausentismo']).encode('utf-8') + "|"
        text_row += str(row['tiempo_malla']).encode('utf-8') + "|"
        text_row += str(row['tiempo_conexion']).encode('utf-8') + "|"
        text_row += str(row['tiempo_conexion_tiempo']).encode('utf-8') + "|"
        text_row += str(row['Tiempo_EstAux']).encode('utf-8') + "|"
        text_row += str(row['Tiempo_EstAux_tiempo']).encode('utf-8') + "|"
        text_row += str(row['tiempo_estaux_out']).encode('utf-8') + "|"
        text_row += str(row['tiempo_estaux_out_tiempo']).encode('utf-8') + "|"
        text_row += str(row['adherencia_malla']).encode('utf-8') + "|"
        text_row += str(row['adherencia_tiempo']).encode('utf-8') + "|"
        text_row += str(row['Centro_Costo']).encode('utf-8') + "|"
        text_row += str(row['rel_unico']).encode('utf-8') + "|"
        text_row += str(row['rel_orden']).encode('utf-8')
        text_row += "\n"
        cloud_storage_rows += text_row
    conn.close()

    filename = "adherencia/workforce" + ".csv"
     #Finalizada la carga en local creamos un Bucket con los datos
    gcscontroller.create_file(filename, cloud_storage_rows, "ct-workforce")

    # try:
    #     deleteQuery = "DELETE FROM `contento-bi.Workforce.Adherencia` WHERE FECHA = '" + mifecha + "'"
    try:
        if dateini == "":
            deleteQuery = 'DELETE FROM `contento-bi.Workforce.Adherencia` WHERE CAST(Fecha_Malla AS DATE) = CURRENT_DATE()'
            # deleteQuery = 'DELETE FROM `contento-bi.Workforce.Adherencia` WHERE 1=1

            client = bigquery.Client()
            query_job = client.query(deleteQuery)
            query_job.result()
        else:
            deleteQuery2 = 'DELETE FROM `contento-bi.Workforce.Adherencia` WHERE CAST(Fecha_Malla AS DATE) between ' + "'" + dateini + "'" +" and " + "'" + dateend + "'"
            client = bigquery.Client()
            query_job = client.query(deleteQuery2)
            query_job.result()            
    except:
        print("no se pudo eliminar")

    #Primero eliminamos todos los registros que contengan esa fecha
    
    # time.sleep(60)

    flowAnswer = workforce_beam.run()

    # time.sleep(60)
    # Poner la ruta en storage cloud en una variable importada para posteriormente eliminarla 
    storage_client = storage.Client()
    bucket = storage_client.get_bucket('ct-workforce')
    blob = bucket.blob("adherencia/workforce" + ".csv")
    # Eliminar el archivo en la variable
    # blob.delete()
    
    # return jsonify(flowAnswer), 200
    return "data cargada" + "flowAnswer" 
Beispiel #5
0
def Iti():

    dateini = request.args.get('dateini')
    dateend = request.args.get('dateend')

    if dateini is None: 
        dateini = ""
    else:
        dateini = dateini    

    if dateend is None: 
        dateend = ""
    else:
        dateend = dateend   

    client = bigquery.Client()
    QUERY = ('select Tabla,servidor,usuario,contrasena,data_base,tabla_bd from `contento-bi.Contento.Usuario_conexion_bases` where Tabla = "iti"')
    query_job = client.query(QUERY)
    rows = query_job.result()
    data = ""

    for row in rows:
        servidor = row.servidor
        usuario = row.usuario
        contrasena = row.contrasena
        data_base = row.data_base 
        tabla_bd = row.tabla_bd

    reload(sys)
    sys.setdefaultencoding('utf8')
    HOY = datetime.datetime.today().strftime('%Y-%m-%d')


    # Nos conectamos a la BD y obtenemos los registros
    if dateini == "":      
        conn = _mssql.connect(server=servidor, user=usuario, password=contrasena, database=data_base)
        conn.execute_query('SELECT Id_Iti,Fecha,Hora,Centro_Costo,Peso,fecha_ejecucion,Estado FROM ' + tabla_bd  + " WHERE CONVERT(DATE, Fecha) = CONVERT(DATE,GETDATE())")
        # conn = _mssql.connect(server=servidor, user=usuario, password=contrasena, database=data_base)
        # conn.execute_query('SELECT Id_Iti,Fecha,Hora,Centro_Costo,Peso,fecha_ejecucion,Estado FROM ' + tabla_bd)
    else:
        conn = _mssql.connect(server=servidor, user=usuario, password=contrasena, database=data_base)
        conn.execute_query('SELECT Id_Iti,Fecha,Hora,Centro_Costo,Peso,fecha_ejecucion,Estado FROM ' + tabla_bd  + ' WHERE CONVERT(DATE, Fecha)'  ' between ' + "'" + dateini + "'" +" and " + "'" + dateend + "'"  )
    
    # conn = _mssql.connect(server=servidor, user=usuario, password=contrasena, database=data_base)
    # conn.execute_query('SELECT Id_Iti,Fecha,Hora,Centro_Costo,Peso,fecha_ejecucion,Estado FROM ' + tabla_bd)
    
    cloud_storage_rows = ""

    # Debido a que los registros en esta tabla pueden tener saltos de linea y punto y comas inmersos
    for row in conn:
        text_row =  ""
        text_row += str(row['Id_Iti']).encode('utf-8') + "|"
        text_row += str(row['Fecha']).encode('utf-8') + "|"
        text_row += str(row['Hora']).encode('utf-8') + "|"
        text_row += str(row['Centro_Costo']).encode('utf-8') + "|"
        text_row += str(row['Peso']).encode('utf-8') + "|"
        text_row += str(row['fecha_ejecucion']).encode('utf-8') + "|"
        text_row += str(row['Estado']).encode('utf-8') 
        text_row += "\n"
        cloud_storage_rows += text_row
    conn.close()

    filename = "workforce/iti" + ".csv"
    #Finalizada la carga en local creamos un Bucket con los datos
    gcscontroller.create_file(filename, cloud_storage_rows, "ct-workforce")
   


    try:
        if dateini == "":
            deleteQuery = 'DELETE FROM `contento-bi.Workforce.Iti` WHERE CAST(Fecha AS DATE) = CURRENT_DATE()'
            # deleteQuery = "DELETE FROM `contento-bi.Workforce.Iti` WHERE id_iti is not null"
            client = bigquery.Client()
            query_job = client.query(deleteQuery)
            query_job.result()
        else:
            deleteQuery2 = "DELETE FROM `contento-bi.Workforce.Iti` WHERE CAST(Fecha AS DATE) between " + "'" + dateini + "'" +" and " + "'" + dateend + "'"  
            # deleteQuery = "DELETE FROM `contento-bi.Workforce.Iti` WHERE id_iti is not null"
            client = bigquery.Client()
            query_job = client.query(deleteQuery2)
            query_job.result()
    except:
        print("no se pudo eliminar")

    #Primero eliminamos todos los registros que contengan esa fecha
    
    flowAnswer = Iti_beam.run()

    # time.sleep(60)
    # Poner la ruta en storage cloud en una variable importada para posteriormente eliminarla 
    storage_client = storage.Client()
    bucket = storage_client.get_bucket('ct-workforce')
    blob = bucket.blob("workforce/iti" + ".csv")
    # Eliminar el archivo en la variable
    # blob.delete()
    
    # return jsonify(flowAnswer), 200
    return "data cargada" + "flowAnswer" 



################################### TII V2 #################################################

# @workforce_api.route("/Iti_detalle")
# def Iti_detalle():
#     reload(sys)
#     sys.setdefaultencoding('utf8')
#     SERVER="BDA01\DOKIMI"
#     USER="******"
#     PASSWORD="******"
#     DATABASE="Workforce"
#     TABLE_DB ="tb_iti_detalle"
#     HOY = datetime.datetime.today().strftime('%Y-%m-%d')

#     # Nos conectamos a la BD y obtenemos los registros
    
#     conn = _mssql.connect(server=SERVER, user=USER, password=PASSWORD, database=DATABASE)
#     conn.execute_query('SELECT  Centro_Costos,fecha_malla,"07:30:00","07:45:00","08:00:00","08:15:00","08:30:00","08:45:00","09:00:00","09:15:00","09:30:00","09:45:00","10:00:00","10:15:00","10:30:00","10:45:00","11:00:00","11:15:00","11:30:00","11:45:00","12:00:00","12:15:00","12:30:00","12:45:00","13:00:00","13:15:00","13:30:00","13:45:00","14:00:00","14:15:00","14:30:00","14:45:00","15:00:00","15:15:00","15:30:00","15:45:00","16:00:00","16:15:00","16:30:00","16:45:00","17:00:00","17:15:00","17:30:00","17:45:00","18:00:00","18:15:00","18:30:00","18:45:00","19:00:00","19:15:00","19:30:00","19:45:00","20:00:00","20:15:00","20:30:00","20:45:00","21:00:00"  FROM ' + TABLE_DB)
#     #  + " WHERE CONVERT(DATE, Fecha_malla) = CONVERT(DATE,GETDATE())")
    
    
#     cloud_storage_rows = ""

#     # Debido a que los registros en esta tabla pueden tener saltos de linea y punto y comas inmersos
#     for row in conn:
#         text_row =  ""
#         text_row += str(row['Centro_Costos']).encode('utf-8') + "|"
#         text_row += str(row['fecha_malla']).encode('utf-8') + "|"
#         text_row += str(row['07:30:00']).encode('utf-8') + "|"
#         text_row += str(row['07:45:00']).encode('utf-8') + "|"
#         text_row += str(row['08:00:00']).encode('utf-8') + "|"
#         text_row += str(row['08:15:00']).encode('utf-8') + "|"
#         text_row += str(row['08:30:00']).encode('utf-8') + "|"
#         text_row += str(row['08:45:00']).encode('utf-8') + "|"
#         text_row += str(row['09:00:00']).encode('utf-8') + "|"
#         text_row += str(row['09:15:00']).encode('utf-8') + "|"
#         text_row += str(row['09:30:00']).encode('utf-8') + "|"
#         text_row += str(row['09:45:00']).encode('utf-8') + "|"
#         text_row += str(row['10:00:00']).encode('utf-8') + "|"
#         text_row += str(row['10:15:00']).encode('utf-8') + "|"
#         text_row += str(row['10:30:00']).encode('utf-8') + "|"
#         text_row += str(row['10:45:00']).encode('utf-8') + "|"
#         text_row += str(row['11:00:00']).encode('utf-8') + "|"
#         text_row += str(row['11:15:00']).encode('utf-8') + "|"
#         text_row += str(row['11:30:00']).encode('utf-8') + "|"
#         text_row += str(row['11:45:00']).encode('utf-8') + "|"
#         text_row += str(row['12:00:00']).encode('utf-8') + "|"
#         text_row += str(row['12:15:00']).encode('utf-8') + "|"
#         text_row += str(row['12:30:00']).encode('utf-8') + "|"
#         text_row += str(row['12:45:00']).encode('utf-8') + "|"
#         text_row += str(row['13:00:00']).encode('utf-8') + "|"
#         text_row += str(row['13:15:00']).encode('utf-8') + "|"
#         text_row += str(row['13:30:00']).encode('utf-8') + "|"
#         text_row += str(row['13:45:00']).encode('utf-8') + "|"
#         text_row += str(row['14:00:00']).encode('utf-8') + "|"
#         text_row += str(row['14:15:00']).encode('utf-8') + "|"
#         text_row += str(row['14:30:00']).encode('utf-8') + "|"
#         text_row += str(row['14:45:00']).encode('utf-8') + "|"
#         text_row += str(row['15:00:00']).encode('utf-8') + "|"
#         text_row += str(row['15:15:00']).encode('utf-8') + "|"
#         text_row += str(row['15:30:00']).encode('utf-8') + "|"
#         text_row += str(row['15:45:00']).encode('utf-8') + "|"
#         text_row += str(row['16:00:00']).encode('utf-8') + "|"
#         text_row += str(row['16:15:00']).encode('utf-8') + "|"
#         text_row += str(row['16:30:00']).encode('utf-8') + "|"
#         text_row += str(row['16:45:00']).encode('utf-8') + "|"
#         text_row += str(row['17:00:00']).encode('utf-8') + "|"
#         text_row += str(row['17:15:00']).encode('utf-8') + "|"
#         text_row += str(row['17:30:00']).encode('utf-8') + "|"
#         text_row += str(row['17:45:00']).encode('utf-8') + "|"
#         text_row += str(row['18:00:00']).encode('utf-8') + "|"
#         text_row += str(row['18:15:00']).encode('utf-8') + "|"
#         text_row += str(row['18:30:00']).encode('utf-8') + "|"
#         text_row += str(row['18:45:00']).encode('utf-8') + "|"
#         text_row += str(row['19:00:00']).encode('utf-8') + "|"
#         text_row += str(row['19:15:00']).encode('utf-8') + "|"
#         text_row += str(row['19:30:00']).encode('utf-8') + "|"
#         text_row += str(row['19:45:00']).encode('utf-8') + "|"
#         text_row += str(row['20:00:00']).encode('utf-8') + "|"
#         text_row += str(row['20:15:00']).encode('utf-8') + "|"
#         text_row += str(row['20:30:00']).encode('utf-8') + "|"
#         text_row += str(row['20:45:00']).encode('utf-8') + "|"
#         text_row += str(row['21:00:00']).encode('utf-8') + "|"
#         text_row += "\n"
#         cloud_storage_rows += text_row
#     conn.close()


#     filename = "workforce/iti_detalle" + ".csv"
#     #Finalizada la carga en local creamos un Bucket con los datos
#     gcscontroller.create_file(filename, cloud_storage_rows, "ct-workforce")


#     try:
#         # deleteQuery = 'DELETE FROM `contento-bi.Workforce.Iti.detalle` WHERE CAST(Fecha_malla AS DATE) = CURRENT_DATE()'

#         deleteQuery = "DELETE FROM `contento-bi.Workforce.Iti_detalle` WHERE Centro_Costos is not null"
#         client = bigquery.Client()
#         query_job = client.query(deleteQuery)
#         query_job.result()
#     except:
#         print("no se pudo eliminar")

#     #Primero eliminamos todos los registros que contengan esa fecha
    
#     flowAnswer = Iti_detalle_beam.run()

#     # time.sleep(60)
#     # Poner la ruta en storage cloud en una variable importada para posteriormente eliminarla 
#     storage_client = storage.Client()
#     bucket = storage_client.get_bucket('ct-workforce')
#     blob = bucket.blob("workforce/iti_detalle" + ".csv")
#     # Eliminar el archivo en la variable
#     blob.delete()
    
#     # return jsonify(flowAnswer), 200
#     return "data cargada" + "flowAnswer" 
Beispiel #6
0
def bd():

    client = bigquery.Client()
    QUERY = (
        'select string_field_0, string_field_1, string_field_2, string_field_3, string_field_4 from `contento-bi.sensus.db_conn`'
    )
    query_job = client.query(QUERY)
    rows = query_job.result()
    data = ""

    for row in rows:
        servidor = row.string_field_0
        usuario = row.string_field_1
        contrasena = row.string_field_2
        data_base = row.string_field_3
        tabla_bd = row.string_field_4

    reload(sys)
    sys.setdefaultencoding('utf8')
    SERVER = servidor
    USER = usuario
    PASSWORD = contrasena
    DATABASE = data_base
    TABLE_DB = tabla_bd

    HOY = datetime.datetime.today().strftime('%Y-%m-%d')

    #Nos conectamos a la BD y obtenemos los registros
    conn = _mssql.connect(server=SERVER,
                          user=USER,
                          password=PASSWORD,
                          database=DATABASE)
    conn.execute_query(
        'SELECT Nombre_Guia, Id_resultado, fecha_registro, Doc_Asesor, nombres, doc_team, Nombre_team_leader, estado_aseg, estado_alarma, reagendamiento,Descripcion,Asignacion FROM '
        + TABLE_DB)
    # conn.execute_query('SELECT Id_Gestion,Id_Causal,Fecha_Seguimiento,Id_Usuario,Valor_Obligacion,Id_Docdeu, Nota FROM ' + TABLE_DB + ' where CAST(Fecha_Seguimiento AS date) >= CAST(' + "'2019-02-01' as DATE) ")

    cloud_storage_rows = ""

    # Debido a que los registros en esta tabla pueden tener saltos de linea y punto y comas inmersos
    for row in conn:
        text_row = ""
        text_row += str(row['Nombre_Guia']).encode('utf-8') + "|"
        text_row += str(row['Id_resultado']).encode('utf-8') + "|"
        text_row += str(row['fecha_registro']).encode('utf-8') + "|"
        text_row += str(row['Doc_Asesor']).encode('utf-8') + "|"
        text_row += str(row['nombres']).encode('utf-8') + "|"
        text_row += str(row['doc_team']).encode('utf-8') + "|"
        text_row += str(row['Nombre_team_leader']).encode('utf-8') + "|"
        text_row += str(row['estado_aseg']).encode('utf-8') + "|"
        text_row += str(row['estado_alarma']).encode('utf-8') + "|"
        text_row += str(row['reagendamiento']).encode('utf-8') + "|"
        text_row += str(row['Descripcion']).encode('utf-8') + "|"
        text_row += str(row['Asignacion']).encode('utf-8') + "|"
        text_row += "\n"
        cloud_storage_rows += text_row
    conn.close()

    print("Estos son los datos: ")

    filename = "Segmento/alarmas" + ".csv"
    #Finalizada la carga en local creamos un Bucket con los datos
    gcscontroller.create_file(filename, cloud_storage_rows, "ct-sensus")

    try:
        deleteQuery = "DELETE FROM `contento-bi.sensus.bd_alarmas` WHERE CAST(SUBSTR(Fecha_registro,0,10) AS DATE) = DATE_ADD(CURRENT_DATE(),INTERVAL -1 DAY)"
        client = bigquery.Client()
        query_job = client.query(deleteQuery)
        query_job.result()
    except:
        print("no se pudo eliminar")

    #Primero eliminamos todos los registros que contengan esa fecha

    time.sleep(60)

    flowAnswer = alarmas_beam.run()

    time.sleep(600)
    # Poner la ruta en storage cloud en una variable importada para posteriormente eliminarla
    storage_client = storage.Client()
    bucket = storage_client.get_bucket('ct-sensus')
    blob = bucket.blob("Segmento/alarmas" + ".csv")
    # Eliminar el archivo en la variable
    blob.delete()

    # return jsonify(flowAnswer), 200
    return "X" + "flowAnswer"
Beispiel #7
0
def tof_fanalca():

    import sys
    reload(sys)

    SERVER = "192.168.20.63\DOKIMI"
    USER = "******"
    PASSWORD = "******"
    DATABASE = "Fanalca_Agendamientos"
    FECHA_CARGUE = datetime.date.today()
    AHORA = FECHA_CARGUE.strftime("%Y-%m-%d")

    filename = DATABASE + str(FECHA_CARGUE) + ".csv"
    storage_client = storage.Client()
    bucket = storage_client.get_bucket('ct-tech-tof')
    blob = bucket.blob(filename)
    client = bigquery.Client()

    try:
        blob.delete()  #Eliminar del storage-----
    except:
        print("Eliminado de storage")

    try:
        QUERY = (
            "delete FROM `contento-bi.Contento_Tech.Consolidado_TOF` where Fecha_Cargue = '"
            + AHORA + "'")
        query_job = client.query(QUERY)
        rows2 = query_job.result()
    except:
        print("Eliminado de bigquery")

    #Nos conectamos a la BD y obtenemos los registros
    conn = _mssql.connect(server=SERVER,
                          user=USER,
                          password=PASSWORD,
                          database=DATABASE)
    conn.execute_query(
        "SELECT * FROM Fanalca_Agendamientos.dbo.Fanalca_Agendamientos where CAST(Fecha_Gestion AS DATE) = CAST(GETDATE() AS DATE)"
    )

    cloud_storage_rows = ""
    for row in conn:
        text_row = ""
        text_row += '' + "|" if str(row[0]).encode('utf-8') is None else str(
            row[0]).encode('utf-8') + "|"
        text_row += '' + "|" if row[1].encode(
            'utf-8') is None else row[1].encode('utf-8') + "|"
        text_row += '' + "|" if row[2].encode(
            'ascii', 'ignore').decode('ascii') is None else row[2].encode(
                'ascii', 'ignore').decode('ascii') + "|"
        text_row += '' + "|" if str(row[3]).encode('utf-8') is None else str(
            row[3]).encode('utf-8') + "|"
        text_row += '' + "|" if str(row[4]).encode('utf-8') is None else str(
            row[4]).encode('utf-8') + "|"
        text_row += '' + "|" if str(row[5]).encode('utf-8') is None else str(
            row[5]).encode('utf-8') + "|"
        text_row += '' + "|" if str(row[6]).encode('utf-8') is None else str(
            row[6]).encode('utf-8') + "|"
        text_row += '' + "|" if str(row[7]).encode('utf-8') is None else str(
            row[7]).encode('utf-8') + "|"
        text_row += '' + "|" if str(row[8]).encode('utf-8') is None else str(
            row[8]).encode('utf-8') + "|"
        text_row += '' + "|" if str(row[9]).encode('utf-8') is None else str(
            row[9]).encode('utf-8') + "|"
        text_row += '' + "|" if str(row[10]).encode('utf-8') is None else str(
            row[10]).encode('utf-8') + "|"
        text_row += '' + "|" if str(row[11]).encode('utf-8') is None else str(
            row[11]).encode('utf-8') + "|"
        text_row += '' + "|" if row[12].encode(
            'ascii', 'ignore').decode('ascii') is None else row[12].encode(
                'ascii', 'ignore').decode('ascii') + "|"
        text_row += '' + "|" if row[14].encode(
            'ascii', 'ignore').decode('ascii') is None else row[14].encode(
                'ascii', 'ignore').decode('ascii') + "|"
        text_row += '' + "|" if str(row[15]).encode('utf-8') is None else str(
            row[15]).encode('utf-8') + "|"
        text_row += '' + "|" if str(row[16]).encode('utf-8') is None else str(
            row[16]).encode('utf-8') + "|"
        text_row += '' + "|" if str(row[17]).encode('utf-8') is None else str(
            row[17]).encode('utf-8') + "\n"

        cloud_storage_rows += text_row

    gcscontroller.create_file(
        filename, cloud_storage_rows,
        "ct-tech-tof")  # Revisar problema con las subcarpetas
    flowAnswer = fanalca_agendamientos_beam.run(filename)

    conn.close()
    return flowAnswer
Beispiel #8
0
def categoria():

    client = bigquery.Client()
    QUERY = (
        'select Tabla,servidor,usuario,contrasena,data_base,tabla_bd from `contento-bi.Contento.Usuario_conexion_bases` where Tabla = "Clima_categoria"'
    )
    query_job = client.query(QUERY)
    rows = query_job.result()
    data = ""

    for row in rows:
        servidor = row.servidor
        usuario = row.usuario
        contrasena = row.contrasena
        data_base = row.data_base
        tabla_bd = row.tabla_bd

    reload(sys)
    sys.setdefaultencoding('utf8')
    HOY = datetime.datetime.today().strftime('%Y-%m-%d')

    #Nos conectamos a la BD y obtenemos los registros
    conn = _mssql.connect(server=servidor,
                          user=usuario,
                          password=contrasena,
                          database=data_base)
    conn.execute_query('SELECT id_categoria,desc_categoria FROM ' + tabla_bd)

    cloud_storage_rows = ""

    # Debido a que los registros en esta tabla pueden tener saltos de linea y punto y comas inmersos
    for row in conn:
        text_row = ""
        text_row += str(row['id_categoria']).encode('utf-8') + "|"
        text_row += str(row['desc_categoria']).encode('utf-8')
        text_row += "\n"
        cloud_storage_rows += text_row
    conn.close()

    filename = "Clima/categoria" + ".csv"
    #Finalizada la carga en local creamos un Bucket con los datos
    gcscontroller.create_file(filename, cloud_storage_rows,
                              "ct-felicidad_y_cultura")

    try:
        deleteQuery = "DELETE FROM `contento-bi.Felicidad_y_Cultura.Categoria` WHERE 1=1"
        client = bigquery.Client()
        query_job = client.query(deleteQuery)
        query_job.result()
    except:
        print("no se pudo eliminar")

    #Primero eliminamos todos los registros que contengan esa fecha

    # time.sleep(60)

    flowAnswer = tabla_categoria_beam.run()

    # time.sleep(60)
    # Poner la ruta en storage cloud en una variable importada para posteriormente eliminarla
    storage_client = storage.Client()
    bucket = storage_client.get_bucket('ct-felicidad_y_cultura')
    blob = bucket.blob("Clima/categoria" + ".csv")
    # Eliminar el archivo en la variable
    blob.delete()

    return " Cargue exitoso Tabla de categoria" + "flowAnswer"
Beispiel #9
0
def bancosac():

    import sys
    reload(sys)

    SERVER = "192.168.20.63\DELTA"
    USER = "******"
    PASSWORD = "******"
    DATABASE = "CORR3WHPSXV"
    FECHA_CARGUE = datetime.date.today()
    AHORA = FECHA_CARGUE.strftime("%Y-%m-%d")

    filename = DATABASE + str(FECHA_CARGUE) + ".csv"
    sub_path = 'bancolombiaSAC/'
    storage_client = storage.Client()
    bucket = storage_client.get_bucket('ct-tech-tof')
    blob = bucket.blob(sub_path + filename)
    client = bigquery.Client()

    try:
        blob.delete()  #Eliminar del storage-----
    except:
        print("Eliminado de storage")

    try:
        QUERY = (
            "delete FROM `contento-bi.Contento_Tech.Gestiones_BancoSAC` where SUBSTR(Fecha_Gestion,0,10) = '"
            + AHORA + "'")
        query_job = client.query(QUERY)
        rows2 = query_job.result()
    except:
        print("Eliminado de bigquery")

    #Nos conectamos a la BD y obtenemos los registros
    conn = _mssql.connect(server=SERVER,
                          user=USER,
                          password=PASSWORD,
                          database=DATABASE)
    conn.execute_query(
        "SELECT * FROM CORR3WHPSXV.dbo.CRM_TO_GOOGLE where CAST(Fecha_Gestion AS DATE) < '"
        + AHORA + "'")

    cloud_storage_rows = ""
    for row in conn:
        text_row = ""
        text_row += '' + "|" if str(row['Id_Gestion']).encode(
            'utf-8') is None else str(row['Id_Gestion']).encode('utf-8') + "|"
        text_row += '' + "|" if str(
            row['Id_Cod_Gestion']).encode('utf-8') is None else str(
                row['Id_Cod_Gestion']).encode('utf-8') + "|"
        text_row += '' + "|" if row['Nombre_Codigo'].encode(
            'utf-8') is None else row['Nombre_Codigo'].encode('utf-8') + "|"
        # text_row += '' + "|" if row['Observacion'].encode('utf-8') is None else row['Observacion'].encode('utf-8') + "|"
        text_row += '' + "|" if str(
            row['Fecha_Gestion']).encode('utf-8') is None else str(
                row['Fecha_Gestion']).encode('utf-8') + "|"
        text_row += '' + "|" if row['Usuario_gestion'].encode(
            'utf-8') is None else row['Usuario_gestion'].encode('utf-8') + "|"
        text_row += '' + "|" if row['Documento'].encode(
            'utf-8') is None else row['Documento'].encode('utf-8') + "|"
        text_row += '' + "|" if row['Num_Obligacion'].encode(
            'utf-8') is None else row['Num_Obligacion'].encode('utf-8') + "|"
        text_row += '' + "|" if str(row['Id_Campana']).encode(
            'utf-8') is None else str(row['Id_Campana']).encode('utf-8') + "|"
        text_row += '' + "|" if row['Nombre_Campana'].encode(
            'utf-8') is None else row['Nombre_Campana'].encode('utf-8') + "\n"

        cloud_storage_rows += text_row

    gcscontroller.create_file(
        sub_path + filename, cloud_storage_rows,
        "ct-tech-tof")  # Revisar problema con las subcarpetas
    flowAnswer = bancoSAC_beam.run(filename)

    conn.close()
    return flowAnswer
Beispiel #10
0
def bd():
    reload(sys)
    sys.setdefaultencoding('utf8')
    SERVER = "192.168.20.63\delta"
    USER = "******"
    PASSWORD = "******"
    DATABASE = "sensus_copc"
    TABLE_DB = "dbo.dwh_historico_lado_lado"
    HOY = datetime.datetime.today().strftime('%Y-%m-%d')

    #Nos conectamos a la BD y obtenemos los registros
    conn = _mssql.connect(server=SERVER,
                          user=USER,
                          password=PASSWORD,
                          database=DATABASE)
    conn.execute_query(
        'SELECT id_lal, centro_c, negociador, nombres_neg, producto, doc_team, nombres_team, doc_ejec, nombre_ejecutivo, doc_ger,Nombre_gerente,id_call,evualuador,nombres,fecha_registro,hora_registro,cumple,cierre_detalle,compromiso FROM '
        + TABLE_DB +
        ' WHERE CONVERT(DATE,fecha_registro) = CONVERT(DATE,GETDATE()-1)')
    # conn.execute_query('SELECT Id_Gestion,Id_Causal,Fecha_Seguimiento,Id_Usuario,Valor_Obligacion,Id_Docdeu, Nota FROM ' + TABLE_DB + ' where CAST(Fecha_Seguimiento AS date) >= CAST(' + "'2019-02-01' as DATE) ")

    cloud_storage_rows = ""

    # Debido a que los registros en esta tabla pueden tener saltos de linea y punto y comas inmersos
    for row in conn:
        text_row = ""
        text_row += str(row['id_lal']).encode('utf-8') + "|"
        text_row += str(row['centro_c']).encode('utf-8') + "|"
        text_row += str(row['negociador']).encode('utf-8') + "|"
        text_row += str(row['nombres_neg']).encode('utf-8') + "|"
        text_row += str(row['producto']).encode('utf-8') + "|"
        text_row += str(row['doc_team']).encode('utf-8') + "|"
        text_row += str(row['nombres_team']).encode('utf-8') + "|"
        text_row += str(row['doc_ejec']).encode('utf-8') + "|"
        text_row += str(row['nombre_ejecutivo']).encode('utf-8') + "|"
        text_row += str(row['doc_ger']).encode('utf-8') + "|"
        text_row += str(row['Nombre_gerente']).encode('utf-8') + "|"
        text_row += str(row['id_call']).encode('utf-8') + "|"
        text_row += str(row['evualuador']).encode('utf-8') + "|"
        text_row += str(row['nombres']).encode('utf-8') + "|"
        text_row += str(row['fecha_registro']).encode('utf-8') + "|"
        text_row += str(row['hora_registro']).encode('utf-8') + "|"
        text_row += str(row['cumple']).encode('utf-8') + "|"
        text_row += "\n"
        cloud_storage_rows += text_row
    conn.close()

    print("Estos son los datos: ")

    filename = "Segmento/bd_lal" + ".csv"
    #Finalizada la carga en local creamos un Bucket con los datos
    gcscontroller.create_file(filename, cloud_storage_rows, "ct-sensus")

    try:
        deleteQuery = "DELETE FROM `contento-bi.sensus.bd_lal` WHERE CAST(SUBSTR(Fecha_registro,0,10) AS DATE) = DATE_ADD(CURRENT_DATE(),INTERVAL -1 DAY)"
        client = bigquery.Client()
        query_job = client.query(deleteQuery)
        query_job.result()
    except:
        print("no se pudo eliminar")

    #Primero eliminamos todos los registros que contengan esa fecha

    time.sleep(60)

    flowAnswer = lal_beam.run()

    time.sleep(600)
    # Poner la ruta en storage cloud en una variable importada para posteriormente eliminarla
    storage_client = storage.Client()
    bucket = storage_client.get_bucket('ct-sensus')
    blob = bucket.blob("Segmento/bd_lal" + ".csv")
    # Eliminar el archivo en la variable
    blob.delete()

    # return jsonify(flowAnswer), 200
    return "X" + "flowAnswer"
Beispiel #11
0
def data_uni():
    reload(sys)
    sys.setdefaultencoding('utf8')
    SERVER="192.168.20.63\MV"
    USER="******"
    PASSWORD="******"
    DATABASE="Mirror_UN1002XZCVBN"
    TABLE_DB = "dbo.Tb_Data"
    HOY = datetime.datetime.today().strftime('%Y-%m-%d')

    #Nos conectamos a la BD y obtenemos los registros
    conn = _mssql.connect(server=SERVER, user=USER, password=PASSWORD, database=DATABASE)
    conn.execute_query('SELECT Id_Data,Id_Campana,Zona,Documento,Cod_Interno,Tipo_Comprador,Customer_Class,Cupo,Num_Obligacion,Vlr_Factura,Fecha_Factura,Fecha_Vencimiento,Vlr_Saldo_Cartera,Dias_vencimiento,Campana_Orig,Ult_Campana,Codigo,Abogado,Division,Pais,Fecha_Prox_Conferencia,Cod_Gestion,Fecha_Gestion,Fecha_Promesa_Pago,Actividad_Economica,Saldo_Capital,Num_Cuotas,Num_Cuotas_Pagadas,Num_Cuotas_Faltantes,Num_Cuotas_Mora,Cant_Veces_Mora,Fecha_Ult_Pago,Saldo_Total_Vencido,Cod_Consecionario,Concesionario,Cod_Gestion_Ant,Grabador,Estado,Fecha_Cargue,Usuario_Cargue,Interes_Mora,Vlr_Cuotas_Vencidas FROM ' + TABLE_DB )
    # conn.execute_query('SELECT Id_Gestion,Id_Causal,Fecha_Seguimiento,Id_Usuario,Valor_Obligacion,Id_Docdeu, Nota FROM ' + TABLE_DB + ' where CAST(Fecha_Seguimiento AS date) >= CAST(' + "'2019-02-01' as DATE) ")
 
    cloud_storage_rows = ""

    # Debido a que los registros en esta tabla pueden tener saltos de linea y punto y comas inmersos
    for row in conn:
        text_row =  ""
        text_row += str(row['Id_Data']).encode('utf-8') + "|"
        text_row += str(row['Id_Campana']).encode('utf-8') + "|" 
        text_row += str(row['Documento']).encode('utf-8') + "|" 
        text_row += str(row['Cod_Interno']).encode('utf-8') + "|" 
        text_row += str(row['Tipo_Comprador']).encode('utf-8') + "|" 
        text_row += str(row['Customer_Class']).encode('utf-8') + "|" 
        text_row += str(row['Cupo']).encode('utf-8') + "|" 
        text_row += str(row['Num_Obligacion']).encode('utf-8') + "|" 
        text_row += str(row['Vlr_Factura']).encode('utf-8') + "|" 
        text_row += str(row['Fecha_Factura']).encode('utf-8') + "|" 
        text_row += str(row['Fecha_Vencimiento']).encode('utf-8') + "|" 
        text_row += str(row['Vlr_Saldo_Cartera']).encode('utf-8') + "|" 
        text_row += str(row['Dias_vencimiento']).encode('utf-8') + "|" 
        text_row += str(row['Campana_Orig']).encode('utf-8') + "|" 
        text_row += str(row['Ult_Campana']).encode('utf-8') + "|" 
        text_row += str(row['Codigo']).encode('utf-8') + "|" 
        text_row += str(row['Abogado']).encode('utf-8') + "|" 
        text_row += str(row['Division']).encode('utf-8') + "|" 
        text_row += str(row['Pais']).encode('utf-8') + "|" 
        text_row += str(row['Fecha_Prox_Conferencia']).encode('utf-8') + "|" 
        text_row += str(row['Cod_Gestion']).encode('utf-8') + "|" 
        text_row += str(row['Fecha_Gestion']).encode('utf-8') + "|" 
        text_row += str(row['Fecha_Promesa_Pago']).encode('utf-8') + "|" 
        text_row += str(row['Actividad_Economica']).encode('utf-8') + "|" 
        text_row += str(row['Saldo_Capital']).encode('utf-8') + "|" 
        text_row += str(row['Num_Cuotas']).encode('utf-8') + "|" 
        text_row += str(row['Num_Cuotas_Pagadas']).encode('utf-8') + "|" 
        text_row += str(row['Num_Cuotas_Faltantes']).encode('utf-8') + "|" 
        text_row += str(row['Num_Cuotas_Mora']).encode('utf-8') + "|" 
        text_row += str(row['Cant_Veces_Mora']).encode('utf-8') + "|" 
        text_row += str(row['Fecha_Ult_Pago']).encode('utf-8') + "|" 
        text_row += str(row['Saldo_Total_Vencido']).encode('utf-8') + "|" 
        text_row += str(row['Cod_Consecionario']).encode('utf-8') + "|" 
        text_row += str(row['Concesionario']).encode('utf-8') + "|" 
        text_row += str(row['Cod_Gestion_Ant']).encode('utf-8') + "|" 
        text_row += str(row['Grabador']).encode('utf-8') + "|" 
        text_row += str(row['Estado']).encode('utf-8') + "|" 
        text_row += str(row['Fecha_Cargue']).encode('utf-8') + "|" 
        text_row += str(row['Usuario_Cargue']).encode('utf-8') + "|" 
        text_row += str(row['Interes_Mora']).encode('utf-8') + "|" 
        text_row += str(row['Vlr_Cuotas_Vencidas']).encode('utf-8') + "|" 
        text_row += "\n"
        cloud_storage_rows += text_row
        
    conn.close()
    
    filename = "data_unificadas/Unificadas_data" + ".csv"
    #Finalizada la carga en local creamos un Bucket con los datos
    gcscontroller.create_file(filename, cloud_storage_rows, "ct-unificadas")

    try:
        deleteQuery = "DELETE FROM `contento-bi.unificadas.Data` WHERE CAST(SUBSTR(Fecha_Creacion,0,10) AS DATE) = CURRENT_DATE()"
        client = bigquery.Client()
        query_job = client.query(deleteQuery)
        query_job.result()
    except:
        print("no se pudo eliminar")

    #Primero eliminamos todos los registros que contengan esa fecha
    
    # time.sleep(60)
    
    flowAnswer = unificadas_data_beam.run()
  
    # time.sleep(180)
    # Poner la ruta en storage cloud en una variable importada para posteriormente eliminarla 
    storage_client = storage.Client()
    bucket = storage_client.get_bucket('ct-unificadas')
    blob = bucket.blob("data_unificadas/Unificadas_data" + ".csv")
    # Eliminar el archivo en la variable
    blob.delete()
    
    # return jsonify(flowAnswer), 200
    return "data cargada " + "flowAnswer" 
Beispiel #12
0
def gestiones():
    reload(sys)
    sys.setdefaultencoding('utf8')
    SERVER="192.168.20.63\MV"
    USER="******"
    PASSWORD="******"
    DATABASE="Mirror_UN1002XZCVBN"
    TABLE_DB = "dbo.Tb_Gestion"
    HOY = datetime.datetime.today().strftime('%Y-%m-%d')

    #Nos conectamos a la BD y obtenemos los registros
    conn = _mssql.connect(server=SERVER, user=USER, password=PASSWORD, database=DATABASE)
    # conn.execute_query('SELECT Id_Gestion,Documento,Num_Obligacion,Id_Data,Id_Cod_Gestion,Id_Cod_Causal,Id_Cod_SubCausal,Id_Bot,Vlr_Promesa,Fecha_Promesa,Usuario_Gestor,Fecha_Gestion FROM ' + TABLE_DB + ' where CAST(Fecha_Gestion AS date) = CAST(GETDATE() as DATE) ')

    conn.execute_query("SELECT Id_Gestion,Documento,Num_Obligacion,Id_Campana,Id_Segmento,Id_Cod_Gestion,Id_Cod_Causal,Id_Cod_SubCausal,Vlr_Promesa,Fecha_Promesa,Num_Cuotas,Telefono,Fecha_Gestion,Usuario_Gestor,Opt_1,Opt_2,Opt_3,Opt_4,Opt_5,Cuadrante,Modalidad_Pago FROM " + TABLE_DB + " WHERE CONVERT(DATE, Fecha_Gestion) = CONVERT(DATE,GETDATE())")
    
    # conn.execute_query("SELECT Id_Gestion,Documento,Num_Obligacion,Id_Campana,Id_Segmento,Id_Cod_Gestion,Id_Cod_Causal,Id_Cod_SubCausal,Vlr_Promesa,Fecha_Promesa,Num_Cuotas,Telefono,Fecha_Gestion,Usuario_Gestor,Opt_1,Opt_2,Opt_3,Opt_4,Opt_5,Cuadrante,Modalidad_Pago FROM " + TABLE_DB + " WHERE CONVERT(DATE, Fecha_Gestion) = CAST('2020-07-04' AS DATE)")

    # conn.execute_query('SELECT Id_Gestion,Id_Causal,Fecha_Seguimiento,Id_Usuario,Valor_Obligacion,Id_Docdeu, Nota FROM ' + TABLE_DB + ' where CAST(Fecha_Seguimiento AS date) >= CAST(' + "'2019-02-01' as DATE) ")

    cloud_storage_rows = ""

    # Debido a que los registros en esta tabla pueden tener saltos de linea y punto y comas inmersos
    for row in conn:
        text_row =  ""
        text_row += str(row['Id_Gestion']).encode('utf-8') + "|"
        text_row += str(row['Documento']).encode('utf-8') + "|"
        text_row += str(row['Num_Obligacion']).encode('utf-8') + "|"
        text_row += str(row['Id_Campana']).encode('utf-8') + "|"
        text_row += str(row['Id_Segmento']).encode('utf-8') + "|"
        text_row += str(row['Id_Cod_Gestion']).encode('utf-8') + "|"
        text_row += str(row['Id_Cod_Causal']).encode('utf-8') + "|"
        text_row += str(row['Id_Cod_SubCausal']).encode('utf-8') + "|"
        # text_row += str(row['Observacion']).encode('utf-8') + "|"
        text_row += str(row['Vlr_Promesa']).encode('utf-8') + "|"
        text_row += str(row['Fecha_Promesa']).encode('utf-8') + "|"
        text_row += str(row['Num_Cuotas']).encode('utf-8') + "|"
        text_row += str(row['Telefono']).encode('utf-8') + "|"
        text_row += str(row['Fecha_Gestion']).encode('utf-8') + "|"
        text_row += str(row['Usuario_Gestor']).encode('utf-8') + "|"
        text_row += str(row['Opt_1']).encode('utf-8') + "|"
        text_row += str(row['Opt_2']).encode('utf-8') + "|"
        text_row += str(row['Opt_3']).encode('utf-8') + "|"
        text_row += str(row['Opt_4']).encode('utf-8') + "|"
        text_row += str(row['Opt_5']).encode('utf-8') + "|"
        text_row += str(row['Cuadrante']).encode('utf-8') + "|"
        text_row += str(row['Modalidad_Pago']).encode('utf-8') + "|"
        text_row += "\n"
        cloud_storage_rows += text_row
    conn.close()

    filename = "gestiones/Unificadas_gestiones" + ".csv"
    #Finalizada la carga en local creamos un Bucket con los datos
    gcscontroller.create_file(filename, cloud_storage_rows, "ct-unificadas")

    try:
        deleteQuery = "DELETE FROM `contento-bi.unificadas.Gestiones` WHERE CAST(SUBSTR(Fecha_Gestion,0,10) AS DATE) = CURRENT_DATE()"
        # deleteQuery = "DELETE FROM `contento-bi.unificadas.Gestiones` WHERE CAST(SUBSTR(Fecha_Gestion,0,10) AS DATE) = '2020-07-04'"
        client = bigquery.Client()
        query_job = client.query(deleteQuery)
        query_job.result()
    except:
        print("no se pudo eliminar")

    #Primero eliminamos todos los registros que contengan esa fecha
    
    # time.sleep(60)

    flowAnswer = unificadas_gestiones_beam.run()

    # time.sleep(60)
    # Poner la ruta en storage cloud en una variable importada para posteriormente eliminarla 
    storage_client = storage.Client()
    bucket = storage_client.get_bucket('ct-unificadas')
    blob = bucket.blob("gestiones/Unificadas_gestiones" + ".csv")
    # Eliminar el archivo en la variable
    blob.delete()
    
    # return jsonify(flowAnswer), 200
    return "gestiones cargadas" + "flowAnswer" 
Beispiel #13
0
def seguimiento_aut():

    reload(sys)
    sys.setdefaultencoding('utf8')
    SERVER = "192.168.20.63\DELTA"
    USER = "******"
    PASSWORD = "******"
    DATABASE = "Refinancia"
    TABLE_DB = "dbo.Tb_Seguimiento"
    HOY = datetime.datetime.today().strftime('%Y-%m-%d')

    #Nos conectamos a la BD y obtenemos los registros
    conn = _mssql.connect(server=SERVER,
                          user=USER,
                          password=PASSWORD,
                          database=DATABASE)
    # conn.execute_query('SELECT Id_seguimiento,Id_docdeu,Id_gestion,Id_causal,fecha_seguimiento,Nota,Id_usuario,Valor_Saldo_Total,numero_contac FROM ' + TABLE_DB  + ' where CAST(fecha_seguimiento AS date) = CAST(GETDATE() as DATE) ')
    conn.execute_query(
        'SELECT Id_seguimiento,Id_docdeu,Id_gestion,Id_causal,fecha_seguimiento,Nota,Id_usuario,Valor_Saldo_Total,numero_contac FROM '
        + TABLE_DB + ' where CAST(fecha_seguimiento AS date) > CAST(' +
        "'2020-09-16' as DATE) ")
    # conn.execute_query('SELECT Id_Gestion,Id_Causal,Fecha_Seguimiento,Id_Usuario,Valor_Obligacion,Id_Docdeu, Nota FROM ' + TABLE_DB + ' where CAST(Fecha_Seguimiento AS date) >= CAST(' + "'2019-02-01' as DATE) ")

    cloud_storage_rows = ""

    # Debido a que los registros en esta tabla pueden tener saltos de linea y punto y comas inmersos
    for row in conn:
        Nota = str(row['Nota']).replace('\r', '').replace('\n', '')
        text_row = ""
        text_row += str(row['Id_seguimiento']) + "|"
        text_row += str(row['Id_docdeu']).encode('utf-8') + "|"
        text_row += str(row['Id_gestion']).encode('utf-8') + "|"
        text_row += str(row['Id_causal']).encode('utf-8') + "|"
        text_row += str(row['fecha_seguimiento']).encode('utf-8') + "|"
        text_row += str(row['Id_usuario']).encode('utf-8') + "|"
        text_row += str(row['Valor_Saldo_Total']).encode('utf-8') + "|"
        text_row += str(row['numero_contac']).encode('utf-8') + "|"

        if Nota is None:
            text_row += "" + "|"
        if Nota.find("|") >= 0:
            text_row += NOTA.replace("|", "*") + "|"
        else:
            text_row += Nota + "|"

        text_row += "\n"
        cloud_storage_rows += text_row
    conn.close()

    filename = "seguimiento_aut/Refinancia_Seguimiento_aut" + ".csv"
    #Finalizada la carga en local creamos un Bucket con los datos
    gcscontroller.create_file(filename, cloud_storage_rows, "ct-refinancia")

    try:
        deleteQuery = "DELETE FROM `contento-bi.refinancia.seguimiento_aut` WHERE CAST(SUBSTR(fecha_seguimiento,0,10) AS DATE) = CURRENT_DATE()"
        client = bigquery.Client()
        query_job = client.query(deleteQuery)
        query_job.result()
    except:
        print("no se pudo eliminar")

    #Primero eliminamos todos los registros que contengan esa fecha

    time.sleep(30)

    flowAnswer = refinancia_seguimiento_aut_beam.run()

    time.sleep(40)
    # Poner la ruta en storage cloud en una variable importada para posteriormente eliminarla
    storage_client = storage.Client()
    bucket = storage_client.get_bucket('ct-refinancia')
    blob = bucket.blob("seguimiento_aut/Refinancia_Seguimiento_aut" + ".csv")
    # Eliminar el archivo en la variable
    blob.delete()

    # return jsonify(flowAnswer), 200
    return "X" + "flowAnswer"
Beispiel #14
0
def prejuridico():
    SERVER = "192.168.20.63\DELTA"
    USER = "******"
    PASSWORD = "******"
    DATABASE = "Avon"
    TABLE_DB = "dbo.Tb_Docdeu"
    FECHA_CARGUE = str(datetime.date.today())
    Fecha = datetime.datetime.today().strftime('%Y-%m-%d')
    # Fecha = "2018-12-20"
    filename = "prejuridico/Avon_inf_prej_" + FECHA_CARGUE + ".csv"
    Ruta = ("/192.168.20.87", "media")[socket.gethostname() == "contentobi"]
    storage_client = storage.Client()
    bucket = storage_client.get_bucket('ct-avon')
    blob = bucket.blob(filename)

    #Nos conectamos a la BD y obtenemos los registros
    conn = _mssql.connect(server=SERVER,
                          user=USER,
                          password=PASSWORD,
                          database=DATABASE)
    # Insertamos los datos de la nueva consulta equivalentes al mismo dia de la anterior eliminacion
    try:
        # conn.execute_query("SELECT * FROM " + TABLE_DB + " WHERE Fecha = " + "CAST('"+ Fecha + "'AS DATE)")

        # conn.execute_query("SELECT Id_Docdeu,A.Nit,Factura,Fecha_Factura,Campana,Ano,Zona,Unidad,Seccion,[Past Due],Ultim_Num_InVoice,Valor_Factura,Saldo,N_Vencidas,Num_Campanas,estado,Valor_PD1,CT,A.Fecha,A.Usuario,asignacion,Ciclo,Vlr_redimir,dia,Dia_Estrategia,Origen,marca,Fecha_Visita,Nombres,Apellidos,Territorio,[Est.Disp] FROM " + TABLE_DB +" A left join avon.dbo.Tb_Nit B on A.Nit = B.Nit WHERE A.Fecha >= CAST('2019-05-04' AS DATE)")
        conn.execute_query(
            "SELECT Id_Docdeu,A.Nit,Factura,Fecha_Factura,Campana,Ano,Zona,Unidad,Seccion,[Past Due],Ultim_Num_InVoice,Valor_Factura,Saldo,N_Vencidas,Num_Campanas,estado,Valor_PD1,CT,A.Fecha,A.Usuario,asignacion,Ciclo,Vlr_redimir,dia,Dia_Estrategia,Origen,marca,Fecha_Visita,Nombres,Apellidos,Territorio,[Est.Disp] FROM "
            + TABLE_DB +
            " A left join avon.dbo.Tb_Nit B on A.Nit = B.Nit WHERE A.Fecha = "
            + "CAST('" + Fecha + "'AS DATE)")

        cloud_storage_rows = ""
        # Debido a que los registros en esta tabla pueden tener saltos de linea y punto y comas inmersos
        for row in conn:
            text_row = ""
            text_row += row['Id_Docdeu'].encode('utf-8') + "|"
            text_row += row['Nit'].encode('utf-8') + "|"
            text_row += row['Factura'].encode('utf-8') + "|"
            text_row += str(row['Fecha_Factura']).encode('utf-8') + "|"
            text_row += row['Campana'].encode('utf-8') + "|"
            text_row += row['Ano'].encode('utf-8') + "|"
            text_row += row['Zona'].encode('utf-8') + "|"
            text_row += str(row['Unidad']).encode('utf-8') + "|"
            text_row += str(row['Seccion']).encode('utf-8') + "|"
            text_row += str(row['Past Due']).encode('utf-8') + "|"
            text_row += str(row['Ultim_Num_InVoice']).encode('utf-8') + "|"
            text_row += str(row['Valor_Factura']).encode('utf-8') + "|"
            text_row += str(row['Saldo']).encode('utf-8') + "|"
            text_row += str(row['N_Vencidas']).encode('utf-8') + "|"
            text_row += str(row['Num_Campanas']).encode('utf-8') + "|"
            text_row += str(row['estado']).encode('utf-8') + "|"
            text_row += str(row['Valor_PD1']).encode('utf-8') + "|"
            text_row += str(row['CT']).encode('utf-8') + "|"
            text_row += row['Fecha'].encode('utf-8') + "|"
            text_row += row['Usuario'].encode('utf-8') + "|"
            text_row += str(row['asignacion']).encode('utf-8') + "|"
            text_row += row['Ciclo'].encode('utf-8') + "|"
            text_row += str(row['Vlr_redimir']).encode('utf-8') + "|"
            text_row += str(row['dia']).encode('utf-8') + "|"
            text_row += str(row['Dia_Estrategia']).encode('utf-8') + "|"
            text_row += str(row['Origen']).encode('utf-8') + "|"
            text_row += row['marca'].encode('utf-8') + "|"
            text_row += row['Nombres'].encode('utf-8') + "|"
            text_row += unicode(row['Apellidos']).encode('utf-8') + "|"
            text_row += unicode(row['Territorio']).encode('utf-8') + "|"
            text_row += str(row['Est.Disp']).encode('utf-8') + "|"
            text_row += "\n"
            cloud_storage_rows += text_row
        conn.close()

        # file = open("/"+ Ruta +"/BI_Archivos/GOOGLE/Avon/"+filename,"a")
        # file.close()
        # blob.upload_from_filename("/"+ Ruta +"/BI_Archivos/GOOGLE/Avon/"+filename)
        gcscontroller.create_file(filename, cloud_storage_rows, "ct-avon")

        try:
            deleteQuery = "DELETE FROM `contento-bi.avon.prejuridico` where CAST(Fecha AS STRING) = " + "CAST('" + Fecha + "'AS STRING)"
            client = bigquery.Client()
            query_job = client.query(deleteQuery)
            query_job.result()
        except:
            print("no se pudo eliminar porque no existe una tabla llamada asi")

        # time.sleep(20)

        flowAnswer = avon_prejuridico_beam.run()

        # time.sleep(600)
        # Poner la ruta en storage cloud en una variable importada para posteriormente eliminarla
        storage_client = storage.Client()
        bucket = storage_client.get_bucket('ct-avon')
        blob = bucket.blob(filename)
        # Eliminar el archivo en la variable
        blob.delete()
        # return "R, " + 'flowAnswer'
    except IOError:
        dIO = "No se han cargado archivos el dia de hoy"
        # return dIO
    return "R, " + 'flowAnswer'