Exemple #1
0
def vk_refresh():

    from doc_token import get_tokens
    passwords = get_tokens()
    token = passwords['vk']

    campaigns = get_vk(token=token,
                       method='ads.getCampaigns',
                       params='account_id=1604608801')
    camps_ids = ",".join([str(i['id']) for i in campaigns['response']])
    date_to = str(datetime.date.today())
    get_campaign_stats = get_vk(
        token=token,
        params=
        f'ids_type=campaign&date_from=2020-04-25&date_to={date_to}&period=day&account_id=1604608801&ids={camps_ids}'
    )
    campaigns_dict = {str(i['id']): i['name'] for i in campaigns['response']}
    time.sleep(1)
    vk_table = campaign_to_table(get_campaign_stats, campaigns_dict)
    time.sleep(1)

    columns = [
        'cmp_id', 'capmaign', 'date', 'cost', 'impressions', 'clicks', 'reach'
    ]
    vk_df = pandas.DataFrame(vk_table, columns=columns)
    time.sleep(1)
    respose = get_vk_ads(token=token)

    utms = dict_vk(respose)

    vk_df['umt_campaing'] = vk_df['cmp_id'].apply(lambda x: utms[int(x)][0])
    vk_df['project'] = vk_df['cmp_id'].apply(lambda x: utms[int(x)][1])
    vk_df['date'] = vk_df['date'].apply(
        lambda x: pandas.Timestamp(datetime.datetime.strptime(x, '%Y-%m-%d')))
    return vk_df
def y_direct_refresh():

    from doc_token import get_tokens
    token = get_tokens()

    dimestions = "ym:ad:<attribution>DirectOrder,ym:ad:<attribution>DirectPhraseOrCond"
    metrics = "ym:ad:<currency>AdCost,ym:ad:clicks"

    try:
        bq_yandex = gbq_pd('YandexAds', 'marketing_bi')
        q_last_date = "SELECT date(max(date)) as last_date  FROM `kalmuktech.marketing_bi.YandexAds`"
        last_date = bq_yandex.df_query(q_last_date).iloc[0, 0].date()
        date = str(last_date)[:10]
    except:
        date = '2020-04-10'

    bq_yandex = gbq_pd('YandexAds', 'marketing_bi')
    clean_last = f"Delete  FROM `kalmuktech.marketing_bi.YandexAds` where date = '{date}'"
    last_date = bq_yandex.df_query(clean_last)

    header_str = dimestions + "," + metrics
    header_str = header_str.replace(":", "_")
    header_str = header_str.replace("<attribution>", "")
    header_str = header_str.replace("<currency>", "")
    column_headers = header_str.split(",") + ['date']

    caps = token['yandex']['caps']
    caps_dirc = direct_acc(**caps)
    probk_list = caps_dirc.get_lists(date, dimestions, metrics)
    direct_table_full = pandas.DataFrame(probk_list, columns=column_headers)

    if probk_list == []:
        return []

    direct_table_full['date'] = direct_table_full['date'].apply(
        pandas.Timestamp)

    cross_data = get_match_table(ym_class=caps_dirc)

    utm_data = match_dict(cross_data)

    final_table_yandex = add_utm_to_df_yandex(direct_table_full, utm_data)

    return final_table_yandex
Exemple #3
0
def refresh_personal_reports():
    log = ""

    token = get_tokens()

    wf_regs_table = gbq_pd('report_regs', datasetId='wf_bi')
    query_compaines = """
        SELECT 
        users.user_id,
        if(notif, users.email, False) as notif_mail,
        users.phone as phone, 
        from_unixtime(users.create_date) as reg_date,
        if(users.last_date> 0, from_unixtime(last_date),from_unixtime(create_date)) as last_dt, 
        if(last_date >0, '1+', 0) as visit_count,
        showcase.vitrins,
        showcase.domain,
        ifnull(reg_sourse.reg_donner, 'Прямая') as reg_donner,
        reg_sourse.reg_donner_id,
        concat("https://workface.ru/ru/company/",company_id) as company_url
    FROM `users`as users

    left join (
        SELECT 
            user_id as usr, 
            if(sum(email_messages+email_deals+email_relationships)>0, True,False) as notif
        FROM `users_notifications` 
        group by user_id
    ) as nots on nots.usr = users.user_id

    left join (
        SELECT
            user_id,
            deals.showcase_id as reg_donner_id,
            domain as reg_donner
        FROM `deals`  as deals
        inner join users on users.create_date = deals.dt_create
        join showcase_info as vitrins on deals.showcase_id = vitrins.showcase_id
    ) as reg_sourse on reg_sourse.user_id = users.user_id

    left join (
    SELECT 
    user_shw.showcase_id as showcase_id,
    user_id,
    domain,
    1 as vitrins,
        company_id
    FROM (
    SELECT  
        showcase_id,
        user_id,
        cmp_id as company_id
    FROM (
        SELECT 
            good_offer_id as showcase_id, 
            company_id as cmp_id
        FROM `good_offers`
        where is_showcase = 2) as jnt_cmp
    join `companies` as cmp on cmp.company_id = jnt_cmp.cmp_id) as user_shw
    JOIN showcase_info on showcase_info.showcase_id = user_shw.showcase_id
    ) as showcase on showcase.user_id = users.user_id
        """
    regs_query = query_df(query_compaines, token['wf_base'])
    wf_regs_table.replace(regs_query)

    q_add_visits = """
    SELECT 
    user_id,
    notif_mail,
    phone,
    reg_date,
    last_dt,
    visits as visit_count,
    vitrins,
    domain,
    reg_donner,
    reg_donner_id,
    company_url
    FROM `kalmuktech.wf_bi.report_regs` as regs
    left join (SELECT user_id as users_v, ifnull(sum(ga_sessions),0) as visits FROM `kalmuktech.wf_bi.users_id`  as u 
    left join `kalmuktech.wf_bi.user_sess`  as uses on u.u_id = uses.ga_dimension1
    group by user_id) as user_visits on user_visits.users_v = regs.user_id
    """
    res_plus_visits = wf_regs_table.df_query(q_add_visits)
    wf_regs_table.replace(res_plus_visits)

    log += f"По таблице report_regs обновилось {len(res_plus_visits)} строк \n"

    wf_vitr_table = gbq_pd('report_virs', datasetId='wf_bi')
    query_compaines = """
    SELECT 
        FROM_UNIXTIME(create_date) as date,
        user_id,
        phone,
        domain,
        ifnull(deals.sums, 0 ) as deal_sums,
        ifnull(deals.deals_cnt, 0) as deal_cnt,
        ifnull(regs.reg_cnt, 0)  as reg_cnt,
        ifnull(goods.goods_cnt, 0) as goods
    FROM (
        SELECT 
            good_offer_id as showcase_id, 
            company_id  
        FROM `good_offers`
        where is_showcase = 2
    ) as showcase

    LEFT JOIN (
        SELECT 
            showcase_id as shw_id,
            sum(total_price) as sums,
            count(distinct deal_id) as deals_cnt
        FROM `deals` 
        GROUP BY shw_id
    ) AS deals on deals.shw_id = showcase.showcase_id

    LEFT JOIN (
        SELECT
            count(distinct user_id) as reg_cnt,
            reg_srs.showcase_id as reg_donner_id
        FROM `deals`  as reg_srs
        inner join users on users.create_date = reg_srs.dt_create
        GROUP BY reg_donner_id
    ) AS regs on regs.reg_donner_id = showcase.showcase_id

    LEFT JOIN (
        SELECT 
            count(distinct good_offer_id) as goods_cnt,
            showcase_id  as v_goods_ids
        FROM `good_offers` 
        GROUP BY showcase_id
    ) as goods on goods.v_goods_ids = showcase.showcase_id

    LEFT JOIN (
        SELECT users.user_id as user_id,
            users.phone as phone,
            create_date,
            company_id as cmp 
        FROM `users`
        left join companies on users.user_id = companies.user_id
        where company_id is not null 
        ) as users on users.cmp = showcase.company_id

    LEFT JOIN (
        SELECT 
            showcase_id as sh_id, 
            domain 
        FROM `showcase_info`
        ) as showcase_info on showcase_info.sh_id = showcase.showcase_id
        """
    regs_query = query_df(query_compaines, token['wf_base'])
    wf_vitr_table.replace(regs_query)

    log += f"По таблице report_virs обновилось {len(regs_query)} строк \n"

    q_add_Ga_regs = """SELECT 
      date, 
      user_id,
      phone,
      domain,
      deal_sums,
      deal_cnt,
      ifnull(regs, 0) as reg_cnt,
      
      goods
    FROM `kalmuktech.wf_bi.report_virs` as v
    left join (SELECT ga_hostname, count(distinct user_id ) as regs FROM `kalmuktech.wf_bi.wf_hosts` as w
    inner join kalmuktech.wf_bi.users_id as u on u.u_id = w.ga_dimension1
    group by 1) as g on g.ga_hostname = v.domain"""
    regs_query = wf_vitr_table.df_query(q_add_Ga_regs)
    wf_vitr_table.replace(regs_query)

    log += f"По таблице report_virs обновилось {len(regs_query)} строк \n"

    query_ann = """
    SELECT
    utm_source,
    utm_campaign,
    date(date) as date,
    CONCAT('https://officeicapru.amocrm.ru/leads/detail/',conts_id) AS all_leads,
    status AS failed,
    sale  as sold,
    sum
    from  (select conts_id as conts_id,
        phone,
        status,
        utm_source,
        utm_campaign,
        utm_term,
        date,
        sale,
        sum
    from 
    (SELECT
        DISTINCT (lead_id) as conts_id,
        phone,
        status,
        utm_source,
        utm_campaign,
        utm_term,
        date,
        sale
    FROM (
        SELECT
        lead_id,
        status,
        sale,
        phone AS ml
        FROM
        kalmuktech.marketing_bi.base_amo_leads AS AMO_leads
        JOIN
        kalmuktech.marketing_bi.base_amo_contacts AS cncts
        ON
        cncts.conts_id = AMO_leads.contact_id
        WHERE
        phone != 'None') AS lead_st
    JOIN (
        SELECT
    min(ga_date) as date,
    phone,
    ga_source as utm_source,
    ga_campaign as utm_campaign,
    ga_keyword as utm_term,
    FROM (
    SELECT
        phone,
        metrika_client_id
    FROM
        `kalmuktech.marketing_bi.callibri_data` AS data
    WHERE
        metrika_client_id IS NOT NULL
        AND phone IS NOT NULL ) AS data
    JOIN
    `kalmuktech.marketing_bi.base_ga_cookie` AS ga
    ON
    data.metrika_client_id = ga.ga_dimension1
    group by 
    2,3,4,5
    ) AS colibr_mail
    ON
        colibr_mail.phone = lead_st.ml) as tbl
    LEft join (SELECT sum(otruzka_sum)/100 as sum, phone as phone_ms FROM `kalmuktech.marketing_bi.ms_sold` group by phone) as ms_sold on ms_sold.phone_ms = tbl.phone
    )

    union ALL

    SELECT
    utm_source,
    utm_campaign,
    date(date) as date,
    CONCAT('https://officeicapru.amocrm.ru/leads/detail/',conts_id) AS all_leads,
    status AS failed,
    sale  as sold,
    0 as sum
    FROM (
    SELECT
        DISTINCT (lead_id) as conts_id,
        email,
        status,
        utm_source,
        utm_campaign,
        utm_term,
        date,
        sale
    FROM (
        SELECT
        lead_id,
        status,
        sale,
        email AS ml
        FROM
        kalmuktech.marketing_bi.base_amo_leads AS AMO_leads
        JOIN
        kalmuktech.marketing_bi.base_amo_contacts AS cncts
        ON
        cncts.conts_id = AMO_leads.contact_id
        WHERE
        email != 'None') AS lead_st
    JOIN (
        SELECT
    min(ga_date) as date,
    email,
    ga_source as utm_source,
    ga_campaign as utm_campaign,
    ga_keyword as utm_term,
    FROM (
    SELECT
        email,
        metrika_client_id
    FROM
        `kalmuktech.marketing_bi.callibri_data` AS data
    WHERE
        metrika_client_id IS NOT NULL
        and email IS NOT NULL
        AND phone IS NULL ) AS data
    JOIN
    `kalmuktech.marketing_bi.base_ga_cookie` AS ga
    ON
    data.metrika_client_id = ga.ga_dimension1
    group by 
    2,3,4,5) 
        AS colibr_mail
    ON
        colibr_mail.email = lead_st.ml)
        
    union ALL

    SELECT
    ga_source,
    ga_campaign,
    date(ga_date),
    CONCAT('https://officeicapru.amocrm.ru/leads/detail/',id) AS all_leads,
    status  AS status,
    sale  as sold,
    0 as sum
    FROM
    kalmuktech.marketing_bi.base_tilda_forms as tilda
    JOIN
    `kalmuktech.marketing_bi.base_ga_cookie` AS ga
    ON
    tilda.cookie = ga.ga_dimension1
    """

    Ann_report = gbq_pd('Ann_report', 'marketing_bi')
    dates_str = Ann_report.df_query(query_ann)
    import pandas

    def transform_sourse(data):
        dict_of_dims_date = []
        for i in data.iterrows():

            if i[1][1] == '(not set)' and i[1][0] == 'google':
                i[1][0] = 'Google СЕО Поиск'
            elif i[1][1] == '(not set)' and 'yandex' in i[1][0]:
                i[1][0] = 'Яндекс СЕО Поиск'
            elif i[1][0] == '(direct)':
                i[1][0] = 'Прямые'
            dict_of_dims_date.append(i[1])

        return pandas.DataFrame(dict_of_dims_date)

    dates_str = transform_sourse(dates_str)
    Ann_report.replace(dates_str)

    log += f"По таблице Ann_report обновилось {len(dates_str)} строк \n"
    return log
Exemple #4
0
def shop_icap_tables():
    log = ""
    token = get_tokens()['wf_base']
    shop_i_cap_GA = ga_connect('195060854')
    start = '2020-04-01'
    end = str(datetime.date.today())
    params = {
        'dimetions': [
            {
                'name': 'ga:dimension1'
            },
            {
                'name': 'ga:dimension2'
            },
            {
                'name': 'ga:date'
            },
        ],
        'metrics': [{
            'expression': 'ga:sessions'
        }],
        'dates': {
            'startDate': f'{start}',
            'endDate': f'{end}'
        }
    }
    i_cap_DF_GA = shop_i_cap_GA.report_df(**params)

    columns = [i.replace(':', '_') for i in i_cap_DF_GA.columns]
    i_cap_DF_GA.columns = columns
    i_cap_DF_GA = df_proc(i_cap_DF_GA)
    df_cooks = i_cap_DF_GA.drop(columns=['ga_sessions'])

    params = {
        'dimetions': [{
            'name': 'ga:dimension2'
        }, {
            'name': 'ga:date'
        }, {
            'name': 'ga:fullReferrer'
        }, {
            'name': 'ga:source'
        }, {
            'name': 'ga:medium'
        }, {
            'name': 'ga:campaign'
        }, {
            'name': 'ga:keyword'
        }],
        'metrics': [{
            'expression': 'ga:sessions'
        }],
        'dates': {
            'startDate': f'{start}',
            'endDate': f'{end}'
        }
    }
    cooks_2_sourese = shop_i_cap_GA.report_df(**params)
    columns = [i.replace(':', '_') for i in cooks_2_sourese.columns]
    cooks_2_sourese.columns = columns
    cooks_2_sourese = df_proc(cooks_2_sourese)
    cooks_2_sourese = cooks_2_sourese.drop(columns=['ga_sessions'])

    cooks_2_sourese_table = gbq_pd('shop_cooks_sourse', 'marketing_bi')
    cooks_2_sourese_table.replace(cooks_2_sourese)
    log += f"По таблице shop_cooks_sourse обновилось {len(cooks_2_sourese)} строк \n"

    df_cooks_table = gbq_pd('shp_icap_wf_cooks', 'marketing_bi')
    df_cooks_table.replace(df_cooks)
    log += f"По таблице shp_icap_wf_cooks обновилось {len(df_cooks)} строк \n"

    def query_df(qry, token):
        devDB = token
        cnx = mysql.connect(**devDB)
        cursor = cnx.cursor()
        cursor.execute(qry)
        resula = [i for i in cursor]
        field_names = [i[0] for i in cursor.description]
        cursor.close()
        cnx.close()
        db_data_df = pd.DataFrame(resula, columns=field_names)
        return db_data_df

    get_buyers = """SELECT
      u.user_id AS user_id,
      u.phone
    FROM
      users AS u

    JOIN
      `companies` AS c
    ON
      c.user_id = u.user_id

    INNER JOIN (
      SELECT
        DISTINCT consumer_profile_id AS cmp_id
      FROM
        `deals`
      WHERE
        supplier_company_id = 46) AS cmp
    ON
      cmp.cmp_id = c.company_id"""

    companies = query_df(get_buyers, token)
    wf_buyers = gbq_pd('wf_buyers_icap', 'marketing_bi')
    wf_buyers.replace(companies)
    log += f"По таблице wf_buyers_icap обновилось {len(companies)} строк \n"
    return log
Exemple #5
0
import pandas
import os
import requests
import json
import datetime

from doc_token import get_tokens
passwords = get_tokens()
token = passwords['facebook']


def fb_add_x(cell, fld):
    ld_click = 0
    if type(cell) == list:
        for i in cell:
            if i['action_type'] == fld: 
                ld_click = int(i['value'])
    return ld_click

def utm_to_colums_full(column):
    sourse_list = []
    medium_list = []
    project_list = []
    for row in column:
        
        if row and 'utm_source' in row:
            utm = row.split('?')
            domain = row.split("/")[2]
            tag_list = [y.split('=') for y in utm[-1].split('&')]
            tags= {i[0]:i[1] for i in tag_list}
            sourse_list.append(tags['utm_source'])
Exemple #6
0
import os
import pandas
from apiclient.discovery import build
from google.cloud import bigquery
from google.oauth2 import service_account
import datetime
from pd_gbq import *
import mysql.connector as mysql
from doc_token import get_tokens
token = get_tokens()


class ga_connect:

    log = ""

    #   Задаём ключ из файла
    credentials = service_account.Credentials.from_service_account_file(
        'kalmuktech-5b35a5c2c8ec.json', )
    analytics = build('analyticsreporting', 'v4', credentials=credentials)

    def __init__(self, viewId):
        # Оптределяем аккаунт откуда бер1м данные
        self.viewId = viewId

    def request(self, dates, metrics, dimetions, filters=[]):
        # Забираем сырые данные
        return ga_connect.analytics.reports().batchGet(
            body={
                'reportRequests': [{
                    "samplingLevel": "LARGE",
Exemple #7
0
def call_hole():
    passwords = get_tokens()
    callibri_connect = callibri(token=passwords['callibri'])
    date2 = datetime.datetime.today().date() - datetime.timedelta(days=1)
    date1 = date2 - datetime.timedelta(days=5)
    callibri_data = callibri_connect.get_stats(date1, date2)
    cal_ph = {
        i['phone']: (i['date'][:10] + ' ' + i['date'][11:19])
        for i in callibri_data['channels_statistics'][0]['calls']
    }

    SCOPES = [
        'https://www.googleapis.com/auth/drive',
        'https://www.googleapis.com/auth/documents'
    ]

    credentials = ServiceAccountCredentials.from_json_keyfile_name(
        'kalmuktech-5b35a5c2c8ec.json', SCOPES)
    service = build('docs', 'v1', credentials=credentials)

    current_token = get_new_token(
        "1V1gX11RDYJf4ZVFCEOqp-kY5j6weApl_oEFkv2oZzW4")
    amo_connect = get_AMO(current_token['access_token'])
    r_dt = datetime.datetime.today()
    t_dt = datetime.datetime(r_dt.year, r_dt.month, r_dt.day)
    date1 = t_dt - datetime.datetime(1970, 1, 1) - datetime.timedelta(days=6)
    date2 = t_dt - datetime.datetime(1970, 1, 1) - datetime.timedelta(days=1)
    date1_s = str(int(date1.total_seconds()))
    date2_s = str(int(date2.total_seconds()))
    fresj_cnts = amo_connect.get_big_amo('contacts')

    import string

    def get_custom_phone(cstms, fld='Телефон'):
        for i in cstms:
            if 'name' in i and i['name'] == fld:
                phn = ''
                for j in i['values'][0]['value']:
                    if j in string.digits:
                        phn += j
                return phn

    cnt_map = {
        get_custom_phone(i['custom_fields']): i['id']
        for i in fresj_cnts
    }
    matches = []
    for i in cal_ph:
        if i in cnt_map:
            pass
        else:
            matches.append(i)

    matches = []

    for i in cal_ph:
        if i in cnt_map:
            pass
        else:
            matches.append(i)

    losts = {}
    for i in matches:
        losts[i] = cal_ph[i]
    if len(matches) > 0:
        message = 'Контакты не попавшие в амо:\n'
        for i, e in losts.items():
            message += f'{i} созданый {e}\n'
    else:
        message = 'Нет пропавших контактов'
    chats = [247391252, 482876050]
    token = "1461276547:AAECMSMOMW1Zah3IEXeAyGAsBVJD0ktM86E"
    method = "sendMessage"

    url = "https://api.telegram.org/bot{token}/{method}".format(token=token,
                                                                method=method)
    for i in chats:
        data = {"chat_id": i, "text": message}
        requests.post(url, data=data)