def create_languages_for_all_campaigns_dataset(date_range):
    try:
        with open(
                f'{os.environ.get("ULANMEDIAAPP")}/data/complete_languages/{date_range}_complete_languages_dataset.json',
                'r') as file:
            json_file = json.load(file)

        metadata = json_file["metadata"]
        data = json_file["data"]

        languages_for_all_campaigns = {"metadata": metadata, "data": []}
        for language_name in data:
            languages_for_all_campaigns["data"].append(
                data[language_name]["for_all_campaigns"])

        with open(
                f"{os.environ.get('ULANMEDIAAPP')}/data/languages_for_all_campaigns/{date_range}_languages_for_all_campaigns_dataset.json",
                "w") as file:
            json.dump(languages_for_all_campaigns, file)

        return json.dumps(languages_for_all_campaigns)
    except:
        print("Failed - email sent")
        send_email("*****@*****.**",
                   "Failed - create_languages_for_all_campaigns_dataset()",
                   "Failed - create_languages_for_all_campaigns_dataset()")
        sys.exit()
def create_campaigns_for_one_ad_dataset(ad_image, date_range):
    try:

        with open(
                f'{os.environ.get("ULANMEDIAAPP")}/data/complete_ads/{date_range}_complete_ads_dataset.json',
                'r') as file:
            json_file = json.load(file)

        metadata = json_file["metadata"]
        data = json_file["data"]

        campaigns_for_one_ad = {
            "metadata": metadata,
            "data": data[ad_image]["for_each_campaign"]
        }

        with open(
                f"{os.environ.get('ULANMEDIAAPP')}/data/campaigns_for_one_ad/{ad_image}_{date_range}_campaigns_for_one_ad_dataset.json",
                "w") as file:
            json.dump(campaigns_for_one_ad, file)

        return json.dumps(campaigns_for_one_ad)
    except:
        print("Failed - email sent")
        send_email("*****@*****.**",
                   "Failed - create_campaigns_for_one_ad_dataset()",
                   "Failed - create_campaigns_for_one_ad_dataset()")
        sys.exit()
def create_p_widgets_for_one_domain_for_all_campaigns_dataset(date_range, domain):
    try:
    
        domains_to_check = domain.split(",")

        p_widgets_for_one_domain_for_all_campaigns = {"metadata": {"vol_start_date":
            "none", "vol_end_date":
            "none"}, "data": {}}

        with open(f'{os.environ.get("ULANMEDIAAPP")}/data/complete_p_widgets/{date_range}_complete_p_widgets_dataset.json', 'r') as file:
            complete_p_widgets = json.load(file)
        
        for p_widget in complete_p_widgets:
            domains_to_match = complete_p_widgets[p_widget]["for_all_campaigns"]["domain"].split(",")
            for domain_to_check in domains_to_check:
                if domain_to_check in domains_to_match:
                    p_widgets_for_one_domain_for_all_campaigns["data"][p_widget] = complete_p_widgets[p_widget]["for_all_campaigns"]
                    p_widgets_for_one_domain_for_all_campaigns["data"][p_widget]["good_campaigns_count"] = complete_p_widgets[p_widget]["good_campaigns_count"]
                    p_widgets_for_one_domain_for_all_campaigns["data"][p_widget]["bad_campaigns_count"] = complete_p_widgets[p_widget]["bad_campaigns_count"]
                    p_widgets_for_one_domain_for_all_campaigns["data"][p_widget]["wait_campaigns_count"] = complete_p_widgets[p_widget]["wait_campaigns_count"]
                    break

        if len(domain) > 20:
            domain = domain[:20]

        with open(f"{os.environ.get('ULANMEDIAAPP')}/data/p_widgets_for_one_domain_for_all_campaigns/{date_range}_{domain}_p_widgets_for_one_domain_for_all_campaigns_dataset.json", "w") as file:
            json.dump(p_widgets_for_one_domain_for_all_campaigns, file)

        return json.dumps(p_widgets_for_one_domain_for_all_campaigns)
    except:
        print("Failed - email sent")
        send_email("*****@*****.**", "Failed - create_p_widgets_for_one_domain_for_all_campaigns_dataset()", "Failed - create_p_widgets_for_one_domain_for_all_campaigns_dataset()")
        sys.exit()
def create_campaigns_for_one_country_dataset(date_range, country_name):
    try:

        with open(f'{os.environ.get("ULANMEDIAAPP")}/data/complete_countries/{date_range}_complete_countries_dataset.json', 'r') as file:
            json_file = json.load(file)

        metadata = json_file["metadata"]
        data = json_file["data"]

        campaigns_for_one_country = {"metadata": metadata, "data": data[country_name]["for_each_campaign"]}   

        campaigns_sets = get_campaign_sets()
        campaigns_lookup = {}
        for campaign in campaigns_sets:
            campaigns_lookup[campaign["vol_id"]] = campaign["name"]

        for campaign_id in campaigns_for_one_country["data"]:
            campaigns_for_one_country["data"][campaign_id]["campaign_name"] = campaigns_lookup[campaign_id]

        with open(f"{os.environ.get('ULANMEDIAAPP')}/data/campaigns_for_one_country/{date_range}_{country_name}_campaigns_for_one_country_dataset.json", "w") as file:
            json.dump(campaigns_for_one_country, file)

        return json.dumps(campaigns_for_one_country)
    except:
        print("Failed - email sent")
        send_email("*****@*****.**", "Failed - create_campaigns_for_one_country_dataset()", "Failed - create_campaigns_for_one_country_dataset()")
        sys.exit()
def update_blacklist_from_mikes_server():
    try:
        res = requests.get(
            "https://ulanmedia.brianhaller.net/api/readblacklist")
        res.raise_for_status()
        widgets = res.json()

        mydb = mysql.connector.connect(host="localhost",
                                       user=mysql_user,
                                       passwd=mysql_password,
                                       database="ulanmedia")

        mycursor = mydb.cursor()

        for widget in widgets:
            sql = f"INSERT INTO colorlist(widget_id, color) values('{widget}', 'black');"
            mycursor.execute(sql)
            mydb.commit()

    except requests.exceptions.RequestException as e:
        print("Failed to update blacklist")
        print(e)
        send_email(
            "*****@*****.**", "Failed - update_backlist() at " +
            str(datetime.now().strftime("%Y-%m-%d %H:%M")), e)
        sys.exit()
def get_mgid_access_token(login, password):
    try:
        res = requests.post("https://api.mgid.com/v1/auth/token",
                            headers={
                                "Content-type":
                                "application/x-www-form-urlencoded",
                                "Cache-Control": "no-cache"
                            },
                            data={
                                "email": login,
                                "password": password
                            })
        try:
            token = res.json()["token"]
            return token

        except:
            send_email(
                "*****@*****.**", "Failed - mgid token access at " +
                str(datetime.now().strftime("%Y-%m-%d %H:%M")),
                "Http status code error - mgid token access failed.")
            sys.exit()
    except requests.exceptions.RequestException as e:
        print("Failed - get_mgid_access_token()")
        send_email(
            "*****@*****.**", "Failed - get_mgid_access_token() at " +
            str(datetime.now().strftime("%Y-%m-%d %H:%M")), e)
        sys.exit()
def create_countries_for_one_campaign_dataset(date_range, vol_id):
    try:

        with open(
                f'{os.environ.get("ULANMEDIAAPP")}/data/complete_countries/{date_range}_complete_countries_dataset.json',
                'r') as file:
            json_file = json.load(file)

        metadata = json_file["metadata"]
        data = json_file["data"]

        countries_for_one_campaign = {"metadata": metadata, "data": []}

        for country in data:
            for campaign in data[country]["for_each_campaign"]:
                if data[country]["for_each_campaign"][campaign][
                        "campaign_id"] == vol_id:
                    countries_for_one_campaign["data"].append(
                        data[country]["for_each_campaign"][campaign])

        with open(
                f"{os.environ.get('ULANMEDIAAPP')}/data/countries_for_one_campaign/{date_range}_{vol_id}_countries_for_one_campaign_dataset.json",
                "w") as file:
            json.dump(countries_for_one_campaign, file)

        return json.dumps(countries_for_one_campaign)
    except:
        print("Failed - email sent")
        send_email("*****@*****.**",
                   "Failed - create_countries_for_one_campaign_dataset()",
                   "Failed - create_countries_for_one_campaign_dataset()")
        sys.exit()
Exemple #8
0
def create_ads_for_one_campaign_dataset(vol_id, date_range):
    try:

        with open(f'{os.environ.get("ULANMEDIAAPP")}/data/complete_ads/{date_range}_complete_ads_dataset.json', 'r') as file:
            json_file = json.load(file)

        metadata = json_file["metadata"]
        data = json_file["data"]

        ads_for_one_campaign = {"metadata": metadata, "data": []}   

        for ad_image in data.values():
            for ad in ad_image["for_each_campaign"]:
                if vol_id == ad["vol_id"]:
                    ads_for_one_campaign["data"].append(ad)
            
        # add the parent ad global rank
        for ad in ads_for_one_campaign["data"]:
            ad_image = ad["image"]
            ad["global_rank"] = data[ad_image]["for_all_campaigns"]["global_rank"]
            ad["global_rank_order"] = data[ad_image]["for_all_campaigns"]["global_rank_order"]

        with open(f"{os.environ.get('ULANMEDIAAPP')}/data/ads_for_one_campaign/{vol_id}_{date_range}_ads_for_one_campaign_dataset.json", "w") as file:
            json.dump(ads_for_one_campaign, file)

        return json.dumps(ads_for_one_campaign)
    except:
        print("Failed - email sent")
        send_email("*****@*****.**", "Failed - create_ads_for_one_campaign_dataset()", "Failed - create_ads_for_one_campaign_dataset()")
        sys.exit()
def create_p_widgets_for_all_campaigns_dataset(date_range):
    try:

        # 1. get some prerequisite data

        campaigns = get_campaign_sets()

        with open(f'{os.environ.get("ULANMEDIAAPP")}/data/complete_p_widgets/{date_range}_complete_p_widgets_dataset.json', 'r') as file:
            complete_p_widgets = json.load(file)
        
        ########################################################

        # 2. set up the basic data structure you want to create

        p_widgets_for_all_campaigns = {"metadata":{}, "data":{}}

        #########################################################

        # 3. Add the metadata. The metadata are the date ranges of the mgid and vol
        # request dates. All p_and_c_widgets_for_one_campaign files have the same
        # date ranges so I am just using the first campaign. 

        vol_id_for_adding_metadata = campaigns[0]["vol_id"]
        with open(f'{os.environ.get("ULANMEDIAAPP")}/data/p_and_c_widgets_for_one_campaign/{campaigns[0]["vol_id"]}_{date_range}_p_and_c_widgets_for_one_campaign_dataset.json', 'r') as file:
            json_file = json.load(file)
        p_widgets_for_all_campaigns["metadata"]["mgid_start_date"] = json_file["metadata"]["mgid_start_date"]
        p_widgets_for_all_campaigns["metadata"]["mgid_end_date"] = json_file["metadata"]["mgid_end_date"] 
        p_widgets_for_all_campaigns["metadata"]["vol_start_date"] = json_file["metadata"]["vol_start_date"]
        p_widgets_for_all_campaigns["metadata"]["vol_end_date"] = json_file["metadata"]["vol_end_date"]

        #########################################################

        # 4. Add the data

        p_widgets_for_all_campaigns["data"] = complete_p_widgets

        #########################################################

        # 5. remove "for_each_campaign" "good_campaigns_count" "bad_campaigns_count" and 
        # "wait_campaigns_count" from each widget and add "good_campaigns_count" "bad_campaigns_count"
        # and "wait_campaigns_count" to p_widgets_for_all_campaigns["data"][p_widget]["for_all_campaigns"]
        for p_widget in p_widgets_for_all_campaigns["data"]:
            p_widgets_for_all_campaigns["data"][p_widget]["for_all_campaigns"]["good_campaigns_count"] = p_widgets_for_all_campaigns["data"][p_widget]["good_campaigns_count"] 
            p_widgets_for_all_campaigns["data"][p_widget]["for_all_campaigns"]["bad_campaigns_count"] = p_widgets_for_all_campaigns["data"][p_widget]["bad_campaigns_count"] 
            p_widgets_for_all_campaigns["data"][p_widget]["for_all_campaigns"]["wait_campaigns_count"] = p_widgets_for_all_campaigns["data"][p_widget]["wait_campaigns_count"] 
            p_widgets_for_all_campaigns["data"][p_widget] = p_widgets_for_all_campaigns["data"][p_widget]["for_all_campaigns"]
        
        ############################################################
        # 6. Save p_widgets_for_all_campaigns to a json file and return it as a
        # json file 

        with open(f"{os.environ.get('ULANMEDIAAPP')}/data/p_widgets_for_all_campaigns/{date_range}_p_widgets_for_all_campaigns_dataset.json", "w") as file:
            json.dump(p_widgets_for_all_campaigns, file)

        return json.dumps(p_widgets_for_all_campaigns)
    except:
        print("Failed - email sent")
        send_email("*****@*****.**", "Failed - create_p_widgets_for_all_campaigns_dataset()", "Failed - create_p_widgets_for_all_campaigns_dataset()")
        sys.exit()
def create_gprs_for_each_p_offer_dataset(date_range):
    try:

        with open(
                f'{os.environ.get("ULANMEDIAAPP")}/data/offers_for_each_flow_rule/{date_range}_offers_for_each_flow_rule_dataset.json',
                'r') as file:
            json_file = json.load(file)
        offers_for_each_flow_rule = json_file["data"]

        #####################################

        gprs_for_each_p_offer = {}

        for flow_rule in offers_for_each_flow_rule:
            for offer in offers_for_each_flow_rule[flow_rule]:
                p_offer_name = offers_for_each_flow_rule[flow_rule][offer][
                    "p_offer_name"]
                gpr = offers_for_each_flow_rule[flow_rule][offer]["gpr"]
                p_offer_profit = round(
                    offers_for_each_flow_rule[flow_rule][offer]
                    ["p_offer_profit"], 2)
                p_offer_profit_rank = offers_for_each_flow_rule[flow_rule][
                    offer]["p_offer_profit_rank"]
                gpr_formula = offers_for_each_flow_rule[flow_rule][offer][
                    "gpr_formula"]
                roi_formula = offers_for_each_flow_rule[flow_rule][offer][
                    "roi_formula"]
                cvr_formula = offers_for_each_flow_rule[flow_rule][offer][
                    "cvr_formula"]
                if p_offer_name not in gprs_for_each_p_offer:
                    gprs_for_each_p_offer[p_offer_name] = {
                        "p_offer_name": p_offer_name,
                        "gpr": gpr,
                        "p_offer_profit": p_offer_profit,
                        "p_offer_profit_rank": p_offer_profit_rank,
                        "gpr_formula": gpr_formula,
                        "roi_formula": roi_formula,
                        "cvr_formula": cvr_formula
                    }

        gprs_for_each_p_offer_list = []
        for p_offer_name in gprs_for_each_p_offer:
            gprs_for_each_p_offer_list.append(
                gprs_for_each_p_offer[p_offer_name])

        gprs_for_each_p_offer_list = pd.DataFrame(gprs_for_each_p_offer_list)
        gprs_for_each_p_offer_list = gprs_for_each_p_offer_list.sort_values(
            "p_offer_profit", ascending=True)

        return json.dumps(gprs_for_each_p_offer_list[[
            "p_offer_profit_rank", "p_offer_profit", "p_offer_name", "gpr",
            "gpr_formula", "roi_formula", "cvr_formula"
        ]].to_dict("records"))
    except:
        print("Failed - email sent")
        send_email("*****@*****.**",
                   "Failed - create_gprs_for_each_p_offer_dataset()",
                   "Failed - create_gprs_for_each_p_offer_dataset()")
        sys.exit()
def combine_mgid_vol_ads_data(mgid_token, vol_token, date_range,
                              vol_start_date, vol_end_date, mgid_data,
                              vol_data):
    try:
        # This function will combine the mgid ads data and the vol ads data. Both
        # data sets are dictionaries with keys = ad id and values = dictionaries of
        # data for that ad id.

        # create a look up dictionary so you can find vol id, and campaign name from mgid id
        campaigns_sets = get_campaign_sets()
        campaigns_lookup = {}
        for campaign in campaigns_sets:
            campaigns_lookup[campaign["mgid_id"]] = [
                campaign["vol_id"], campaign["name"]
            ]

        # combining mgid and vol by ad_id
        combined_ads = {
            "metadata": {
                "vol_start_date": vol_start_date,
                "vol_end_date": vol_end_date
            },
            "data": {}
        }

        for ad in mgid_data.values():
            ad_id = ad["ad_id"]
            mgid_id = ad["mgid_id"]
            if mgid_id not in campaigns_lookup:
                continue
            vol_id = campaigns_lookup[mgid_id][0]
            name = campaigns_lookup[mgid_id][1]
            if ad_id in vol_data:
                vol_ad_data = vol_data[ad_id]
                ad["vol_id"] = vol_id
                ad["name"] = name
                ad["conversions"] = vol_ad_data["conversions"]
                ad["sales"] = vol_ad_data["sales"]
                ad["leads"] = vol_ad_data["leads"]
                ad["revenue"] = vol_ad_data["revenue"]
            else:
                ad["vol_id"] = vol_id
                ad["name"] = name
                ad["conversions"] = 0
                ad["sales"] = 0
                ad["leads"] = 0
                ad["revenue"] = 0
            combined_ads["data"][ad_id] = ad

        with open(
                f"{os.environ.get('ULANMEDIAAPP')}/data/ads/{date_range}_ads_dataset.json",
                "w") as file:
            json.dump(combined_ads, file)
    except:
        print("Failed - email sent")
        send_email("*****@*****.**",
                   "Failed - combine_mgid_vol_ads_data()",
                   "Failed - combine_mgid_vol_ads_data()")
        sys.exit()
def create_months_for_one_ad_for_one_campaign_dataset(token, start_date, end_date, ad_image, campaign_id):
    try:
        start = datetime.strptime(start_date, "%Y-%m-%d").date()
        end = datetime.strptime(end_date, "%Y-%m-%d").date()
        
        months = {"metadata": {"vol_start_date": start_date, "vol_end_date":
            end_date}, "data": {}}
        url = f"https://api.voluum.com/report?from={start_date}T00:00:00Z&to={end_date}T00:00:00Z&tz=America%2FLos_Angeles&filter={ad_image}&conversionTimeMode=VISIT&currency=USD&sort=month&direction=desc&columns=month&columns=customVariable5&columns=visits&columns=conversions&columns=revenue&columns=cost&columns=profit&columns=cv&columns=roi&columns=epv&groupBy=month&groupBy=custom-variable-5&offset=0&limit=1000&include=ACTIVE&filter1=campaign&filter1Value={campaign_id}"
        res = requests.get(url, headers = {"cwauth-token": token}).json()
        for row in res["rows"]:
            month = row["month"]
            clicks = row["visits"]
            cost = row["cost"]
            revenue = row["revenue"]
            profit = row["profit"]
            if month in months["data"]:
                months["data"][month]["clicks"] += clicks
                months["data"][month]["cost"] += cost
                months["data"][month]["revenue"] += revenue 
                months["data"][month]["profit"] += profit
            else:
                months["data"][month] = {"clicks": clicks,
                        "cost": cost, 
                        "revenue": revenue, 
                        "profit": profit, 
                        "leads": 0,
                        "sales": 0,
                        "month": month,
                        "month_index": find_month_index_number(month)

                        }

        with open(f'{os.environ.get("ULANMEDIAAPP")}/data/conversions_for_each_campaign/oneeighty_conversions_for_each_campaign_dataset.json', 'r') as file:
            json_file = json.load(file)
        conversions_for_each_campaign = json_file["data"]

        for conversion in conversions_for_each_campaign[campaign_id]:
            ad = conversion["customVariable5"]
            if ad != ad_image:
                continue
            month = int(conversion["visitTimestamp"].split('-')[1])
            conversion_type = conversion["transactionId"]
            if conversion_type == "account":
                if month in months["data"]:
                    months["data"][month]["leads"] += 1
            elif conversion_type == "deposit":
                if month in months["data"]:
                    months["data"][month]["sales"] += 1

        with open(f"{os.environ.get('ULANMEDIAAPP')}/data/months_for_one_ad_for_one_campaign/{ad_image}_{campaign_id}_months_for_one_ad_for_one_campaign_dataset.json", "w") as file:
            json.dump(months, file)

        return json.dumps(months)
    except:
        print("Failed - email sent")
        send_email("*****@*****.**", "Failed - create_months_for_one_ad_for_one_campaign_dataset()", "Failed - create_months_for_one_ad_for_one_campaign_dataset()")
        sys.exit()
Exemple #13
0
def get_mgid_widget_clicks_and_costs_by_campaign(token, campaign_id,
                                                 start_date, end_date):
    try:
        url = f"https://api.mgid.com/v1/goodhits/campaigns/{campaign_id}/quality-analysis?token={token}&campaignId={campaign_id}&dateInterval=interval&startDate={start_date}&endDate={end_date}"
        response = requests.get(url)
        if response.status_code == 401:
            mgid_token = get_and_return_new_mgid_token()
            return get_mgid_widget_clicks_and_costs_by_campaign(
                mgid_token, campaign_id, start_date, end_date)

        response.raise_for_status()
        response = response.json()
        # The logic below esstenially loops through each each widget and records
        # clicks into my dictionary of widgets. The only complication is if a widget
        # has sources, which are like child widgets. Sources are kept in a list.
        # For each source, if it is "0", that means it is really the parent widget,
        # and its data will be stored in voluum as such. If the source is not "0",
        # then it really is a child widget and its data is stored in voluum as
        # parent widget id + s + child widget id.
        # The reason why it matters how it is stored in voluum is because the
        # purpose of this function is to contribute to getting accurate widget
        # data. Mgid provides accurate data on clicks and cost and voluum provides
        # accurate data on the rest (conversions, revenue, etc).
        widgets_data = {}
        if response[campaign_id][start_date + "_" + end_date] == []:
            return widgets_data
        for id, data in response[campaign_id][start_date + "_" +
                                              end_date].items():
            widget_id = id
            if data["sources"]:
                for source_id, source_data in data["sources"].items():
                    if source_id is not "0":
                        widget_id = f"{id}s{source_id}"
                    widgets_data[widget_id] = {
                        "widget_id": widget_id,
                        "clicks": source_data["clicks"],
                        "cost": source_data["spent"],
                        "coeff": source_data["qualityFactor"]
                    }
            else:
                widgets_data[widget_id] = {
                    "widget_id": widget_id,
                    "clicks": data["clicks"],
                    "cost": data["spent"],
                    "coeff": data["qualityFactor"]
                }

        return widgets_data
    except requests.exceptions.RequestException as e:
        print("Failed - get_mgid_widget_clicks_and_costs_by_campaign")
        send_email(
            "*****@*****.**",
            "Failed - get_mgid_widget_clicks_and_costs_by_campaign() at " +
            str(datetime.now().strftime("%Y-%m-%d %H:%M")), e)
        sys.exit()
def get_blacklist():
    try:
        res = requests.get("https://ulanmedia.brianhaller.net/api/readblacklist")
        res.raise_for_status()
        return res.json()
    except requests.exceptions.RequestException as e:
            print("Failed to update campaign sets")
            print(e)
            send_email("*****@*****.**", "Failed - update_campaign_sets() at " +
                   str(datetime.now().strftime("%Y-%m-%d %H:%M")), e)
            sys.exit()
Exemple #15
0
def get_vol_access_token(id, key):
    try:
        res = requests.post("https://api.voluum.com/auth/access/session",
                headers = {"Content-type": "application/json",
                    "Accept": "application/json"},
                data = json.dumps({"accessId": id, "accessKey": key}))
        res.raise_for_status()
        return res.json()["token"]
    except requests.exceptions.RequestException as e:
        print("Failed - get_vol_access_token()") 
        send_email("*****@*****.**", "Failed - get_vol_access_token() at " +
                str(datetime.now().strftime("%Y-%m-%d %H:%M")), e)
        sys.exit()
Exemple #16
0
def get_all_campaign_revenues_by_traffic_source(token, traffic_source_id,
                                                start_date, end_date):
    url = f"https://api.voluum.com/report?from={start_date}T00%3A00%3A00Z&to={end_date}T00%3A00%3A00Z&tz={mgid_timezone}&sort=visits&direction=desc&columns=campaignName&columns=campaignId&columns=visits&columns=revenue&groupBy=campaign&offset=0&limit=100000&include=ACTIVE&conversionTimeMode=VISIT&filter1=traffic-source&filter1Value={traffic_source_id}"
    try:
        res = requests.get(url, headers={"cwauth-token": token})
        res.raise_for_status()
        return res.json()
    except requests.exceptions.RequestException as e:
        print("Failed - get_all_campaign_revenues_by_traffic_source()")
        send_email(
            "*****@*****.**",
            "Failed - get_all_campaign_revenues_by_traffic_source at " +
            str(datetime.now().strftime("%Y-%m-%d %H:%M")), e)
        sys.exit()
def get_all_campaign_conversions_by_traffic_source(token,
        traffic_source_id, start_date, end_date):
    url=f"https://api.voluum.com/report/conversions?from={start_date}T00%3A00%3A00Z&to={end_date}T00:00:00Z&tz={mgid_timezone}&filter={traffic_source_id}&sort=campaignName&direction=asc&columns=transactionId&columns=revenue&columns=campaignName&columns=trafficSourceId&groupBy=conversion&offset=0&limit=100000&include=ACTIVE&conversionTimeMode=VISIT"
    try:
        campaigns = requests.get(url, headers = {"cwauth-token": token}).json()

        if campaigns["totalRows"] != len(campaigns["rows"]):
            # The get request has a limit of 100,000 so if there are more than 
            # 100,000 conversions returned, this exception will be raised. 
            raise Exception("voluum didn't return all the conversion data.")

        # create a dictionary to hold the conversion and revenue data for each
        # campaign. Fill the empty campaigns_data with key = vol_id and value =
        # dict with name, revenue, leads, and sales keys. 
        campaigns_data = {} 
        for campaign in campaigns["rows"]:
            vol_id = campaign["campaignId"]
            campaign_name = re.sub(r"^.* - ", "",campaign["campaignName"], count=1)        
            if vol_id not in campaigns_data:
                campaigns_data[vol_id] = {"name": campaign_name, "revenue": 0,
                        "leads": 0, "sales": 0}

        for campaign in campaigns["rows"]:
            # check that visitTimestamp is within the date range of choice.
            # This is necessary because the request returns results based on
            # the sale date, not the original click date. We only want results
            # where the original click date is within the chosen date range. 
            start_date_in_date_format = datetime.strptime(start_date, "%Y-%m-%d")
            click_date_in_date_format = datetime.strptime(campaign["visitTimestamp"], "%Y-%m-%d %I:%M:%S %p")
            if start_date_in_date_format > click_date_in_date_format:
                continue

            revenue = campaign["revenue"]
            vol_id = campaign["campaignId"]
            campaign_name = re.sub(r"^.* - ", "",campaign["campaignName"], count=1)        
            lead = 0
            sale = 0
            if campaign["transactionId"] == "account":
                lead = 1
            elif campaign["transactionId"] == "deposit":
                sale = 1
            campaigns_data[vol_id]["leads"] += lead
            campaigns_data[vol_id]["sales"] += sale
            campaigns_data[vol_id]["revenue"] += revenue 
        return campaigns_data 
    except requests.exceptions.RequestException as e:
        print("Failed - get_all_campaign_conversions_by_traffic_source()") 
        send_email("*****@*****.**", "Failed - get_all_campaign_conversions_by_traffic_source at " +
                str(datetime.now().strftime("%Y-%m-%d %H:%M")), e)
        sys.exit()
def exclude_all_campaigns_for_blacklisted_p_widgets(date_range):

    with open(
            f'{os.environ.get("ULANMEDIAAPP")}/data/complete_p_widgets/{date_range}_complete_p_widgets_dataset.json',
            'r') as file:
        complete_p_widgets = json.load(file)

    emails_sent = 0
    for p_widget in complete_p_widgets.values():
        if p_widget["for_all_campaigns"]["global_status"] == "p_blacklist":
            for campaign in p_widget["for_each_campaign"]:
                if campaign["status"] == "included":
                    subject = 'ALERT - found an "included" campaign in a blacklisted widget'
                    message = f'found an included campaign on a blacklisted p widget:\np widget id {p_widget["for_all_campaigns"]["widget_id"]}\ncampaign {campaign["mgid_id"]}\nhttps://ulanmedia.brianhaller.net/campaignsforonepwidget/{p_widget["for_all_campaigns"]["widget_id"]}'
                    print(message)
                    emails_sent += 1
                    send_email("*****@*****.**", subject, message)
                    send_email("*****@*****.**", subject, message)

    if emails_sent == 0:
        # send email saying no "included" campaigns on blacklisted widgets were found.
        message = 'ok - campaigns for blacklisted widgets are all excluded'
        print(message)
        send_email("*****@*****.**", message, message)
        send_email("*****@*****.**", message, message)
Exemple #19
0
def create_offers_for_one_flow_rule_dataset(date_range, flow_rule_argument):
    try:

        with open(
                f'{os.environ.get("ULANMEDIAAPP")}/data/offers_for_each_flow_rule/{date_range}_offers_for_each_flow_rule_dataset.json',
                'r') as file:
            json_file = json.load(file)

        metadata = json_file["metadata"]
        offers_for_each_flow_rule = json_file["data"]

        offers_for_one_flow_rule = {
            "metadata": metadata,
            "data": offers_for_each_flow_rule[flow_rule_argument]
        }

        #######################################################
        # Add classification to each offer
        for offer in offers_for_one_flow_rule["data"]:
            offers_for_one_flow_rule["data"][offer][
                "classification"] = classify_offer_for_all_campaigns(
                    offers_for_one_flow_rule["data"][offer])

        #######################################################
        # Add has_mismatch_vol_weight_and_rec_weight to each offer
        for offer in offers_for_one_flow_rule["data"]:
            vol_weight = offers_for_one_flow_rule["data"][offer]["vol_weight"]
            rec_weight = offers_for_one_flow_rule["data"][offer]["rec_weight"]
            if (vol_weight != rec_weight) & (vol_weight != "NA"):
                offers_for_one_flow_rule["data"][offer][
                    "has_mismatch_vol_weight_and_rec_weight"] = True
            else:
                offers_for_one_flow_rule["data"][offer][
                    "has_mismatch_vol_weight_and_rec_weight"] = False

        ###############################################
        # Save file and return

        with open(
                f"{os.environ.get('ULANMEDIAAPP')}/data/offers_for_one_flow_rule/{flow_rule_argument}_{date_range}_offers_for_one_flow_rule_dataset.json",
                "w") as file:
            json.dump(offers_for_one_flow_rule, file)

        return json.dumps(offers_for_one_flow_rule)
    except:
        print("Failed - email sent")
        send_email("*****@*****.**",
                   "Failed - create_offers_for_one_flow_rule_dataset()",
                   "Failed - create_offers_for_one_flow_rule_dataset()")
        sys.exit()
def get_vol_ads_data(date_range):
    try:

        with open(f'{os.environ.get("ULANMEDIAAPP")}/data/conversions_for_each_campaign/{date_range}_conversions_for_each_campaign_dataset.json', 'r') as file:
            json_file = json.load(file)
        metadata = json_file["metadata"]
        conversions_for_each_campaign = json_file["data"]

        # campaigns will be a list of dictionaries, where each dictionary is a
        # campaign with keys and values like {vol_id: "123", mgid_id: 123, ...}
        campaigns = get_campaign_sets()

        # The ads_data dictionary will  have this structure: each key is an ad id 
        # (customvariable3) and each value is a dictionary with stats for that ad

        ads_data = {}
        for campaign in campaigns:
            vol_id = campaign["vol_id"]
            name = campaign["name"]
            if vol_id not in conversions_for_each_campaign:
                continue
            for conversion in conversions_for_each_campaign[vol_id]:
                ad_id = conversion["customVariable3"]
                if ad_id not in ads_data:
                    ads_data[ad_id] = {} 
                    ads_data[ad_id]["ad_id"] = ad_id
                    ads_data[ad_id]["vol_id"] = vol_id
                    ads_data[ad_id]["name"] = name 
                    ads_data[ad_id]["revenue"] = conversion["revenue"]
                    ads_data[ad_id]["conversions"] = 1
                    if conversion["transactionId"] == "account":
                       ads_data[ad_id]["leads"] = 1
                       ads_data[ad_id]["sales"] = 0
                    elif conversion["transactionId"] == "deposit":
                       ads_data[ad_id]["sales"] = 1
                       ads_data[ad_id]["leads"] = 0
                else:
                    ads_data[ad_id]["revenue"] += conversion["revenue"]
                    ads_data[ad_id]["conversions"] += 1
                    if conversion["transactionId"] == "account":
                       ads_data[ad_id]["leads"] += 1
                    elif conversion["transactionId"] == "deposit":
                       ads_data[ad_id]["sales"] += 1

        return ads_data
    except:
        print("Failed - email sent")
        send_email("*****@*****.**", "Failed - get_vol_ads_data()", "Failed - get_vol_ads_data()")
        sys.exit()
Exemple #21
0
def check_all_mgid_ads(token):
    request_number = 0
    ads_data = {}
    while len(ads_data) == request_number * 700:
        url = f"https://api.mgid.com/v1/goodhits/clients/{mgid_client_id}/teasers?token={token}&limit=700&start={request_number * 700}"
        request_number = request_number + 1
        res = requests.get(url)
        if res.status_code == 401:
            mgid_token = get_and_return_new_mgid_token()
            return check_all_mgid_ads(mgid_token)
        res.raise_for_status()
        res = res.json()
        if len(res) == 0:
            break
        for ad_id in res:
            ads_data[ad_id] = res[ad_id]

    ad_status_counts = {"active": 0, "paused": 0, "pending": 0, "rejected": 0}
    rejected_ads = {}
    for ad_id in ads_data:
        if (ads_data[ad_id]["status"]["code"] == 'goodPerformance'):
            ad_status_counts["active"] += 1
        elif (ads_data[ad_id]["status"]["code"] == 'new'):
            ad_status_counts["active"] += 1
        elif (ads_data[ad_id]["status"]["code"] == 'campaignBlocked'):
            ad_status_counts["paused"] += 1
        elif (ads_data[ad_id]["status"]["code"] == 'blocked'):
            ad_status_counts["paused"] += 1
        elif (ads_data[ad_id]["status"]["code"] == 'onModeration'):
            ad_status_counts["pending"] += 1
        else:
            ad_status_counts["rejected"] += 1
            rejected_ads[ad_id] = {
                "ad_id": ad_id,
                "campaign_id": ads_data[ad_id]["campaignId"]
            }

    subject = f"ok - {len(ads_data)} ads: {ad_status_counts['active']} active, {ad_status_counts['paused']} paused, {ad_status_counts['pending']} pending, {ad_status_counts['rejected']} rejected"
    body = f"{ad_status_counts['active']} of {len(ads_data)} ads in all campaigns are active.\n{ad_status_counts['paused']} of {len(ads_data)} ads in all campaigns are paused.\n{ad_status_counts['pending']} of {len(ads_data)} ads in all campaigns are pending.\n{ad_status_counts['rejected']} of {len(ads_data)} ads in all campaigns are rejected."
    if ad_status_counts["rejected"] > 0:
        subject = f"ALERT - {len(ads_data)} ads: {ad_status_counts['active']} active, {ad_status_counts['paused']} paused, {ad_status_counts['pending']} pending, {ad_status_counts['rejected']} rejected"
        for ad_id in rejected_ads:
            body += f'\n\nad {rejected_ads[ad_id]["ad_id"]} rejected in campaign {rejected_ads[ad_id]["campaign_id"]}\nhttps://dashboard.mgid.com/advertisers/teasers-goods/campaign_id/{rejected_ads[ad_id]["campaign_id"]}'
    print(subject)
    print(body)
    send_email("*****@*****.**", subject, body)
    send_email("*****@*****.**", subject, body)
def get_mgid_campaign_costs(token, client_id, start, end):
    try:
        res = requests.get(
            f"https://api.mgid.com/v1/goodhits/clients/{client_id}/campaigns-stat?token={token}&dateInterval=interval&startDate={start}&endDate={end}"
        )
        if res.status_code == 401:
            mgid_token = get_and_return_new_mgid_token()
            return get_mgid_campaign_costs(mgid_token, client_id, start, end)

        res.raise_for_status()
        return res.json()
    except requests.exceptions.RequestException as e:
        print("Failed - get_mgid_campaign_costs")
        send_email(
            "*****@*****.**",
            "Failed -                get_mgid_campaign_costs() at " +
            str(datetime.now().strftime("%Y-%m-%d %H:%M")), e)
        sys.exit()
def get_mgid_daily_stats_data(token, start_date, end_date):
    mgid_url = f"https://api.mgid.com/v1/goodhits/clients/{mgid_client_id}/campaigns-stat?token={token}&dateInterval=interval&startDate={start_date}&endDate={end_date}"

    try:
        res = requests.get(mgid_url)
        if res.status_code == 401:
            mgid_token = get_and_return_new_mgid_token()
            return get_mgid_daily_stats_data(mgid_token, start_date, end_date)

        res.raise_for_status()
        return res.json()["campaigns-stat"]

    except requests.exceptions.RequestException as e:
        send_email(
            "*****@*****.**",
            "Failed - get_mgid_daily_stats_data at " +
            str(datetime.now().strftime("%Y-%m-%d %H:%M")), e)
        sys.exit()
def update_widget_domains_file():
    try:
        res = requests.get("http://ulanmedia.com/mgid/widgetdomains.txt")
        res.raise_for_status()
        res = res.text.split("\n")
        widget_domains = []
        for widget_domain in res:
            widget_domains.append(widget_domain.replace("\r", ""))

        widget_domain_lookup = {}

        for widget_domain in widget_domains:
            if len(widget_domain) > 0:
                split_widget_domain = widget_domain.split(",")
                if len(split_widget_domain) != 4:
                    continue
                traffic_source = split_widget_domain[0]
                widget_id = split_widget_domain[1]
                domain = split_widget_domain[2]
                widget_domain_source = split_widget_domain[3]
                if widget_id in widget_domain_lookup:
                    widget_domain_lookup[widget_id]['domains'].append(domain)
                else:
                    widget_domain_lookup[widget_id] = {
                        'widget_id': widget_id,
                        'domains': [domain],
                        'traffic_source': traffic_source,
                        'widget_domain_source': widget_domain_source
                    }

        with open(
                f"{os.environ.get('ULANMEDIAAPP')}/curated_lists/widget_domains/widget_domains.json",
                "w") as file:
            json.dump(widget_domain_lookup, file)

    except requests.exceptions.RequestException as e:
        print("Failed to update widget domains file")
        print(e)
        send_email(
            "*****@*****.**",
            "Failed - update_widget_domains_file() at " +
            str(datetime.now().strftime("%Y-%m-%d %H:%M")), e)
        sys.exit()
Exemple #25
0
def create_active_flow_rules_list(token):
    try:
        flow_id = "da8f9291-462d-43e7-98a4-24d62f608297"
        url = f"https://api.voluum.com/flow/{flow_id}"
        response = requests.get(url, headers = {"cwauth-token": token})
        response.raise_for_status()
        response = response.json()

        active_flow_rules = []
        for offer in response["defaultPaths"]:
            active_flow_rules.append(offer["name"])
        for offer in response["conditionalPathsGroups"]:
            active_flow_rules.append(offer["name"])
            
        with open(f"../../data/active_flow_rules/active_flow_rules.json", "w") as file:
            json.dump(active_flow_rules, file)

    except requests.exceptions.RequestException as e:
        print("Failed - create_active_flow_rules_list")
        send_email("*****@*****.**", "Failed - create_active_flow_rules_list() at " + str(datetime.now().strftime("%Y-%m-%d %H:%M")), e)
        sys.exit()
Exemple #26
0
def get_vol_daily_stats_data(token, start_date, end_date, timezone):
    vol_url = f"https://api.voluum.com/report?from={start_date}T00%3A00%3A00Z&to={end_date}T00:00:00Z&tz={timezone}&sort=campaignName&direction=desc&columns=campaignName&columns=day&columns=campaignId&columns=visits&columns=conversions&columns=revenue&columns=cost&columns=cpv&groupBy=campaign&groupBy=day&offset=0&limit=1000000&include=ACTIVE&conversionTimeMode=VISIT&filter1=traffic-source&filter1Value={mgidVolTrafficSourceId}"
    try:
        res = requests.get(vol_url, headers={"cwauth-token": token})
        res.raise_for_status()
        vol_response = res.json()
        if vol_response["totalRows"] != len(vol_response["rows"]):
            send_email(
                "*****@*****.**",
                "Failed - get_vol_daily_stats_data at " +
                str(datetime.now().strftime("%Y-%m-%d %H:%M")),
                "all data not returned from voluum")
            raise Exception("voluum didn't return all the conversion data")
        return vol_response["rows"]

    except requests.exceptions.RequestException as e:
        print("exception handled")
        send_email(
            "*****@*****.**", "Failed - get_vol_daily_stats_data at " +
            str(datetime.now().strftime("%Y-%m-%d %H:%M")), e)
        sys.exit()
def get_mgid_included_widgets_by_campaign(token, campaign_id, start_date,
                                          end_date):
    try:
        url = f"https://api.mgid.com/v1/goodhits/campaigns/{campaign_id}/quality-analysis?token={token}&campaignId={campaign_id}&dateInterval=interval&startDate={start_date}&endDate={end_date}"
        response = requests.get(url)
        if response.status_code == 401:
            mgid_token = get_and_return_new_mgid_token()
            return get_mgid_included_widgets_by_campaign(
                mgid_token, campaign_id, start_date, end_date)

        response.raise_for_status()
        response = response.json()

        widgets = []
        if response[campaign_id][start_date + "_" + end_date] == []:
            return widgets
        for id, data in response[campaign_id][start_date + "_" +
                                              end_date].items():
            widget_id = id
            widgets.append(widget_id)
            if data["sources"]:
                for source_id, source_data in data["sources"].items():
                    if source_id is not "0":
                        widget_id = f"{id}s{source_id}"
                        widgets.append(widget_id)

        excluded_widgets = get_mgid_excluded_widgets_by_campaign(
            token, mgid_client_id, campaign_id)
        included_widgets = []
        for widget in widgets:
            if widget not in excluded_widgets:
                included_widgets.append(widget)

        return included_widgets
    except:
        print("Failed - email sent")
        send_email("*****@*****.**",
                   "Failed - get_mgid_included_widgets_by_campaign()",
                   "Failed - get_mgid_included_widgets_by_campaign()")
        sys.exit()
def get_mgid_excluded_widgets_by_campaign(mgid_token, mgid_client_id,
                                          mgid_campaign_id):
    url = f"https://api.mgid.com/v1/goodhits/clients/{mgid_client_id}/campaigns/{mgid_campaign_id}?token={mgid_token}"
    try:
        response = requests.get(url)
        if response.status_code == 401:
            new_mgid_token = get_and_return_new_mgid_token()
            return get_mgid_excluded_widgets_by_campaign(
                new_mgid_token, mgid_client_id, mgid_campaign_id)

        response.raise_for_status()
        response = response.json()
        # the response is a dictionary of lists. The keys are widget ids.
        # The values are sources. If a widget id has a source, it must be
        # this concatenation is necessary {widget_id}s{source_id}.
        # One strange thing about the response is that the source id appears
        # to be in a list but the [] are actually just part of the source id
        # string.
        excluded_widgets = []
        if response["widgetsFilterUid"]["widgets"] == []:
            return excluded_widgets
        for key, value in response["widgetsFilterUid"]["widgets"].items():
            if (value == "[]") | (value == None):
                excluded_widgets.append(key)
            else:
                value = value.replace("[", "")
                value = value.replace("]", "")
                value = value.replace(",", "")
                c_widgets = value.split(" ")
                for c_widget in c_widgets:
                    excluded_widgets.append(f"{key}s{c_widget}")
        # excluded_widgets is a list of excluded widget ids
        return excluded_widgets
    except requests.exceptions.RequestException as e:
        print("Failed - get_mgid_excluded_widgets_by_campaign")
        send_email(
            "*****@*****.**",
            "Failed - get_mgid_excluded_widgets_by_campaign() at " +
            str(datetime.now().strftime("%Y-%m-%d %H:%M")), e)
        sys.exit()
Exemple #29
0
def update_good_widgets_file():
    try:
        res = requests.get("http://ulanmedia.com/mgid/goodpwidgets.txt")
        res.raise_for_status()
        widgets = res.text.split("\n")
        good_widgets = []
        for widget in widgets:
            if widget != "":
                good_widgets.append(widget.replace("\r", ""))

        with open(
                f"{os.environ.get('ULANMEDIAAPP')}/curated_lists/good_widgets/good_widgets.json",
                "w") as file:
            json.dump(good_widgets, file)
    except requests.exceptions.RequestException as e:
        print("Failed to update good widgets file")
        print(e)
        send_email(
            "*****@*****.**",
            "Failed - update_good widgets_file() at " +
            str(datetime.now().strftime("%Y-%m-%d %H:%M")), e)
        sys.exit()
Exemple #30
0
def create_conversions_for_each_campaign_dataset(token, start_date, end_date, date_range):
    try:
        url = f"https://api.voluum.com/report/conversions?from={start_date}T00:00:00Z&to={end_date}T00:00:00Z&tz=America%2FLos_Angeles&conversionTimeMode=VISIT&sort=visitTimestamp&direction=desc&columns=visitTimestamp&columns=transactionId&columns=campaignId&columns=offerId&columns=countryName&columns=trafficSourceId&columns=deviceName&columns=os&columns=browser&columns=isp&columns=customVariable1&columns=customVariable3&groupBy=conversion&offset=0&limit=1000000&include=ACTIVE&filter=37bbd390-ed90-4978-9066-09affa682bcc"
        res = requests.get(url, headers = {"cwauth-token": token})
        res.raise_for_status()
        res = res.json()
        conversions_for_each_campaign = {"metadata": {"vol_start_date":
            start_date, "vol_end_date": end_date}, "data": {}}
        for conversion in res["rows"]:
            campaign_id = conversion["campaignId"]
            if campaign_id not in conversions_for_each_campaign["data"]:
                conversions_for_each_campaign["data"][campaign_id] = [conversion]
            else:
                conversions_for_each_campaign["data"][campaign_id].append(conversion)
        with open(f"{os.environ.get('ULANMEDIAAPP')}/data/conversions_for_each_campaign/{date_range}_conversions_for_each_campaign_dataset.json", "w") as file:
            json.dump(conversions_for_each_campaign, file)
    
    except requests.exceptions.RequestException as e:
        print("Failed - create_conversions_for_each_campaign_dataset()")
        print(e)
        send_email("*****@*****.**", "Failed - create_conversions_for_each_campaign_dataset() at " + str(datetime.now().strftime("%Y-%m-%d %H:%M")), e)
        sys.exit()