import json from lib.auth import Auth from lib.httpHandler import HttpHandler http = HttpHandler() a = Auth("auth", "here") token = a.readResponse(a.authorizationRequest(http)) http.setToken(token) count = http.getRequestPage(0, "line-item").json()['response']['count'] allAineItems = list() for start_element in range(4300, count, 100): lineItemsNew = http.getRequestPage(start_element, "line-item").json()['response']['line-items'] allAineItems.append(lineItemsNew) allLowLineItems = list() for lineItemArr in allAineItems: for lineItem in lineItemArr: if lineItem['lifetime_budget'] is not None and lineItem['all_stats'] is not None: if float(lineItem['lifetime_budget']) > float(lineItem['all_stats']['lifetime']['revenue'])*2.0: if '7day' in lineItem['all_stats'] and int(lineItem['all_stats']['7day']['imps']) < 7000: allLowLineItems.append(lineItem) elif 'yesterday' in lineItem['all_stats'] and int(lineItem['all_stats']['yesterday']['imps']) < 1000: allLowLineItems.append(lineItem) for lineItem in allLowLineItems:
import json from lib.auth import Auth from lib.httpHandler import HttpHandler from worker.siphon.feedGenerator import FeedGenerator from worker.siphon.SiphonDownloader import SiphonDownloader http = HttpHandler() a = Auth("auth", "here") token = a.readResponse(a.authorizationRequest(http)) http.setToken(token) allDataFeeds = FeedGenerator(http) downloader = SiphonDownloader("temp", http) download_links = allDataFeeds.generateLocationRequests(allDataFeeds.getFeeds()) for params in download_links: downloader.download_file(params)
def getPlacement(placement_id): params = {'id': str(placement_id)} response_placement = http.getRequest('placement', params).json()['response'] if 'placement' in response_placement: return response_placement['placement'] else: return None proxies = { "http": "http://proxy.t-online.net:3128", "https": "http://proxy.t-online.net:3128", } http = HttpHandler("http://api.appnexus.com") a = Auth() aquireAuthToken(a, http) filename = "all_site_ALL_categories.csv" worker = AbstractGenericWorker(http) #sites = worker.getAllEntitiesByType('site') sites = worker.getAllEntitiesByRange('site', 300, 400) writer_content = list() count = len(sites) i = 1
import json from lib.auth import Auth from lib.httpHandler import HttpHandler from lib.fileWriter import FileWriter from worker.lowRunning.LowRunning import LowRunning proxies = {"http": "http://proxy.t-online.net:3128", "https": "http://proxy.t-online.net:3128"} filename = "low_running_lineitems.csv" http = HttpHandler(proxies) a = Auth("Auth", "here") token = a.readResponse(a.authorizationRequest(http)) http.setToken(token) count = http.getRequestPage(0, "line-item").json()["response"]["count"] allLineItems = list() for start_element in range(4500, count, 100): lineItemsNew = http.getRequestPage(start_element, "line-item").json()["response"]["line-items"] allLineItems.append(lineItemsNew) params = {"never_run": "true"} count = http.getRequestPage(0, "line-item", params).json()["response"]["count"] allNeverLineItems = list() for start_element in range(0, count, 100):