def add_data(self): data = Data.get_data() data['id'] = None # чтобы на сервере id сгенерилось Logger.debug('Add data: {}'.format(data)) data = json.dumps(data) RestClient.post(data) RestClient.get()
class Mapper: def __init__(self): self.client = RestClient(LOGIN, PASSWORD) def save_get_response(self, path): directory = path.split('/') filepath = '' for name in directory: filepath += f"{name}_" filepath = filepath[:-1] + '.json' # get the data response = json.dumps(self.client.get(path), indent=4) with open(filepath, 'w') as outfile: outfile.write(response)
# Output name timestr = time.strftime("%Y%m%d-%H%M%S") tag = args.output + "-" + timestr filename = tag + ".csv" with open(filename,'w',newline='') as file: writer = csv.DictWriter(file, fieldnames=fields, delimiter=";") writer.writeheader() file.close() client = RestClient(user,password) # While there are results, request the next batch next_batch = True while next_batch: response = client.get("/v3/serp/google/organic/tasks_ready") if response['status_code'] == 20000: tasks_available = response["tasks"][0]["result_count"] print("{} tasks available".format(tasks_available)) if tasks_available < 1: next_batch = False results = [] for task in response['tasks']: if (task['result'] and (len(task['result']) > 0)): for resultTaskInfo in task['result']: if(resultTaskInfo['endpoint_advanced']): results.append(client.get(resultTaskInfo['endpoint_advanced'])) for result in results: for task in result["tasks"]: task_id = task['id']
def get_all(self): RestClient.get()
# Output name timestr = time.strftime("%Y%m%d-%H%M%S") tag = args.output + "-" + timestr filename = tag + ".csv" with open(filename, 'w', newline='') as file: writer = csv.DictWriter(file, fieldnames=fields, delimiter=";") writer.writeheader() file.close() client = RestClient(user, password) # While there are results, request the next batch next_batch = True while next_batch: response = client.get( "/v3/keywords_data/google/search_volume/tasks_ready") if response['status_code'] == 20000: tasks_available = response["tasks"][0]["result_count"] print("{} tasks available".format(tasks_available)) if tasks_available < 1: next_batch = False for task in response["tasks"]: if (task['result'] and (len(task['result']) > 0)): for result_task_info in task['result']: if (result_task_info['endpoint']): res = client.get(result_task_info['endpoint']) for t in res["tasks"]: if (t['result'] and (len(t['result']) > 0)): for k in t['result']:
## ## Shows number of tasks ready for download. #### import configparser import argparse from client import RestClient if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--config', default="config.ini", type=str, help='Global config file (default: "config.ini")') args = parser.parse_args() conf = configparser.ConfigParser() conf.read(args.config) user = conf['general']['user'] password = conf['general']['password'] client = RestClient(user, password) response = client.get("/v3/keywords_data/google/search_volume/tasks_ready") if response["status_code"] == 20000: tasks_available = response["tasks"][0]["result_count"] print("{} tasks available".format(tasks_available)) else: print("error. Code: %d Message: %s" % (response["status_code"], response["status_message"]))
#### ## DATAFORSEO SERPS API ## ## Shows number of tasks ready for download #### import configparser import argparse from client import RestClient if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--config', default="config.ini", type=str, help='Global config file (default: "config.ini")') args = parser.parse_args() conf = configparser.ConfigParser() conf.read(args.config) user = conf['general']['user'] password = conf['general']['password'] client = RestClient(user,password) response = client.get("/v3/serp/google/organic/tasks_ready") if response["status_code"] == 20000: tasks_available = response["tasks"][0]["result_count"] print("{} tasks available".format(tasks_available)) else: print("error. Code: %d Message: %s" % (response["status_code"], response["status_message"]))