예제 #1
0
def save_data(data, code):
    res = [data[0], data[1], data[2], code]
    print(res)
    IO.csv_append([res], file.split(".")[0]+"_result")
예제 #2
0
def get_body(data):
    return json.dumps(json.loads(data))

def save_data(data, code):
    res = [data[0], data[1], data[2], code]
    print(res)
    IO.csv_append([res], file.split(".")[0]+"_result")



if __name__=="__main__":
    if len(sys.argv) > 1:
        file = sys.argv[1]


    data = IO.csvfile_read(file)
    domain = data.pop(0)[1]
    data.pop(0)


    for each in data:
        crawler = Crawler()
        url = domain + each[1]
        params = {}
        body   = {}
        res = None

        if each[0].find("#")==-1 and each[2] == 'GET':
            params = get_param(each[4])
            crawler.get(url, params=params)
            res = crawler.status()