def fbownedpublicapp():
    """python function that exports all public owned data"""
    #Open access_data.json file
    accessdata = loadjsonfile(APP_NAME, "access_data.json")
    #Database connection info
    connectiondata = loadjsonfile(APP_NAME, "connection_data.json")

    #pages to scrape
    pages_to_scrape = ["jumbosupermarkten", "PLUSsupermarkt", "EMTESUPERMARKTEN",
                       "Dirksupermarkten", "CoopSupermarkten", "lidlnederland",
                       "JanLindersSupermarkten", "DEENSupermarkten", "albertheijn"]
    page_loop = 0

    #loop through pages in array
    for _ in pages_to_scrape:
        #construct request for page data
        constructedrequest = fb_httpbuilderpublic(APP_NAME, pages_to_scrape[page_loop],
                                                  16, 1500, 6000, accessdata["app_access_token"])

        #retrieve page data json from api
        requestjson = httpjson(APP_NAME, constructedrequest, 30)

        #procus complete public facebook data tables
        fb_ownedpubliccomplete(APP_NAME, requestjson, connectiondata, 10)

        #increment top page loop
        page_loop = page_loop+1
示例#2
0
def fbownedpublicapp():
    """python function that exports all public owned data"""
    #Open access_data.json file
    accessdata = loadjsonfile(APP_NAME, ABSOLUTE_PATH + "access_data.json")
    #Database connection info
    connectiondata = loadjsonfile(APP_NAME,
                                  ABSOLUTE_PATH + "connection_data.json")

    #pages to scrape
    pages_to_scrape = [
        "afcajax", "PSV", "feyenoord", "AZAlkmaar", "FCUtrecht1970",
        "ADODenHaag", "peczwolle", "VitesseArnhem", "scheerenveen",
        "HeraclesAlmelo", "VVV.Venlo", "FCGroningen", "excelsiorrdam",
        "WillemIITilburg", "FCTwente", "NACnl", "SpartaRotterdam",
        "RodaJCKerkrade", "NECNijmegen"
    ]
    page_loop = 0

    #loop through pages in array
    for page in pages_to_scrape:
        #construct request for page data
        constructedrequest = fb_httpbuilderpublic(
            APP_NAME, pages_to_scrape[page_loop], 25, 1500, 6000,
            accessdata["app_access_token"])
        print(constructedrequest)
        #retrieve page data json from api
        requestjson = httpjson(APP_NAME, constructedrequest, 40)
        print(requestjson)

        #procus complete public facebook data tables
        fb_ownedpubliccomplete(APP_NAME, requestjson, connectiondata, 10)

        #increment top page loop
        page_loop = page_loop + 1
示例#3
0
def fbownedcommentsentiment():
    """get the sentiment data of yesterday's comments of pages"""
    #Database connection info
    connectiondata = loadjsonfile(APP_NAME, "connection_data.json")
    #sentiment api info
    sentapidata = loadjsonfile(APP_NAME, "sent_api_data.json")

    #get list of ids to loop through
    fb_pages = fb_getpageidsmysql(APP_NAME, connectiondata, 10)

    #get comments for each fb_page of yesterday
    for _ in fb_pages:
        fb_getcommentsentimentmsql(APP_NAME, fb_pages, connectiondata, sentapidata)
示例#4
0
def seoscienceapp():
    """app to retrieve datasets"""
    #Database connection info
    connectiondata = loadjsonfile(APP_NAME,
                                  ABSOLUTE_PATH + "connection_data.json")

    #Database connection info
    clientsecrets = ABSOLUTE_PATH + "client_secrets.json"

    #cache general variables
    currentdate = datetime.now()
    delaydate = (currentdate - timedelta(days=5)).strftime("%Y-%m-%d")

    #Initialize the search console api
    webmasters_service = sc_initsearchconsole(APP_NAME, SCOPES, clientsecrets)

    #get data and loop through searchtypes
    for typing in searchtypes:
        LOOPSTART = 0
        full_data = sc_full_export(APP_NAME, SC_URL, webmasters_service,
                                   delaydate, typing, LOOPSTART)
        if 'rows' in full_data:
            if len(full_data["rows"]) > 4999:
                LOOPSTART = LOOPSTART + 5000

    sc_lp_export(APP_NAME, SC_URL, webmasters_service, delaydate, typing, 0)