def connect_google(self): if os.path.exists(self.credentials): account = searchconsole.authenticate( client_config=self.client_secrets, credentials=self.credentials) else: account = searchconsole.authenticate( client_config=self.client_secrets, serialize=self.credentials) self.webproperty = account[self.webproperty] if self.webproperty is None: raise Exception("webproperty is None!")
def setUp(self): self.account = searchconsole.authenticate( client_config='auth/client_secrets.json', credentials='auth/credentials.dat' ) self.webproperty = self.account[webproperty_uri] self.query = self.webproperty.query
def load_tests(loader, tests, ignore): """ Many docstrings contain doctests. Instead of using a separate doctest runner, we use doctest's Unittest API.""" account = searchconsole.authenticate( client_config='auth/client_secrets.json', credentials='auth/credentials.dat' ) globs = { 'account': account, 'webproperty': account[webproperty_uri], 'www_webproperty_com': webproperty_uri, 'query': account[webproperty_uri].query } kwargs = { 'globs': globs, 'optionflags': doctest.ELLIPSIS } tests.addTests(doctest.DocTestSuite(searchconsole.auth, **kwargs)) tests.addTests(doctest.DocTestSuite(searchconsole.account, **kwargs)) tests.addTests(doctest.DocTestSuite(searchconsole.query, **kwargs)) return tests
def _get_report(self): account = searchconsole.authenticate( client_config="/usr/local/airflow/dags/client_secret_google_search_console.json", credentials="/usr/local/airflow/dags/client_secret_google_search_console_serialized.json", ) webproperty = account["https://tw.pycon.org/"] return webproperty.query.range("today", days=-7).dimension("query").get()
def test_files(self): """ Test whether a webmasters service can be created using a Google format client secrets and credentials file. """ account = searchconsole.authenticate( client_config='auth/client_secrets.json', credentials='auth/credentials.dat') self.assertIsInstance(account, searchconsole.account.Account)
def test_mappings(self): """ Test whether a webmasters service can be created using Google format client_config and credentials mappings. """ from auth.creds import client_secrets, credentials account = searchconsole.authenticate(client_config=client_secrets, credentials=credentials) self.assertIsInstance(account, searchconsole.account.Account)
def test_serialize_credentials(self): """ Test whether a credentials object can serialized.""" serialized_file = 'auth/webmasters.dat' account = searchconsole.authenticate( client_config='auth/client_secrets.json', credentials='auth/credentials.dat', serialize=serialized_file) serialized_file_exists = os.path.isfile(serialized_file) self.assertTrue(serialized_file_exists) serialized_account = searchconsole.authenticate( client_config='auth/client_secrets.json', credentials=serialized_file, ) self.assertIsInstance(serialized_account, searchconsole.account.Account) os.remove(serialized_file)
from scipy import stats from fuzzywuzzy import fuzz from time import perf_counter import searchconsole from datetime import datetime from datetime import timedelta # --------------DATA RETRIVING--------------- # no credentials saved, do not save credentials #account = searchconsole.authenticate(client_config='client_secrets.json') # no credentials saved, want to save credentials #account = searchconsole.authenticate(client_config='client_secrets.json', serialize = 'credentials.json') # credentials saved as credentials.json account = searchconsole.authenticate(client_config='client_secrets.json', credentials='credentials.json') # webproperty must match what's shown on Google Search Console webproperty = account['******'] # website url start = datetime.strptime("******", "%Y-%m-%d") # modify start date end = datetime.strptime("******", "%Y-%m-%d") # modify end date df = pd.DataFrame() while start != end: start_datetime = datetime.strftime(start, "%Y-%m-%d") # interval = 1 day shifted_datetime = datetime.strftime(start + timedelta(days=1), "%Y-%m-%d") report = webproperty.query.range(
st.write(JsonFromString) site = 'https://www.tatielou.co.uk/' # Property to extract num_days = 5 # Number of Days, Months to Extract #creds = 'GSCTatieLouCredentialsNonLegacy.json' output = 'gsc_data.csv' st.header("Proceed to Oauth") my_file = Path("credentials.json") #if st.button('Proceed to Oauth'): if my_file.is_file(): #account = searchconsole.authenticate(client_config=JsonFromString,credentials='credentials.json') account = searchconsole.authenticate(client_config=JsonFromString, credentials='credentials.json') webproperty = account['https://www.tatielou.co.uk/'] report = webproperty.query.range('today', days=-7).dimension('query').get() df = pd.DataFrame(report.rows) st.write(df) else: #account = searchconsole.authenticate(client_config=JsonFromString,serialize='credentials.json') #Synode's code ############### outputStdout = st.empty() with st_stdout(outputStdout.info): account = searchconsole.authenticate(client_config=JsonFromString, serialize='credentials.json', flow="console") #searchconsole.authenticate(client_config="GSCTatieLouCredentials.json", serialize='credentials.json', flow="console") #st.write('No, credentials.json doesnt exist')
send_to_mails = '' # In[49]: #!pip install git+https://github.com/joshcarty/google-searchconsole import searchconsole import urllib3 import pandas as pd from pandas import DataFrame import warnings # In[48]: http = urllib3.PoolManager() account = searchconsole.authenticate( client_config='client_secrets.json', credentials='/Users/Michael.vandenreym/Desktop/python/credentials.json') webproperty = account[consoledomain] report = webproperty.query.range('today', days=-numberdays).dimension('page').get() # In[4]: warnings.filterwarnings("ignore") error4 = [] badcodes = [ '400', '401', '402', '403', '404', '500', '501', '502', '503', '504' ] for i in range(0, len(report.rows) - 1): try: resp = http.request('GET', report.rows[i][0])