def test_noproxy_trailing_comma(): os.environ['http_proxy'] = 'http://myproxy.example.com:80' os.environ['no_proxy'] = 'localhost,other.host,' pi = httplib2.proxy_info_from_environment() assert not pi.applies_to('localhost') assert not pi.applies_to('other.host') assert pi.applies_to('example.domain')
def test_noproxy_trailing_comma(): os.environ["http_proxy"] = "http://myproxy.example.com:80" os.environ["no_proxy"] = "localhost,other.host," pi = httplib2.proxy_info_from_environment() assert not pi.applies_to("localhost") assert not pi.applies_to("other.host") assert pi.applies_to("example.domain")
def test_noproxy_trailing_comma(monkeypatch): monkeypatch.setenv("http_proxy", "http://myproxy.example.com:80") monkeypatch.setenv("no_proxy", "localhost,other.host,") pi = httplib2.proxy_info_from_environment() assert not pi.applies_to("localhost") assert not pi.applies_to("other.host") assert pi.applies_to("example.domain")
def test_from_env_https(monkeypatch): assert os.environ.get("http_proxy") is None monkeypatch.setenv("http_proxy", "http://myproxy.example.com:80") monkeypatch.setenv("https_proxy", "http://myproxy.example.com:81") pi = httplib2.proxy_info_from_environment("https") assert pi.proxy_host == "myproxy.example.com" assert pi.proxy_port == 81
def get_credentials(): """Gets valid user credentials from storage. If nothing has been stored, or if the stored credentials are invalid, the OAuth2 flow is completed to obtain the new credentials. Returns: Credentials, the obtained credential. """ home_dir = os.path.expanduser('~') credential_dir = os.path.join(home_dir, '.credentials') if not os.path.exists(credential_dir): os.makedirs(credential_dir) credential_path = os.path.join(credential_dir, 'calendar-python-quickstart.json') store = Storage(credential_path) credentials = store.get() if not credentials or credentials.invalid: flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES) flow.user_agent = APPLICATION_NAME http = httplib2.Http(proxy_info=httplib2.proxy_info_from_environment(), disable_ssl_certificate_validation=True) if flags: credentials = tools.run_flow(flow, store, flags, http) else: # Needed only for compatibility with Python 2.6 credentials = tools.run(flow, store) print('Storing credentials to ' + credential_path) return credentials
def connect_launchpad(base_url, timeout=None, proxy_info=None, version=Launchpad.DEFAULT_VERSION): """Log in to the Launchpad API. :return: The root `Launchpad` object from launchpadlib. """ if proxy_info is None: import httplib2 proxy_info = httplib2.proxy_info_from_environment('https') try: cache_directory = get_cache_directory() except EnvironmentError: cache_directory = None credential_store = get_credential_store() authorization_engine = get_auth_engine(base_url) return Launchpad.login_with('breezy', base_url, cache_directory, timeout=timeout, credential_store=credential_store, authorization_engine=authorization_engine, proxy_info=proxy_info, version=version)
def test_applies_to(): os.environ['http_proxy'] = 'http://myproxy.example.com:80' os.environ['https_proxy'] = 'http://myproxy.example.com:81' os.environ['no_proxy'] = 'localhost,otherhost.domain.local,example.com' pi = httplib2.proxy_info_from_environment() assert not pi.applies_to('localhost') assert pi.applies_to('www.google.com') assert not pi.applies_to('www.example.com')
def test_applies_to(): os.environ['http_proxy'] = 'http://myproxy.example.com:80' os.environ['https_proxy'] = 'http://myproxy.example.com:81' os.environ['no_proxy'] = 'localhost,example.com,.wildcard' pi = httplib2.proxy_info_from_environment() assert not pi.applies_to('localhost') assert pi.applies_to('www.google.com') assert pi.applies_to('prefixlocalhost') assert pi.applies_to('www.example.com') assert pi.applies_to('sub.example.com') assert not pi.applies_to('sub.wildcard') assert not pi.applies_to('pub.sub.wildcard')
def test_applies_to(): os.environ["http_proxy"] = "http://myproxy.example.com:80" os.environ["https_proxy"] = "http://myproxy.example.com:81" os.environ["no_proxy"] = "localhost,example.com,.wildcard" pi = httplib2.proxy_info_from_environment() assert not pi.applies_to("localhost") assert pi.applies_to("www.google.com") assert pi.applies_to("prefixlocalhost") assert pi.applies_to("www.example.com") assert pi.applies_to("sub.example.com") assert not pi.applies_to("sub.wildcard") assert not pi.applies_to("pub.sub.wildcard")
def test_applies_to(monkeypatch): monkeypatch.setenv("http_proxy", "http://myproxy.example.com:80") monkeypatch.setenv("https_proxy", "http://myproxy.example.com:81") monkeypatch.setenv("no_proxy", "localhost,example.com,.wildcard") pi = httplib2.proxy_info_from_environment() assert not pi.applies_to("localhost") assert pi.applies_to("www.google.com") assert pi.applies_to("prefixlocalhost") assert pi.applies_to("www.example.com") assert pi.applies_to("sub.example.com") assert not pi.applies_to("sub.wildcard") assert not pi.applies_to("pub.sub.wildcard")
def get_dataset_fashion_mnist(): import httplib2 # detect presense of proxy and use env varibles if they exist pi = httplib2.proxy_info_from_environment() if pi: import socks socks.setdefaultproxy(pi.proxy_type, pi.proxy_host, pi.proxy_port) socks.wrapmodule(httplib2) # now all calls through httplib2 should use the proxy settings httplib2.Http() return tf.keras.datasets.fashion_mnist.load_data()
def get_proxy_info(self): if self.proxy_host is None: pi = httplib2.proxy_info_from_environment() if not (hasattr(httplib2, 'socks') and hasattr(httplib2.socks, 'PROXY_TYPE_HTTP_NO_TUNNEL')): return pi else: pi = httplib2.ProxyInfo(proxy_type = self.proxy_type, proxy_host = self.proxy_host, proxy_port = self.proxy_port) pi.proxy_type = self.proxy_type return pi
def GetHttp(): """Configure an httplib2 object using the user's proxy, if necessary. This should be used everywhere in gcutil where an Http object is needed. Returns: An httplib2.Http object. """ proxy_info = httplib2.proxy_info_from_environment() if proxy_info: proxy_info.proxy_rdns = True return httplib2.Http(proxy_info=proxy_info)
def get_all_items(self): http_conn = httplib2.Http(proxy_info=httplib2.proxy_info_from_environment()) uri = 'https://getpocket.com/v3/get' method = 'POST' headers = {'Content-Type': 'application/json'} body = '''{"consumer_key": "46666-60a7a1a006160f5641381067", "access_token": "40cb85d8-a3b9-fa64-3f74-b88315", "detailType": "simple"}''' try: resp, content = http_conn.request(uri=uri, method=method, headers=headers, body=body) except Exception, e: # print e return None
def login(service, timeout=None, proxy_info=None, version=Launchpad.DEFAULT_VERSION): """Log in to the Launchpad API. :return: The root `Launchpad` object from launchpadlib. """ if proxy_info is None: proxy_info = httplib2.proxy_info_from_environment('https') cache_directory = get_cache_directory() launchpad = Launchpad.login_with( 'bzr', _get_api_url(service), cache_directory, timeout=timeout, proxy_info=proxy_info, version=version) # XXX: Work-around a minor security bug in launchpadlib < 1.6.3, which # would create this directory with default umask. osutils.chmod_if_possible(cache_directory, 0700) return launchpad
def get_item_list(self): http_conn = httplib2.Http(proxy_info=httplib2.proxy_info_from_environment()) uri = 'https://getpocket.com/v3/get' method = 'POST' headers = {'Content-Type': 'application/json'} body = '''{"consumer_key": "46666-60a7a1a006160f5641381067", "access_token": "40cb85d8-a3b9-fa64-3f74-b88315", "count": "2", "detailType": "simple"}''' # print 'Start connection...' resp, content = http_conn.request(uri=uri, method=method, headers=headers, body=body) # print 'Request over.' if resp.status != 200: # print resp return None return self.parse_json(content)
def initialize_analyticsreporting(client_secrets_path): """Initializes the analyticsreporting service object. Returns: analytics an authorized analyticsreporting service object. """ SCOPES = ['https://www.googleapis.com/auth/analytics.readonly'] DISCOVERY_URI = ( 'https://analyticsreporting.googleapis.com/$discovery/rest') # Parse command-line arguments. parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, parents=[tools.argparser]) # flags = parser.parse_args(['--noauth_local_webserver']) flags = parser.parse_args([]) # Set up a Flow object to be used if we need to authenticate. flow = client.flow_from_clientsecrets( client_secrets_path, scope=SCOPES, message=tools.message_if_missing(client_secrets_path)) # Prepare credentials, and authorize HTTP object with them. # If the credentials don't exist or are invalid run through the native client # flow. The Storage object will ensure that if successful the good # credentials will get written back to a file. storage = file.Storage('analyticsreporting.dat') credentials = storage.get() pi = httplib2.proxy_info_from_environment('http') if credentials is None or credentials.invalid: credentials = tools.run_flow(flow, storage, flags, http=httplib2.Http(proxy_info=pi)) http = credentials.authorize(http=httplib2.Http(proxy_info=pi)) # Build the service object. analytics = build('analytics', 'v4', http=http, discoveryServiceUrl=DISCOVERY_URI) return analytics
def connect_launchpad(base_url, timeout=None, proxy_info=None, version=Launchpad.DEFAULT_VERSION): """Log in to the Launchpad API. :return: The root `Launchpad` object from launchpadlib. """ if proxy_info is None: import httplib2 proxy_info = httplib2.proxy_info_from_environment('https') cache_directory = get_cache_directory() return Launchpad.login_with('breezy', base_url, cache_directory, timeout=timeout, proxy_info=proxy_info, version=version)
def main(): client = TrelloClient(api_key='YOUR_SECRET', api_secret='YOUR_API_SECRET', token='YOUR_TOKEN', token_secret='YOUR_TOKEN_SECRET') credentials = get_credentials() http = httplib2.Http(proxy_info=httplib2.proxy_info_from_environment(), disable_ssl_certificate_validation=True) auth = credentials.authorize(http) # http = credentials.authorize(httplib2.Http(), disable_ssl_certificate_validation=True) # http_auth = credentials.authorize(Http(proxy_info = httplib2.ProxyInfo(httplib2.socks.PROXY_TYPE_HTTP_NO_TUNNEL, 'proxy url wihout http://', 8080, proxy_user = '', proxy_pass = '') )) service = discovery.build('calendar', 'v3', http=http) # calendar_list = service.calendarList().list(pageToken=None).execute() # timeMin = datetime.datetime(2017, 10, 23, 2, 0).isoformat() + 'Z' # 'Z' indicates UTC time # timeMax = datetime.datetime(2017, 10, 25, 22, 0).isoformat() + 'Z' # 'Z' indicates UTC time tMin = dateutil.parser.parse(args[0], dayfirst=True) tMax = dateutil.parser.parse(args[1], dayfirst=True) # to include end of interval tMax += datetime.timedelta(days=1) print(tMin) print(tMax) timeMin = tMin.isoformat() + 'Z' # 'Z' indicates UTC time timeMax = tMax.isoformat() + 'Z' # 'Z' indicates UTC time eventsResult = service.events().list(calendarId=calendarID, timeMin=timeMin, timeMax=timeMax, singleEvents=True, orderBy='startTime').execute() events = eventsResult.get('items', []) if not events: print('No events found.') for event in events: start = dateutil.parser.parse(event['start'].get('dateTime')) end = dateutil.parser.parse(event['end'].get('dateTime')) card = getTrelloCardByName(client, event['summary']) print(card) # print(float((end-start).seconds)/3600.0) addTaskTime(card, start, end)
def initialize_analyticsreporting(creds_path, scopes): """Initialize an analyticsreporting service object. Returns: analytics an authorized analyticsreporting service object. """ credentials = ServiceAccountCredentials.from_json_keyfile_name( creds_path, scopes=scopes) proxy = httplib2.proxy_info_from_environment('https') if proxy: logging.info('Using proxy: %s:%s', proxy.proxy_host, proxy.proxy_port) # Force remote DNS resolution through the proxy (default behaviour # in newer releases of httplib2). proxy.proxy_rdns = True http = credentials.authorize(httplib2.Http(proxy_info=proxy)) # Build the service object. analytics = build('analytics', 'v4', http=http, discoveryServiceUrl=V2_DISCOVERY_URI) return analytics
## hack for credentials directory credential_dir = os.path.join( os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__)))), 'credentials') import httplib2 from apiclient import discovery, errors from oauth2client import client from oauth2client import tools from oauth2client.file import Storage import base64, email, email.header, datetime from email.mime.text import MIMEText # detect presense of proxy and use env varibles if they exist pi = httplib2.proxy_info_from_environment() if pi: import socks socks.setdefaultproxy(pi.proxy_type, pi.proxy_host, pi.proxy_port) socks.wrapmodule(httplib2) # If modifying these scopes, delete your previously saved credentials # at ~/.credentials/gmail-python-quickstart.json SCOPES = 'https://www.googleapis.com/auth/gmail.modify' CLIENT_SECRET_FILE = os.path.join(credential_dir, 'google-api.json') APPLICATION_NAME = 'Gmail API - Python' class Mailbox(): def __init__(self, cred=credential_dir, flags=None): self.cred = credential_dir
def get_gcs_proxy_info(self): if CONF.backup_gcs_proxy_url: return httplib2.proxy_info_from_url(CONF.backup_gcs_proxy_url) else: return httplib2.proxy_info_from_environment()
def test_noproxy_star(): os.environ["http_proxy"] = "http://myproxy.example.com:80" os.environ["NO_PROXY"] = "*" pi = httplib2.proxy_info_from_environment() for host in ("localhost", "169.254.38.192", "www.google.com"): assert not pi.applies_to(host)
def test_noproxy_star(monkeypatch): monkeypatch.setenv("http_proxy", "http://myproxy.example.com:80") monkeypatch.setenv("NO_PROXY", "*") pi = httplib2.proxy_info_from_environment() for host in ("localhost", "169.254.38.192", "www.google.com"): assert not pi.applies_to(host)
def test_from_env_https(): os.environ['http_proxy'] = 'http://myproxy.example.com:80' os.environ['https_proxy'] = 'http://myproxy.example.com:81' pi = httplib2.proxy_info_from_environment('https') assert pi.proxy_host == 'myproxy.example.com' assert pi.proxy_port == 81
def test_from_env_none(): os.environ.clear() pi = httplib2.proxy_info_from_environment() assert pi is None
def test_from_env_https(): os.environ["http_proxy"] = "http://myproxy.example.com:80" os.environ["https_proxy"] = "http://myproxy.example.com:81" pi = httplib2.proxy_info_from_environment("https") assert pi.proxy_host == "myproxy.example.com" assert pi.proxy_port == 81
def test_noproxy_star(): os.environ['http_proxy'] = 'http://myproxy.example.com:80' os.environ['NO_PROXY'] = '*' pi = httplib2.proxy_info_from_environment() for host in ('localhost', '169.254.38.192', 'www.google.com'): assert not pi.applies_to(host)
CLIENT_SECRET_FILE = "client_secrets.json" RCLONE_CLIENT_ID = '202264815644.apps.googleusercontent.com' RCLONE_CLIENT_SECRET = 'X4Z3ca8xfWDb1Voo-F9a7ZxJ' SCOPES = "https://www.googleapis.com/auth/drive" REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob' HTTP_NUM_RETRIES = 5 log = logging.getLogger(__name__) https_proxy = os.environ.get("HTTPS_PROXY", "") if https_proxy: proxyInfo = httplib2.proxy_info_from_environment("https") httplib2.socks.setdefaultproxy(proxy_type=proxyInfo.proxy_type, addr=proxyInfo.proxy_host, port=proxyInfo.proxy_port, rdns=proxyInfo.proxy_rdns, username=proxyInfo.proxy_user, password=proxyInfo.proxy_pass ) httplib2.socks.wrapmodule(httplib2) class ClientRedirectServer(BaseHTTPServer.HTTPServer): query_params = {} class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler): def do_GET(self):