def _get_task_from_queue(self, task_id, refresh_lease): """ Gets and leases the task associated with the task_id from the pull task queue. :param: task_id refresh_lease: If true, refreshes the lease on the task :return: Leased Task. """ try: http = httplib2.Http() c = credentials.get_credentials() if c: task_api = build('taskqueue', 'v1beta2', http=c.authorize(http)) get_req = task_api.tasks().get(project='s~ocr-backend', taskqueue=self.queue, task=task_id) result = get_req.execute() if 'id' in result: if refresh_lease: self.refresh_lease_time() return result else: return None else: config.logging.error('Error getting credentials') except httplib2.ServerNotFoundError as e: config.logging.error('HTTP Error {0}'.format(e.message)) return None
def logout(args): if get_credentials(): delete_credentials() client.auth = None print 'You have been logged out.' else: print 'You are not logged-in.'
def check_credentials(host): global passwd while True: if host in passwd: break credentials.add_credentials() passwd = credentials.get_credentials()
def createNetwork(network_name, subnet_name, cidr): credentials = get_credentials() neutron = client.Client(**credentials) try: body_sample = { 'network': { 'name': network_name, 'admin_state_up': True } } netw = neutron.create_network(body=body_sample) net_dict = netw['network'] network_id = net_dict['id'] print('Network %s created' % network_id) body_create_subnet = { 'subnets': [{ 'cidr': cidr, 'ip_version': 4, 'name': subnet_name, 'network_id': network_id }] } subnet = neutron.create_subnet(body=body_create_subnet) print('Created subnet %s' % subnet) finally: print("Execution completed")
def main(): updatedToSheets = False data = database.getFromDB(updatedToSheets) richardSheetID = '1PY-eVqQs-2LdJu3gZ2tdaBRBNFh0gpI1GdSWpL_yXd8' sheetID = '1XATRUyPfMIqiLKrpYixMT2E6hT1uwkknSBgW8v3x1ME' if data != None: ROWS = list() for row in data: ROWS.append(list(row)) spreadsheet = { 'ID': sheetID, 'rangeName': 'A1:F1', 'valueInputOption': 'RAW', 'ROWS': ROWS } credentials = c.get_credentials() http_auth = credentials.authorize(Http()) service = build('sheets', 'v4', http=http_auth) values = spreadsheet['ROWS'] body = {'values': values} print 'Uploading to Google Sheets...' result = service.spreadsheets().values().append( spreadsheetId=spreadsheet['ID'], range=spreadsheet['rangeName'], valueInputOption=spreadsheet['valueInputOption'], body=body).execute() database.setUpdatedToSheets() else: print 'Everything is up to date...'
def move_to_queue(self, queue, payload, lease_time): """ Modifies the payload of the tasks and moves it to a new queue. (ie. training or improve-cropping) """ import time try: http = httplib2.Http() c = credentials.get_credentials() if c: task_api = build('taskqueue', 'v1beta2', http=c.authorize(http)) body = {"kind": "taskqueues#task", "id": self.id, "queueName": queue, "payloadBase64": str.encode(str(payload), encoding='base64'), "enqueueTimestamp": long(time.time()), "leaseTimestamp": lease_time, "retry_count": 0} create_req = task_api.tasks().insert(project='s~ocr-backend', taskqueue=queue, body=body) result = create_req.execute() config.logging.info('task: insert - result: {0}'.format(result)) if result: self.delete_task_from_queue() self.queue = queue self.payload = payload else: config.logging.error('Error getting credentials') except httplib2.ServerNotFoundError as e: config.logging.error('HTTP Error {0}'.format(e.message)) return None
def test_get_credentials_missing_section(self): """ Test reading the credentials from a configuration file where the credentials section is missing (uninitialized). """ mock_file = self.mox.CreateMockAnything() mock_file.closed = False mock_file.name = "foobar" username, password = ("*****@*****.**", "password") self.mox.StubOutWithMock(SafeConfigParser, "get") self.mox.StubOutWithMock(SafeConfigParser, "add_section") self.mox.StubOutWithMock(SafeConfigParser, "set") self.mox.StubOutWithMock(__builtin__, "raw_input") config = ConfigurationParser() config.parse(mock_file) error = NoSectionError("credentials") config.get("credentials", "username").AndRaise(error) config.add_section("credentials") raw_input(mox.IgnoreArg()).AndReturn(username) raw_input(mox.IgnoreArg()).AndReturn(password) config.set("credentials", "username", username) config.set("credentials", "password", password) self.mox.ReplayAll() self.assertEquals((username, password), credentials.get_credentials())
def main(args): client_secret_file_path = os.environ['GCAL_CLIENT_SECRET_PATH'] credentials = get_credentials(client_secret_file_path) service = get_service(credentials) parser = argparse.ArgumentParser() sub = parser.add_subparsers(title='sub commands') list_parser = sub.add_parser('list') list_parser.set_defaults(func=list_cals) event_parser = sub.add_parser('events') event_parser.set_defaults(func=list_events) event_parser.add_argument('cal_ids', nargs='?') event_parser.add_argument('days', type=int, nargs='?') event_parser.add_argument('--no-times', action='store_true') event_parser.add_argument('--markdown-list', action='store_true') hour_parser = sub.add_parser('hour') hour_parser.set_defaults(func=calc_hours) hour_parser.add_argument('cal_ids', nargs='?') hour_parser.add_argument('days', type=int, nargs='?') hour_parser.add_argument('--no-times', action='store_true') args = parser.parse_args() args.func(service, args)
def createRouter(): credentials = get_nova_credentials() nova_client = nvclient.Client(**credentials) net = nova_client.networks.find(label="ext-net") try: credentials = get_credentials() neutron = client.Client(**credentials) neutron.format = 'json' request = { 'router': { 'name': 'External Router', 'admin_state_up': True, 'external_gateway_info': { 'network_id': net.id } } } router = neutron.create_router(request) router_id = router['router']['id'] router = neutron.show_router(router_id) print(router) addInterface(neutron, router_id, 'FirstSubnet') addInterface(neutron, router_id, 'SecondSubnet') addInterface(neutron, router_id, 'ThirdSubnet') addInterface(neutron, router_id, 'FourthSubnet') finally: print("created router")
def test_get_creds(self): """ Test the basic presence of creds. This will be a good flag if tests start failing all over the place. """ creds = get_credentials() self.assertIsNotNone(creds)
def _get_next_task_from_queue(self): """ Gets and leases one available tasks from the pull task queue. :return: Leased Task. """ try: http = httplib2.Http() c = credentials.get_credentials() if c: task_api = build('taskqueue', 'v1beta2', http=c.authorize(http)) lease_req = task_api.tasks().lease(project='ocr-backend', taskqueue=self.queue, leaseSecs=config.READING_LEASE_TIME, numTasks=1) result = lease_req.execute() if 'items' in result: if result['items'] is not None: task = result['items'][0] return task else: return None else: return None else: config.logging.error('Error getting credentials') except httplib2.ServerNotFoundError as e: config.logging.error('HTTP Error {0}'.format(e.message)) return None
def __init__(self, driver, name=None): self.driver = driver self.credentials = credentials.get_credentials(name) # Public Views self.homeView = homeView.HomeView(driver) self.forgotPwView = forgotPwView.ForgotPwView(driver) self.createAcctView = createAcctView.CreateAcctView(driver) # Authenticated self.aboutMeView = aboutMeView.AboutMeView(driver) self.myelomaDiagnosisView = myelDiagView.MyelDiagView(driver) self.currentHealthView = currentHealthView.CurrentHealthView(driver) self.fitLvlView = fitLvlView.FitLvlView(driver) self.fullHealthView = fullHealthView.FullHealthView(driver) self.myelomaGeneticsView = myelomaGeneticsView.MyelomaGeneticsView( driver) self.treatmentsOutcomesView = treatmentsOutcomesView.TreatmentsOutcomesView( driver) self.treatmentOptionsView = treatmentOptionsView.TreatmentOptionsView( driver) # Myeloma Labs self.myelomaLabsView = myelomaLabsView.MyelomaLabsView(driver) self.myLabsFacilitiesView = myLabsFacilitiesView.MyLabsFacilitiesView( driver) self.myLabsAddFacilityView = myLabsAddFacilityView.MyLabsAddFacilityView( driver) self.consentFormView = consentFormView.ConsentFormView(driver) self.settingsView = settingsView.SettingsView(driver) self.surveysView = surveysView.SurveysView(driver)
def __init__(self, config): user = input('Input spotifys username: '******'CLI_ID'], credentials['CLI_KEY'], REDIRECT_URI) self.spotify = spotipy.Spotify(auth=token)
def main(): global targets username, password = get_credentials('laozi') remote_host = Machine(username, password) for target in targets: remote_host.connect(target) # stdin, stdout = remote_host.create_channel(target, input_file) # slb.send_cmd(stdin, stdout, input_file) remote_host.list_dir('/home/laozi/')
def test_reading_credentials_from_file(self, mock_os_path): # Setting up the mocks mock_os_path.exists.return_value = True # Doing the real call self.assertEqual(get_credentials(), ['http://abc', 'test', 'password']) # Asserting functions have been called mock_os_path.exists.assert_called_once_with('.env') open.assert_called_once_with('.env')
def __init__(self): # myo super(Listener, self).__init__() self.orientation = None self.pose = libmyo.Pose.rest self.locked = False # hue lights creds = credentials.get_credentials() self.lc = LightController.LightController(creds["ip_addr"],creds["username"]) self.lc.reset_lights_to_white()
def authenticate(): credentials = get_credentials() SCOPE = 'playlist-modify-public' REDIRECT_URI = 'http://localhost:8888/callback' token = util.prompt_for_user_token(user_id, SCOPE, credentials['CLI_ID'], credentials['CLI_KEY'], REDIRECT_URI) return spotipy.Spotify(auth=token)
def connect_to_googlesheets(): print('Connecting to GoogleSheets...') api_credentials = get_credentials() http = api_credentials.authorize(Http()) api_url = ('https://sheets.googleapis.com/$discovery/rest?' 'version=v4') googlesheets = api.build('sheets', 'v4', http=http, discoveryServiceUrl=api_url) print('- Successfully established connection to GoogleSheets.') return googlesheets
def authorize_gcalender(self): """ Creates a Google Calendar API service object :return: a Google Calendar API service object """ credentials = get_credentials( self.credentials, self.client_secret, 'https://www.googleapis.com/auth/calendar.readonly', 'G Suite Utilities') http = credentials.authorize(httplib2.Http()) return discovery.build('calendar', 'v3', http=http)
def read_access_token(): global access_token # FIXME: embiggen the access token type we use cred = credentials.get_credentials("ig") if (cred is None): print "Could not credentials in file %s. See credentials.py" % credentials.CCAUTH sys.exit(1) if ("auth-token" not in cred): print "Could not find auth-toke in ig credentials. See credentials.py" sys.exit(1) access_token = cred["auth-token"]
def main(): global targets username, password = get_credentials('laozi') remote_host = Machine(username, password) for target in targets: remote_host.connect(target) stdin, stdout = remote_host.create_channel(target, input_file) slb.send_cmd(stdin, stdout, input_file) remote_dir = input('Which directory should I list?') remote_host.list_content(remote_dir) remote_file = input('Which file should I retrieve?') for f in remote_file: remote_host.retrieve(remote_dir, remote_file)
def get_api(): # pull the credentials cred = credentials.get_credentials("twitter") if (cred is None): print "%s not found / missing twitter credentials" % credentials.CCAUTH # connect to twitter auth = tweepy.OAuthHandler(cred["cons-key"], cred["cons-secret"]) auth.set_access_token(cred["access-token"], cred["access-secret"]) api = tweepy.API(auth) return api
def LSAinit(): # establish data connection to MySQL DATABASE db = MySQLdb.connect("127.0.0.1", "aero", get_credentials(), "aerotest") cursor = db.cursor() sqlA = "select distinct ev_id from aircraft_accident order by ev_id" cursor.execute(sqlA) effectiveEventId = [] for item in cursor.fetchall(): effectiveEventId.append(item[0]) db.close() db = MySQLdb.connect("127.0.0.1", "aero", get_credentials(), "aerotest") cursor = db.cursor() sqlB = "select distinct ev_id, narr_cause from narratives where NULLIF(narr_cause, '') IS NOT NULL order by ev_id" cursor.execute(sqlB) narratives = [] for item in cursor.fetchall(): narratives.append({"event_id": item[0], "cause": item[1]}) db.close() for eventId in effectiveEventId: for x in narratives: if x['event_id'] == eventId: index = narratives.index(x) temp = narratives[index]['cause'].split() temp = [ decodeword(word) for word in temp if word.lower() not in stop_words ] temp = [stemmer.stem(decodeword(word)) for word in temp] dataset.append({ "event_id": eventId, "cause": ' '.join(temp), "original-text": narratives[index]['cause'] }) continue return dataset
def main(): """ GMail API retrieval """ credentials = get_credentials(flags) service = build_service(credentials) messages = list_messages(service, 'me', '*****@*****.**') if not messages: print('No messages') else: print('Messages:') for msg in messages: print(msg)
def create_port(): credentials = get_credentials() neutron = client.Client(**credentials) body_value = { "port": { "admin_state_up": True, "name": "VIP", "network_id": vip_network_id } } response = neutron.create_port(body=body_value) #print json.dumps(response, sort_keys=True, indent=4) // Debug Example return json.dumps(response["port"]["fixed_ips"][0]["ip_address"])
def login(driver): # Login function # # Input: driver, in the main page of rocking soccer time.sleep(1) username_elem = driver.find_element_by_name('username') password_elem = driver.find_element_by_name('password') #login_elem = driver.find_element_by_name('login') username, password = get_credentials() username_elem.send_keys(username) time.sleep(1) password_elem.send_keys(password) time.sleep(1) password_elem.send_keys(Keys.RETURN)
def test_reading_credentials_from_command_line(self, mock_file, mock_input, mock_os_path): # Setting up the mocks mock_os_path.exists.return_value = False # Doing the real call self.assertEqual(get_credentials(), ['abcs', 'abcs', 'abcs']) # Asserting functions have been called mock_os_path.exists.assert_called_once_with('.env') mock_file.assert_called_once_with('.env', 'w') mock_file().write.assert_has_calls( [call('abcs\n'), call('abcs\n'), call('abcs')]) self.assertEqual(mock_file().write.call_count, 3) mock_file().close.assert_called_once_with()
def main(): """ Main entry point for execution as a program (instead of as a module). """ args = parse_args() logging.info('coursera_dl version %s', __version__) completed_classes = [] if not args.cookies_file: try: args.username, args.password = get_credentials( username=args.username, password=args.password, netrc=args.netrc, use_keyring=args.use_keyring) except CredentialsError as e: logging.error(e) sys.exit(1) mkdir_p(PATH_CACHE, 0o700) if args.clear_cache: shutil.rmtree(PATH_CACHE) if args.on_demand: logging.warning('--on-demand option is deprecated and is not required' ' anymore. Do not use this option. It will be removed' ' in the future.') for class_name in args.class_names: try: logging.info('Downloading class: %s', class_name) if download_class(args, class_name): completed_classes.append(class_name) except requests.exceptions.HTTPError as e: logging.error('HTTPError %s', e) except requests.exceptions.SSLError as e: logging.error('SSLError %s', e) print_ssl_error_message(e) if is_debug_run(): raise except ClassNotFound as cnf: logging.error('Could not find class: %s', cnf) except AuthenticationFailed as af: logging.error('Could not authenticate: %s', af) if completed_classes: logging.info( "Classes which appear completed: " + " ".join(completed_classes))
def __init__(self, image_id): """ Image object constructor. Gets the image from the Cloud Store, also creates the object to interact with the storage_api. :param: image_id: the URLsafe Cloud Store identifier of the image provided by the user :return: A picture object of the region of interest image from the reading """ http = httplib2.Http() c = credentials.get_credentials() if c: self.storage_api = api_discovery.build('storage', 'v1', http=c.authorize(http)) request = self.storage_api.objects().get_media(bucket=BUCKET, object=image_id) resp = request.execute() self.pic = Image.open(StringIO(resp)) self.image_id = image_id self._extract_region_of_interest(save=True) else: config.logging.error('Error getting credentials')
def connectToInstance(instance_name, network_name, network2_name): credentials = get_credentials() neutron = client.Client(**credentials) credentials = get_nova_credentials() nova_client = nvclient.Client(**credentials) instance = nova_client.servers.find(name=instance_name) server = nova_client.servers.get(instance) if len(server.networks.get(network_name)) == 1: public_ip = server.networks.get(network2_name)[1] else: public_ip = server.networks.get(network_name)[1] print(public_ip) #Providing private key to keystone to authenticate. ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) key = paramiko.RSAKey.from_private_key_file("./id_rsa") ssh.connect(public_ip, username='******', pkey=key) return ssh
def google_login(c, use_credentials_file=False): if not use_credentials_file: email = input('Email: ') pwd = getpass('Senha: ') else: try: from credentials import get_credentials email, pwd = get_credentials() except: print("Failure when retrieving credentials") # Escrever usuario e clicar em next c.send_keys(USERNAME_XP, email) c.click(USER_NEXT) sleep(3) # Escrever senha e next c.send_keys(PWD_XP, pwd) c.click(PWD_NEXT)
def test_get_credentials(self): """ Ensure proper behavior when the configuration file contains the username-password pair. """ mock_file = self.mox.CreateMockAnything() mock_file.closed = False mock_file.name = "foobar" username, password = ("*****@*****.**", "password") self.mox.StubOutWithMock(SafeConfigParser, "get") config = ConfigurationParser() config.parse(mock_file) config.get("credentials", "username").AndReturn(username) config.get("credentials", "password").AndReturn(password) self.mox.ReplayAll() self.assertEquals((username, password), credentials.get_credentials())
def delete_task_from_queue(self): """ Deletes the task from the queue :return: """ try: http = httplib2.Http() c = credentials.get_credentials() if c: task_api = build('taskqueue', 'v1beta2', http=c.authorize(http)) delete_req = task_api.tasks().delete(project='s~ocr-backend', taskqueue=self.queue, task=self.id) result = delete_req.execute() config.logging.info('task: Delete task - result: {0}'.format(result)) else: config.logging.error('Error getting credentials') except httplib2.ServerNotFoundError as e: config.logging.error('HTTP Error {0}'.format(e.message)) return None
def main(): """ GMail API retrieval """ credentials = get_credentials(flags) service = build_service(credentials) messages = list_messages(service, 'me', 'from:[email protected]') if not messages: print("No messages, exiting") return for message in messages: message_id = message['id'] mime_message = download_mime_message(service, 'me', message_id) reply_header = mime_message.get('Reply-To') # if this is a message that was not well formed we might as well skip it. # from formspree the well formed messages have a Reply-To header. if not reply_header: continue html = get_message_html(mime_message) content = get_message_content(html) # now if we had someone respond we should find a message # that corresponds to that and save that flag sent_messages = list_messages(service, 'me', 'to:' + reply_header) responded = True if sent_messages else False content['responded'] = responded # last step which is figuring out what the person has told us. # we only need this if we sent them an e-mail if responded: responses = list_messages(service, 'me', 'from:' + reply_header) if responses: response = download_message(service, 'me', responses[0]['id']) snippet = response['snippet'] content['response'] = snippet pprint.pprint(content)
def get_authenticated_api(): if socket.gethostname().startswith("Benjamins"): print("should have already got credentials") credentials = get_credentials() consumer_key = credentials['consumer_key'] consumer_secret = credentials['consumer_secret'] access_token = credentials['access_token'] access_token_secret = credentials['access_token_secret'] else: consumer_key = environ['consumer_key'] consumer_secret = environ['consumer_secret'] access_token = environ['access_token'] access_token_secret = environ['access_token_secret'] # Access and authorize our Twitter credentials from credentials.py auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) api = tweepy.API(auth) return (api)
def __init__(topo,info): topo.info=info topo.subnet_route_route = len(info['link']['route_route']) topo.subnet_route_host = len(info['link']['route_host']) topo.subnet_num = topo.subnet_route_route+topo.subnet_route_host topo.router_num = info['routes'] topo.network_id = u'' topo.subnet_id = {} topo.subnet_cidr = {} topo.router_cidr = {} topo.ports_id = {} topo.router_id = {} topo.route_ip = {} topo.neutron_credentials = get_credentials() topo.nova_credentials = get_nova_credentials_v2() topo.neutron = neutron_client(**topo.neutron_credentials) topo.nova = nova_client(**topo.nova_credentials) for tmp in range(topo.subnet_num): topo.ports_id[tmp] = {} for tmp in range(topo.subnet_num): topo.route_ip[tmp] = {}
def insert_event(refresh_token): credentials = Credentials.get_credentials(refresh_token) http = credentials.authorize(httplib2.Http()) service = build(serviceName='calendar', version='v3', http=http) event = { 'summary': 'Google I/O 2015', 'location': '800 Howard St., San Fransico', 'description': 'A chance to hear more about Google', 'start': { 'dateTime': '2017-04-06T06:00:00-18:00', 'timeZone': 'UTC', }, 'end': { 'dateTime': '2017-04-06T14:00:00-18:00', 'timeZone': 'UTC', }, } event = service.events().insert(calendarId='primary', body=event).execute()
def create_a_port(): server_id= 'f9dec530-f163-4965-8aab-bfc43c955566' network_id= '17087163-a2a2-48d6-b26a-f1db86c0612a' server_detail = nova_client.servers.get(server_id) print server_detail.id print "Enter the name" name1 = raw_input(); if server_detail != None: credentials = get_credentials() neutron = client.Client(**credentials) body_value = { "port": { "admin_state_up": True, "device_id": server_id, "name": name1, "network_id": network_id } } response = neutron.create_port(body=body_value) print response return
def get_event(refresh_token): credentials = Credentials.get_credentials(refresh_token) http = credentials.authorize(httplib2.Http()) service = build(serviceName='calendar', version='v3', http=http) now = datetime.datetime.utcnow().isoformat() + 'Z' print("Getting the upcoming 10 events") eventsResult = service.events().list(calendarId='primary', timeMin=now, maxResults=10, singleEvents=True, orderBy='startTime').execute() events = eventsResult.get('items') if not events: print("No events") for event in events: start = event['start'].get('dateTime', event['start'].get('date')) print(start + " " + event['summary']) return events
from fabLib import * from fabric.api import settings import credentials passwd = credentials.get_credentials() def check_credentials(host): global passwd while True: if host in passwd: break credentials.add_credentials() passwd = credentials.get_credentials() def test_run(): host = raw_input('Host: ').strip() check_credentials(host) pass if __name__ == '__main__': test_run()
#!/usr/bin/env python """ Description: This script is to allocate & associate the floating IP. Developer: [email protected] """ import pdb from neutronclient.v2_0 import client import novaclient.v1_1.client as nvclient from credentials import get_credentials, get_nova_credentials, \ get_tenant_nova_credentials from config import FLOATING_IP_POOL neutron_credentials = get_credentials() credentials = get_nova_credentials() neutron = client.Client(**neutron_credentials) nova = nvclient.Client(**credentials) #if not nova.keypairs.findall(name="admin"): #with open(os.path.expanduser('~/.ssh/id_rsa.pub')) as fpubkey: #nova.keypairs.create(name="admin", public_key=fpubkey.read()) def add_floating_ip_for_vm(tenant_name, instance): """ This method is used to allocate & associate floating IP to the given VM\ based on the availability from the defined pool. """ tenant_credentials = get_tenant_nova_credentials(tenant_name) pdb.set_trace() nova = nvclient.Client(**tenant_credentials)
def parseArgs(): """ Parse the arguments/options passed to the program on the command line. """ parser = argparse.ArgumentParser( description='Download Coursera.org lecture material and resources.') # positional parser.add_argument('class_names', action='store', nargs='+', help='name(s) of the class(es) (e.g. "nlp")') parser.add_argument('-c', '--cookies_file', dest='cookies_file', action='store', default=None, help='full path to the cookies.txt file') parser.add_argument('-u', '--username', dest='username', action='store', default=None, help='coursera username') parser.add_argument('-n', '--netrc', dest='netrc', nargs='?', action='store', const=True, default=False, help='use netrc for reading passwords, uses default' ' location if no path specified') parser.add_argument('-p', '--password', dest='password', action='store', default=None, help='coursera password') # optional parser.add_argument('--about', dest='about', action='store_true', default=False, help='download "about" metadata. (Default: False)') parser.add_argument('-b', '--preview', dest='preview', action='store_true', default=False, help='get preview videos. (Default: False)') parser.add_argument('-f', '--formats', dest='file_formats', action='store', default='all', help='file format extensions to be downloaded in' ' quotes space separated, e.g. "mp4 pdf" ' '(default: special value "all")') parser.add_argument('-sf', '--section_filter', dest='section_filter', action='store', default=None, help='only download sections which contain this' ' regex (default: disabled)') parser.add_argument('-lf', '--lecture_filter', dest='lecture_filter', action='store', default=None, help='only download lectures which contain this regex' ' (default: disabled)') parser.add_argument('--wget', dest='wget', action='store', nargs='?', const='wget', default=None, help='use wget for downloading,' 'optionally specify wget bin') parser.add_argument('--curl', dest='curl', action='store', nargs='?', const='curl', default=None, help='use curl for downloading,' ' optionally specify curl bin') parser.add_argument('--aria2', dest='aria2', action='store', nargs='?', const='aria2c', default=None, help='use aria2 for downloading,' ' optionally specify aria2 bin') parser.add_argument('--axel', dest='axel', action='store', nargs='?', const='axel', default=None, help='use axel for downloading,' ' optionally specify axel bin') # We keep the wget_bin, ... options for backwards compatibility. parser.add_argument('-w', '--wget_bin', dest='wget_bin', action='store', default=None, help='DEPRECATED, use --wget') parser.add_argument('--curl_bin', dest='curl_bin', action='store', default=None, help='DEPRECATED, use --curl') parser.add_argument('--aria2_bin', dest='aria2_bin', action='store', default=None, help='DEPRECATED, use --aria2') parser.add_argument('--axel_bin', dest='axel_bin', action='store', default=None, help='DEPRECATED, use --axel') parser.add_argument('-o', '--overwrite', dest='overwrite', action='store_true', default=False, help='whether existing files should be overwritten' ' (default: False)') parser.add_argument('-l', '--process_local_page', dest='local_page', help='uses or creates local cached version of syllabus' ' page') parser.add_argument('--skip-download', dest='skip_download', action='store_true', default=False, help='for debugging: skip actual downloading of files') parser.add_argument('--path', dest='path', action='store', default='', help='path to save the file') parser.add_argument('--verbose-dirs', dest='verbose_dirs', action='store_true', default=False, help='include class name in section directory name') parser.add_argument('--debug', dest='debug', action='store_true', default=False, help='print lots of debug information') parser.add_argument('--quiet', dest='quiet', action='store_true', default=False, help='omit as many messages as possible' ' (only printing errors)') parser.add_argument('--add-class', dest='add_class', action='append', default=[], help='additional classes to get') parser.add_argument('-r', '--reverse', dest='reverse', action='store_true', default=False, help='download sections in reverse order') parser.add_argument('--combined-section-lectures-nums', dest='combined_section_lectures_nums', action='store_true', default=False, help='include lecture and section name in final files') parser.add_argument('--hook', dest='hooks', action='append', default=[], help='hooks to run when finished') args = parser.parse_args() # Initialize the logging system first so that other functions # can use it right away if args.debug: logging.basicConfig(level=logging.DEBUG, format='%(name)s[%(funcName)s] %(message)s') elif args.quiet: logging.basicConfig(level=logging.ERROR, format='%(name)s: %(message)s') else: logging.basicConfig(level=logging.INFO, format='%(message)s') # turn list of strings into list args.file_formats = args.file_formats.split() for bin in ['wget_bin', 'curl_bin', 'aria2_bin', 'axel_bin']: if getattr(args, bin): logging.error('The --%s option is deprecated, please use --%s', bin, bin[:-4]) sys.exit(1) # check arguments if args.cookies_file and not os.path.exists(args.cookies_file): logging.error('Cookies file not found: %s', args.cookies_file) sys.exit(1) if not args.cookies_file: try: args.username, args.password = get_credentials( username=args.username, password=args.password, netrc=args.netrc) except CredentialsError as e: logging.error(e) sys.exit(1) return args
def __init__(self, driver, name=None): self.driver = driver self.credentials = credentials.get_credentials(name) # if name is not None: # self.businesses = dict([(x, credentials.get_credentials(x)) for x in # self.credentials['businesses']]) # Pages # syntax: self.whatever = filename(same as in __init__.py).Classname(driver) self.signin_page = signin.SigninPage(driver) self.signin_code_page = signin_code.SigninCodePage(driver) self.reset_password_page = reset_password.ResetPasswordPage(driver) self.reset_password_code_page = ( reset_password.ResetPasswordCodePage(driver)) self.reset_password_new_page = ( reset_password.ResetPasswordNewPage(driver)) # enroll process (business or responding to invite) self.invite_pre_screen_page = invite_pre_screen.InvitePreScreenPage( driver) self.dob_page = invite.DOBPage(driver) self.invite_page = invite.InvitePage(driver) self.enroll_code_page = enroll_code.EnrollCodePage(driver) self.enroll_factor2_page = enroll_factor2.EnrollFactor2Page(driver) self.enroll_name_page = enroll_name.EnrollNamePage(driver) self.enroll_password_page = enroll_password.EnrollPasswordPage(driver) self.enroll_accept_page = enroll_accept.EnrollAcceptPage(driver) self.enroll_signin_page = enroll_signin.EnrollSigninPage(driver) self.why_email_page = why.WhyEmailPage(driver) self.why_phone_page = why.WhyPhonePage(driver) self.password_tips_page = why.PasswordTipsPage(driver) self.for_employers = for_employers.ForEmployersPage( driver) # Home page self.for_employees = for_employees.ForEmployeesPage(driver) self.contact_map_page = contact_flow.ContactMapPage(driver) self.contact_form_page = contact_flow.ContactFormPage(driver) self.about_public_page = about.AboutPublicPage(driver) self.about_private_page = about.AboutPrivatePage(driver) self.pub_terms_page = pub_terms.PubTermsPage(driver) self.pub_privacy_page = pub_privacy.PubPrivacyPage(driver) self.add_business_page = add_business.AddBusinessPage(driver) self.business_details_page = ( business_details.BusinessDetailsPage(driver)) self.business_prefilled_page = ( business_prefilled.BusinessPrefilledPage(driver)) self.business_settings_page = ( business_settings.BusinessSettingsPage(driver)) self.admin_page = admin.AdminPage(driver) self.add_admin_page = admin.AddAdminPage(driver) self.employee_welcome = employee_welcome.EmployeeWelcomePage(driver) self.account_page = account.AccountPage(driver) self.eHome_page = eHome.EHomePage(driver) self.account_details_page = account.AccountDetailsPage(driver) self.send_to_bank_page = send_to_bank.SendToBankPage(driver) self.send_to_atm_page = send_to_atm.SendToATMPage(driver) self.send_to_cashout = send_to_cashout.SendToCashoutPage(driver) self.recipient_page = recipient.RecipientPage(driver) self.recipient_name_page = recipient_name.RecipientNamePage(driver) self.recipient_view_page = recipient_view.RecipientViewPage(driver) self.recipient_address_page = ( recipient_address.RecipientAddressPage(driver)) self.recipient_info_page = (recipient_info.RecipientInfoPage(driver)) self.bank_account_page = bank_account.BankAccountPage(driver) self.bank_account_select_page = ( bank_account_select.BankAccountSelectPage(driver)) self.send_page = send.SendPage(driver) self.td_page = td.TransferDetailsPage(driver) self.clabe_page = what_is_clabe.ClabePage(driver) self.lobby_page = lobby.LobbyPage(driver) self.invitations_page = invitations.InvitationsPage(driver) self.invitation_card_page = invitation_card.InvitationCardPage(driver) self.pending_elections_page = ( pending_elections.PendingElectionsPage(driver)) self.employee_page = employees.EmployeePage(driver) self.employee_add_page = employee_add.AddEmployeePage(driver) self.employee_add_csv1_page = ( employee_add_csv.AddEmployeesCSV1Page(driver)) self.employee_add_csv2_page = ( employee_add_csv.AddEmployeesCSV2Page(driver)) self.employee_view_page = employee_view.EmployeeViewPage(driver) self.ps_page = personal_settings.SettingsPage(driver) self.participate_page = participate.ParticipatePage(driver) self.ps_edit_email_page = ps_edit_email.EditEmailPage(driver) self.ps_add_email_page = ps_add_email.AddEmailPage(driver) self.ps_edit_phone_page = ps_edit_phone.EditPhonePage(driver) self.ps_add_phone_page = ps_add_phone.AddPhonePage(driver) self.ps_confirmation_page = ( ps_confirmation.SettingsConfirmationPage(driver)) self.ps_change_pw_page = ps_change_pw.ChangePasswordPage(driver) self.employers_page = ps_employers.EmployerPage(driver) self.pay_election_page = pay_election.PayElectionPage(driver) self.election_history_page = ( pay_election_history.ElectionHistoryPage(driver)) self.contact_us_page = contact_us.ContactPublicPage(driver) self.feedback_page = contact_us.ContactPrivatePage(driver)
# A wrapper around MySQLdb # To start querying you need to open a connection ### cnx = db.connect() # There are 2 types of queries: query (for queries that return stuff) and mutate (for queries that mutate tables and databases) # As soon as you finish querying call # db.close(cnx) !!!! (IMPORTANT) import MySQLdb from credentials import get_credentials config = { 'user': '******', 'passwd': get_credentials(), 'host': '127.0.0.1', 'port': 3306, 'db': 'aerotest' } import MySQLdb.cursors class MySQLCursorDict(MySQLdb.cursors.Cursor): def fetchone(self): row = self._fetch_row() if row: return dict(zip(self.column_names, self._row_to_python(row))) return None class Connection(): def __init__(self):
def whoami(args): creds = get_credentials() if creds: print creds[0] else: print 'You are not logged-in.'
def main(): """Shows basic usage of the Gmail API. Creates a Gmail API service object """ credentials = get_credentials() http = credentials.authorize(httplib2.Http()) service = discovery.build('gmail', 'v1', http=http) try: query = sys.argv[1] except IndexError: query = 'is:unread' try: path = sys.argv[2] except IndexError: path = 'INBOX' try: char_count = int(sys.argv[3]) except IndexError: char_count = 100 try: labels = True if str(sys.argv[4]) == 'true' else False except IndexError: labels = False if labels: results = service.users().labels().list(userId='me').execute() labels = results.get('labels', []) if len(labels) > 0: num = 1 for label in labels: label_info = service.users().labels().get( userId='me', id=label['id']).execute() print("{}. {} ({})".format( num, label['name'], label_info['messagesUnread'])) num += 1 else: if path == 'ALL': results = service.users().labels().list(userId='me').execute() labels = results.get('labels', []) for label in labels: try: results = ListAllMessages( service, 'me', query, [label['name']] ) if results: msg_count = 0 threads = [] for message in results: if message['threadId'] not in threads: msg_count += 1 threads.append(message['threadId']) unique_messages = len(threads) if unique_messages > 0: print("{0} unread message(s) in {1}" .format(unique_messages, label['name'])) except Exception as error: print(label['name'] + ': ' + str(error)) else: try: results = ListAllMessages(service, 'me', query, [path]) messageDict = {} messages = [] threads = [] currThread = '' if results: for message in results: messageDict = GetMessage(service, 'me', message['id']) msgData = '' msgData = messageDict['data'] headers = messageDict['payload']['headers'] currThread = message['threadId'] if currThread not in threads: info = [] info = GetFromAndTime(headers) info['data'] = msgData[:char_count] messages.append(info) threads.append(currThread) msg_len = len(messages) display_init_msg = "\nFetching " +\ str(msg_len) +\ " email(s) from " + path + "." print(display_init_msg) print(len(display_init_msg) * "-") if msg_len > 0: seq = 1 for message in messages: from_str = message['from'] date_str = message['date'].__str__() subject_str = message['subject']\ if message['subject'] else "(No Subject)" msg_str = message['data']\ if message['data'] else "(No Message)" email_str = "\nEmail: " + str(seq) print(email_str) print(len(email_str) * "-") print("\n+ From: " + from_str + ", at " + date_str + "\n" + " Sub: " + subject_str + "\n" + " Msg: " + msg_str + "\n" + "--------------------------\n") seq += 1 except Exception as error: print(path + ': ' + str(error))
#here = os.path.dirname(os.path.realpath(__file__)) from random import choice import threading import logging logging.basicConfig( format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) from threading import Thread import sys from telegram.ext import Updater import onlyfunctions as f from telegram.ext import CommandHandler, MessageHandler, Filters import credentials as CR token, chatID = CR.get_credentials() updater = Updater(token=token) dispatcher = updater.dispatcher j = updater.job_queue def start(bot, update): txt = "I'm a bot, please talk to me!" bot.send_message(chat_id=update.message.chat_id, text=txt) def shutdown(): updater.stop() updater.is_idle = False
def parseArgs(args=None): """ Parse the arguments/options passed to the program on the command line. """ parser = argparse.ArgumentParser( description='Download Coursera.org lecture material and resources.') # positional parser.add_argument('class_names', action='store', nargs='+', help='name(s) of the class(es) (e.g. "nlp")') parser.add_argument('-c', '--cookies_file', dest='cookies_file', action='store', default=None, help='full path to the cookies.txt file') parser.add_argument('-u', '--username', dest='username', action='store', default=None, help='coursera username') parser.add_argument('-n', '--netrc', dest='netrc', nargs='?', action='store', const=True, default=False, help='use netrc for reading passwords, uses default' ' location if no path specified') parser.add_argument('-p', '--password', dest='password', action='store', default=None, help='coursera password') # optional parser.add_argument('--about', dest='about', action='store_true', default=False, help='download "about" metadata. (Default: False)') parser.add_argument('-b', '--preview', dest='preview', action='store_true', default=False, help='get preview videos. (Default: False)') parser.add_argument('-f', '--formats', dest='file_formats', action='store', default='all', help='file format extensions to be downloaded in' ' quotes space separated, e.g. "mp4 pdf" ' '(default: special value "all")') parser.add_argument('-sf', '--section_filter', dest='section_filter', action='store', default=None, help='only download sections which contain this' ' regex (default: disabled)') parser.add_argument('-lf', '--lecture_filter', dest='lecture_filter', action='store', default=None, help='only download lectures which contain this regex' ' (default: disabled)') parser.add_argument('-rf', '--resource_filter', dest='resource_filter', action='store', default=None, help='only download resources which match this regex' ' (default: disabled)') parser.add_argument('--wget', dest='wget', action='store', nargs='?', const='wget', default=None, help='use wget for downloading,' 'optionally specify wget bin') parser.add_argument('--curl', dest='curl', action='store', nargs='?', const='curl', default=None, help='use curl for downloading,' ' optionally specify curl bin') parser.add_argument('--aria2', dest='aria2', action='store', nargs='?', const='aria2c', default=None, help='use aria2 for downloading,' ' optionally specify aria2 bin') parser.add_argument('--axel', dest='axel', action='store', nargs='?', const='axel', default=None, help='use axel for downloading,' ' optionally specify axel bin') # We keep the wget_bin, ... options for backwards compatibility. parser.add_argument('-w', '--wget_bin', dest='wget_bin', action='store', default=None, help='DEPRECATED, use --wget') parser.add_argument('--curl_bin', dest='curl_bin', action='store', default=None, help='DEPRECATED, use --curl') parser.add_argument('--aria2_bin', dest='aria2_bin', action='store', default=None, help='DEPRECATED, use --aria2') parser.add_argument('--axel_bin', dest='axel_bin', action='store', default=None, help='DEPRECATED, use --axel') parser.add_argument('-o', '--overwrite', dest='overwrite', action='store_true', default=False, help='whether existing files should be overwritten' ' (default: False)') parser.add_argument('-l', '--process_local_page', dest='local_page', help='uses or creates local cached version of syllabus' ' page') parser.add_argument('--skip-download', dest='skip_download', action='store_true', default=False, help='for debugging: skip actual downloading of files') parser.add_argument('--path', dest='path', action='store', default='', help='path to save the file') parser.add_argument('--verbose-dirs', dest='verbose_dirs', action='store_true', default=False, help='include class name in section directory name') parser.add_argument('--debug', dest='debug', action='store_true', default=False, help='print lots of debug information') parser.add_argument('--quiet', dest='quiet', action='store_true', default=False, help='omit as many messages as possible' ' (only printing errors)') parser.add_argument('--add-class', dest='add_class', action='append', default=[], help='additional classes to get') parser.add_argument('-r', '--reverse', dest='reverse', action='store_true', default=False, help='download sections in reverse order') parser.add_argument('--combined-section-lectures-nums', dest='combined_section_lectures_nums', action='store_true', default=False, help='include lecture and section name in final files') parser.add_argument('--hook', dest='hooks', action='append', default=[], help='hooks to run when finished') parser.add_argument('-pl', '--playlist', dest='playlist', action='store_true', default=False, help='generate M3U playlists for course weeks') parser.add_argument('--clear-cache', dest='clear_cache', action='store_true', default=False, help='clear cached cookies') parser.add_argument('--unrestricted-filenames', dest='intact_fnames', action='store_true', default=False, help='Do not limit filenames to be ASCII-only') args = parser.parse_args(args) # Initialize the logging system first so that other functions # can use it right away if args.debug: logging.basicConfig(level=logging.DEBUG, format='%(name)s[%(funcName)s] %(message)s') elif args.quiet: logging.basicConfig(level=logging.ERROR, format='%(name)s: %(message)s') else: logging.basicConfig(level=logging.INFO, format='%(message)s') # turn list of strings into list args.file_formats = args.file_formats.split() # decode path so we can work properly with cyrillic symbols on different # versions on Python args.path = decode_input(args.path) for bin in ['wget_bin', 'curl_bin', 'aria2_bin', 'axel_bin']: if getattr(args, bin): logging.error('The --%s option is deprecated, please use --%s', bin, bin[:-4]) sys.exit(1) # check arguments if args.cookies_file and not os.path.exists(args.cookies_file): logging.error('Cookies file not found: %s', args.cookies_file) sys.exit(1) if not args.cookies_file: try: args.username, args.password = get_credentials( username=args.username, password=args.password, netrc=args.netrc) except CredentialsError as e: logging.error(e) sys.exit(1) return args
def parse_args(args=None): """ Parse the arguments/options passed to the program on the command line. """ parser = argparse.ArgumentParser(description="Download Coursera.org lecture material and resources.") # Basic options group_basic = parser.add_argument_group("Basic options") group_basic.add_argument("class_names", action="store", nargs="+", help='name(s) of the class(es) (e.g. "ml-005")') group_basic.add_argument( "-u", "--username", dest="username", action="store", default=None, help="coursera username" ) group_basic.add_argument( "-p", "--password", dest="password", action="store", default=None, help="coursera password" ) group_basic.add_argument( "--on-demand", dest="on_demand", action="store_true", default=False, help="[DEPRECATED] get on-demand videos. Do not use" " this option, it is deprecated. The script will" " try to detect course type automatically.", ) group_basic.add_argument( "-b", # FIXME: kill this one-letter option "--preview", dest="preview", action="store_true", default=False, help="get videos from preview pages. (Default: False)", ) group_basic.add_argument( "--path", dest="path", action="store", default="", help="path to where to save the file. (Default: current directory)", ) group_basic.add_argument( "-sl", # FIXME: deprecate this option "--subtitle-language", dest="subtitle_language", action="store", default="en", help="Choose language to download subtitles. (Default: en)", ) # Selection of material to download group_material = parser.add_argument_group("Selection of material to download") group_material.add_argument( "--about", # FIXME: should be --about-course dest="about", action="store_true", default=False, help='download "about" metadata. (Default: False)', ) group_material.add_argument( "-f", "--formats", dest="file_formats", action="store", default="all", help="file format extensions to be downloaded in" ' quotes space separated, e.g. "mp4 pdf" ' '(default: special value "all")', ) group_material.add_argument( "--ignore-formats", dest="ignore_formats", action="store", default=None, help="file format extensions of resources to ignore" " (default: None)", ) group_material.add_argument( "-sf", # FIXME: deprecate this option "--section_filter", dest="section_filter", action="store", default=None, help="only download sections which contain this" " regex (default: disabled)", ) group_material.add_argument( "-lf", # FIXME: deprecate this option "--lecture_filter", dest="lecture_filter", action="store", default=None, help="only download lectures which contain this regex" " (default: disabled)", ) group_material.add_argument( "-rf", # FIXME: deprecate this option "--resource_filter", dest="resource_filter", action="store", default=None, help="only download resources which match this regex" " (default: disabled)", ) group_material.add_argument( "--video-resolution", dest="video_resolution", action="store", default="540p", help="video resolution to download (default: 540p); " "only valid for on-demand courses; " "only values allowed: 360p, 540p, 720p", ) # Selection of material to download group_external_dl = parser.add_argument_group("External downloaders") group_external_dl.add_argument( "--wget", dest="wget", action="store", nargs="?", const="wget", default=None, help="use wget for downloading," "optionally specify wget bin", ) group_external_dl.add_argument( "--curl", dest="curl", action="store", nargs="?", const="curl", default=None, help="use curl for downloading," " optionally specify curl bin", ) group_external_dl.add_argument( "--aria2", dest="aria2", action="store", nargs="?", const="aria2c", default=None, help="use aria2 for downloading," " optionally specify aria2 bin", ) group_external_dl.add_argument( "--axel", dest="axel", action="store", nargs="?", const="axel", default=None, help="use axel for downloading," " optionally specify axel bin", ) parser.add_argument( "--resume", dest="resume", action="store_true", default=False, help="resume incomplete downloads (default: False)", ) parser.add_argument( "-o", "--overwrite", dest="overwrite", action="store_true", default=False, help="whether existing files should be overwritten" " (default: False)", ) parser.add_argument( "--verbose-dirs", dest="verbose_dirs", action="store_true", default=False, help="include class name in section directory name", ) parser.add_argument( "--quiet", dest="quiet", action="store_true", default=False, help="omit as many messages as possible" " (only printing errors)", ) parser.add_argument( "-r", "--reverse", dest="reverse", action="store_true", default=False, help="download sections in reverse order" ) parser.add_argument( "--combined-section-lectures-nums", dest="combined_section_lectures_nums", action="store_true", default=False, help="include lecture and section name in final files", ) parser.add_argument( "--unrestricted-filenames", dest="intact_fnames", action="store_true", default=False, help="Do not limit filenames to be ASCII-only", ) parser.add_argument( "-v", "--version", help="Display the version of udemy-dl and exit", action="version", version="%(prog)s {version}".format(version=__version__), ) # Advanced authentication group_adv_auth = parser.add_argument_group("Advanced authentication options") group_adv_auth.add_argument( "-c", "--cookies_file", dest="cookies_file", action="store", default=None, help="full path to the cookies.txt file", ) group_adv_auth.add_argument( "-n", "--netrc", dest="netrc", nargs="?", action="store", const=True, default=False, help="use netrc for reading passwords, uses default" " location if no path specified", ) group_adv_auth.add_argument( "-k", "--keyring", dest="use_keyring", action="store_true", default=False, help="use keyring provided by operating system to " "save and load credentials", ) group_adv_auth.add_argument( "--clear-cache", dest="clear_cache", action="store_true", default=False, help="clear cached cookies" ) # Advanced miscellaneous options group_adv_misc = parser.add_argument_group("Advanced miscellaneous options") group_adv_misc.add_argument("--hook", dest="hooks", action="append", default=[], help="hooks to run when finished") group_adv_misc.add_argument( "-pl", "--playlist", dest="playlist", action="store_true", default=False, help="generate M3U playlists for course weeks", ) # Debug options group_debug = parser.add_argument_group("Debugging options") group_debug.add_argument( "--skip-download", dest="skip_download", action="store_true", default=False, help="for debugging: skip actual downloading of files", ) group_debug.add_argument( "--debug", dest="debug", action="store_true", default=False, help="print lots of debug information" ) group_debug.add_argument( "-l", # FIXME: remove short option from rarely used ones "--process_local_page", dest="local_page", help="uses or creates local cached version of syllabus" " page", ) # Final parsing of the options args = parser.parse_args(args) # Initialize the logging system first so that other functions # can use it right away if args.debug: logging.basicConfig(level=logging.DEBUG, format="%(name)s[%(funcName)s] %(message)s") elif args.quiet: logging.basicConfig(level=logging.ERROR, format="%(name)s: %(message)s") else: logging.basicConfig(level=logging.INFO, format="%(message)s") # turn list of strings into list args.file_formats = args.file_formats.split() # decode path so we can work properly with cyrillic symbols on different # versions on Python args.path = decode_input(args.path) # check arguments if args.use_keyring and args.password: logging.warning("--keyring and --password cannot be specified together") args.use_keyring = False if args.use_keyring and not keyring: logging.warning("The python module `keyring` not found.") args.use_keyring = False if args.cookies_file and not os.path.exists(args.cookies_file): logging.error("Cookies file not found: %s", args.cookies_file) sys.exit(1) if not args.cookies_file: try: args.username, args.password = get_credentials( username=args.username, password=args.password, netrc=args.netrc, use_keyring=args.use_keyring ) except CredentialsError as e: logging.error(e) sys.exit(1) return args
def main(): """Shows basic usage of the Google Calendar API. Creates a Google Calendar API service object """ credentials = get_credentials() http = credentials.authorize(httplib2.Http()) service = discovery.build('calendar', 'v3', http=http) try: curDate = parser.parse(sys.argv[1]) except: curDate = datetime.datetime.now() dayStart = datetime.datetime( curDate.year, curDate.month, curDate.day, 0, 0, 0).isoformat() + 'Z' dayEnd = datetime.datetime( curDate.year, curDate.month, curDate.day, 23, 59, 59).isoformat() + 'Z' selectedDate = dayStart[0:10] page_token = None while True: calendar_list = service.calendarList().list( pageToken=page_token).execute() for calendar_list_entry in calendar_list['items']: print ("\n+ " + calendar_list_entry['summary'] + " on " + selectedDate) eventsResult = service.events().list( calendarId=calendar_list_entry['id'], timeMin=dayStart, timeMax=dayEnd, maxResults=10, singleEvents=True, orderBy='startTime').execute() events = eventsResult.get('items', []) if events: for event in events: start = event['start'].get( 'dateTime', event['start'].get('date')) end = event['end'].get( 'dateTime', event['end'].get('date')) d1 = parser.parse(start) d2 = parser.parse(end) diff = relativedelta(d2, d1) summary = event.get('summary', '') if diff.hours > 0 or diff.minutes > 0: print (" --- " + summary + " (" + start[11:16] + " - " + end[11:16] + "): %d hr(s) %d min(s)" % (diff.hours, diff.minutes)) else: print (" --- " + summary + " (" + start[0:10] + " - " + end[0:10] + ")") page_token = calendar_list.get('nextPageToken') if not page_token: break
from console import parse_args, parse_filters from credentials import get_credentials from clients.utils import iterate_response, BadRequest, curry_with_filters from clients.clients import ProjectClient, IssueClient, UserClient, TimeEntryClient from formatter import PipeFormatter, LinkFormatter [url, username, password] = get_credentials() args = parse_args() args = parse_filters(args) formatter = PipeFormatter() if args.subject == 'projects': rm = ProjectClient(username, password, url) if args.id is not None: item = rm.get_project_details(args.id) formatter.format_project_details(item['project']) else: for p in iterate_response( curry_with_filters(rm.get_projects, args.filters), 'projects'): summary = { "type": "projects", "identifier": p['identifier'], "id": p['id'], "name": p['name'], "description":
def get_token(): credentials = get_credentials() keystone = client.Client(**credentials) token = keystone.get_raw_token_from_identity_service(**credentials) return token["token"]["id"]
from stopwords import get_english_stop_words from nltk.stem import * from sklearn.cluster import KMeans from sklearn.metrics import adjusted_rand_score from decode import decodeword from sklearn import metrics from credentials import get_credentials import MySQLdb import sys import numpy as np import matplotlib.pyplot as plt from sklearn.decomposition import PCA ##################### FETCHING DATABASE FOR RESOURCES REQUIRED ########### db = MySQLdb.connect("127.0.0.1", "aero", get_credentials(), "aerotest") cursor = db.cursor() sql = "select narr_cause from narratives where NULLIF(narr_cause, '') IS NOT NULL;" #sql = "select ev_id, narr_accp, narr_accf, narr_cause from narratives where NULLIF(narr_cause, '') IS NOT NULL;" cursor.execute(sql) narratives = [] for item in cursor.fetchall(): narratives.append(item[0]) db.close() ################################# FETCHING ENDS HERE ##################### dataset = [] stop_words = get_english_stop_words() stemmer = PorterStemmer() for narrative in narratives:
""" Description: This script is to create network, subnets, router with \ external gateway mapping. Developer: [email protected] """ import pdb import os from neutronclient.v2_0 import client from credentials import get_credentials from config import NETWORK_COUNT, VM_COUNT, FLOATING_IP_CREATION from vm_instances import launch_vm_on_network, terminate_vm_on_network from floating_ips import release_all_floating_ips from datetime import datetime, timedelta from pytz import timezone credentials = get_credentials() neutron = client.Client(**credentials) fmt = "%Y-%m-%d %H:%M:%S %Z%z" def create_network(tenant, router, network_index, network_cidr): """ This method is used to create network, subnets, interfaces on router with \ external gateway mapping for the given network name and CIDR. """ try: prefix = tenant['tenant_name'] network_name = prefix + '-net-' + network_index print "\n" print "=" * 50
import requests import os import sys import json import random import time import credentials # randomly flash colors creds = credentials.get_credentials() url = "http://"+creds["ip_addr"]+"/api/"+creds["username"]+"/lights/2/state" # for i in xrange(1,41): # n = int(random.random() * 60000) # r = requests.put(url,data=json.dumps({"on":True, "sat":254, "bri":254,"hue":n,"transitiontime":0})) # print r.text # randomly flash colors faster by turning on and off # last = False # for i in xrange(1,41): # n = int(random.random() * 60000) # r = requests.put(url,data=json.dumps({"on":last,"sat":254,"bri":254,"hue":n,"transitiontime":0})) # last = not last # print r.text # r = requests.get(url) # d = json.loads(r.text) # print [int(n) for n in d["lights"]] # print r.text # print json.loads(requests.get(url).text)["lights"]