Esempio n. 1
0
 def get_credentials(self):
     # Retrieve the credentials
     self.cred_tested = credentials.Credentials(CONF.tested_rc,
                                                pwd=CONF.tested_passwd,
                                                no_env=CONF.no_env)
     if CONF.testing_rc:
         self.cred_testing = credentials.Credentials(
             CONF.testing_rc, pwd=CONF.testing_passwd, no_env=CONF.no_env)
     else:
         self.cred_testing = None
Esempio n. 2
0
class TopChans:
  max_retries = 10
  wait_retry_sec = 5
  url = "https://api.twitch.tv/kraken/streams"
  creds = creds.Credentials().creds
  # Return current top channels in JSON format, number of channels is num
  def top_channels(self, num):
    num_retries = 0
    while num_retries < self.max_retries:
      try:
        print self.creds
        curr_url = self.url + "?limit=" + str(num) + "&stream_type=live"
        request = urllib2.Request(curr_url, headers={"Client-ID": self.creds['T_CLIENTID']})
        req = urllib2.urlopen(request)
        data = req.read()
        res = self.data_to_json(data)
        return res
      except Exception, e:
        print e
        num_retries = num_retries + 1
        if num_retries < self.max_retries:
          print "Will retry in %d seconds", self.wait_retry_sec
          sleep(self.wait_retry_sec)
        else:
          print "Final retry, won't retry again"
Esempio n. 3
0
 def __init__ (self, username, password):
     '''
     Initialization method for a new client.
     ''' 
     self.username = username
     self.password = password
     self.credential = credentials.Credentials()
Esempio n. 4
0
def main():
    if not os.environ.get('OS_CEILOMETER_URL'):
        print("ERROR: $OS_CEILOMETER_URL variable not set!")
        print(
            "Use this as an example: OS_CEILOMETER_URL=https://compute.datacentred.io:8777"
        )
        exit(1)

    # get a token
    creds = credentials.Credentials()
    token = creds.get_token()

    ceilomar = ceilomarius.Ceilomarius(
        api_version=2,
        endpoint=os.environ['OS_CEILOMETER_URL'],
        token=token,
        verbose=False)

    query = [{
        'field': 'metadata.event_type',
        'op': 'eq',
        'value': 'compute.instance.exists'
    }]
    for l in xrange(1, 11):
        results = []
        objects = []
        for iter in xrange(1, 6):
            resp = ceilomar.get_metric(meter_name="instance", q=query, limit=l)
            objects = resp.json()
            elapsed = resp.elapsed.microseconds
            print("Objects retrieved: {}, us elapsed: {}, try no: {}".format(
                len(objects), elapsed, iter))
            results.append(elapsed)
        print("Items: {}, avg response time: {}".format(
            len(objects), numpy.mean(results)))
Esempio n. 5
0
    def __init__(self, ip, port):
        self.credentials = credentials.Credentials()
        self.ip = ip
        self.port = port

        while True:
            self.start()
            time.sleep(0.3)
    def __init__(self, appName, appVersion, deviceName, deviceId, capabilities=None, devicePixelRatio=None):
        
        log.info("Begin ConnectionManager constructor")

        self.credentialProvider = cred.Credentials()
        self.appName = appName
        self.appVersion = appVersion
        self.deviceName = deviceName
        self.deviceId = deviceId
        self.capabilities = capabilities
        self.devicePixelRatio = devicePixelRatio
Esempio n. 7
0
def _create_target(target_config):
    now = datetime.datetime.utcnow()
    delta = int(target_config['delta'])
    creds = credentials.Credentials(target_config['credentials'])
    return ce_job.CostExplorerJob(
        name=target_config['name'],
        labels=target_config['metric_labels'],
        credentials=creds,
        metrics=target_config['metric'],
        granularity=target_config['granularity'].upper(),
        start=(now - datetime.timedelta(days=delta)).strftime('%Y-%m-%d'),
        end=now.strftime('%Y-%m-%d'),
        refresh=target_config['refresh'])
Esempio n. 8
0
    def private_query(self, method, data={}):
        cred = credentials.Credentials()
        data['nonce'] = int(1000 * time.time())

        urlpath = "/{}/{}/{}".format(self.api_version, self.private, method)
        complete_url = self.uri + urlpath
        headers = cred.get_header(data, urlpath)
        response = requests.post(complete_url, data=data, headers=headers)
        if response.status_code == 200:
            js = json.loads(response.content.decode('utf-8'))
            return js
        else:
            return None
Esempio n. 9
0
    def __init__(self):
        if not os.environ.get('OS_HEAT_URL'):
            print("ERROR: $OS_HEAT_URL variable not set!")
            print(
                "Use this as an example: OS_HEAT_URL=https://compute.datacentred.io:8004/v1/`echo $OS_PROJECT_ID`"
            )
            exit(1)

        # get a token
        creds = credentials.Credentials()
        token = creds.get_token()

        self.heat = heatclient.client.Client(
            1, endpoint=os.environ['OS_HEAT_URL'], token=token)
Esempio n. 10
0
 def __init__(self,
              config,
              openstack_spec,
              config_plugin,
              factory,
              notifier=None):
     # the base config never changes for a given NFVbench instance
     self.base_config = config
     # this is the running config, updated at every run()
     self.config = None
     self.config_plugin = config_plugin
     self.factory = factory
     self.notifier = notifier
     self.cred = credentials.Credentials(config.openrc_file, None, False) \
         if config.openrc_file else None
     self.chain_runner = None
     self.specs = Specs()
     self.specs.set_openstack_spec(openstack_spec)
     self.vni_ports = []
     sys.stdout.flush()
Esempio n. 11
0
def plugin_init(opts=None):
    # Parse the credentials of the OpenStack cloud
    if not opts:
        opts = OptionsHolder()
    cred = credentials.Credentials(opts)
    creds_nova = cred.get_nova_credentials_v2()
    # Create the nova and neutron instances
    nova_client = Client(**creds_nova)

    opts = {'all_tenants': 1}
    servers = nova_client.servers.list(detailed=True, search_opts=opts)
    count = 0
    for server in servers:
        chain_id, nvf = decode_instance_name(server.name)
        if chain_id:
            full_name = '%s.%02d' % (nvf, chain_id)
            by_uuid[server.id] = full_name
            count += 1
    print 'Plugin loaded with %d service chain names from Nova' % (count)
    return True
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("instance_id", help="Instance ID to compare samples for.")
    parser.add_argument("--verbose", help="increase output verbosity", action="store_true")
    args = parser.parse_args()

    instance_id = args.instance_id

    verbose = False
    if args.verbose:
        print("INFO: Verbosity turned on.")
        verbose = True

    print("INFO: This will compare 2 weeks worth of samples for specified instance.")

    # get a token
    creds = credentials.Credentials()
    token = creds.get_token()

    # get config
    conf = config.Config()

    pag = paginator.Paginator(endpoint = conf.ceilometer_endpoint, token = token)
    mon = mongor.Mongor()

    time_end = datetime.utcnow()
    time_begin = time_end - timedelta(days=14)

    samples_api = pag.get_samples_for_instance(instance_id, time_begin, time_end)
    samples_db = mon.get_samples_for_instance(instance_id, time_begin, time_end)

    compare_samples(samples_api, samples_db, verbose)
    event_samples = extract_events_from_samples(samples_db)

    if verbose:
        print("INFO: All the event-based samples:")
        for event in event_samples:
            pprint.pprint(event)
            print("----------------")
    print( "INFO: There are {} event-based samples.".format(len(event_samples)) )
Esempio n. 13
0
 def __init__(self, config):
     cred = credentials.Credentials(config.openrc_file, None, False)
     session = cred.get_session()
     self.neutron_client = nclient.Client('2.0', session=session)
     self.nova_client = Client(2, session=session)
     network_names = [
         inet['name'] for inet in config.internal_networks.values()
     ]
     network_names.extend(
         [inet['name'] for inet in config.edge_networks.values()])
     router_names = [
         rtr['router_name'] for rtr in config.edge_networks.values()
     ]
     # add idle networks as well
     if config.idle_networks.name:
         network_names.append(config.idle_networks.name)
     self.cleaners = [
         ComputeCleaner(self.nova_client, config.loop_vm_name),
         FlavorCleaner(self.nova_client, config.flavor_type),
         NetworkCleaner(self.neutron_client, network_names),
         RouterCleaner(self.neutron_client, router_names)
     ]
Esempio n. 14
0
anchors_ids = []
global News_channel_ids
News_channel_ids = []

ld_hndlers = loading_handlers.Loading_Handlers()

id_coll = ids_collector_module.ids_collector()

tech = ld_hndlers.loading_handlers_in_array('techHandlers')
politics = ld_hndlers.loading_handlers_in_array('politicsHANDLERS')
sports = ld_hndlers.loading_handlers_in_array('sportsHANDLERS')
entertainment = ld_hndlers.loading_handlers_in_array('entertainmentHANDLERS')
anchor = ld_hndlers.loading_handlers_in_array('AnchorsHANDLERS')
News_channel = ld_hndlers.loading_handlers_in_array('News_channelHANDLERS')

api = credentials.Credentials().API()
mongo = creatingConnection()
Connection = mongo.run()
tweet = Streaming_tweets.Tweets()
save_Tweet = saveTweet.saveTweet()


def ids_collect(api):
    tech_ids = id_coll.collector(tech, api)
    politics_ids = id_coll.collector(politics, api)
    sports_ids = id_coll.collector(sports, api)
    entertainment_ids = id_coll.collector(entertainment, api)
    anchors_ids = id_coll.collector(anchor, api)
    News_channel_ids = id_coll.collector(News_channel, api)

import requests
import json
import credentials

params = (('expire_hours', '2'), )
creds = credentials.Credentials()

data = '{"user":"******", "password":"******"}'
print(data)
resp_token = requests.post('https://pfa.foreca.com/authorize/token',
                           params=params,
                           data=data)
print(resp_token)
token_data = resp_token.json()
resp_token_json = json.loads(json.dumps(token_data))

#NB. Original query string below. It seems impossible to parse and
#reproduce query strings 100% accurately so the one below is given
#in case the reproduced version is not "correct".
# response = requests.post('https://pfa.foreca.com/authorize/token?expire_hours=2', data=data)

access_token = resp_token_json['access_token']

headers = {
    'Authorization': 'Bearer ' + access_token,
}

params = (('lang', 'es'), )

resp_tempe = requests.get('https://pfa.foreca.com/api/v1/current/100633679',
                          headers=headers,
Esempio n. 16
0
                        default=False,
                        action='store_true',
                        help='do not read env variables')
    parser.add_argument(
        '-d',
        '--debug',
        dest='debug',
        default=False,
        action='store_true',
        help='debug level',
    )

    (opts, args) = parser.parse_known_args()
    config_api = configure.Configuration.from_file(
        default_cfg_file).configure()
    cred = credentials.Credentials(opts.rc, opts.pwd, opts.no_env)

    cpulse_log = logging.getLogger()
    # create formatter
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')

    ch = logging.StreamHandler()
    cpulse_log.addHandler(ch)
    ch.setFormatter(formatter)

    if opts.debug:
        cpulse_log.setLevel(logging.DEBUG)
        ch.setLevel(logging.DEBUG)
    health_check_start()
Esempio n. 17
0
 def setUp(self):
     self.settings = thread_settings.ThreadSettings(
         credentials.Credentials().access_token)
Esempio n. 18
0
def main():
    parser = argparse.ArgumentParser(description='KloudBuster Force Cleanup')

    parser.add_argument('-r',
                        '--rc',
                        dest='rc',
                        action='store',
                        required=False,
                        help='openrc file',
                        metavar='<file>')
    parser.add_argument('-f',
                        '--file',
                        dest='file',
                        action='store',
                        required=False,
                        help='get resources to delete from cleanup log file '
                        '(default:discover from OpenStack)',
                        metavar='<file>')
    parser.add_argument('-d',
                        '--dryrun',
                        dest='dryrun',
                        action='store_true',
                        default=False,
                        help='check resources only - do not delete anything')
    parser.add_argument(
        '--filter',
        dest='filter',
        action='store',
        required=False,
        help='resource name regular expression filter (default:"KB")'
        ' - OpenStack discovery only',
        metavar='<any-python-regex>')
    opts = parser.parse_args()

    cred = credentials.Credentials(opts.rc)

    if opts.file:
        resources = get_resources_from_cleanup_log(opts.file)
    else:
        # None means try to find the resources from openstack directly by name
        resources = None
    global resource_name_re
    if opts.filter:
        try:
            resource_name_re = re.compile(opts.filter)
        except Exception as exc:
            print 'Provided filter is not a valid python regular expression: ' + opts.filter
            print str(exc)
            sys.exit(1)
    else:
        resource_name_re = re.compile('KB')

    cleaners = KbCleaners(cred, resources, opts.dryrun)

    if opts.dryrun:
        print
        print(
            '!!! DRY RUN - RESOURCES WILL BE CHECKED BUT WILL NOT BE DELETED !!!'
        )
        print

    # Display resources to be deleted
    count = cleaners.show_resources()
    if not count:
        sys.exit(0)

    if not opts.file and not opts.dryrun:
        prompt_to_run()

    cleaners.clean()
Esempio n. 19
0
import time
import atexit
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.cron import CronTrigger
import credentials
import callybot_database
from datetime import datetime
from MySQLdb import OperationalError
import restart_VPN
import logg
import sys

sys.stdout = logg.Logger(sys.stdout, open('log.txt',
                                          'a'))  # Writes to file and console
app = Flask(__name__)
credential = credentials.Credentials()
db_credentials = credential.db_info
db = callybot_database.CallybotDB(db_credentials[0], db_credentials[1],
                                  db_credentials[2], db_credentials[3])
replier = reply.Reply(credential.access_token, db)
received_message = []


def init():
    reminder_interrupt()
    restart_vpn_interrupt()
    clear_old_reminders()
    thread_handler = thread_settings.ThreadSettings(credential.access_token)
    thread_handler.whitelist("https://folk.ntnu.no/halvorkmTDT4140/")
    thread_handler.set_greeting(
        "Hi there {{user_first_name}}!\nWelcome to CallyBot. Press 'Get Started' to get started!"
Esempio n. 20
0
def main():
    if not os.environ.get('OS_CEILOMETER_URL'):
        print("ERROR: $OS_CEILOMETER_URL variable not set!")
        print(
            "Use this as an example: OS_CEILOMETER_URL=https://compute.datacentred.io:8777"
        )
        exit(1)

    parser = argparse.ArgumentParser()
    parser.add_argument("stack_id",
                        help="The ID or name of the stack with resources.")
    parser.add_argument("--verbose",
                        help="increase output verbosity",
                        action="store_true")
    args = parser.parse_args()

    verbose = False
    if args.verbose:
        print("INFO: Verbosity turned on.")
        verbose = True

    # get a token
    creds = credentials.Credentials()
    token = creds.get_token()

    # use the token with ceilometer's endpoint
    # NOTE: Sadly, the official client doesn't seem to work with the `limit` parameter.
    # NOTE: Use the new custom class.
    #
    #cclient = ceilometerclient.client.get_client(2,
    #                           os_endpoint=os.environ['OS_CEILOMETER_URL'],
    #                           os_token=token)
    #query_samples = [dict(field='meter', op='eq', value='instance')]
    #print( cclient.samples.list( q=query_samples, limit=1 ) )
    ceilomar = ceilomarius.Ceilomarius(
        api_version=2,
        endpoint=os.environ['OS_CEILOMETER_URL'],
        token=token,
        verbose=verbose)

    stack_id = args.stack_id
    stack = stacker.Stacker()
    resources = stack.get_resources_in_stack(stack_id)
    # stores every meter associated with resources from this stack
    meters_all = dict()
    for res in resources:
        if verbose:
            print("INFO: Retrieved resource: %s" % res)
        meters_all.update(ceilomar.get_meters_for_resource(res, limit=1))
    if verbose:
        for key in meters_all.keys():
            print("INFO: All meters retrieved from the stack:")
            print("INFO: The main key: %s" % key)
            print("INFO: Values for this key:")
            for item in meters_all.get(key):
                print("INFO: counter_name: {}, resource_id: {}, timestamp: {}".
                      format(item.get('counter_name'), item.get('resource_id'),
                             item.get('timestamp')))

    # variables for stats on the number of found meters
    n_required = 0
    n_found = 0
    # read required meters from a file
    with open('required_meters.txt', 'r') as f:
        for line in f:
            meter = line.strip()
            if meter in meters_all:
                for item in meters_all[meter]:
                    print("meter: {meter} -> timestamp: {timestamp}".format(
                        meter=item.get('counter_name'),
                        timestamp=item.get('timestamp')))
                    if verbose:
                        print("INFO: Resource metadata:")
                        print("INFO: {}".format(item.get('resource_metadata')))
                n_found += 1
            n_required += 1

    print("Found {} out of all {} meters!".format(n_found, n_required))
Esempio n. 21
0
class ChanConn(threading.Thread):

    # Variable definitions
    creds = credentials.Credentials().creds
    NICK = creds['T_NICK']
    PASS = creds['T_PASS']
    HOST = "irc.twitch.tv"
    PORT = 6667
    output_path = "data/"
    nickname = "brohunt"

    # Initialize the connection.
    def __init__(self, chan):
        reload(sys)
        sys.setdefaultencoding(
            'utf8')  # this is needed for the write to work with utf8
        threading.Thread.__init__(self)  # call superclass
        # Set default values
        self.logger = logging.getLogger('main_log')
        self.connected = False
        self.socket = None
        self.file_raw = None
        self.file_filtered = None
        self.active = True
        self.chan = chan
        self.database = Database()

        # Setup output files
        self.file_path = self.output_path + str(chan)
        self.file_raw = codecs.open(self.file_path+"_raw", 'w', \
          encoding='utf8')
        self.file_filtered = codecs.open(self.file_path, 'w', \
          encoding='utf8')
        self.socket = socket.socket()
        self.connect()
        self.join_chan(chan)

    # Connect to irc server
    def connect(self):
        try:
            self.socket.connect((self.HOST, self.PORT))
            self.send("PASS %s" % self.PASS)
            self.send("NICK %s" % self.NICK)
        except socket.timeout:
            self.active = False
            self.logger.error("Socket timeout for server connect.\
        Aborting this thread.")

    # Connect to a given channel
    def join_chan(self, channel):
        channel = "#" + channel
        self.send("JOIN %s" % channel)

    # Run thread and listen to the joined channel
    def run(self):
        while self.active:
            # Read message from connected channels
            try:
                buf = self.socket.recv(4096)
            except socket.timeout:
                self.logger.error(
                    "Got timeout for channel %s when\
          waiting for a message. Shutting down.", self.chan)
                self.stop()
                continue  # Skip rest of loop so we shutdown nice.
            except socket.error:
                self.logger.error(
                    "Got an error for channel %s when\
          waiting for a message. Shutting down.", self.chan)
                self.stop()
                continue  # Skip rest of loop so we shut down nice.

            lines = buf.split("\n")
            timestamp = datetime.datetime.now().strftime('%y-%m-%d %H:%M:%S')
            for data in lines:
                data = str(data).strip()
                if data == '':
                    continue

                parsed = None
                try:
                    msg = timestamp + data + "\n"
                    self.file_raw.write(msg)
                    parsed = self.parse_json(data, timestamp)
                    print parsed
                    self.database.insert(parsed)
                    print parsed
                except UnicodeDecodeError:
                    logging.error(
                        "Could not write raw message: [ %s ], to file.", msg)
                if parsed != None:
                    try:
                        self.file_filtered.write(json.dumps(parsed) + "\n")
                    except UnicodeDecodeError:
                        logging.error("Could not write parsed [ %s ] to file.",
                                      parsed)
                        self.stop()
                        break  # To skip the consequtive code

                # server ping/pong?
                if data.find('PING') != -1:
                    n = data.split(':')[1]
                    self.send('PONG :' + n)
                    if self.connected == False:
                        self.perform()
                        self.connected = True

                args = data.split(None, 3)
                if len(args) != 4:
                    continue
                ctx = {}
                ctx['sender'] = args[0][1:]
                ctx['type'] = args[1]
                ctx['target'] = args[2]
                ctx['msg'] = args[3][1:]

                # whom to reply?
                target = ctx['target']
                if ctx['target'] == self.nickname:
                    target = ctx['sender'].split("!")[0]
        self.handle_exit()

    # This will make the next while-loop to break and the thread to terminate
    def stop(self):
        self.active = False

    # Send a message through the socket
    def send(self, msg):
        self.socket.send(msg + "\r\n")

    def perform(self):
        self.send("PRIVMSG R : Login <>")
        self.send("MODE %s +x" % self.nickname)

    # Convert the read data into a json object
    def parse_json(self, data, time):

        if data.find('PRIVMSG') == -1:
            return None
        if data.find('jtv') != -1:
            return None

        exclam_index = data.find('!')

        # nickname starts at 1 since every message starts with ':'
        # which is not part of nickname
        nickname = data[1:exclam_index]
        hashtag_index = data.find('#')
        chan_end_index = hashtag_index + data[hashtag_index:].find(':')
        chan = data[hashtag_index:chan_end_index]

        msg_start_index = chan_end_index + data[chan_end_index:].find(':')
        msg = data[msg_start_index + 1:]

        if exclam_index == -1 \
        or hashtag_index == -1 \
        or msg_start_index == -1:
            return None

        json_dict = {
            'nickname': nickname,
            'channel': chan,
            'message': msg,
            'time': time
        }
        return json_dict

    def handle_exit(self):
        # This is where we are supposed to make calls to S3_handle to
        # Save away our chat logs
        self.socket.close()
        self.file_raw.close()
        self.file_filtered.close()
Esempio n. 22
0
                      help='Specify branches name')
env_data.add_argument('-rg',
                      required=True,
                      type=str,
                      help='Specify AWS Region name')
# endregion

# region VARS
args = vars(env_data.parse_args())
ENV = args['v']
REPO = args['r']
EMAIL = args['e']
BRANCH = args['b']
REG = args['rg']
repo_name = '{}/{}'.format(ENV, REPO)
git_api = credentials.Credentials(cred_type='gitapi',
                                  region=REG).get_secret().get('api')
username_cc = credentials.Credentials(cred_type='codecommit',
                                      region=REG).get_secret().get('username')
password_cc = credentials.Credentials(cred_type='codecommit',
                                      region=REG).get_secret().get('password')
check = check_list.Checklist(repo_name=repo_name, api_key=git_api)
parsed_branch = []
bran = branches.Branch(repo_name=repo_name, api_key=git_api)
# endregion

for item in BRANCH.split(','):
    parsed_branch.append(item)
    if parsed_branch[0] == 'all':
        parsed_branch = bran.get_branches_list()
        parsed_branch = list(dict.fromkeys(parsed_branch))
Esempio n. 23
0
from yowsup.layers.logger import YowLoggerLayer
from yowsup.stacks import YowStack
from yowsup.common import YowConstants
from yowsup.layers import YowLayerEvent
from yowsup.stacks import YowStack, YOWSUP_CORE_LAYERS
from yowsup.layers.axolotl import YowAxolotlLayer
from yowsup import env

try:
    import credentials as crdntials
except Exception:
    print '''Your phone nr credentials could not be loaded. Read the instructions
             in the credentials_todo.py file. Don't forget to rename the file
             to credentials.py'''

crdntls = crdntials.Credentials()
CREDENTIALS = (crdntls.getlogin(), crdntls.getpassword())

if __name__ == "__main__":
    layers = (
        EchoLayer,
        #     YowParallelLayer([YowAuthenticationProtocolLayer, YowMessagesProtocolLayer, YowReceiptProtocolLayer, YowAckProtocolLayer, YowMediaProtocolLayer, YowIqProtocolLayer, YowCallsProtocolLayer]),YowAxolotlLayer
        # ) + YOWSUP_CORE_LAYERS
        YowParallelLayer([
            YowAuthenticationProtocolLayer, YowMessagesProtocolLayer,
            YowReceiptProtocolLayer, YowAckProtocolLayer,
            YowMediaProtocolLayer, YowIqProtocolLayer, YowCallsProtocolLayer
        ]),
        YowAxolotlLayer,
        YowLoggerLayer,
        YowCoderLayer,