def get_total_events(table_id, label, action): service = get_service(_APP_NAME, 'v3') try: filter_ = \ 'ga:eventCategory==Marketplace;' \ 'ga:eventAction=={};' \ 'ga:eventLabel=={}' \ .format( action, label, ) results = service.data().ga().get( ids=table_id, start_date=_START_DATE, end_date=_END_DATE, metrics='ga:totalEvents', filters=filter_, start_index=_START_INDEX, max_results=_MAX_RESULTS, ) results = results.execute() except TypeError, error: print ('There was an error in constructing your query : %s' % error)
def airflow_init_web_passwd(c, host, name): ck, cv = utils.get_service(env.services, name) cmd = "docker exec -it {} python {}".format( name, os.path.expandvars( "$SKP_SHOME/volume/var/airflow/airflow_init_web_passwd.py")) rcmd = "ssh -o StrictHostKeyChecking=no {}@{} -t {}".format( env.SKP_USER, env.hosts[host]["ipv4"], cmd) res = c.run(rcmd, pty=True)
def get(self): catalog_serv = utils.get_service('catalog') f = urllib.request.urlopen('http://%s:%s/catalog' % catalog_serv) content = f.read().decode() f.close() service_dict = json.loads(content) for sv in service_dict: self.write('<a href="/inventory?name=%s">%s</a><br />' % (sv['name'], sv['name']))
def main(): from splunklib.searchcommands import csv as splunkcsv del splunkcsv import splunk.Intersplunk from utils import get_service, read_fieldnames_from_command_input try: # check execution mode: it could be 'getinfo' to get some information about # how to execute the actual command (isgetinfo, sys.argv) = splunk.Intersplunk.isGetInfo(sys.argv) if isgetinfo: splunk.Intersplunk.outputInfo( streaming=False, # because it only runs on a search head generating=False, retevs=False, reqsop=False, preop=None, timeorder=True, clear_req_fields=False, req_fields=None) # read command arguments # keywords, kvs = splunk.Intersplunk.getKeywordsAndOptions() if len(sys.argv) < 2: raise Exception("Missing actual R script parameter") command_argument = sys.argv[1] # read header, fieldnames, and events from input stream input_data = StringIO() shutil.copyfileobj(sys.stdin, input_data) input_data.seek(0) fieldnames = read_fieldnames_from_command_input(input_buf=input_data) input_data.seek(0) settings = {} input_events = splunk.Intersplunk.readResults(input_data, settings) # connect to splunk using SDK service = get_service(settings['infoPath']) header, rows = r(service, input_events, command_argument, fieldnames=fieldnames) splunk.Intersplunk.outputResults(rows, fields=header) except errors.Error as e: splunk.Intersplunk.outputResults( splunk.Intersplunk.generateErrorResults(str(e))) except Exception as e: splunk.Intersplunk.outputResults( splunk.Intersplunk.generateErrorResults( str(e) + ": " + traceback.format_exc()))
def get(self): name = self.get_argument('name', '') catalog_serv = utils.get_service('buy') f = urllib.request.urlopen('http://%s:%s/buy?name=%s' % (catalog_serv + (name,))) content = f.read().decode() f.close() service_dict = json.loads(content) self.write(str(service_dict)) self.write('<a href="/">back</a><br />')
def delete_by_email_sender(email_sender): user_id = "me" service = utils.get_service() messagens = list_messages_by_email_sender(service, email_sender, user_id) for msg in messagens: delete_message_by_id(service, user_id, msg['id'], email_sender)
def main(): from splunklib.searchcommands import csv as splunkcsv del splunkcsv import splunk.Intersplunk from utils import get_service, read_fieldnames_from_command_input try: # check execution mode: it could be 'getinfo' to get some information about # how to execute the actual command (isgetinfo, sys.argv) = splunk.Intersplunk.isGetInfo(sys.argv) if isgetinfo: splunk.Intersplunk.outputInfo( streaming=False, # because it only runs on a search head generating=False, retevs=False, reqsop=False, preop=None, timeorder=True, clear_req_fields=False, req_fields=None ) #read command arguments #keywords, kvs = splunk.Intersplunk.getKeywordsAndOptions() if len(sys.argv) < 2: raise Exception("Missing actual R script parameter") command_argument = sys.argv[1] # read header, fieldnames, and events from input stream input_data = StringIO() shutil.copyfileobj(sys.stdin, input_data) input_data.seek(0) fieldnames = read_fieldnames_from_command_input(input_buf=input_data) input_data.seek(0) settings = {} input_events = splunk.Intersplunk.readResults(input_data, settings) #connect to splunk using SDK service = get_service(settings['infoPath']) header, rows = r(service, input_events, command_argument, fieldnames=fieldnames) splunk.Intersplunk.outputResults(rows, fields=header) except errors.Error as e: splunk.Intersplunk.outputResults(splunk.Intersplunk.generateErrorResults(str(e))) except Exception as e: splunk.Intersplunk.outputResults( splunk.Intersplunk.generateErrorResults(str(e) + ": " + traceback.format_exc()))
def add_inventory(name, count): mongo_url = utils.get_service('inventory_mongo') if not mongo_url: return mongo_url = 'mongodb://%s:%s/' % mongo_url logger.debug(mongo_url) conn = pymongo.MongoClient(mongo_url) db = conn.inventoryDB coll = db.inventory coll.save({'name': name, 'count': count})
def get(self, client_id, service_id): request_token = request.headers.get('authorization') auth_status = auth.verify(client_id, request_token) if auth_status != 200: return auth_status service = utils.get_service(client_id, service_id) if service.count() == 0: return en_us.SERVICE_NOT_FOUND return utils.encoder(service[0])
def save_catalog(name): mongo_url = utils.get_service('catalog_mongo') if not mongo_url: return mongo_url = 'mongodb://%s:%s/' % mongo_url logger.debug(mongo_url) conn = pymongo.MongoClient(mongo_url) db = conn.catalogDB coll = db.catalog coll.save({'name': name})
def mysql_client(c, host, name): ck, cv = utils.get_service(env.services, name) passwd = "" for e in cv["environments"]: if e.find("MYSQL_ROOT_PASSWORD") > -1: passwd = e.split("=")[1] cmd = 'docker exec -it {} mysql -uroot -p"{}"'.format(name, passwd) rcmd = "ssh -o StrictHostKeyChecking=no {}@{} -t {}".format( env.SKP_USER, env.hosts[host]["ipv4"], cmd) res = c.run(rcmd, pty=True)
def pgsql_client(c, host, name): ck, cv = utils.get_service(env.services, name) passwd = "" for e in cv["environments"]: if e.find("PGSQL_ROOT_PASSWORD") > -1: passwd = e.split("=")[1] cmd = "docker exec -it {} \"/bin/bash -c 'PGPASSWORD={} psql -U postgres'\"".format( name, passwd) rcmd = "ssh -o StrictHostKeyChecking=no {}@{} -t {}".format( env.SKP_USER, env.hosts[host]["ipv4"], cmd) res = c.run(rcmd, pty=True)
def rm(c, name): sk, sv = utils.get_service(env.services, name) hosts = utils.get_hosts(env.hosts, sv["label"]) if sv["label"] == "zookeeper": zid = 0 for k, v in hosts.iteritems(): zid += 1 cmd = "docker rm -f {}-{}".format(sk, zid) rcmd = "docker-machine ssh {} {}".format(k, cmd) utils.run_without_exit(c, rcmd) else: _docker_rm(c, hosts, sk)
def mysql_create_airflow_db(c, host, name): mysql_name = name.split(":")[0] airflow_name = name.split(":")[1] ck, cv = utils.get_service(env.services, airflow_name) db = "" db_user = "" db_pw = "" for e in cv["environments"]: if e.replace(" ", "").find("AIRFLOW_DB=") > -1: db = e.split("=")[1] if e.replace(" ", "").find("AIRFLOW_DB_USER="******"=")[1] if e.replace(" ", "").find("AIRFLOW_DB_PASSWORD="******"=")[1] ck, cv = utils.get_service(env.services, mysql_name) mysql_pw = "" for e in cv["environments"]: if e.find("MYSQL_ROOT_PASSWORD") > -1: mysql_pw = e.split("=")[1] queries = [] queries.append("DROP DATABASE IF EXISTS {};".format(db)) queries.append("DROP USER IF EXISTS {};".format(db_user)) queries.append("CREATE DATABASE {};".format(db)) queries.append( "CREATE USER IF NOT EXISTS '{}'@'%' IDENTIFIED BY '{}';".format( db_user, db_pw)) queries.append("GRANT ALL PRIVILEGES ON {}.* TO '{}'@'%';".format( db, db_user)) queries.append("FLUSH PRIVILEGES;") for query in queries: sql_cmd = 'mysql -uroot -p\\"{}\\" -e\\"{}\\"'.format(mysql_pw, query) cmd = 'docker exec -it {} "{}"'.format(mysql_name, sql_cmd) rcmd = "ssh -o StrictHostKeyChecking=no {}@{} -t {}".format( env.SKP_USER, env.hosts[host]["ipv4"], cmd) res = c.run(rcmd, pty=True)
def get(self): name = self.get_argument('name', '') catalog_serv = utils.get_service('inventory') f = urllib.request.urlopen('http://%s:%s/inventory?name=%s' % (catalog_serv + (name, ))) content = f.read().decode() f.close() service_dict = json.loads(content) params = json.dumps({}).encode('utf8') req = urllib.request.Request('http://%s:%s/inventory?name=%s&count=%s' % (catalog_serv + (name, service_dict['count'] - 1)), data=params, headers={'content-type': 'application/json'}) response = urllib.request.urlopen(req) response.close() self.write('{"status":"success"}')
def pgsql_create_airflow_db(c, host, name): pgsql_name = name.split(":")[0] airflow_name = name.split(":")[1] ck, cv = utils.get_service(env.services, airflow_name) db = "" db_user = "" db_pw = "" for e in cv["environments"]: if e.replace(" ", "").find("AIRFLOW_DB=") > -1: db = e.split("=")[1] if e.replace(" ", "").find("AIRFLOW_DB_USER="******"=")[1] if e.replace(" ", "").find("AIRFLOW_DB_PASSWORD="******"=")[1] ck, cv = utils.get_service(env.services, pgsql_name) pgsql_pw = "" for e in cv["environments"]: if e.find("PGSQL_ROOT_PASSWORD") > -1: pgsql_pw = e.split("=")[1] queries = [] queries.append("DROP DATABASE IF EXISTS {};".format(db)) queries.append("DROP USER IF EXISTS {};".format(db_user)) queries.append("CREATE DATABASE {};".format(db)) queries.append("CREATE USER {} PASSWORD '{}';".format(db_user, db_pw)) queries.append( "GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO {};".format( db, db_user)) for query in queries: sql_cmd = 'psql -U postgres -c\\"{}\\"'.format(query) cmd = 'docker exec -it {} "{}"'.format(pgsql_name, sql_cmd) rcmd = "ssh -o StrictHostKeyChecking=no {}@{} -t {}".format( env.SKP_USER, env.hosts[host]["ipv4"], cmd) res = c.run(rcmd, pty=True)
def get_inventory(name): mongo_url = utils.get_service('inventory_mongo') if not mongo_url: return mongo_url = 'mongodb://%s:%s/' % mongo_url logger.debug(mongo_url) conn = pymongo.MongoClient(mongo_url) db = conn.inventoryDB coll = db.inventory unpro = coll.find_one({'name': name}) if unpro: unpro.pop('_id') return unpro
def get_catalog(): mongo_url = utils.get_service('catalog_mongo') if not mongo_url: return mongo_url = 'mongodb://%s:%s/' % mongo_url logger.debug(mongo_url) conn = pymongo.MongoClient(mongo_url) catalogs = [] db = conn.catalogDB coll = db.catalog unpro = coll.find() for doc in unpro: doc.pop('_id') catalogs.append(doc) return catalogs
def main(): from splunklib.searchcommands import csv as splunkcsv del splunkcsv import splunk.Intersplunk from utils import get_service try: # check execution mode: it could be 'getinfo' to get some information about # how to execute the actual command (isgetinfo, sys.argv) = splunk.Intersplunk.isGetInfo(sys.argv) if isgetinfo: splunk.Intersplunk.outputInfo( streaming=False, # because it only runs on a search head generating=False, retevs=False, reqsop=False, preop=None, timeorder=True, clear_req_fields=False, req_fields=None) #read command arguments #keywords, kvs = splunk.Intersplunk.getKeywordsAndOptions() if len(sys.argv) != 1: raise Exception("No parameter allowed") # read header, fieldnames, and events from input stream settings = {} splunk.Intersplunk.readResults(sys.stdin, settings) #connect to splunk using SDK service = get_service(settings['infoPath']) r_stats(service) splunk.Intersplunk.outputResults([]) except errors.Error as e: splunk.Intersplunk.outputResults( splunk.Intersplunk.generateErrorResults(str(e))) except Exception as e: splunk.Intersplunk.outputResults( splunk.Intersplunk.generateErrorResults( str(e) + ": " + traceback.format_exc()))
def main(): from splunklib.searchcommands import csv as splunkcsv del splunkcsv import splunk.Intersplunk from utils import get_service try: # check execution mode: it could be 'getinfo' to get some information about # how to execute the actual command (isgetinfo, sys.argv) = splunk.Intersplunk.isGetInfo(sys.argv) if isgetinfo: splunk.Intersplunk.outputInfo( streaming=False, # because it only runs on a search head generating=False, retevs=False, reqsop=False, preop=None, timeorder=True, clear_req_fields=False, req_fields=None ) #read command arguments #keywords, kvs = splunk.Intersplunk.getKeywordsAndOptions() if len(sys.argv) != 1: raise Exception("No parameter allowed") # read header, fieldnames, and events from input stream settings = {} splunk.Intersplunk.readResults(sys.stdin, settings) #connect to splunk using SDK service = get_service(settings['infoPath']) r_stats(service) splunk.Intersplunk.outputResults([]) except errors.Error as e: splunk.Intersplunk.outputResults(splunk.Intersplunk.generateErrorResults(str(e))) except Exception as e: splunk.Intersplunk.outputResults( splunk.Intersplunk.generateErrorResults(str(e) + ": " + traceback.format_exc()))
def post(self, client_id, service_id): request_token = request.headers.get('authorization') auth_status = auth.verify(client_id, request_token) if auth_status != 200: return auth_status service = utils.get_service(client_id, service_id) if service.count() == 0: return en_us.NOT_FOUND command = { "serviceid": service_id, "content": utils.encoder(service)[0]['restart_command'] } command = json.dumps(command) pub(command.encode('utf-8')) return "", 204
def airflow_init_db(c, host, name): ck, cv = utils.get_service(env.services, name) cmd = "docker rm -f {} ".format(name) rcmd = "docker-machine ssh {} {}".format(host, cmd) utils.run_without_exit(c, rcmd) cmd = "{} run -i -t -d --privileged --restart=always --name {} ".format( cv["docker"], ck) cmd += "--network {} ".format(" --network ".join(cv["networks"])) cmd += "-p {} ".format(" -p ".join(cv["ports"])) cmd += "-v {} ".format(os.path.expandvars(" -v ".join(cv["volumes"]))) cmd += "-e {} ".format(os.path.expandvars(" -e ".join(cv["environments"]))) cmd += "{} {}".format( cv["image"], os.path.expandvars("$SKP_SHOME/volume/bin/init_airflow.sh")) rcmd = "ssh -o StrictHostKeyChecking=no {}@{} -t {}".format( env.SKP_USER, env.hosts[host]["ipv4"], cmd) res = c.run(rcmd, pty=True)
def run(c, name): sk, sv = utils.get_service(env.services, name) hosts = utils.get_hosts(env.hosts, sv["label"]) cmd = "" if sv["label"] == "registry": _docker_rm(c, hosts, sk) cmd = "docker run -i -t -d --privileged --restart=always --name {} ".format( sk) cmd += _get_opts(sv) cmd += "{}".format(sv["image"]) _docker_ssh(c, hosts, cmd) elif sv["label"] == "jupyter": _docker_rm(c, hosts, sk) cmd = "{} run -i -t -d --privileged --restart=always --name {} ".format( sv["docker"], sk) cmd += _get_opts(sv) cmd += "{} {}".format(sv["image"], os.path.expandvars(sv["cmd"])) _docker_ssh(c, hosts, cmd) elif sv["label"] == "spark-master": _docker_rm(c, hosts, sk) cmd = "{} run -i -t -d --privileged --restart=always --name {} ".format( sv["docker"], sk) cmd += _get_opts(sv) cmd += "{} {}".format(sv["image"], os.path.expandvars(sv["cmd"])) _docker_ssh(c, hosts, cmd) elif sv["label"] == "spark-worker": _docker_rm(c, hosts, sk) cmd = "{} run -i -t -d --privileged --restart=always --name {} ".format( sv["docker"], sk) cmd += _get_opts(sv) cmd += "{} {}".format(sv["image"], os.path.expandvars(sv["cmd"])) _docker_ssh(c, hosts, cmd) elif sv["label"] == "mysql": _docker_rm(c, hosts, sk) cmd = "docker run -i -t -d --privileged --restart=always --name {} ".format( sk) cmd += _get_opts(sv) cmd += "{} --default-authentication-plugin=mysql_native_password ".format( sv["image"]) _docker_ssh(c, hosts, cmd) elif sv["label"] == "pgsql": _docker_rm(c, hosts, sk) cmd = "docker run -i -t -d --privileged --restart=always --name {} ".format( sk) cmd += _get_opts(sv) cmd += "{} ".format(sv["image"]) _docker_ssh(c, hosts, cmd) elif sv["label"] == "airflow": _docker_rm(c, hosts, sk) cmd = "{} run -i -t -d --privileged --restart=always --name {} ".format( sv["docker"], sk) cmd += _get_opts(sv) cmd += "{} {}".format(sv["image"], os.path.expandvars(sv["cmd"])) _docker_ssh(c, hosts, cmd) elif sv["label"] == "portainer": _docker_rm(c, hosts, sk) cmd = "docker run -i -t -d --privileged --restart=always --name {} ".format( sk) cmd += _get_opts(sv) cmd += "{} {}".format(sv["image"], os.path.expandvars(sv["cmd"])) _docker_ssh(c, hosts, cmd) elif sv["label"] == "portainer-agent": _docker_rm(c, hosts, sk) cmd = "docker run -i -t -d --privileged --restart=always --name {} ".format( sk) cmd += _get_opts(sv) cmd += "{}".format(sv["image"]) _docker_ssh(c, hosts, cmd) elif sv["label"] == "zookeeper": zid = 0 for k, v in hosts.iteritems(): zid += 1 cmd = "docker rm -f {}-{}".format(sk, zid) rcmd = "docker-machine ssh {} {}".format(k, cmd) utils.run_without_exit(c, rcmd) with open('/tmp/zoo.cfg', 'w') as f: lines = [ 'tickTime=2000', 'dataDir=/opt/zookeeper/data', 'clientPort=2181', 'initLimit=5', 'syncLimit=2' ] zid = 0 for k, v in hosts.iteritems(): zid += 1 lines.append("server.{}={}-{}:2888:3888".format(zid, sk, zid)) for line in lines: f.write("{}\n".format(line)) zid = 0 for k, v in hosts.iteritems(): zid += 1 with open('/tmp/myid', 'w') as f: f.write("{}\n".format(zid)) path_z = os.path.expandvars(sv["path"]) path_conf = "{}/conf".format(path_z) cmd = "sudo mkdir -p {} ".format(path_conf) rcmd = "docker-machine ssh {} {}".format(k, cmd) utils.run_without_exit(c, rcmd) path_data = "{}/data".format(path_z) cmd = "sudo mkdir -p {} ".format(path_data) rcmd = "docker-machine ssh {} {}".format(k, cmd) utils.run_without_exit(c, rcmd) cmd = "docker-machine scp /tmp/zoo.cfg {}:/tmp/zoo.cfg ".format(k) utils.run_with_exit(c, cmd) cmd = "docker-machine ssh {} sudo cp /tmp/zoo.cfg {} ".format( k, path_conf) utils.run_with_exit(c, cmd) cmd = "docker-machine scp /tmp/myid {}:/tmp/myid ".format(k) utils.run_with_exit(c, cmd) cmd = "docker-machine ssh {} sudo cp /tmp/myid {} ".format( k, path_data) utils.run_with_exit(c, cmd) cmd = "docker run -i -t -d --privileged --restart=always --name {}-{} ".format( sk, zid) cmd += _get_opts(sv) cmd += "{}".format(sv["image"]) rcmd = "docker-machine ssh {} {}".format(k, cmd) utils.run_without_exit(c, rcmd) elif sv["label"] == "kafka": _docker_rm(c, hosts, sk) zsk, zsv = utils.get_service(env.services, sv["zookeeper"]) zhosts = utils.get_hosts(env.hosts, "zookeeper") zid = 0 zconns = [] for zk, zv in zhosts.iteritems(): zid += 1 zconns.append("{}-{}:2181".format(zsk, zid)) zconn = ",".join(zconns) print("Zookeeper Conn: {}".format(zconn)) bid = 0 for k, v in hosts.iteritems(): bid += 1 cmd = "docker-machine ip {} ".format(k) res = c.run(cmd) ip = res.stdout.strip() cmd = "docker run -i -t -d --privileged --restart=always --name {} ".format( sk) cmd += _get_opts(sv) cmd += '-e KAFKA_BROKER_ID="{}" '.format(bid) cmd += '-e KAFKA_ADVERTISED_HOST_NAME="{}" '.format(k) cmd += '-e KAFKA_ZOOKEEPER_CONNECT="{}" '.format(zconn) cmd += '-e KAFKA_LISTENER_SECURITY_PROTOCOL_MAP="INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT" ' cmd += '-e KAFKA_ADVERTISED_LISTENERS="INSIDE://:9092,OUTSIDE://{}:9094" '.format( k) cmd += '-e KAFKA_LISTENERS="INSIDE://:9092,OUTSIDE://:9094" ' cmd += '-e KAFKA_INTER_BROKER_LISTENER_NAME="INSIDE" ' cmd += '-e KAFKA_ADVERTISED_PORT="9092" ' cmd += "{}".format(sv["image"]) rcmd = "docker-machine ssh {} {}".format(k, cmd) utils.run_with_exit(c, rcmd) else: print("Unkown Label: {}".format(cv["label"])) sys.exit(-1)