def run(self, hostname, kube_type, do_deploy, wait, timeout, testing, docker_options, ebs_volume, ls_device, verbose): if kube_type is None: kube_type_id = Kube.get_default_kube_type() else: kube_type = Kube.get_by_name(kube_type) if kube_type is None: raise InvalidCommand('Kube type with name `{0}` not ' 'found.'.format(kube_type)) kube_type_id = kube_type.id options = None testing = testing or WITH_TESTING if docker_options is not None: options = {'DOCKER': docker_options} if get_maintenance(): raise InvalidCommand( 'Kuberdock is in maintenance mode. Operation canceled' ) try: check_node_data({'hostname': hostname, 'kube_type': kube_type_id}) if not isinstance(ls_device, (tuple, list)): ls_device = (ls_device,) res = create_node(None, hostname, kube_type_id, do_deploy, testing, options=options, ls_devices=ls_device, ebs_volume=ebs_volume) print(res.to_dict()) if wait: wait_for_nodes([hostname, ], timeout, verbose) except Exception as e: raise InvalidCommand("Node management error: {0}".format(e))
def game(filename): """Import a YAML file of game data.""" with open(filename, 'r') as game_file: contents = yaml.safe_load(game_file) world_name = contents['name'] try: world = World.query.filter_by(name=world_name).one() except NoResultFound: world = World(name=world_name) db.session.add(world) for city_name, city_produces in contents['cities'].items(): try: city = City.query.filter_by(name=city_name, world=world).one() except NoResultFound: city = City(name=city_name, world=world) db.session.add(city) for good_name in city_produces: try: good = Good.query.filter_by(name=good_name).one() except NoResultFound: good = Good(name=good_name) db.session.add(good) city.produces.append(good) for card_num, card_data in enumerate(contents['cards']): if 'number' in card_data: card_num = card_data['number'] try: card = Card.query.filter_by(number=card_num, world=world).one() except NoResultFound: card = Card(number=card_num, world=world) db.session.add(card) card.event = card_data.get('event') for contract_data in card_data.get('contracts', []): try: good = Good.query.filter_by(name=contract_data[0]).one() except NoResultFound: raise InvalidCommand('Good {} does not exist'.format( contract_data[0])) try: city = City.query.filter_by(name=contract_data[1], world=world).one() except NoResultFound: raise InvalidCommand('City {} does not exist'.format( contract_data[1])) try: contract = Contract.query.filter_by(good=good, city=city).one() except NoResultFound: contract = Contract(good=good, city=city) contract.amount = contract_data[2] card.contracts.append(contract) db.session.commit()
def run(self, username, password): if User.query.filter_by(username=username).first() is not None: raise InvalidCommand("User with this username already exists") user = User(username, username.title(), password, True) user.add(user) print("Superuser `%s` created successfully" % username)
def run(self, username, password, rolename): try: role = Role.filter_by(rolename=rolename).one() except NoResultFound: raise InvalidCommand('Role with name `%s` not found' % rolename) if User.filter_by(username=username).first(): raise InvalidCommand('User `%s` already exists' % username) if not password: password = generate_new_pass() print "New password: {}".format(password) u = User.create(username=username, password=password, role=role, active=True, package_id=0) db.session.add(u) db.session.commit()
def run(self, hostname, ebs_volume, size, devices): if get_maintenance(): raise InvalidCommand( 'Kuberdock is in maintenance mode. Operation canceled' ) if size is not None and size <= 0: raise InvalidCommand( 'Invalid size value (must be > 0): {}'.format(size) ) if devices: devices = devices.split(',') ok, message = extend_ls_volume( hostname, devices=devices, ebs_volume=ebs_volume, size=size ) if not ok: raise InvalidCommand(u'Failed to extend LS: {}'.format(message)) print 'Operation performed successfully'
def run(self, hostname): node = db.session.query(Node).filter(Node.hostname == hostname).first() if node is None: raise InvalidCommand(u'Node "{0}" not found'.format(hostname)) PersistentDisk.get_by_node_id(node.id).delete( synchronize_session=False) delete_node(node=node, force=True)
def refresh_linkedin_info(email): """ Refresh and display LinkedIn information for a user. """ user = User.query_in_deployment().filter_by(email=email).one() if linkedin.retrieve_access_token(user) is None: raise InvalidCommand( 'The user must first (re)connect to LinkedIn on the website.') linkedin.update_user_info(user) pprint.pprint(user.linkedin.user_info)
def __setup_globals(users_file): if users is not None: return if not os.path.exists(DATA_DIR): os.mkdir(DATA_DIR) if not os.path.exists(users_file): raise InvalidCommand("Please export NoI 1.0 users to %s." % users_file) set_users_from_json(json.load(open(users_file, 'r')))
def run(self, nodename, flagname, value, delete): node = Node.get_by_name(nodename) if not node: raise InvalidCommand(u'Node "{0}" not found'.format(nodename)) if delete: NodeFlag.delete_by_name(node.id, flagname) print u'Node flag "{0}" was deleted'.format(flagname) return NodeFlag.save_flag(node.id, flagname, value) if flagname == NodeFlagNames.CEPH_INSTALLED: tasks.add_k8s_node_labels( node.hostname, {NODE_CEPH_AWARE_KUBERDOCK_LABEL: "True"} ) check_namespace_exists(node.ip) print u'Node "{0}": flag "{1}" was set to "{2}"'.format( nodename, flagname, value)
def deploy_scheduled(console_out, deploy_flag, leniency, verbose, quiet, *args, **kwargs): """ Find scheduled distributions and deploy """ if verbose: logger = setup_command_logger(logging.DEBUG) elif quiet: logger = setup_command_logger(logging.ERROR) else: logger = setup_command_logger(logging.INFO) from splice.queries import get_scheduled_distributions, unschedule_distribution import requests dt = datetime.utcnow() distributions = get_scheduled_distributions(leniency, dt) logger.info("{0} - found {1} distributions".format(dt, len(distributions))) dist_data = [] for dist in distributions: logger.info("fetching {0}".format(dist.url)) r = requests.get(dist.url) if r.status_code == 200: dist_data.append((r.json(), dist.channel_id, dist.id)) else: logger.error("FETCH_ERROR status_code:{0} url:{1}".format( r.status_code, dist.url)) from splice.ingest import ingest_links, distribute, IngestError if deploy_flag: for rawdata, channel_id, dist_id in dist_data: try: new_data = ingest_links(rawdata, channel_id) if console_out: print json.dumps(new_data, sort_keys=True, indent=2) distribute(new_data, channel_id, deploy_flag) unschedule_distribution(dist_id) except IngestError, e: raise InvalidCommand(e.message) except:
def run(self, name, template, origin, no_validation): from kubedock.kapi.apps import PredefinedApp try: with open(template, 'r') as tf: template_data = tf.read() except IOError as err: raise InvalidCommand("Can not load template: %s" % err) if not no_validation: PredefinedApp.validate(template_data) result = PredefinedApp.create( name=name, template=template_data, origin=origin or 'kuberdock', ) print(result)
def run(self, subnet, exclude, include, node=None): if exclude and include: raise InvalidCommand('Can\'t specify both -e and -i') if include: to_include = ippool.IpAddrPool().parse_autoblock(include) net = IPv4Network(unicode(subnet)) hosts = {str(i) for i in net.hosts()} # Do not include network and broadcast IP address # TODO: Fix according to AC-4044 hosts.add(str(net.network_address)) hosts.add(str(net.broadcast_address)) exclude = ','.join(hosts - to_include) ippool.IpAddrPool().create({ 'network': subnet.decode(), 'autoblock': exclude, 'node': node })
def load_links(in_file, country_code, channel_id, out_path, console_out, verbose, old_format, *args, **kwargs): """ Load a set of links in the data warehouse """ if verbose: logger = setup_command_logger(logging.DEBUG) else: logger = setup_command_logger(logging.INFO) rawdata = None with open(in_file, 'r') as f: rawdata = json.load(f) from splice.ingest import ingest_links, IngestError try: locale = rawdata.keys()[0] country_locale_str = "/".join([country_code, locale]) new_data = ingest_links({country_locale_str: rawdata[locale]}, channel_id) if old_format: new_data = new_data[new_data.keys()[0]] if console_out: print json.dumps(new_data, sort_keys=True, indent=2) if out_path: directory, _ = os.path.split(out_path) if not os.path.exists(directory): os.makedirs(directory) with open(out_path, "w") as f: json.dump(new_data, f, sort_keys=True, indent=2) logger.info("wrote {0}".format(out_path)) except IngestError, e: raise InvalidCommand(e.message)
def ingest_tiles(in_file, channel_id, out_path, console_out, deploy_flag, verbose, *args, **kwargs): """ Load a set of links for all country/locale combinations into data warehouse and optionally deploy """ if verbose: logger = setup_command_logger(logging.DEBUG) else: logger = setup_command_logger(logging.INFO) rawdata = None with open(in_file, 'r') as f: rawdata = json.load(f) from splice.ingest import ingest_links, distribute, IngestError try: new_data = ingest_links(rawdata, channel_id) if console_out: print json.dumps(new_data, sort_keys=True, indent=2) if out_path: directory, _ = os.path.split(out_path) if not os.path.exists(directory): os.makedirs(directory) with open(out_path, "w") as f: json.dump(new_data, f, sort_keys=True, indent=2) logger.info("wrote {0}".format(out_path)) if deploy_flag: logger.info("Distributing AND Deploying data") else: logger.info("Distributing data (NO deploy)") distribute(new_data, channel_id, deploy_flag) except IngestError, e: raise InvalidCommand(e.message)
def run(self, nodename): node = Node.get_by_name(nodename) if not node: raise InvalidCommand(u'Node "{0}" not found'.format(nodename)) print json.dumps(node.to_dict())
def handle_import(self, options): """ Gets the posts from either the provided URL or the path if it is local. """ url = options.get("url") flush = options.get('flush') if flush: from backend.blog.models import BlogCategory, BlogComment, BlogPost BlogComment.query.delete() BlogPost.query.delete() BlogCategory.query.delete() if url is None: raise InvalidCommand("Usage is import_wordpress ") try: import feedparser except ImportError: raise InvalidCommand("Could not import the feedparser library.") feed = feedparser.parse(url) # We use the minidom parser as well because feedparser won't # interpret WXR comments correctly and ends up munging them. # xml.dom.minidom is used simply to pull the comments when we # get to them. xml = parse(url) xmlitems = xml.getElementsByTagName("item") for (i, entry) in enumerate(feed["entries"]): # Get a pointer to the right position in the minidom as well. xmlitem = xmlitems[i] excerpt = getattr(entry, 'excerpt_encoded') content = linebreaks(self.wp_caption(entry.content[0]["value"])) # Get the time struct of the published date if possible and # the updated date if we can't. pub_date = getattr(entry, "published_parsed", entry.updated_parsed) if pub_date: pub_date = datetime.fromtimestamp(mktime(pub_date)) pub_date -= timedelta(seconds=timezone) # Tags and categories are all under "tags" marked with a scheme. terms = defaultdict(set) for item in getattr(entry, "tags", []): terms[item.scheme].add(item.term) if entry.wp_post_type == "post": post = self.add_post(title=entry.title, content=content, pub_date=pub_date, tags=terms["post_tag"], categories=terms["category"], excerpt=excerpt, old_url=entry.id) # Get the comments from the xml doc. for c in xmlitem.getElementsByTagName("wp:comment"): name = self.get_text(c, "author", c.CDATA_SECTION_NODE) email = self.get_text(c, "author_email", c.TEXT_NODE) url = self.get_text(c, "author_url", c.TEXT_NODE) body = self.get_text(c, "content", c.CDATA_SECTION_NODE) pub_date = self.get_text(c, "date_gmt", c.TEXT_NODE) fmt = "%Y-%m-%d %H:%M:%S" pub_date = datetime.strptime(pub_date, fmt) pub_date -= timedelta(seconds=timezone) self.add_comment(post=post, name=name, email=email, body=body, website=url, pub_date=pub_date)
def run(self, pv_id, new_size): try: result = change_pv_size(pv_id, new_size) except APIError as err: raise InvalidCommand(str(err)) print json.dumps(result)
def run(self, hostname): try: result = get_block_device_list(hostname) except APIError as err: raise InvalidCommand(str(err)) print json.dumps(result, indent=2)
def run(self, hostname): try: result = get_ls_info(hostname) except APIError as err: raise InvalidCommand(str(err)) print json.dumps(result)