def home(context, request): debug_mode_timeout = get_setting('debug_mode_timeout', get_current_registry().settings, request.db) if debug_mode_timeout is None: logger.warning( 'Please define a debug_mode_timeout in the gecoscc.ini file!') debug_mode_timeout = 24 # 24 hours debug_mode_timeout = int(debug_mode_timeout) * 60 * 60 return { 'websockets_enabled': json.dumps(is_websockets_enabled()), 'update_error_interval': get_setting('update_error_interval', get_current_registry().settings, request.db), 'debug_mode_timeout': debug_mode_timeout, 'printer_type': PRINTER_TYPE, 'printer_conn_type': PRINTER_CONN_TYPE, 'printer_oppolicy_type': PRINTER_OPPOLICY_TYPE }
def get(self): user = self.request.user variables = self.parse_item(user.get('variables', {})) settings = get_current_registry().settings chef = {} chef['chef_server_uri'] = settings.get('chef.url') chef['chef_link'] = True chef['chef_validation'] = get_pem_for_username(settings, user['username'], 'chef_client.pem') gcc = {} gcc['gcc_link'] = True gcc['uri_gcc'] = self.request.host_url gcc['gcc_username'] = self.request.user['username'] auth_type = variables.get('auth_type', 'LDAP') if auth_type == 'LDAP': auth_properties = variables['auth_ldap'] else: if variables['specific_conf'] == 'false': auth_properties = { 'specific_conf': False, 'ad_properties': variables['auth_ad'] } else: schema = self.schema_detail() conf_files = schema.get_config_files('r', user['username']) auth_properties = {'specific_conf': True} ad_properties = {} for conf_file in conf_files: variable_name = conf_file.name.split(os.sep)[-1].replace( '.', '_') ad_properties[variable_name] = conf_file.read().encode( 'base64') auth_properties['ad_properties'] = ad_properties auth = {'auth_properties': auth_properties, 'auth_type': auth_type} return { 'version': get_setting('firstboot_api.version', settings, self.request.db), 'organization': get_setting('firstboot_api.organization_name', settings, self.request.db), 'notes': get_setting('firstboot_api.comments', settings, self.request.db), 'gem_repo': get_setting('firstboot_api.gem_repo', settings, self.request.db), 'gem_repos_by_admin': variables.get('gem_repos', []), 'uri_ntp': variables.get('uri_ntp', ''), 'auth': auth, 'chef': chef, 'gcc': gcc }
def command(self): urls = json.loads(get_setting('printers.urls', self.settings, self.db)) collection = self.db.printer_models printer_model = PrinterModel() models = [] num_imported = 0 print '\n\nDownloading printers lists...' for url in urls: try: res = requests.get(url) except requests.exceptions.RequestException: print 'Error downloading file:', url continue temp = tempfile.NamedTemporaryFile(suffix='.tar.gz') temp.write(StringIO(res.content).read()) temp.flush() tar = tarfile.open(temp.name) members = tar.getmembers() for member in members: path = member.name.split('/') model = '' try: file_name = path[-1] ext = file_name.split('.')[-1] except IndexError: continue if ext == 'xml' and (path[-2] == PRINTER or path[-2] == DRIVER): xml_file = tar.extractfile(member) manufacturer, model = self.parse_model_xml(xml_file.read()) if model: models.append(model) new_printer = printer_model.serialize({'manufacturer': manufacturer, 'model': model}) db_printer = collection.find_one({'model': model}) if not db_printer: collection.insert(new_printer) num_imported += 1 print "Imported printer: %s" % model temp.close() print '\n\nImported %d printers' % num_imported # Adding 'Other' model for every manufacturer models.append('Other') # Later, don't remove for m in collection.distinct('manufacturer'): other = printer_model.serialize({'manufacturer': m, 'model': 'Other'}) collection.update({'manufacturer': m},{'$set': other}) removed = collection.remove({'model': {'$nin': models}}) print 'Removed %d printers.\n\n' % removed['n']
def home(context, request): return { 'websockets_enabled': json.dumps(is_websockets_enabled()), 'update_error_interval': get_setting('update_error_interval', get_current_registry().settings, request.db) }
def home(context, request): return { 'websockets_enabled': json.dumps(is_websockets_enabled()), 'update_error_interval': get_setting('update_error_interval', get_current_registry().settings, request.db), 'printer_type': PRINTER_TYPE, 'printer_conn_type': PRINTER_CONN_TYPE, 'printer_oppolicy_type': PRINTER_OPPOLICY_TYPE }
def command(self): urls = json.loads(get_setting('printers.urls', self.settings, self.db)) collection = self.db.printer_models printer_model = PrinterModel() models = [] num_imported = 0 print '\n\nDownloading printers lists...' for url in urls: try: res = requests.get(url) except requests.exceptions.RequestException: print 'Error downloading file:', url continue temp = tempfile.NamedTemporaryFile(suffix='.tar.gz') temp.write(StringIO(res.content).read()) temp.flush() tar = tarfile.open(temp.name) members = tar.getmembers() for member in members: path = member.name.split('/') model = '' try: file_name = path[-1] ext = file_name.split('.')[-1] except IndexError: continue if ext == 'xml' and path[-2] == PRINTER: xml_file = tar.extractfile(member) manufacturer, model = self.parse_model_xml(xml_file.read()) if model: models.append(model) new_printer = printer_model.serialize({'manufacturer': manufacturer, 'model': model}) db_printer = collection.find_one({'model': model}) if not db_printer: collection.insert(new_printer) num_imported += 1 print "Imported printer: %s" % model temp.close() print '\n\nImported %d printers' % num_imported removed = collection.remove({'model': {'$nin': models}}) print 'Removed %d printers.\n\n' % removed['n']
def get(self): user = self.request.user variables = self.parse_item(user.get("variables", {})) settings = get_current_registry().settings chef = {} chef["chef_server_uri"] = settings.get("chef.url") chef["chef_link"] = True chef["chef_validation"] = get_pem_for_username(settings, user["username"], "chef_client.pem") gcc = {} gcc["gcc_link"] = True gcc["uri_gcc"] = self.request.host_url gcc["gcc_username"] = self.request.user["username"] auth_type = variables.get("auth_type", "LDAP") if auth_type == "LDAP": auth_properties = variables["auth_ldap"] else: if variables["specific_conf"] == "false": auth_properties = {"specific_conf": False, "ad_properties": variables["auth_ad"]} else: schema = self.schema_detail() conf_files = schema.get_config_files("r", user["username"]) auth_properties = {"specific_conf": True} ad_properties = {} for conf_file in conf_files: variable_name = conf_file.name.split(os.sep)[-1].replace(".", "_") ad_properties[variable_name] = conf_file.read().encode("base64") auth_properties["ad_properties"] = ad_properties auth = {"auth_properties": auth_properties, "auth_type": auth_type} return { "version": get_setting("firstboot_api.version", settings, self.request.db), "organization": get_setting("firstboot_api.organization_name", settings, self.request.db), "notes": get_setting("firstboot_api.comments", settings, self.request.db), "gem_repo": get_setting("firstboot_api.gem_repo", settings, self.request.db), "uri_ntp": variables.get("uri_ntp", ""), "auth": auth, "chef": chef, "gcc": gcc, }
def command(self): packages = [] packages_urls = [] repositories = json.loads( get_setting('repositories', self.settings, self.db)) num_packages = 0 for repo in repositories: print '\n\n\nFetching: ', repo dists_url = repo + 'dists/' repo_packages = self.get_packages_urls(dists_url) packages_urls.extend(repo_packages) print '\n\n\nLooking for new packages...' for url in packages_urls: try: r = requests.get(url) except requests.exceptions.RequestException: print "Error downloading file: ", url continue packages_list = gzip.GzipFile(fileobj=StringIO(r.content), mode='rb') package_model = Package() package = {} try: for line in packages_list: try: key_value = self.parse_line(line) except IndexError: continue if key_value['key'] == PACKAGE_NAME_TOKEN: package['name'] = key_value['value'] packages.append(package['name']) new_package = package_model.serialize(package) db_package = self.db.packages.find_one( {'name': package['name']}) if not db_package: self.db.packages.insert(new_package) num_packages += 1 print "Imported package:", package['name'] except IOError: print "Error decompressing file:", url continue print '\n\nImported %d packages' % num_packages removed = self.db.packages.remove({'name': {'$nin': packages}}) print 'Removed %d packages.\n\n\n' % removed['n']
def command(self): packages = [] packages_urls = [] repositories = json.loads(get_setting("repositories", self.settings, self.db)) num_packages = 0 for repo in repositories: print "\n\n\nFetching: ", repo dists_url = repo + "dists/" repo_packages = self.get_packages_urls(dists_url) packages_urls.extend(repo_packages) print "\n\n\nLooking for new packages..." for url in packages_urls: try: r = requests.get(url) except requests.exceptions.RequestException: print "Error downloading file: ", url continue packages_list = gzip.GzipFile(fileobj=StringIO(r.content), mode="rb") package_model = Package() package = {} try: for line in packages_list: try: key_value = self.parse_line(line) except IndexError: continue if key_value["key"] == PACKAGE_NAME_TOKEN: package["name"] = key_value["value"] packages.append(package["name"]) new_package = package_model.serialize(package) db_package = self.db.packages.find_one({"name": package["name"]}) if not db_package: self.db.packages.insert(new_package) num_packages += 1 print "Imported package:", package["name"] except IOError: print "Error decompressing file:", url continue print "\n\nImported %d packages" % num_packages removed = self.db.packages.remove({"name": {"$nin": packages}}) print "Removed %d packages.\n\n\n" % removed["n"]
def command(self): profiles = json.loads(get_setting('software_profiles', self.settings, self.db)) collection = self.db.software_profiles profile_model = SoftwareProfile() for new_profile in profiles: name = new_profile['name'] db_profile = collection.find_one({'name': name}) if not db_profile: collection.insert(new_profile) print "Imported profile: %s" % name elif new_profile['packages'] != db_profile['packages']: collection.update({'name': name}, new_profile) print "Updated profile: %s" % name
def create_setting(key): default_settings = get_current_registry().settings setting = Setting() setting.key = key if key == "update_error_interval": appstruct = {'key': key, 'type': 'number', 'value': default_settings.get('update_error_interval')} elif key == "repositories": appstruct = {'key': key, 'type': 'URLs', 'value': default_settings.get('repositories')} elif key == "printers.urls": appstruct = {'key': key, 'type': 'URLs', 'value': default_settings.get('printers.urls')} elif key == "software_profiles": value = get_setting('software_profiles', default_settings, None) appstruct = {'key': key, 'type': 'Profiles', 'value': value} else: appstruct = {'key': key, 'type': 'string', 'value': default_settings.get(key)} return Setting().serialize(appstruct)
def command(self): urls = json.loads(get_setting('printers.urls', self.settings, self.db)) collection = self.db.printer_models printer_model = PrinterModel() models = [] num_imported = 0 print('\n\nDownloading printers lists...') for url in urls: try: res = requests.get(url) except requests.exceptions.RequestException: print('Error downloading file:', url) continue suffix = '.tar.gz' if url.endswith('.tar.xz'): suffix = '.tar.xz' temp = tempfile.NamedTemporaryFile(suffix=suffix) temp.write(BytesIO(res.content).read()) temp.flush() tar = tarfile.open(temp.name) members = tar.getmembers() for member in members: path = member.name.split('/') model = '' try: file_name = path[-1] ext = file_name.split('.')[-1] except IndexError: continue if ext == 'xml' and (path[-2] == PRINTER or path[-2] == DRIVER): xml_file = tar.extractfile(member) xml_data = xml_file.read().decode('UTF-8') manufacturer, model = self.parse_model_xml(xml_data) if model: models.append(model) new_printer = printer_model.serialize( {'manufacturer': manufacturer, 'model': model}) db_printer = collection.find_one({'model': model}) if not db_printer: collection.insert_one(new_printer) num_imported += 1 print("Imported printer: %s" % model) temp.close() print('\n\nImported %d printers' % num_imported) # Adding 'Other' model for every manufacturer models.append('Other') # Later, don't remove for m in collection.distinct('manufacturer'): other = printer_model.serialize( {'manufacturer': m, 'model': 'Other'}) collection.insert_one(other) removed = collection.delete_many({'model': {'$nin': models}}) print('Removed %d printers.\n\n' % removed.deleted_count)
def command(self): # Clean the database if necessary if self.options.clean: print('Cleaning MongoDB data before importing...') self.db.packages.drop() else: print('Adding package information to existing data...') packages = [] packages_urls = {} repositories = json.loads( get_setting('repositories', self.settings, self.db)) num_packages = 0 # Fetch repositories packages files for repo in repositories: print '\n\n\nFetching: ', repo dists_url = repo + 'dists/' repo_packages = self.get_packages_urls(dists_url) packages_urls[repo] = repo_packages print '\n\n\nLooking for new packages...' for repo in packages_urls: for url in packages_urls[repo]: try: r = requests.get(url) except requests.exceptions.RequestException: print "Error downloading file: ", url continue packages_list = gzip.GzipFile(fileobj=StringIO(r.content), mode='rb') package_model = Package() package = {} package['repository'] = repo try: for line in packages_list: if line.strip() == '': if 'name' in package: packages.append(package['name']) db_package = self.db.packages.find_one( {'name': package['name']}) newVersion = {'version': package['version']} if 'description' in package: newVersion['description'] = package[ 'description'] if 'depends' in package: newVersion['depends'] = package['depends'] if 'provides' in package: newVersion['provides'] = package[ 'provides'] if 'conflicts' in package: newVersion['conflicts'] = package[ 'conflicts'] if 'replaces' in package: newVersion['replaces'] = package[ 'replaces'] newArchitecture = { 'architecture': package['architecture'], 'versions': [newVersion] } newRepository = { 'repository': package['repository'], 'architectures': [newArchitecture] } if not db_package: # Create new package record newPackage = { 'name': package['name'], 'repositories': [newRepository] } # Check with collander package_model.serialize(newPackage) self.db.packages.insert(newPackage) num_packages += 1 print "Imported package:", package[ 'name'], " ", package[ 'version'], " ", package[ 'architecture'] else: # Update existing package record # Check package repository current_repo = None for repodata in db_package['repositories']: if repodata['repository'] == package[ 'repository']: current_repo = repodata break if current_repo is None: # Add new repository db_package['repositories'].append( newRepository) else: # Check package architecture current_arch = None for archdata in current_repo[ 'architectures']: if archdata[ 'architecture'] == package[ 'architecture']: current_arch = archdata break if current_arch is None: # Add new architecture current_repo[ 'architectures'].append( newArchitecture) else: # Check version current_ver = None for verdata in current_arch[ 'versions']: if verdata[ 'version'] == package[ 'version']: current_ver = verdata break if current_ver is None: # Add new version current_arch[ 'versions'].append( newVersion) # Update self.db.packages.update( {'name': package['name']}, {'$set': db_package}) print "Updated package:", package[ 'name'], " ", package[ 'version'], " ", package[ 'architecture'] package = {} package['repository'] = repo else: try: key_value = self.parse_line(line) except IndexError: continue if key_value['key'] == PACKAGE_NAME_TOKEN: package['name'] = key_value['value'] if key_value['key'] == VERSION_TOKEN: package['version'] = key_value['value'] if key_value['key'] == ARCHITECTURE_TOKEN: package['architecture'] = key_value['value'] if key_value['key'] == DESCRIPTION_TOKEN: package['description'] = key_value['value'] if key_value['key'] == DEPENDS_TOKEN: package['depends'] = key_value['value'] if key_value['key'] == PROVIDES_TOKEN: package['provides'] = key_value['value'] if key_value['key'] == CONFLICTS_TOKEN: package['conflicts'] = key_value['value'] if key_value['key'] == REPLACES_TOKEN: package['replaces'] = key_value['value'] except IOError: print "Error decompressing file:", url continue print '\n\nImported %d packages' % num_packages removed = self.db.packages.remove({'name': {'$nin': packages}}) print 'Removed %d packages.\n\n\n' % removed['n']