def post(self): data = request.get_json()["share"] id = random_string(16) share = shared_files.SharedFile(id, data["path"], data.get("expires", 0)) share.add() return jsonify(share=share.serialized)
def format(self, record): if type(record.msg) in [str, bytes]: id = random_string(16) data = {"id": id, "message_id": id, "title": None, "message": record.msg, "comp": "Unknown", "cls": "runtime", "complete": True} else: data = record.msg.copy() levelname = "CRITICAL" logtime = datetime.datetime.fromtimestamp(record.created) logtime = logtime.strftime("%Y-%m-%d %H:%M:%S") logtime = "%s,%03d" % (logtime, record.msecs) if record.levelname == "DEBUG": levelname = "DEBUG " if record.levelname == "INFO": levelname = "INFO " if record.levelname == "SUCCESS": levelname = "SUCCESS" if record.levelname == "WARNING": levelname = "WARN " if record.levelname == "ERROR": levelname = "ERROR " data.update({"cls": data["cls"].upper()[0], "levelname": levelname, "asctime": logtime}) result = self._fmt.format(**data) return result
def post_install(self, extra_vars, dbpasswd=""): secret_key = random_string() dbengine = 'mysql' \ if self.app.selected_dbengine == 'db-mariadb' \ else 'sqlite' # Write a standard Wallabag config file config_file = os.path.join(self.path, 'app/config/parameters.yml') with open(config_file + ".dist", 'r') as f: ic = f.readlines() with open(config_file, 'w') as f: for l in ic: if "database_driver: " in l: pdo = "pdo_mysql" if dbengine == "mysql" else "pdo_sqlite" l = " database_driver: {0}\n".format(pdo) elif "database_path: " in l and dbengine == 'sqlite': l = " database_path: {0}\n".format(self.db.path) elif "database_name: " in l and dbengine == 'mysql': l = " database_name: {0}\n".format(self.db.id) elif "database_user: "******" database_user: {0}\n".format(self.db.id) elif "database_password: "******"{0}"\n'.format(dbpasswd) elif "secret: " in l: l = " secret: {0}\n".format(secret_key) f.write(l) # Make sure that the correct PHP settings are enabled php.enable_mod('sqlite3', 'bcmath', 'pdo_mysql' if dbengine == 'mysql' else 'pdo_sqlite', 'zip', 'tidy') php.open_basedir('add', '/usr/bin/php') uid, gid = users.get_system("http").uid, groups.get_system("http").gid # Set up the database then delete the install folder if dbengine == 'sqlite3': php.open_basedir('add', '/var/lib/sqlite3') cwd = os.getcwd() os.chdir(self.path) s = shell("php bin/console wallabag:install --env=prod -n") if s["code"] != 0: logger.error("Websites", s["stderr"].decode()) raise errors.OperationFailedError( "Failed to populate database. See logs for more info") os.chdir(cwd) if dbengine == 'sqlite3': os.chown("/var/lib/sqlite3/{0}.db".format(self.db.id), -1, gid) os.chmod("/var/lib/sqlite3/{0}.db".format(self.db.id), 0o660) # Finally, make sure that permissions are set so that Wallabag # can make adjustments and save plugins when need be. for r, d, f in os.walk(self.path): for x in d: os.chown(os.path.join(r, x), uid, gid) for x in f: os.chown(os.path.join(r, x), uid, gid)
def firstrun(): data = request.get_json() resize_boards = [ "Raspberry Pi", "Raspberry Pi 2", "Raspberry Pi 3", "Cubieboard2", "Cubietruck", "BeagleBone Black", "ODROID-U" ] if data.get("resize_sd_card", None)\ and config.get("enviro", "board") in resize_boards: part = 1 if config.get("enviro", "board").startswith("Cubie") else 2 p1str = 'd\nn\np\n1\n\n\nw\n' p2str = 'd\n2\nn\np\n2\n\n\nw\n' shell('fdisk /dev/mmcblk0', stdin=(p1str if part == 1 else p2str)) if not os.path.exists('/etc/cron.d'): os.mkdir('/etc/cron.d') with open('/etc/cron.d/resize', 'w') as f: f.write('@reboot root e2fsck -fy /dev/mmcblk0p{0}\n'.format(part)) f.write('@reboot root resize2fs /dev/mmcblk0p{0}\n'.format(part)) f.write('@reboot root rm /etc/cron.d/resize\n') f.close() if data.get("use_gpu_mem", None) \ and config.get("enviro", "board").startswith("Raspberry"): f = filesystems.get("mmcblk0p1") if not f.is_mounted: f.mountpoint = "/boot" f.mount() cfgdata = [] if os.path.exists('/boot/config.txt'): with open("/boot/config.txt", "r") as f: for x in f.readlines(): if x.startswith("gpu_mem"): x = "gpu_mem=16\n" cfgdata.append(x) if "gpu_mem=16\n" not in cfgdata: cfgdata.append("gpu_mem=16\n") with open("/boot/config.txt", "w") as f: f.writelines(cfgdata) else: with open("/boot/config.txt", "w") as f: f.write("gpu_mem=16\n") if data.get("cubie_mac", None) \ and config.get("enviro", "board").startswith("Cubie"): if config.get("enviro", "board") == "Cubieboard2": with open('/boot/uEnv.txt', 'w') as f: opt_str = 'extraargs=mac_addr={0}\n' f.write(opt_str.format(data.get("cubie_mac"))) elif config.get("enviro", "board") == "Cubietruck": with open('/etc/modprobe.d/gmac.conf', 'w') as f: opt_str = 'options sunxi_gmac mac_str="{0}"\n' f.write(opt_str.format(data.get("cubie_mac"))) if data.get("install"): as_job(install, data["install"]) rootpwd = "" if data.get("protectRoot"): rootpwd = random_string(16) shell("passwd root", stdin="{0}\n{0}\n".format(rootpwd)) security.initialize_firewall() return jsonify(rootpwd=rootpwd)
def post_install(self, extra_vars, dbpasswd=""): secret_key = random_string() # Use the WordPress key generators as first option # If connection fails, use the secret_key as fallback try: keysection = requests.get( 'https://api.wordpress.org/secret-key/1.1/salt/').text except: keysection = '' if 'define(\'AUTH_KEY' not in keysection: keysection = ( 'define(\'AUTH_KEY\', \'{0}\');\n' 'define(\'SECURE_AUTH_KEY\', \'{0}\');\n' 'define(\'LOGGED_IN_KEY\', \'{0}\');\n' 'define(\'NONCE_KEY\', \'{0}\');\n'.format(secret_key) ) # Write a standard WordPress config file with open(os.path.join(self.path, 'wp-config.php'), 'w') as f: f.write('<?php\n' 'define(\'DB_NAME\', \'{0}\');\n' 'define(\'DB_USER\', \'{0}\');\n' 'define(\'DB_PASSWORD\', \'{1}\');\n' 'define(\'DB_HOST\', \'localhost\');\n' 'define(\'DB_CHARSET\', \'utf8\');\n' 'define(\'SECRET_KEY\', \'{2}\');\n' '\n' 'define(\'WP_CACHE\', true);\n' 'define(\'FORCE_SSL_ADMIN\', false);\n' '\n' '{3}' '\n' '$table_prefix = \'wp_\';\n' '\n' '/** Absolute path to the WordPress directory. */\n' 'if ( !defined(\'ABSPATH\') )\n' ' define(\'ABSPATH\', dirname(__FILE__) . \'/\');\n' '\n' '/** Sets up WordPress vars and included files. */\n' 'require_once(ABSPATH . \'wp-settings.php\');\n' .format(self.db.id, dbpasswd, secret_key, keysection) ) # Make sure that the correct PHP settings are enabled php.enable_mod('mysqli', 'opcache') php.enable_mod('apcu', config_file="/etc/php/conf.d/apcu.ini") # Finally, make sure that permissions are set so that Wordpress # can make adjustments and save plugins when need be. uid, gid = users.get_system("http").uid, groups.get_system("http").gid for r, d, f in os.walk(self.path): for x in d: os.chown(os.path.join(r, x), uid, gid) for x in f: os.chown(os.path.join(r, x), uid, gid)
def create_share(path, expires): """Create a fileshare link.""" try: share = shared_files.Share(random_string(), path, expires) share.add() logger.success('ctl:links:create', 'Created link') smsg = "Link is your external server address, plus: /shared/{0}" logger.info('ctl:links:create', smsg.format(share.id)) except Exception as e: raise CLIException(str(e))
def post_install(self, vars, dbpasswd=""): secret_key = random_string() # Use the WordPress key generators as first option # If connection fails, use the secret_key as fallback try: keysection = urllib.urlopen('https://api.wordpress.org/secret-key/1.1/salt/').read() except: keysection = '' if not 'define(\'AUTH_KEY' in keysection: keysection = ( 'define(\'AUTH_KEY\', \''+secret_key+'\');\n' 'define(\'SECURE_AUTH_KEY\', \''+secret_key+'\');\n' 'define(\'LOGGED_IN_KEY\', \''+secret_key+'\');\n' 'define(\'NONCE_KEY\', \''+secret_key+'\');\n' ) # Write a standard WordPress config file with open(os.path.join(self.path, 'wp-config.php'), 'w') as f: f.write('<?php\n' 'define(\'DB_NAME\', \''+self.db.id+'\');\n' 'define(\'DB_USER\', \''+self.db.id+'\');\n' 'define(\'DB_PASSWORD\', \''+dbpasswd+'\');\n' 'define(\'DB_HOST\', \'localhost\');\n' 'define(\'DB_CHARSET\', \'utf8\');\n' 'define(\'SECRET_KEY\', \''+secret_key+'\');\n' '\n' 'define(\'WP_CACHE\', true);\n' 'define(\'FORCE_SSL_ADMIN\', false);\n' '\n' +keysection+ '\n' '$table_prefix = \'wp_\';\n' '\n' '/** Absolute path to the WordPress directory. */\n' 'if ( !defined(\'ABSPATH\') )\n' ' define(\'ABSPATH\', dirname(__FILE__) . \'/\');\n' '\n' '/** Sets up WordPress vars and included files. */\n' 'require_once(ABSPATH . \'wp-settings.php\');\n' ) # Make sure that the correct PHP settings are enabled php.enable_mod('mysqli', 'opcache') php.enable_mod('apcu', config_file="/etc/php/conf.d/apcu.ini") # Finally, make sure that permissions are set so that Wordpress # can make adjustments and save plugins when need be. uid, gid = users.get_system("http").uid, groups.get_system("http").gid for r, d, f in os.walk(self.path): for x in d: os.chown(os.path.join(r, x), uid, gid) for x in f: os.chown(os.path.join(r, x), uid, gid)
def change_admin_passwd(self): try: s = services.get("mysqld") if s.state != "running": s.start() except: return "" new_passwd = random_string()[:16] secrets.set("mysql", new_passwd) secrets.save() c = MySQLdb.connect('localhost', 'root', '', 'mysql') c.query('UPDATE user SET password=PASSWORD("'+new_passwd+'") WHERE User=\'root\'') c.query('FLUSH PRIVILEGES') c.commit() return new_passwd
def emit(self, record): data = record.msg if type(data) in [str, bytes]: id = id or random_string(16) data = {"id": id, "message": record.msg, "message_id": id, "title": None, "comp": "Unknown", "cls": "runtime", "complete": True} logtime = datetime.datetime.fromtimestamp(record.created) logtime = logtime.isoformat() data.update({"cls": data["cls"], "level": record.levelname.lower(), "time": logtime}) pipe = storage.pipeline() storage.publish("notifications", data, pipe) storage.prepend("n:{0}".format(data["id"]), data, pipe) storage.expire("n:{0}".format(data["id"]), 604800, pipe) pipe.execute()
def change_admin_passwd(self): try: s = services.get("mysqld") if s.state != "running": s.start() except: return "" new_passwd = random_string()[:16] secrets.set("mysql", new_passwd) secrets.save() c = MySQLdb.connect('localhost', 'root', '', 'mysql') c.query("UPDATE user SET password=PASSWORD(\"{0}\") " "WHERE User='******'".format(new_passwd)) c.query('FLUSH PRIVILEGES') c.commit() return new_passwd
def restore(self, data): signals.emit("backups", "pre_restore", self) # Trigger pre-restore hook for the app/site self.pre_restore() # Extract all files in archive sitename = "" with tarfile.open(data["path"], "r:gz") as t: for x in t.getnames(): if x.startswith("etc/nginx/sites-available"): sitename = os.path.basename(x) t.extractall("/") # If it's a website that had a database, restore DB via SQL file too dbpasswd = "" if self.ctype == "site" and sitename: self.site = websites.get(sitename) if not self.site: websites.scan() self.site = websites.get(sitename) meta = ConfigParser.SafeConfigParser() meta.read(os.path.join(self.site.path, ".arkos")) if meta.get("website", "dbengine", None) and os.path.exists("/%s.sql"%sitename): dbmgr = databases.get_managers(meta.get("website", "dbengine")) if databases.get(sitename): databases.get(sitename).remove() db = dbmgr.add_db(sitename) with open("/%s.sql"%sitename, "r") as f: db.execute(f.read()) os.unlink("/%s.sql"%sitename) if dbmgr.meta.database_multiuser: dbpasswd = random_string()[0:16] if databases.get_user(sitename): databases.get_user(sitename).remove() db_user = dbmgr.add_user(sitename, dbpasswd) db_user.chperm("grant", db) # Trigger post-restore hook for the app/site if self.ctype == "site": self.post_restore(self.site, dbpasswd) self.site.nginx_enable() else: self.post_restore() signals.emit("backups", "post_restore", self) data["is_ready"] = True return data
def __init__(self, level, comp, message, cls="notify", id=None, title=None): level = level.upper() if level not in self.LEVELS: raise errors.InvalidConfigError("Unrecognized log level specified") id = id or random_string(16) self.level = self.LEVELS[level] self.comp = comp self.message = message self.cls = cls self.id = id self.title = title self.message_id = id self.complete = True
def redis(): """Initialize distribution Redis integration.""" paths = ["arkos-redis.service", "arkos-redis.conf"] for x in paths: if not os.path.exists(os.path.join("/usr/share/arkos/redis", x)): raise CLIException( "Template files could not be found. Your installation may " "be corrupted. Please reinstall the `arkos-configs` package.") logger.debug('ctl:init:redis', 'Stopping daemon if exists: arkos-redis') shell("systemctl stop arkos-redis") logger.info('ctl:init:redis', 'Copying files') ruid, rgid = pwd.getpwnam("redis").pw_uid, grp.getgrnam("redis").gr_gid shutil.copy("/usr/share/arkos/redis/arkos-redis.conf", "/etc/arkos-redis.conf") os.chown("/etc/arkos-redis.conf", ruid, rgid) os.chmod("/etc/arkos-redis.conf", 0o660) shutil.copy("/usr/share/arkos/redis/arkos-redis.service", "/usr/lib/systemd/system/arkos-redis.service") os.chmod("/usr/lib/systemd/system/arkos-redis.service", 0o644) if not os.path.exists("/var/lib/arkos-redis"): os.makedirs("/var/lib/arkos-redis") os.chmod("/var/lib/arkos-redis", 0o700) os.chown("/var/lib/arkos-redis", ruid, rgid) logger.info('ctl:init:redis', 'Setting admin password') redis_passwd = random_string(16) with open("/etc/arkos-redis.conf", "r") as f: data = f.read() data = data.replace("%REDISPASS%", redis_passwd) with open("/etc/arkos-redis.conf", "w") as f: f.write(data) secrets.set("redis", redis_passwd) secrets.save() logger.debug('ctl:init:redis', 'Starting daemon: arkos-redis') shell("systemctl daemon-reload") shell("systemctl enable arkos-redis") shell("systemctl start arkos-redis") logger.success('ctl:init:redis', 'Complete')
def post_install(self, vars, dbpasswd=""): secret_key = random_string() php.open_basedir('add', '/dev') # If there is a custom path for the data directory, add to open_basedir uid, gid = users.get_system("http").uid, groups.get_system("http").gid os.makedirs(os.path.join(self.path, "data")) os.chown(os.path.join(self.path, "data"), uid, gid) if self.data_path == self.path: self.data_path = os.path.join(self.path, "data") else: try: os.makedirs(os.path.join(self.data_path)) except OSError, e: if e[0] == 17: pass else: raise os.chown(os.path.join(self.data_path), uid, gid) php.open_basedir('add', self.data_path)
def as_job(func, *args, **kwargs): id = random_string()[0:16] j = Job(id, func, *args, **kwargs) j.start() return id
def post(self): data = request.get_json()["share"] id = random_string() share = shared_files.Share(id, data["path"], data.get("expires", 0)) share.add() return jsonify(share=share.as_dict())
def post_install(self, vars, dbpasswd=""): secret_key = random_string() dbengine = 'mysql' if self.meta.selected_dbengine == 'db-mariadb' else 'sqlite' username = vars.get("wb-username") passwd = vars.get("wb-passwd") + username + secret_key passwd = hashlib.sha1(passwd).hexdigest() # Write a standard Wallabag config file shutil.copy(os.path.join(self.path, 'inc/poche/config.inc.default.php'), os.path.join(self.path, 'inc/poche/config.inc.php')) with open(os.path.join(self.path, 'inc/poche/config.inc.php'), 'r') as f: ic = f.readlines() oc = [] for l in ic: if 'define (\'SALT\'' in l: l = '@define (\'SALT\', \''+secret_key+'\');\n' oc.append(l) elif 'define (\'STORAGE\'' in l: l = '@define (\'STORAGE\', \''+dbengine+'\');\n' oc.append(l) elif 'define (\'STORAGE_SQLITE\'' in l and dbengine == 'sqlite': l = '@define (\'STORAGE_SQLITE\', \'/var/lib/sqlite3/'+self.db.id+'.db\');\n' oc.append(l) elif 'define (\'STORAGE_DB\'' in l and dbengine == 'mysql': l = '@define (\'STORAGE_DB\', \''+self.db.id+'\');\n' oc.append(l) elif 'define (\'STORAGE_USER\'' in l and dbengine == 'mysql': l = '@define (\'STORAGE_USER\', \''+self.db.id+'\');\n' oc.append(l) elif 'define (\'STORAGE_PASSWORD\'' in l and dbengine == 'mysql': l = '@define (\'STORAGE_PASSWORD\', \''+dbpasswd+'\');\n' oc.append(l) else: oc.append(l) with open(os.path.join(self.path, 'inc/poche/config.inc.php'), 'w') as f: f.writelines(oc) # Make sure that the correct PHP settings are enabled php.enable_mod('mysql' if dbengine == 'mysql' else 'sqlite3', 'pdo_mysql' if dbengine == 'mysql' else 'pdo_sqlite', 'zip', 'tidy', 'xcache', 'openssl') # Set up Composer and install the proper modules php.composer_install(self.path) uid, gid = users.get_system("http").uid, groups.get_system("http").gid # Set up the database then delete the install folder if dbengine == 'mysql': with open(os.path.join(self.path, 'install/mysql.sql')) as f: self.db.execute(f.read()) self.db.execute( "INSERT INTO users (username, password, name, email) VALUES ('%s', '%s', '%s', '');" % (username, passwd, username), commit=True) lid = int(self.db.manager.connection.insert_id()) self.db.execute( "INSERT INTO users_config (user_id, name, value) VALUES (%s, 'pager', '10');" % lid, commit=True) self.db.execute( "INSERT INTO users_config (user_id, name, value) VALUES (%s, 'language', 'en_EN.UTF8');" % lid, commit=True) else: shutil.copy(os.path.join(self.path, 'install/poche.sqlite'), '/var/lib/sqlite3/%s.db' % self.db.id) php.open_basedir('add', '/var/lib/sqlite3') os.chown("/var/lib/sqlite3/%s.db" % self.db.id, -1, gid) os.chmod("/var/lib/sqlite3/%s.db", 0664) self.db.execute( "INSERT INTO users (username, password, name, email) VALUES ('%s', '%s', '%s', '');" % (username, passwd, username)) self.db.execute( "INSERT INTO users_config (user_id, name, value) VALUES (1, 'pager', '10');") self.db.execute( "INSERT INTO users_config (user_id, name, value) VALUES (1, 'language', 'en_EN.UTF8');") shutil.rmtree(os.path.join(self.path, 'install')) # Finally, make sure that permissions are set so that Wallabag # can make adjustments and save plugins when need be. for r, d, f in os.walk(self.path): for x in d: if d in ["assets", "cache", "db"]: os.chmod(os.path.join(r, d), 0755) os.chown(os.path.join(r, x), uid, gid) for x in f: os.chown(os.path.join(r, x), uid, gid)
def run_daemon(environment, config_file, secrets_file, policies_file, debug): """Run the Kraken server daemon.""" app.debug = debug or environment in ["dev", "vagrant"] app.config["SECRET_KEY"] = random_string() # Open and load configuraton config = arkos.init(config_file, secrets_file, policies_file, app.debug, environment in ["dev", "vagrant"], app.logger) storage.connect() if environment not in ["dev", "vagrant"]: filehdlr = RotatingFileHandler( '/var/log/kraken.log', maxBytes=2097152, backupCount=5 ) st = "{asctime} [{cls}] [{levelname}] {comp}: {message}" filehdlr.setLevel(logging.DEBUG if app.debug else logging.INFO) filehdlr.setFormatter(FileFormatter(st)) logger.logger.addHandler(filehdlr) apihdlr = APIHandler() apihdlr.setLevel(logging.DEBUG if app.debug else logging.INFO) apihdlr.addFilter(NotificationFilter()) logger.logger.addHandler(apihdlr) logger.info("Init", "arkOS Kraken {0}".format(arkos.version)) if environment in ["dev", "vagrant"]: logger.debug("Init", "*** TEST MODE ***") logger.info("Init", "Using config file at {0}".format(config.filename)) app.conf = config arch = config.get("enviro", "arch", "Unknown") board = config.get("enviro", "board", "Unknown") platform = detect_platform() hwstr = "Detected architecture/hardware: {0}, {1}" logger.info("Init", hwstr.format(arch, board)) logger.info("Init", "Detected platform: {0}".format(platform)) logger.info("Init", "Environment: {0}".format(environment)) config.set("enviro", "run", environment) for code in list(default_exceptions.keys()): app.register_error_handler(code, make_json_error) app.register_blueprint(auth.backend) logger.info("Init", "Loading applications and scanning system...") arkos.initial_scans() # Load framework blueprints logger.info("Init", "Loading frameworks...") register_frameworks(app) logger.info("Init", "Initializing Genesis (if present)...") app.register_blueprint(genesis.backend) hasgen = genesis.verify_genesis() if not hasgen: errmsg = ("A compiled distribution of Genesis was not found. " "Kraken will finish loading but you may not be able to " "access the Web interface.") logger.warning("Init", errmsg) app.after_request(add_cors_to_response) logger.info("Init", "Server is up and ready") try: import eventlet pubsub = storage.redis.pubsub(ignore_subscribe_messages=True) pubsub.subscribe(["arkos:notifications", "arkos:records:push", "arkos:records:purge"]) eventlet.spawn(handle_pubsub, pubsub, socketio) eventlet_socket = eventlet.listen( (config.get("genesis", "host"), config.get("genesis", "port")) ) if config.get("genesis", "ssl", False): eventlet_socket = eventlet.wrap_ssl( eventlet_socket, certfile=config.get("genesis", "cert_file"), keyfile=config.get("genesis", "cert_key"), ssl_version=ssl.PROTOCOL_TLSv1_2, server_side=True) eventlet.wsgi.server( eventlet_socket, app, log=WSGILogWrapper(), log_format=('%(client_ip)s - "%(request_line)s" %(status_code)s ' '%(body_length)s %(wall_seconds).6f')) except KeyboardInterrupt: logger.info("Init", "Received interrupt") raise
def _install(self, extra_vars, enable, nthread): nthread.title = "Installing website" msg = Notification("info", "Webs", "Preparing to install...") nthread.update(msg) # Make sure the chosen port is indeed open if not tracked_services.is_open_port(self.port, self.domain): cname = "({0})".format(self.app.id) raise errors.InvalidConfigError(cname, nthread)\ from tracked_services.PortConflictError(self.port, self.domain) # Set some metadata values specialmsg, dbpasswd = "", "" site_dir = config.get("websites", "site_dir") path = (self.path or os.path.join(site_dir, self.id)) self.path = path self.php = extra_vars.get("php") or self.php \ or self.app.uses_php or False self.version = self.app.version.rsplit("-", 1)[0] \ if self.app.website_updates else None # Classify the source package type if not self.app.download_url: ending = "" elif self.app.download_url.endswith(".tar.gz"): ending = ".tar.gz" elif self.app.download_url.endswith(".tgz"): ending = ".tgz" elif self.app.download_url.endswith(".tar.bz2"): ending = ".tar.bz2" elif self.app.download_url.endswith(".zip"): ending = ".zip" elif self.app.download_url.endswith(".git"): ending = ".git" else: raise errors.InvalidConfigError( "Invalid source archive format in {0}".format(self.app.id)) msg = "Running pre-installation..." uid, gid = users.get_system("http").uid, groups.get_system("http").gid nthread.update(Notification("info", "Webs", msg)) # Call website type's pre-install hook self.pre_install(extra_vars) # If needs DB and user didn't select an engine, choose one for them if len(self.app.database_engines) > 1 \ and extra_vars.get("dbengine", None): self.app.selected_dbengine = extra_vars.get("dbengine") if not getattr(self.app, "selected_dbengine", None)\ and self.app.database_engines: self.app.selected_dbengine = self.app.database_engines[0] # Create DB and/or DB user as necessary if getattr(self.app, "selected_dbengine", None): msg = "Creating database..." nthread.update(Notification("info", "Webs", msg)) mgr = databases.get_managers(self.app.selected_dbengine) if not mgr: estr = "No manager found for {0}" raise errors.InvalidConfigError( estr.format(self.app.selected_dbengine)) # Make sure DB daemon is running if it has one if not mgr.state: svc = services.get(mgr.meta.database_service) svc.restart() self.db = mgr.add_db(self.id) if hasattr(self.db, "path"): os.chmod(self.db.path, 0o660) os.chown(self.db.path, -1, gid) # If multiuser DB type, create user if mgr.meta.database_multiuser: dbpasswd = random_string(16) db_user = mgr.add_user(self.id, dbpasswd) db_user.chperm("grant", self.db) # Make sure the target directory exists, but is empty pkg_path = os.path.join("/tmp", self.id + ending) if os.path.isdir(self.path): shutil.rmtree(self.path) os.makedirs(self.path) # Download and extract the source repo / package msg = "Downloading website source..." nthread.update(Notification("info", "Webs", msg)) if self.app.download_url and ending == ".git": g = git.Repo.clone_from(self.app.download_url, self.path) if hasattr(self.app, "download_at_tag"): g = git.Git(self.path) g.checkout(self.app.download_git_tag) elif self.app.download_url: download(self.app.download_url, file=pkg_path, crit=True) # Format extraction command according to type msg = "Extracting source..." nthread.update(Notification("info", "Webs", msg)) if ending in [".tar.gz", ".tgz", ".tar.bz2"]: arch = tarfile.open(pkg_path, "r:gz") r = (x for x in arch.getnames() if re.match("^[^/]*$", x)) toplvl = next(r, None) if not toplvl: raise errors.OperationFailedError( "Malformed source archive") arch.extractall(site_dir) os.rename(os.path.join(site_dir, toplvl), self.path) else: arch = zipfile.ZipFile(pkg_path) r = (x for x in arch.namelist() if re.match("^[^/]*/$", x)) toplvl = next(r, None) if not toplvl: raise errors.OperationFailedError( "Malformed source archive") arch.extractall(site_dir) os.rename(os.path.join(site_dir, toplvl.rstrip("/")), self.path) os.remove(pkg_path) # Set proper starting permissions on source directory os.chmod(self.path, 0o755) os.chown(self.path, uid, gid) for r, d, f in os.walk(self.path): for x in d: os.chmod(os.path.join(r, x), 0o755) os.chown(os.path.join(r, x), uid, gid) for x in f: os.chmod(os.path.join(r, x), 0o644) os.chown(os.path.join(r, x), uid, gid) # If there is a custom path for the data directory, set it up if getattr(self.app, "website_datapaths", None) \ and extra_vars.get("datadir"): self.data_path = extra_vars["datadir"] if not os.path.exists(self.data_path): os.makedirs(self.data_path) os.chmod(self.data_path, 0o755) os.chown(self.data_path, uid, gid) elif hasattr(self, "website_default_data_subdir"): self.data_path = os.path.join(self.path, self.website_default_data_subdir) else: self.data_path = self.path # Create the nginx serverblock addtoblock = self.addtoblock or [] if extra_vars.get("addtoblock"): addtoblock += nginx.loads(extra_vars.get("addtoblock"), False) default_index = "index." + ("php" if self.php else "html") if hasattr(self.app, "website_root"): webroot = os.path.join(self.path, self.app.website_root) else: webroot = self.path block = nginx.Conf() server = nginx.Server( nginx.Key("listen", str(self.port)), nginx.Key("listen", "[::]:" + str(self.port)), nginx.Key("server_name", self.domain), nginx.Key("root", webroot), nginx.Key( "index", getattr(self.app, "website_index", None) or default_index), nginx.Location("/.well-known/acme-challenge/", nginx.Key("root", self.path))) if addtoblock: server.add(*[x for x in addtoblock]) block.add(server) nginx.dumpf(block, os.path.join("/etc/nginx/sites-available", self.id)) challenge_dir = os.path.join(self.path, ".well-known/acme-challenge/") if not os.path.exists(challenge_dir): os.makedirs(challenge_dir) # Create arkOS metadata file meta = configparser.SafeConfigParser() meta.add_section("website") meta.set("website", "id", self.id) meta.set("website", "app", self.app.id) meta.set("website", "ssl", self.cert.id if getattr(self, "cert", None) else "None") meta.set("website", "version", self.version or "None") if getattr(self.app, "website_datapaths", None) \ and self.data_path: meta.set("website", "data_path", self.data_path) meta.set("website", "dbengine", "") meta.set("website", "dbengine", getattr(self.app, "selected_dbengine", "")) with open(os.path.join(self.path, ".arkos"), "w") as f: meta.write(f) # Call site type's post-installation hook msg = "Running post-installation. This may take a few minutes..." nthread.update(Notification("info", "Webs", msg)) specialmsg = self.post_install(extra_vars, dbpasswd) # Cleanup and reload daemons msg = "Finishing..." nthread.update(Notification("info", "Webs", msg)) self.installed = True storage.websites[self.id] = self if self.port == 80: cleanup_acme_dummy(self.domain) signals.emit("websites", "site_installed", self) if enable: self.nginx_enable() if enable and self.php: php.open_basedir("add", "/srv/http/") php_reload() msg = "{0} site installed successfully".format(self.app.name) nthread.complete(Notification("success", "Webs", msg)) if specialmsg: return specialmsg
def post_install(self, vars, dbpasswd=""): secret_key = random_string() # If there is a custom path for the data directory, add to open_basedir uid, gid = users.get_system("http").uid, groups.get_system("http").gid if not self.data_path.startswith(self.path): os.makedirs(os.path.join(self.path, "data")) os.chown(os.path.join(self.path, "data"), uid, gid) php.open_basedir('add', self.data_path) # Create ownCloud automatic configuration file with open(os.path.join(self.path, 'config', 'autoconfig.php'), 'w') as f: f.write('<?php\n' ' $AUTOCONFIG = array(\n' ' "adminlogin" => "admin",\n' ' "adminpass" => "' + dbpasswd + '",\n' ' "dbtype" => "mysql",\n' ' "dbname" => "' + self.db.id + '",\n' ' "dbuser" => "' + self.db.id + '",\n' ' "dbpass" => "' + dbpasswd + '",\n' ' "dbhost" => "localhost",\n' ' "dbtableprefix" => "",\n' ' "directory" => "' + self.data_path + '",\n' ' );\n' '?>\n') os.chown(os.path.join(self.path, 'config', 'autoconfig.php'), uid, gid) # Make sure that the correct PHP settings are enabled php.enable_mod('mysql', 'pdo_mysql', 'zip', 'gd', 'ldap', 'iconv', 'openssl', 'xcache', 'posix') # Make sure xcache has the correct settings, otherwise ownCloud breaks with open('/etc/php/conf.d/xcache.ini', 'w') as f: f.writelines([ 'extension=xcache.so\n', 'xcache.size=64M\n', 'xcache.var_size=64M\n', 'xcache.admin.enable_auth = Off\n', 'xcache.admin.user = "******"\n', 'xcache.admin.pass = "******"\n' ]) php.change_setting("always_populate_raw_post_data", "-1") mydir = os.getcwd() os.chdir(self.path) s = shell("sudo -u http php index.php") if s["code"] != 0: raise Exception("ownCloud database population failed") s = shell("sudo -u http php occ app:enable user_ldap") if s["code"] != 0: raise Exception("ownCloud LDAP configuration failed") os.chdir(mydir) ldap_sql = ( "REPLACE INTO appconfig (appid, configkey, configvalue) VALUES" "('user_ldap', 'ldap_uuid_attribute', 'auto')," "('user_ldap', 'ldap_host', 'localhost')," "('user_ldap', 'ldap_port', '389')," "('user_ldap', 'ldap_base', 'dc=arkos-servers,dc=org')," "('user_ldap', 'ldap_base_users', 'dc=arkos-servers,dc=org')," "('user_ldap', 'ldap_base_groups', 'dc=arkos-servers,dc=org')," "('user_ldap', 'ldap_tls', '0')," "('user_ldap', 'ldap_display_name', 'cn')," "('user_ldap', 'ldap_userlist_filter', 'objectClass=mailAccount')," "('user_ldap', 'ldap_group_filter', 'objectClass=posixGroup')," "('user_ldap', 'ldap_group_display_name', 'cn')," "('user_ldap', 'ldap_group_member_assoc_attribute', 'uniqueMember')," "('user_ldap', 'ldap_login_filter', '(&(|(objectclass=posixAccount))(|(uid=%uid)))')," "('user_ldap', 'ldap_quota_attr', 'mailQuota')," "('user_ldap', 'ldap_quota_def', '')," "('user_ldap', 'ldap_email_attr', 'mail')," "('user_ldap', 'ldap_cache_ttl', '600')," "('user_ldap', 'ldap_configuration_active', '1')," "('user_ldap', 'home_folder_naming_rule', '')," "('user_ldap', 'ldap_backup_host', '')," "('user_ldap', 'ldap_dn', '')," "('user_ldap', 'ldap_agent_password', '')," "('user_ldap', 'ldap_backup_port', '')," "('user_ldap', 'ldap_nocase', '')," "('user_ldap', 'ldap_turn_off_cert_check', '')," "('user_ldap', 'ldap_override_main_server', '')," "('user_ldap', 'ldap_attributes_for_user_search', '')," "('user_ldap', 'ldap_attributes_for_group_search', '')," "('user_ldap', 'ldap_expert_username_attr', 'uid')," "('user_ldap', 'ldap_expert_uuid_attr', '');") self.db.execute(ldap_sql, commit=True) # TODO set authed user name self.db.execute("INSERT INTO group_user VALUES ('admin','testuser');", commit=True)
def new(self, level, comp, message, cls="notify", id=None, title=None): return Notification(level, comp, message, cls, id or random_string(16), title)
def __init__(self, id=None, title=None, message=None): self.id = id or random_string(16) self.title = title if message: self._send(message, complete=False)
def post_install(self, vars, dbpasswd=""): secret_key = random_string() dbengine = 'mysql' if self.meta.selected_dbengine == 'db-mariadb' else 'sqlite' username = vars.get("wb-username") passwd = vars.get("wb-passwd") + username + secret_key passwd = hashlib.sha1(passwd).hexdigest() # Write a standard Wallabag config file shutil.copy( os.path.join(self.path, 'inc/poche/config.inc.default.php'), os.path.join(self.path, 'inc/poche/config.inc.php')) with open(os.path.join(self.path, 'inc/poche/config.inc.php'), 'r') as f: ic = f.readlines() oc = [] for l in ic: if 'define (\'SALT\'' in l: l = '@define (\'SALT\', \'' + secret_key + '\');\n' oc.append(l) elif 'define (\'STORAGE\'' in l: l = '@define (\'STORAGE\', \'' + dbengine + '\');\n' oc.append(l) elif 'define (\'STORAGE_SQLITE\'' in l and dbengine == 'sqlite': l = '@define (\'STORAGE_SQLITE\', \'/var/lib/sqlite3/' + self.db.id + '.db\');\n' oc.append(l) elif 'define (\'STORAGE_DB\'' in l and dbengine == 'mysql': l = '@define (\'STORAGE_DB\', \'' + self.db.id + '\');\n' oc.append(l) elif 'define (\'STORAGE_USER\'' in l and dbengine == 'mysql': l = '@define (\'STORAGE_USER\', \'' + self.db.id + '\');\n' oc.append(l) elif 'define (\'STORAGE_PASSWORD\'' in l and dbengine == 'mysql': l = '@define (\'STORAGE_PASSWORD\', \'' + dbpasswd + '\');\n' oc.append(l) else: oc.append(l) with open(os.path.join(self.path, 'inc/poche/config.inc.php'), 'w') as f: f.writelines(oc) # Make sure that the correct PHP settings are enabled php.enable_mod('mysql' if dbengine == 'mysql' else 'sqlite3', 'pdo_mysql' if dbengine == 'mysql' else 'pdo_sqlite', 'zip', 'tidy', 'xcache', 'openssl') # Set up Composer and install the proper modules php.composer_install(self.path) uid, gid = users.get_system("http").uid, groups.get_system("http").gid # Set up the database then delete the install folder if dbengine == 'mysql': with open(os.path.join(self.path, 'install/mysql.sql')) as f: self.db.execute(f.read()) self.db.execute( "INSERT INTO users (username, password, name, email) VALUES ('%s', '%s', '%s', '');" % (username, passwd, username), commit=True) lid = int(self.db.manager.connection.insert_id()) self.db.execute( "INSERT INTO users_config (user_id, name, value) VALUES (%s, 'pager', '10');" % lid, commit=True) self.db.execute( "INSERT INTO users_config (user_id, name, value) VALUES (%s, 'language', 'en_EN.UTF8');" % lid, commit=True) else: shutil.copy(os.path.join(self.path, 'install/poche.sqlite'), '/var/lib/sqlite3/%s.db' % self.db.id) php.open_basedir('add', '/var/lib/sqlite3') os.chown("/var/lib/sqlite3/%s.db" % self.db.id, -1, gid) os.chmod("/var/lib/sqlite3/%s.db", 0664) self.db.execute( "INSERT INTO users (username, password, name, email) VALUES ('%s', '%s', '%s', '');" % (username, passwd, username)) self.db.execute( "INSERT INTO users_config (user_id, name, value) VALUES (1, 'pager', '10');" ) self.db.execute( "INSERT INTO users_config (user_id, name, value) VALUES (1, 'language', 'en_EN.UTF8');" ) shutil.rmtree(os.path.join(self.path, 'install')) # Finally, make sure that permissions are set so that Wallabag # can make adjustments and save plugins when need be. for r, d, f in os.walk(self.path): for x in d: if d in ["assets", "cache", "db"]: os.chmod(os.path.join(r, d), 0755) os.chown(os.path.join(r, x), uid, gid) for x in f: os.chown(os.path.join(r, x), uid, gid)
def post_install(self, vars, dbpasswd=""): secret_key = random_string() # If there is a custom path for the data directory, add to open_basedir uid, gid = users.get_system("http").uid, groups.get_system("http").gid if not self.data_path.startswith(self.path): os.makedirs(os.path.join(self.path, "data")) os.chown(os.path.join(self.path, "data"), uid, gid) php.open_basedir('add', self.data_path) # Create ownCloud automatic configuration file with open(os.path.join(self.path, 'config', 'autoconfig.php'), 'w') as f: f.write( '<?php\n' ' $AUTOCONFIG = array(\n' ' "adminlogin" => "admin",\n' ' "adminpass" => "'+dbpasswd+'",\n' ' "dbtype" => "mysql",\n' ' "dbname" => "'+self.db.id+'",\n' ' "dbuser" => "'+self.db.id+'",\n' ' "dbpass" => "'+dbpasswd+'",\n' ' "dbhost" => "localhost",\n' ' "dbtableprefix" => "",\n' ' "directory" => "'+self.data_path+'",\n' ' );\n' '?>\n' ) os.chown(os.path.join(self.path, 'config', 'autoconfig.php'), uid, gid) # Make sure that the correct PHP settings are enabled php.enable_mod('mysql', 'pdo_mysql', 'zip', 'gd', 'ldap', 'iconv', 'openssl', 'xcache', 'posix') # Make sure xcache has the correct settings, otherwise ownCloud breaks with open('/etc/php/conf.d/xcache.ini', 'w') as f: f.writelines(['extension=xcache.so\n', 'xcache.size=64M\n', 'xcache.var_size=64M\n', 'xcache.admin.enable_auth = Off\n', 'xcache.admin.user = "******"\n', 'xcache.admin.pass = "******"\n']) php.change_setting("always_populate_raw_post_data", "-1") mydir = os.getcwd() os.chdir(self.path) s = shell("sudo -u http php index.php") if s["code"] != 0: raise Exception("ownCloud database population failed") s = shell("sudo -u http php occ app:enable user_ldap") if s["code"] != 0: raise Exception("ownCloud LDAP configuration failed") os.chdir(mydir) ldap_sql = ("REPLACE INTO appconfig (appid, configkey, configvalue) VALUES" "('user_ldap', 'ldap_uuid_attribute', 'auto')," "('user_ldap', 'ldap_host', 'localhost')," "('user_ldap', 'ldap_port', '389')," "('user_ldap', 'ldap_base', 'dc=arkos-servers,dc=org')," "('user_ldap', 'ldap_base_users', 'dc=arkos-servers,dc=org')," "('user_ldap', 'ldap_base_groups', 'dc=arkos-servers,dc=org')," "('user_ldap', 'ldap_tls', '0')," "('user_ldap', 'ldap_display_name', 'cn')," "('user_ldap', 'ldap_userlist_filter', 'objectClass=mailAccount')," "('user_ldap', 'ldap_group_filter', 'objectClass=posixGroup')," "('user_ldap', 'ldap_group_display_name', 'cn')," "('user_ldap', 'ldap_group_member_assoc_attribute', 'uniqueMember')," "('user_ldap', 'ldap_login_filter', '(&(|(objectclass=posixAccount))(|(uid=%uid)))')," "('user_ldap', 'ldap_quota_attr', 'mailQuota')," "('user_ldap', 'ldap_quota_def', '')," "('user_ldap', 'ldap_email_attr', 'mail')," "('user_ldap', 'ldap_cache_ttl', '600')," "('user_ldap', 'ldap_configuration_active', '1')," "('user_ldap', 'home_folder_naming_rule', '')," "('user_ldap', 'ldap_backup_host', '')," "('user_ldap', 'ldap_dn', '')," "('user_ldap', 'ldap_agent_password', '')," "('user_ldap', 'ldap_backup_port', '')," "('user_ldap', 'ldap_nocase', '')," "('user_ldap', 'ldap_turn_off_cert_check', '')," "('user_ldap', 'ldap_override_main_server', '')," "('user_ldap', 'ldap_attributes_for_user_search', '')," "('user_ldap', 'ldap_attributes_for_group_search', '')," "('user_ldap', 'ldap_expert_username_attr', 'uid')," "('user_ldap', 'ldap_expert_uuid_attr', '');" ) self.db.execute(ldap_sql, commit=True) # TODO set authed user name self.db.execute("INSERT INTO group_user VALUES ('admin','testuser');", commit=True)
def restore(self, backup, data=True, nthread=NotificationThread()): """ Restore an associated arkOS app backup. :param Backup backup: backup to restore :param bool data: Restore backed up data files too? :param NotificationThread nthread: notification thread to use :returns: ``Backup`` :rtype: dict """ nthread.title = "Restoring backup" # Trigger pre-restore hook for the app/site signals.emit("backups", "pre_restore", self) msg = "Running pre-restore for {0}...".format(backup["pid"]) nthread.update(Notification("info", "Backup", msg)) self.pre_restore() # Extract all files in archive sitename = "" nthread.update(Notification("info", "Backup", "Extracting files...")) with tarfile.open(backup["path"], "r:gz") as t: for x in t.getnames(): if x.startswith("etc/nginx/sites-available"): sitename = os.path.basename(x) t.extractall("/") # If it's a website that had a database, restore DB via SQL file too dbpasswd = "" if self.ctype == "site" and sitename: self.site = websites.get(sitename) if not self.site: websites.scan() self.site = websites.get(sitename) meta = configparser.SafeConfigParser() meta.read(os.path.join(self.site.path, ".arkos")) sql_path = "/{0}.sql".format(sitename) if meta.get("website", "dbengine", fallback=None) \ and os.path.exists(sql_path): nthread.update( Notification("info", "Backup", "Restoring database...")) dbmgr = databases.get_managers(meta.get("website", "dbengine")) if databases.get(sitename): databases.get(sitename).remove() db = dbmgr.add_db(sitename) with open(sql_path, "r") as f: db.execute(f.read()) os.unlink(sql_path) if dbmgr.meta.database_multiuser: dbpasswd = random_string(16) dbuser = databases.get_users(sitename) if dbuser: dbuser.remove() db_user = dbmgr.add_user(sitename, dbpasswd) db_user.chperm("grant", db) # Trigger post-restore hook for the app/site msg = "Running post-restore for {0}...".format(backup["pid"]) nthread.update(Notification("info", "Backup", msg)) if self.ctype == "site": self.post_restore(self.site, dbpasswd) self.site.nginx_enable() else: self.post_restore() signals.emit("backups", "post_restore", self) backup["is_ready"] = True msg = "{0} restored successfully.".format(backup["pid"]) nthread.complete(Notification("info", "Backup", msg)) return backup
def ldap(): """Initialize distribution copy of OpenLDAP.""" paths = ["slapd.conf", "ldap.conf", "base.ldif"] for x in paths: if not os.path.exists(os.path.join("/usr/share/arkos/openldap", x)): raise CLIException( "Template files could not be found. Your installation may " "be corrupted. Please reinstall the `arkos-configs` package.") logger.debug('ctl:init:ldap', 'Stopping daemon: slapd') s = shell("systemctl stop slapd") if s["code"] != 0: raise click.ClickException(s["stderr"].decode()) logger.info('ctl:init:ldap', 'Cleaning up old LDAP database') if os.path.exists("/etc/openldap/slapd.ldif"): os.unlink("/etc/openldap/slapd.ldif") slapdir = "/etc/openldap/slapd.d" for x in os.listdir(slapdir): fpath = os.path.join(slapdir, x) if os.path.isdir(fpath): shutil.rmtree(fpath) else: os.unlink(fpath) logger.info('ctl:init:ldap', 'Installing initial configuration') shutil.copy("/usr/share/arkos/openldap/slapd.conf", "/etc/openldap/slapd.conf") shutil.copy("/usr/share/arkos/openldap/ldap.conf", "/etc/openldap/ldap.conf") if os.path.exists("/usr/share/doc/sudo/schema.OpenLDAP"): shutil.copy("/usr/share/doc/sudo/schema.OpenLDAP", "/etc/openldap/schema/sudo.schema") shutil.copy("/usr/share/arkos/openldap/mailserver.schema", "/etc/openldap/schema/mailserver.schema") shutil.copy("/usr/share/arkos/openldap/samba.schema", "/etc/openldap/schema/samba.schema") logger.info('ctl:init:ldap', 'Setting admin password') ldap_passwd = random_string(16) ldap_pwhash = ldap_sha512_crypt.encrypt(ldap_passwd) with open("/etc/openldap/slapd.conf", "r") as f: data = f.read() data = data.replace("%ROOTPW%", ldap_pwhash) with open("/etc/openldap/slapd.conf", "w") as f: f.write(data) secrets.set("ldap", ldap_passwd) secrets.save() logger.info('ctl:init:ldap', 'Generating new LDAP database') logger.debug('ctl:init:ldap', 'slapadd slapd.conf') shell("slapadd -f /etc/openldap/slapd.conf -F /etc/openldap/slapd.d/", stdin="") logger.debug('ctl:init:ldap', 'slaptest') shell("slaptest -f /etc/openldap/slapd.conf -F /etc/openldap/slapd.d/") luid, lgid = pwd.getpwnam("ldap").pw_uid, grp.getgrnam("ldap").gr_gid for r, d, f in os.walk("/etc/openldap/slapd.d"): for x in d: os.chown(os.path.join(r, x), luid, lgid) for x in f: os.chown(os.path.join(r, x), luid, lgid) logger.debug('ctl:init:ldap', 'slapindex') shell("slapindex") logger.debug('ctl:init:ldap', 'slapadd base.ldif') shell("slapadd -l /usr/share/arkos/openldap/base.ldif") for r, d, f in os.walk("/var/lib/openldap/openldap-data"): for x in d: os.chown(os.path.join(r, x), luid, lgid) for x in f: os.chown(os.path.join(r, x), luid, lgid) logger.debug('ctl:init:ldap', 'Restarting daemon: slapd') shell("systemctl enable slapd") shell("systemctl restart slapd") logger.success('ctl:init:ldap', 'Complete')