def datetime(): if request.method == "PUT": systemtime.set_datetime() return jsonify( datetime={ "datetime": systemtime.get_iso_time(), "offset": systemtime.verify_time(False, False) })
def as_dict(self): return { "id": self.id, "path": self.path, "expires": self.expires!=0, "expires_at": systemtime.get_iso_time(self.expires, "unix") if self.expires != 0 else "", "fetch_count": self.fetch_count }
def backup(self, data=True, backup_location=""): if not backup_location: backup_location = config.get("backups", "location", "/var/lib/arkos/backups") if self.ctype == "site": self.version = self.site.meta.version signals.emit("backups", "pre_backup", self) # Trigger the pre-backup hook for the app/site if self.ctype == "site": self.pre_backup(self.site) else: self.pre_backup() # Create backup directory in storage backup_dir = os.path.join(backup_location, self.id) try: os.makedirs(backup_dir) except: pass # Gather config and data file paths to archive myconfig = self._get_config() data = self._get_data() if data else [] timestamp = systemtime.get_serial_time() isotime = systemtime.get_iso_time(timestamp) path = os.path.join(backup_dir, "%s-%s.tar.gz" % (self.id,timestamp)) # Zip up the gathered file paths with tarfile.open(path, "w:gz") as t: for f in myconfig+data: for x in glob.glob(f): t.add(x) if self.ctype == "site" and self.site.db: dbsql = StringIO.StringIO(self.site.db.dump()) dinfo = tarfile.TarInfo(name="/%s.sql"%self.site.id) dinfo.size = len(dbsql.buf) t.addfile(tarinfo=dinfo, fileobj=dbsql) # Create a metadata file to track information info = {"pid": self.id, "type": self.ctype, "icon": self.icon, "version": self.version, "time": isotime} if self.site: info["site_type"] = self.site.meta.id with open(os.path.join(backup_dir, "%s-%s.meta" % (self.id,timestamp)), "w") as f: f.write(json.dumps(info)) # Trigger post-backup hook for the app/site if self.ctype == "site": self.post_backup(self.site) else: self.post_backup() signals.emit("backups", "post_backup", self) return {"id": self.id+"/"+timestamp, "pid": self.id, "path": path, "icon": self.icon, "type": self.ctype, "time": isotime, "version": self.version, "size": os.path.getsize(path), "site_type": self.site.meta.id if self.site else None, "is_ready": True}
def get(backup_location=""): """ Return a list of backup dicts from the backup directory. ``Backup`` dicts are in the following format (example): { "icon": "globe", "id": "testghost/20150317124530", "is_ready": true, "path": "/var/lib/arkos/backups/testghost/testghost-xxx.tar.gz", "pid": "testghost", "site_type": "ghost", "size": 14612219, "time": "2015-03-17T12:45:30-04:00", "type": "site", "version": "0.5.10-1" } :param str backup_location: Location to scan (instead of arkOS default) :returns: backups found :rtype: Backup """ backups = [] if not backup_location: backup_location = config.get("backups", "location") if not os.path.exists(backup_location): os.makedirs(backup_location) for x in glob.glob(os.path.join(backup_location, "*/*.tar.gz")): path = x name = os.path.basename(x).split("-")[0] meta = x.split(".tar.gz")[0]+".meta" stime = x.split("-")[1].split(".tar.gz")[0] if not os.path.exists(meta): data = {"id": name+"/"+stime, "pid": name, "path": path, "icon": None, "type": "Unknown", "time": systemtime.get_iso_time(stime), "version": "Unknown", "size": os.path.getsize(path), "site_type": None, "is_ready": True} backups.append(data) continue with open(meta, "r") as f: data = json.loads(f.read()) data = {"id": "{0}/{1}".format(name, stime), "pid": name, "path": path, "icon": data["icon"], "type": data["type"], "time": data["time"], "version": data["version"], "size": os.path.getsize(path), "is_ready": True, "site_type": data.get("site_type", None)} backups.append(data) return backups
def get(backup_location=""): backups = [] if not backup_location: backup_location = config.get("backups", "location", "/var/lib/arkos/backups") if not os.path.exists(backup_location): os.makedirs(backup_location) for x in glob.glob(os.path.join(backup_location, "*/*.tar.gz")): path = x name = os.path.basename(x).split("-")[0] meta = x.split(".tar.gz")[0]+".meta" stime = x.split("-")[1].split(".tar.gz")[0] if not os.path.exists(meta): backups.append({"id": name+"/"+stime, "pid": name, "path": path, "icon": None, "type": "Unknown", "time": systemtime.get_iso_time(stime), "version": "Unknown", "size": os.path.getsize(path), "site_type": None, "is_ready": True}) continue with open(meta, "r") as f: data = json.loads(f.read()) backups.append({"id": name+"/"+stime, "pid": name, "path": path, "icon": data["icon"], "type": data["type"], "time": data["time"], "version": data["version"], "size": os.path.getsize(path), "site_type": data.get("site_type", None), "is_ready": True}) return backups
def backup(self, data=True, backup_location="", nthread=NotificationThread()): """ Initiate a backup of the associated arkOS app. :param bool data: Include specified data files in the backup? :param str backup_location: Save output archive to custom path :param NotificationThread nthread: notification thread to use :returns: ``Backup`` :rtype: dict """ nthread.title = "Creating a backup" if not backup_location: backup_location = config.get("backups", "location") if self.ctype == "site": self.version = self.site.app.version signals.emit("backups", "pre_backup", self) msg = "Running pre-backup for {0}...".format(self.id) nthread.update(Notification("info", "Backup", msg)) # Trigger the pre-backup hook for the app/site if self.ctype == "site": self.pre_backup(self.site) else: self.pre_backup() # Create backup directory in storage backup_dir = os.path.join(backup_location, self.id) try: os.makedirs(backup_dir) except: pass # Gather config and data file paths to archive myconfig = self._get_config() data = self._get_data() if data else [] timestamp = systemtime.get_serial_time() isotime = systemtime.get_iso_time(timestamp) archive_name = "{0}-{1}.tar.gz".format(self.id, timestamp) path = os.path.join(backup_dir, archive_name) # Zip up the gathered file paths nthread.complete(Notification("info", "Backup", "Creating archive...")) with tarfile.open(path, "w:gz") as t: for f in myconfig+data: for x in glob.glob(f): t.add(x) if self.ctype == "site" and self.site.db: dbsql = io.StringIO(self.site.db.dump()) dinfo = tarfile.TarInfo(name="/{0}.sql".format(self.site.id)) dinfo.size = len(dbsql.buf) t.addfile(tarinfo=dinfo, fileobj=dbsql) # Create a metadata file to track information info = {"pid": self.id, "type": self.ctype, "icon": self.icon, "version": self.version, "time": isotime} if self.site: info["site_type"] = self.site.app.id filename = "{0}-{1}.meta".format(self.id, timestamp) with open(os.path.join(backup_dir, filename), "w") as f: f.write(json.dumps(info)) # Trigger post-backup hook for the app/site msg = "Running post-backup for {0}...".format(self.id) nthread.update(Notification("info", "Backup", msg)) if self.ctype == "site": self.post_backup(self.site) else: self.post_backup() signals.emit("backups", "post_backup", self) msg = "{0} backed up successfully.".format(self.id) nthread.complete(Notification("info", "Backup", msg)) return {"id": "{0}/{1}".format(self.id, timestamp), "pid": self.id, "path": path, "icon": self.icon, "type": self.ctype, "time": isotime, "version": self.version, "size": os.path.getsize(path), "is_ready": True, "site_type": self.site.app.id if self.site else None}
def serialized(self): data = self.as_dict data["expires_at"] = systemtime.get_iso_time(self.expires, "unix") if self.expires != 0 else "", return data
def datetime(): if request.method == "PUT": systemtime.set_datetime() return jsonify(datetime={"datetime": systemtime.get_iso_time(), "offset": systemtime.get_offset()})
def serialized(self): """Return serializable shared file metadata as dict.""" data = self.as_dict data["expires_at"] = systemtime.get_iso_time(self.expires, "unix")\ if self.expires != 0 else "" return data