def main(): installed_sites = ":".join(get_sites()) sites = os.environ.get("SITES", installed_sites).split(":") with_files=True if os.environ.get("WITH_FILES") else False backup(sites, with_files) exit(0)
def main(): details = dict() sites = get_sites() conn, bucket = get_s3_config() for site in sites: details = get_backup_details(site) db_file = details.get('database', {}).get('file_path') folder = os.environ.get('BUCKET_DIR') + '/' + site + '/' if db_file: folder = os.environ.get('BUCKET_DIR') + '/' + site + '/' + os.path.basename(db_file)[:15] + '/' upload_file_to_s3(db_file, folder, conn, bucket) # Archive site_config.json site_config_file = details.get('site_config', {}).get('file_path') if not site_config_file: site_config_file = os.path.join(os.getcwd(), site, 'site_config.json') upload_file_to_s3(site_config_file, folder, conn, bucket) public_files = details.get('public_files', {}).get('file_path') if public_files: folder = os.environ.get('BUCKET_DIR') + '/' + site + '/' + os.path.basename(public_files)[:15] + '/' upload_file_to_s3(public_files, folder, conn, bucket) private_files = details.get('private_files', {}).get('file_path') if private_files: folder = os.environ.get('BUCKET_DIR') + '/' + site + '/' + os.path.basename(private_files)[:15] + '/' upload_file_to_s3(private_files, folder, conn, bucket) delete_old_backups(os.environ.get('BACKUP_LIMIT', '3'), bucket, site) print('push-backup complete') exit(0)
def get_required_queues(app, prefix=''): ret = [] for site in get_sites(): ret.append('{}{}'.format(prefix, site)) if not prefix: # default queue only for shortjob workers ret.append(app.conf['CELERY_DEFAULT_QUEUE']) return ret
def schedule_all_tasks(): with frappe.init_site(): sites = get_sites() for site in sites: get_all_tasks_site(site) while True: time.sleep(10)
def main(): installed_sites = ":".join(get_sites()) sites = os.environ.get("SITES", installed_sites).split(":") with_files = cint(os.environ.get("WITH_FILES")) backup(sites, with_files) if frappe.redis_server: frappe.redis_server.connection_pool.disconnect() exit(0)
def enqueue_events_for_all_sites(): '''Loop through sites and enqueue events that are not already queued''' with frappe.init_site(): jobs_per_site = get_jobs() sites = get_sites() for site in sites: try: enqueue_events_for_site(site=site, queued_jobs=jobs_per_site[site]) except: # it should try to enqueue other sites print(frappe.get_traceback())
def enqueue_events_for_all_sites(): '''Loop through sites and enqueue events that are not already queued''' with frappe.init_site(): jobs_per_site = get_jobs() sites = get_sites() for site in sites: try: enqueue_events_for_site(site=site, queued_jobs=jobs_per_site[site]) except: # it should try to enqueue other sites print frappe.get_traceback()
def enqueue_scheduler_events(): for site in get_sites(): enqueue_lock = os.path.join(site, 'locks', 'enqueue.lock') try: if check_lock(enqueue_lock, timeout=1800): continue touch_file(enqueue_lock) enqueue_events_for_site.delay(site=site) except LockTimeoutError: os.remove(enqueue_lock)
def console(site): "Start ipython console for a site" if site not in get_sites(): print("Site {0} does not exist on the current bench".format(site)) return frappe.init(site=site) frappe.connect() frappe.local.lang = frappe.db.get_default("lang") all_apps = frappe.get_installed_apps() for app in all_apps: locals()[app] = __import__(app) print("Apps in this namespace:\n{}".format(", ".join(all_apps))) IPython.embed(display_banner="", header="")
def test_schedule(): jobs_per_site={} if os.path.exists(os.path.join('.', '.restarting')): # Don't add task to queue if webserver is in restart mode return with frappe.init_site(): jobs_per_site = get_jobs() sites = get_sites() #print(jobs_per_site) for site in sites: conf = frappe.get_site_config("/home/frappe/frappe-bench/sites","/home/frappe/frappe-bench/sites/"+site) if conf.db_name!="db_commeta": continue print(site) print(conf.db_name)
def enqueue_events_for_site(site): print dir(enqueue_events_for_site) try: site = "omnitech.gulfcloudservices.com" if site in get_sites(): frappe.init(site=site) if frappe.local.conf.maintenance_mode: return frappe.connect(site=site) enqueue_events(site) except: task_logger.error('Exception in Enqueue Events for Site {0}'.format(site)) raise finally: frappe.destroy()
def enqueue_events_for_all_sites(): '''Loop through sites and enqueue events that are not already queued''' if os.path.exists(os.path.join('.', '.restarting')): # Don't add task to queue if webserver is in restart mode return with frappe.init_site(): sites = get_sites() for site in sites: try: enqueue_events_for_site(site=site) except: # it should try to enqueue other sites print(frappe.get_traceback())
def enqueue_events_for_site(site): print dir(enqueue_events_for_site) try: site = "omnitech.gulfcloudservices.com" if site in get_sites(): frappe.init(site=site) if frappe.local.conf.maintenance_mode: return frappe.connect(site=site) enqueue_events(site) except: task_logger.error( 'Exception in Enqueue Events for Site {0}'.format(site)) raise finally: frappe.destroy()
def enqueue_events_for_all_sites(): '''Loop through sites and enqueue events that are not already queued''' if os.path.exists(os.path.join('.', '.restarting')): # Don't add task to queue if webserver is in restart mode return with frappe.init_site(): sites = get_sites() for site in sites: try: enqueue_events_for_site(site=site) except Exception as e: print(e.__class__, 'Failed to enqueue events for site: {}'.format(site))
def main(): backup_dir = get_backup_dir() if len(list_directories(backup_dir)) == 0: pull_backup_from_s3() for site in list_directories(backup_dir): site_slug = site.replace('.', '_') backups = [ datetime.datetime.strptime(backup, DATE_FORMAT) for backup in list_directories(os.path.join(backup_dir, site)) ] latest_backup = max(backups).strftime(DATE_FORMAT) files_base = os.path.join(backup_dir, site, latest_backup, '') files_base += latest_backup + '-' + site_slug site_config_path = files_base + '-site_config_backup.json' if not os.path.exists(site_config_path): site_config_path = os.path.join(backup_dir, site, 'site_config.json') if site in get_sites(): print('Overwrite site {}'.format(site)) restore_database(files_base, site_config_path, site) restore_private_files(files_base) restore_files(files_base) else: site_config = get_conf_params( db_name='_' + hashlib.sha1(site.encode()).hexdigest()[:16], db_password=random_string(16)) frappe.local.site = site frappe.local.sites_path = os.getcwd() frappe.local.site_path = os.getcwd() + '/' + site make_conf( db_name=site_config.get('db_name'), db_password=site_config.get('db_password'), ) make_site_dirs() print('Create site {}'.format(site)) restore_database(files_base, site_config_path, site) restore_private_files(files_base) restore_files(files_base) if frappe.redis_server: frappe.redis_server.connection_pool.disconnect() exit(0)
def main(): backup_dir = get_backup_dir() if len(list_directories(backup_dir)) == 0: pull_backup_from_s3() for site in list_directories(backup_dir): site_slug = site.replace('.', '_') backups = [ datetime.datetime.strptime(backup, DATE_FORMAT) for backup in list_directories(os.path.join(backup_dir, site)) ] latest_backup = max(backups).strftime(DATE_FORMAT) files_base = os.path.join(backup_dir, site, latest_backup, '') files_base += latest_backup + '-' + site_slug site_config_path = files_base + '-site_config_backup.json' if not os.path.exists(site_config_path): site_config_path = os.path.join(backup_dir, site, 'site_config.json') if site in get_sites(): restore_database(files_base, site_config_path, site) restore_private_files(files_base) restore_files(files_base) else: mariadb_root_password = get_password('MYSQL_ROOT_PASSWORD') if not mariadb_root_password: print('Variable MYSQL_ROOT_PASSWORD not set') exit(1) site_config = get_conf_params( db_name='_' + hashlib.sha1(site.encode()).hexdigest()[:16], db_password=random_string(16)) frappe.local.site = site frappe.local.sites_path = os.getcwd() frappe.local.site_path = os.getcwd() + '/' + site make_conf( db_name=site_config.get('db_name'), db_password=site_config.get('db_password'), ) make_site_dirs() restore_database(files_base, site_config_path, site) restore_private_files(files_base) restore_files(files_base) exit(0)
def enqueue_events_for_all_sites(): '''Loop through sites and enqueue events that are not already queued''' if os.path.exists(os.path.join('.', '.restarting')): # Don't add task to queue if webserver is in restart mode return with frappe.init_site(): jobs_per_site = get_jobs() sites = get_sites() for site in sites: try: enqueue_events_for_site(site=site, queued_jobs=jobs_per_site[site]) except: # it should try to enqueue other sites print(frappe.get_traceback())
def migrate_sites(maintenance_mode=False): installed_sites = ":".join(get_sites()) sites = os.environ.get("SITES", installed_sites).split(":") if not maintenance_mode: maintenance_mode = cint(os.environ.get("MAINTENANCE_MODE")) if maintenance_mode: set_maintenance_mode(True) for site in sites: print('Migrating', site) frappe.init(site=site) frappe.connect() try: migrate() finally: frappe.destroy() # Disable maintenance mode after migration set_maintenance_mode(False)
def auto_deploy(context, app, migrate=False, restart=False, remote='upstream'): '''Pull and migrate sites that have new version''' from frappe.utils.gitutils import get_app_branch from frappe.utils import get_sites branch = get_app_branch(app) app_path = frappe.get_app_path(app) # fetch subprocess.check_output(['git', 'fetch', remote, branch], cwd=app_path) # get diff if subprocess.check_output( ['git', 'diff', '{0}..{1}/{0}'.format(branch, remote)], cwd=app_path): print('Updates found for {0}'.format(app)) if app == 'frappe': # run bench update import shlex subprocess.check_output(shlex.split('bench update --no-backup'), cwd='..') else: updated = False subprocess.check_output( ['git', 'pull', '--rebase', remote, branch], cwd=app_path) # find all sites with that app for site in get_sites(): frappe.init(site) if app in frappe.get_installed_apps(): print('Updating {0}'.format(site)) updated = True subprocess.check_output( ['bench', '--site', site, 'clear-cache'], cwd='..') if migrate: subprocess.check_output( ['bench', '--site', site, 'migrate'], cwd='..') frappe.destroy() if updated or restart: subprocess.check_output(['bench', 'restart'], cwd='..') else: print('No Updates')
def auto_deploy(context, app, migrate=False, restart=False, remote='upstream'): '''Pull and migrate sites that have new version''' from frappe.utils.gitutils import get_app_branch from frappe.utils import get_sites branch = get_app_branch(app) app_path = frappe.get_app_path(app) # fetch subprocess.check_output(['git', 'fetch', remote, branch], cwd = app_path) # get diff if subprocess.check_output(['git', 'diff', '{0}..upstream/{0}'.format(branch)], cwd = app_path): print('Updates found for {0}'.format(app)) if app=='frappe': # run bench update subprocess.check_output(['bench', 'update', '--no-backup'], cwd = '..') else: updated = False subprocess.check_output(['git', 'pull', '--rebase', 'upstream', branch], cwd = app_path) # find all sites with that app for site in get_sites(): frappe.init(site) if app in frappe.get_installed_apps(): print('Updating {0}'.format(site)) updated = True subprocess.check_output(['bench', '--site', site, 'clear-cache'], cwd = '..') if migrate: subprocess.check_output(['bench', '--site', site, 'migrate'], cwd = '..') frappe.destroy() if updated and restart: subprocess.check_output(['bench', 'restart'], cwd = '..') else: print('No Updates')
def get_required_queues(app, prefix=''): ret = [] for site in get_sites(): ret.append('{}{}'.format(prefix, site)) ret.append(app.conf['CELERY_DEFAULT_QUEUE']) return ret
def enqueue_scheduler_events(): for site in get_sites(): enqueue_events_for_site.delay(site=site)
import os, frappe, compileall, re from frappe.utils.backups import scheduled_backup from frappe.utils import now from frappe.utils import get_sites def backup(sites, with_files=False): for site in sites: frappe.init(site) frappe.connect() odb = scheduled_backup(ignore_files=not with_files, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, force=True) print("database backup taken -", odb.backup_path_db, "- on", now()) if with_files: print("files backup taken -", odb.backup_path_files, "- on", now()) print("private files backup taken -", odb.backup_path_private_files, "- on", now()) frappe.destroy() installed_sites = ":".join(get_sites()) sites = os.environ.get("SITES", installed_sites).split(":") with_files = True if os.environ.get("WITH_FILES") else False backup(sites, with_files) exit(0)
Args: module (str, optional): Name of your logger and consequently your log file. Defaults to None. with_more_info (bool, optional): Will log the form dict using the SiteContextFilter. Defaults to False. allow_site ((str, bool), optional): Pass site name to explicitly log under it's logs. If True and unspecified, guesses which site the logs would be saved under. Defaults to True. filter (function, optional): Add a filter function for your logger. Defaults to None. max_size (int, optional): Max file size of each log file in bytes. Defaults to 100_000. file_count (int, optional): Max count of log files to be retained via Log Rotation. Defaults to 20. Returns: <class 'logging.Logger'>: Returns a Python logger object with Site and Bench level logging capabilities. """ if allow_site is True: site = getattr(frappe.local, "site", None) elif allow_site in get_sites(): site = allow_site else: site = False logger_name = "{0}-{1}".format(module, site or "all") try: return frappe.loggers[logger_name] except KeyError: pass if not module: module = "frappe" with_more_info = True