def add(self, environment): item = { 'ref': GeneralHelper.prepare_string(environment.ref_name), 'name': GeneralHelper.prepare_string(environment.name), 'path': environment.path, 'db_user': GeneralHelper.prepare_string(environment.db_user), 'db_password': GeneralHelper.prepare_string(environment.db_pw) } self.items.append(item) environment.init_env_project_file() self.init_directus_env(environment) self.write()
def load_projects(self): self.projects = [] for line in open("data/envs/{ref_name}.txt".format( ref_name=self.ref_name)).readlines(): if line: try: item = {} data = line.split('\t') item['ref'] = data[0] item['database'] = GeneralHelper.prepare_string(data[1]) item['name'] = GeneralHelper.prepare_string(data[2]) self.projects.append(item) except KeyError: pass
def clear_env(self, ref_name): index = GeneralHelper.get_index_on_dict_value( collection=self.items, key='ref', value=ref_name ) self.items.pop(index) self.write() os.system('rm data/envs/{ref}.txt'.format(ref=ref_name))
def check_directus_dir(): while True: os.system("bash shell/directoryFinder.sh") path = GeneralHelper.prepare_path( open("data/tmp/directusEnv.txt").read()) os.remove("data/tmp/directusEnv.txt") if EnvValidations.validate_unique_path(path): break else: print("\nX Path not unique, try again...") return path
def load(self): self.envs.load() index = GeneralHelper.get_index_on_dict_value( collection=self.envs.items, key='ref', value=self.ref_name) if index >= 0: self.name = self.envs.items[index]['name'] self.path = self.envs.items[index]['path'] self.db_user = self.envs.items[index]['db_user'] self.db_pw = self.envs.items[index]['db_pw'] self.load_projects() else: raise SystemError('Unknown environment reference:', self.ref_name)
def delete_project(self, project, keep_db): index = GeneralHelper.get_index_on_dict_value(collection=self.projects, key='ref', value=project.ref_name) self.projects.pop(index) DirectusController.delete_config(path=self.path, pj_name=project.ref_name) if not keep_db: env_db = db(self.db_user, self.db_pw) env_db.drop_db(project.ref_name) self.write()
def create_db_migration(self): raw_tables = self.get_db_tables() tables = list(map(lambda d: d[0], raw_tables)) clean_tables = self.remove_blacklist(tables) file_name = "{pj_ref}-{db}-{timestamp}".format( pj_ref=self.name, db=self.database, timestamp=GeneralHelper.prepare_string(str(datetime.now()))) os.system( 'mysqldump -u {username} --password="******" {database} {tables} ' '> data/migrations/{filename}.sql'.format( username=self.username, password=self.password, database=self.database, tables=" ".join(clean_tables), filename=file_name))
def __init__(self, ref, name=None, database=None): self.ref_name = GeneralHelper.prepare_string(ref) if name: self.name = GeneralHelper.prepare_name(name) self.name = name self.database = database
def get_project(self, ref_name): index = GeneralHelper.get_index_on_dict_value(collection=self.projects, key='ref', value=ref_name) return self.projects[index]
def download_migrations(self): path = GeneralHelper.prepare_path(self.check_output_dir()) Migration.download_migrations(out_dir=path)