def db_cr(app): dsn = getConfig().SQLALCHEMY_DATABASE_URI assert not database_exists(dsn) migrate(app) yield db drop_database(dsn) assert not database_exists(dsn)
def main(source_site, dest_site, repos, dest_user=None, dest_token=None, source_user=None, source_token=None, temp_dir=None, remote=None, timeout=None): if not source_site.startswith(HTTPS_START): source_site = HTTPS_START + source_site if not dest_site.startswith(HTTPS_START): dest_site = HTTPS_START + dest_site source_auth = migrate.construct_non_none_tuple(source_user, source_token) dest_auth = migrate.construct_non_none_tuple(dest_user, dest_token) temp_path, temp_existed_before = migrate.try_create_temp_dir(temp_dir) for repo in repos: user = source_user dest_user = dest_user repo_name = repo if "/" in repo: split_str = repo.split("/") user = split_str[0] repo_name = split_str[1] if user is None: print("A user on the source site must be specified") exit() if dest_user is None: print("A user on the destination site must be specified") exit() source_repo = REPO_FORMAT.format(source_site, user, repo_name) dest_repo = REPO_FORMAT.format(dest_site, dest_user, repo_name) migrate.migrate(source_repo, dest_repo, source_auth=source_auth, dest_auth=dest_auth, temp_dir=temp_dir, remote=remote, timeout=timeout) try: if not temp_existed_before: shutil.rmtree(temp_path) except OSError as e: print("An error occurred in cleanup. Exiting") quit()
def deploy(target, site): setup_apache() apache('stop') migrate.migrate(target) collect_files() v = publish.make_version(target) v.publish(source='target') apache_config(target, site) apache('start') print "\nDeploy completed successfully." print "To rollback application: rm -r %s/current; mv %s/previous %s/current" % (target, target, target) print "To rollback database: mysql jukebox < %s/previous/db-backup.sql" % target
def deploy(target, site): setup_apache() apache('stop') migrate.migrate(target) collect_files() v = publish.make_version(target) v.publish(source='target') apache_config(target, site) apache('start') print "\nDeploy completed successfully." print "To rollback application: rm -r %s/current; mv %s/previous %s/current" % ( target, target, target) print "To rollback database: mysql jukebox < %s/previous/db-backup.sql" % target
def test_basic(self): #migrate with tempfile.TemporaryDirectory() as tmp_dir: migrate.migrate(self.pid, tmp_dir) #verify self.assertEqual(os.listdir(tmp_dir), [self.pid]) with open(os.path.join(tmp_dir, self.pid, 'inventory.json'), 'rb') as f: inventory = json.loads(f.read().decode('utf8')) self.assertEqual(list(inventory['versions'].keys()), ['v1']) self.assertEqual(len(inventory['manifest'].keys()), 2) content_dir = os.path.join(tmp_dir, self.pid, 'v1', 'content') self.assertEqual(os.listdir(content_dir), ['RELS-EXT', 'DC']) #cleanup REPO.api.purgeObject(self.pid)
def __init__(self, remoteShell, domainAdmin="admin", domain=None): self.remoteShell = remoteShell self.uptoolPath = "/opt/quest/bin/uptool" self.domainAdmin = domainAdmin self.defaultDomain = domain self.container = container.container(self.remoteShell, self.domainAdmin, self.defaultDomain) self.create = upCreate.create(self.remoteShell, self.domainAdmin, self.defaultDomain) self.delete = upDelete.delete(self.remoteShell, self.domainAdmin, self.defaultDomain) self.depopulate = depopulate.depopulate(self.remoteShell, self.domainAdmin, self.defaultDomain) self.list = upList.list(self.remoteShell, self.domainAdmin, self.defaultDomain) self.membership = membership.membership(self.remoteShell, self.domainAdmin, self.defaultDomain) self.migrate = migrate.migrate(self.remoteShell, self.domainAdmin, self.defaultDomain) self.modify = modify.modify(self.remoteShell, self.domainAdmin, self.defaultDomain) self.populate = populate.populate(self.remoteShell, self.domainAdmin, self.defaultDomain) isinstance(self.container, container.container) isinstance(self.create, upCreate.create) isinstance(self.delete, upDelete.delete) isinstance(self.depopulate, depopulate.depopulate) isinstance(self.list, upList.list) isinstance(self.membership, membership.membership) isinstance(self.migrate, migrate.migrate) isinstance(self.modify, modify.modify) isinstance(self.populate, populate.populate)
def setUp(self): self.maxDiff = None with pool as db: db.execute_raw("DROP DATABASE IF EXISTS wallets") migrate(quiet=True) db.execute_raw("USE wallets") self.clickhouse = ClickHouse() self.clickhouse.execute( '''DROP TABLE IF EXISTS `outgoing_transactions`''') self.clickhouse.execute( '''DROP TABLE IF EXISTS `incoming_transactions`''') with open("clickhouse_migrations/01-incoming-transactions.sql") as f: self.clickhouse.execute("".join(f.readlines())) with open("clickhouse_migrations/02-outgoing-transactions.sql") as f: self.clickhouse.execute("".join(f.readlines()))
def do(self, populations, *args): # 执行变异,populations为存储着种群类对象的列表 if type(populations) != list: raise RuntimeError('error in Migrate: The populations must be a list. (输入参数populations必须是list类型。)') PopSizes = list(pop.sizes for pop in populations) FitnVs = list(pop.FitnV for pop in populations) # 调用种群迁移算子进行种群个体迁移 [Aborigines, Foreigners, FromPlaces] = migrate(PopSizes, self.MIGR, self.Structure, self.Select, self.Replacement, FitnVs) NewPopulations = [] for i in range(len(populations)): # 更新迁移个体后的种群 NewPopulations.append((populations[i])[Aborigines[i]] + (populations[FromPlaces[i]])[Foreigners[i]]) return NewPopulations
def reset(): """ Reset the htmengine database; upon successful completion, the necessary schema are created, but the tables are not populated """ # Make sure we have the latest version of configuration config = Config("application.conf", os.environ.get("APPLICATION_CONFIG_PATH")) dbName = config.get("repository", "db") resetDatabaseSQL = ( "DROP DATABASE IF EXISTS %(database)s; " "CREATE DATABASE %(database)s;" % {"database": dbName}) statements = resetDatabaseSQL.split(";") engine = getUnaffiliatedEngine(config) with engine.connect() as connection: for s in statements: if s.strip(): connection.execute(s) migrate()
def run(self): from migrate import migrate if migrate(repo=self.repo, source=self.source, dest=self.dest): lock.acquire() try: set_success_count() finally: lock.release() else: lock.acquire() try: set_failed_repos(self.repo['name']) finally: lock.release()
def create_qs(procId, sourceURL: str): print("[PROC-{}--[Phase 3]]-- Process Started Creating QS".format(procId)) namaFile = procId df = load_data(namaFile, 'processed') metadata = var_settings.job_metadata_dict[procId] context = [] title = "" if "tags" in metadata: context = metadata["tags"] if "title" in metadata: title = metadata["title"] #enrich context with title if len(title) > 1: print("ABCDEFG - " + title) tags_from_title = get_ner_context(title) context = context + tags_from_title #removing duplicate tags context = list(set(context)) metadata["tags"] = context var_settings.job_metadata_dict[procId] = metadata print(context) literal_columns_label = [ x for x in df.columns if x not in var_settings.entityheader_dict[procId] ] df_mapping = link_data(df, var_settings.protagonist_dict[procId], var_settings.entityheader_dict[procId], var_settings.mapping_dict[procId], context) save_linking_result(pd.DataFrame(df_mapping), procId) df_final = generate_qs(df_mapping, df, var_settings.protagonist_dict[procId], literal_columns_label, procId, sourceURL) res_address = 'data/results/{}'.format(namaFile) print("[PROC-{}--[Phase 3]]-- Saving to {}".format(procId, res_address)) df_final.to_csv(res_address, index=False) migrate(procId)
def __init__(self, *args, **kwargs): """ Construct a SkelCL server. """ # Fail if we can't find the path if not fs.isdir(self.LLVM_PATH): io.fatal("Could not find llvm path '{0}'".format(self.LLVM_PATH)) super(Server, self).__init__(*args, **kwargs) io.info("Registered server %s/SkelCLServer ..." % SESSION_NAME) # Setup persistent database. self.db = migrate(Database()) self.db.status_report() # Create an in-memory sample strategy cache. self.strategies = cache.TransientCache()
type=str, dest='migrate', help='migrate old posts(hexo)') parser.add_argument('--fix-summary', dest='fix_summary', help='render new summary from posts.') parser.add_argument('--setup-comment', dest='setup_comment', help='setup comment for blog') args = parser.parse_args() print('Called with arguments: {}'.format(args)) ROOTDIR = par_dir(BASEDIR) POSTSDIR = os.path.join(ROOTDIR, 'posts') if args.migrate: migrate(args.migrate, POSTSDIR) if args.new: title = args.new created = curr_time() pardir = created[:7] # 取年月 2018-02 为父文件夹 metadata = {'created': created, 'title': title} if args.setup_comment: label_ = COMMENT_PREFIX + '_' + created gh_client = GithubClient(REPO) gh_client.create_label(label_) source_md = '[{title}](../blob/{branch}/posts/{pardir}/{created}.md)'.format( title=title, branch=BRANCH, pardir=pardir, created=created) body = source_md + ' ' + COMMENT_BODY labels = [COMMENT_PREFIX, label_] issue_title = '[{}] {}'.format(COMMENT_PREFIX, title)
def test_obj_already_exists(self): with tempfile.TemporaryDirectory() as tmp_dir: os.mkdir(os.path.join(tmp_dir, self.pid)) with self.assertRaises(migrate.MigrationError): migrate.migrate(self.pid, tmp_dir)
import os import cfg from migrate import migrate import utils if not Path(cfg.DB_PATH).exists(): with open(cfg.DB_PATH, 'wb') as fp: with open(cfg.DB_TEMPLATE_PATH, 'rb') as source: utils.copy_file(source, fp) if not Path(cfg.DB_PATH).exists(): print("Database file '{}' does not exist!".format(cfg.DB_PATH)) exit(1) migrate(cfg.DB_PATH) for dir in [cfg.SESSIONS_DATA_DIR, cfg.SESSIONS_LOCK_DIR, cfg.RECORDINGS_PATH]: try: os.mkdir(dir) except: pass def run_website(): import website def run_discord_bot(): if cfg.ENABLE_DISCORD_BOT: import discord_bot
def setUp(self): self.maxDiff = None with pool as db: db.execute_raw("DROP DATABASE IF EXISTS users") migrate(quiet=True) db.execute_raw("USE users")
help='create new post with given title.') parser.add_argument('--update', nargs='*', dest='update', help='update post with given title in post and summary.') parser.add_argument('--migrate', type=str, dest='migrate', help='migrate old posts(hexo)') parser.add_argument('--fix-summary', dest='fix_summary', help='render new summary from posts.') parser.add_argument('--setup-comment', dest='setup_comment', help='setup comment for blog') args = parser.parse_args() print('Called with arguments: {}'.format(args)) ROOTDIR = par_dir(BASEDIR) POSTSDIR = os.path.join(ROOTDIR, 'posts') if args.migrate: migrate(args.migrate, POSTSDIR) if args.new: title = args.new created = curr_time() pardir = created[:7] # 取年月 2018-02 为父文件夹 metadata = {'created': created, 'title': title} if args.setup_comment: label_ = COMMENT_PREFIX + '_' + created gh_client = GithubClient(REPO) gh_client.create_label(label_) source_md = '[{title}](../blob/{branch}/posts/{pardir}/{created}.md)'.format( title=title, branch=BRANCH, pardir=pardir, created=created ) body = source_md + ' ' + COMMENT_BODY labels = [COMMENT_PREFIX, label_]
import os import bot import migrate if __name__ == '__main__': migrate.migrate() bot.bot.run(os.getenv("TOKEN"))
@cross_origin(app) def upload_acts_api(request): ''' Upload acts. ''' storage_id = request.args.get('storage') if storage_id is not None: return json(upload_acts(request.json, storage_id)) @app.route('/api/v01/storekeepers', methods=['GET', 'POST', 'DELETE', 'OPTIONS']) @cross_origin(app) def get_or_post_storekeepers_api(request): if request.method == 'GET': return json(get_storekeepers()) elif request.method == 'POST': return json(append_storekeeper(request.json)) elif request.method == 'DELETE': storekeeper_name = request.args.get('storekeeper_name') if storekeeper_name is not None: return json(delete_storekeeper(storekeeper_name)) if __name__ == '__main__': # change_storekeeper() migrate() update_downloads_view() upgrade_act_dates() print('Start server') app.run(host=settings.HOST['address'], port=settings.HOST['port'])
def main(): migrate.migrate()
def __init__(self, pathSettings, timeoutSaveSettingsTime): # set the settings path self.fileSettings = pathSettings + self.fileSettings self.newFileSettings = self.fileSettings + self.newFileExtension self.timeoutSaveSettingsTime = timeoutSaveSettingsTime self.timeoutSaveSettingsEventId = None self.rootGroup = None self.settingsGroup = None # Print the logscript version logging.info('Localsettings version is: 0x%04x' % version) if not path.isdir(pathSettings): print('Error path %s does not exist!' % pathSettings) sys.exit(errno.ENOENT) if path.isfile(self.fileSettings): # Try to validate the settings file. try: tree = etree.parse(self.fileSettings) root = tree.getroot() # NOTE: there used to be a 1.0 version once upon a time an no version at all # in really old version. Since it is easier to compare integers only use the # major part. loadedVersionTxt = tree.xpath("string(/Settings/@version)") or "1" loadedVersion = [int(i) for i in loadedVersionTxt.split('.')][0] migrate.migrate(self, tree, loadedVersion) logging.info('Settings file %s validated' % self.fileSettings) if loadedVersionTxt != settingsVersion: print("Updating version to " + settingsVersion) root.set(settingsTag, settingsVersion) self.save(tree) except Exception as e: print(e) logging.error('Settings file %s invalid' % self.fileSettings) remove(self.fileSettings) logging.error('%s removed' % self.fileSettings) # check if settings file is present, if not exit create a "empty" settings file. if not path.isfile(self.fileSettings): logging.warning('Settings file %s not found' % self.fileSettings) root = etree.Element(settingsRootName) root.set(settingsTag, settingsVersion) tree = etree.ElementTree(root) self.save(tree) logging.warning('Created settings file %s' % self.fileSettings) # connect to the SessionBus if there is one. System otherwise bus = dbus.SessionBus() if 'DBUS_SESSION_BUS_ADDRESS' in environ else dbus.SystemBus() busName = dbus.service.BusName(self.dbusName, bus) self.rootGroup = GroupObject(busName, "/", None, removable = False) self.settingsGroup = self.rootGroup.createGroups("/Settings") self.settingsGroup._removable = False devices = DevicesGroup(busName, "/Settings/Devices", self.settingsGroup, removable = False) self.settingsGroup.addGroup("Devices", devices) parseXmlFile(self.fileSettings, self.rootGroup)