def diff_command(args): # Get local table store and its meta state domain_info = get_domains(user_dir=args.user_dir).get(args.domain) if domain_info is None: click.secho("Configuration not found: {}".format(args.domain), fg='red') sys.exit(1) local_ts = domain_info['table_store'] local_m1, local_m2 = local_ts.refresh_metadata() # Get origin table store meta info origin = local_ts.get_table('domain')['origin'] origin_backend = create_backend(origin) origin_ts = origin_backend.load_table_store() origin_meta = origin_ts.meta.get() local_diff = ("Local store and scratch", local_m1, local_m2, False) origin_diff = ("Local and origin", origin_meta, local_m2, args.details) for title, m1, m2, details in local_diff, origin_diff: diff = diff_meta(m1, m2) if diff['identical']: print title, "is clean." else: print title, "are different:" print "\tFirst checksum: ", diff['checksum']['first'][:7] print "\tSecond checksum:", diff['checksum']['second'][:7] if diff['modified_diff']: print "\tTime since pull: ", str( diff['modified_diff']).split('.')[0] print "\tNew tables:", diff['new_tables'] print "\tDeleted tables:", diff['deleted_tables'] print "\tModified tables:", diff['modified_tables'] if details: # Diff origin origin_ts = get_store_from_url(origin) for table_name in diff['modified_tables']: t1 = local_ts.get_table(table_name) t2 = origin_ts.get_table(table_name) tablediff = diff_tables(t1, t2) print "\nTable diff for", table_name, "\n(first=local, second=origin):" print json.dumps(tablediff, indent=4, sort_keys=True)
def push_command(args): domain_info = get_domains(user_dir=args.user_dir).get(args.domain) if not domain_info: print "Can't push '{}'.".format(args.domain) sys.exit(1) ts = domain_info['table_store'] origin = ts.get_table('domain')['origin'] print "Pushing local config to source", origin result = push_to_origin(ts, args.force) if not result['pushed']: print "Push failed. Reason:", result['reason'] print "Origin has changed. Use --force to force push." if 'time_diff' in result: print "Time diff", result['time_diff'] else: print "Config pushed. Reason: ", result['reason'] local_store = create_backend('file://' + domain_info['path']) local_store.save_table_store(ts)
def get_default_drift_config_and_source(): """ Same as get_default_drift_config but returns a tuple of table store and the source of where it was loaded from. """ if _sticky_ts: return _sticky_ts, 'memory://_dummy' url = os.environ.get('DRIFT_CONFIG_URL') if url: # Enable domain shorthand if ':' not in url: domains = get_domains() domain = domains.get(url) if domain: return domain['table_store'], 'file://' + domain['path'] else: raise RuntimeError( "No domain named '{}' found on local disk. Available domains: {}." .format(url, ", ".join(domains.keys()))) b = create_backend(url) return b.load_table_store(), url else: domains = get_domains() if len(domains) == 0: raise ConfigNotFound( "No config found in ~/.drift/config. Use 'driftconfig init' command to " "initialize a local config, or add a reference to the config using the " "environment variable 'DRIFT_CONFIG_URL'.") elif len(domains) != 1: domain_names = ", ".join(domains.keys()) raise ConfigNotFound( "Multiple Drift configurations found in ~/.drift/config.\n" "Specify which configuration to use by referencing it in the " "'DRIFT_CONFIG_URL' environment variable.\n" "Configurations available on local disk: %s." "" % domain_names) domain = domains.values()[0] return domain['table_store'], 'file://' + domain['path']
def _pull_command(args): for domain_name, domain_info in get_domains( user_dir=args.user_dir).items(): if args.domain and args.domain != domain_name: continue result = pull_from_origin(domain_info['table_store'], ignore_if_modified=args.ignore_if_modified, force=args.force) if not result['pulled']: print "Pull failed for", domain_name, ". Reason:", result['reason'] if result['reason'] == 'local_is_modified': print "Use --ignore-if-modified to overwrite local changes." else: print "Use --force to force a pull." else: if result['reason'] == 'pulled_from_origin': local_backend = create_backend('file://' + domain_info['path']) local_backend.save_table_store(result['table_store']) print "Config for {} pulled. Reason: {}".format( domain_name, result['reason'])
def pull_from_origin(local_ts, ignore_if_modified=False, force=False): origin = local_ts.get_table('domain')['origin'] origin_backend = create_backend(origin) origin_meta = origin_backend.load_table_store() old, new = local_ts.refresh_metadata() if old != new and not ignore_if_modified: return {'pulled': False, 'reason': 'local_is_modified'} crc_match = local_ts.meta['checksum'] == origin_meta.meta['checksum'] if crc_match and not force: return { 'pulled': True, 'table_store': local_ts, 'reason': 'pull_skipped_crc_match' } origin_ts = origin_meta return { 'pulled': True, 'table_store': origin_ts, 'reason': 'pulled_from_origin' }
def post(self): tenant_name = g.conf.tenant_name['tenant_name'] tier_name = g.conf.tier['tier_name'] # quick check for tenant state before downloading config if g.conf.tenant["state"] != "initializing": abort( httplib.BAD_REQUEST, message= "You can only provision tenants which are in state 'initializing'. Tenant '%s' is in state '%s'" % (tenant_name, g.conf.tenant["state"])) args_per_provisioner = {} if request.json: for arg in request.json.get("provisioners", {}): if "provisioner" not in arg or "arguments" not in arg: log.warning( "Provisioner argument missing 'provisioner' or 'arguments'" ) continue args_per_provisioner[arg["provisioner"]] = arg["arguments"] origin = g.conf.domain['origin'] ts = get_store_from_url(origin) conf = get_drift_config(ts=ts, tenant_name=tenant_name, tier_name=tier_name, deployable_name=current_app.config['name']) if conf.tenant["state"] != "initializing": raise RuntimeError("Tenant unexpectedly found in state '%s': %s" % (conf.tenant["state"], conf.tenant)) resources = current_app.config.get("resources") for module_name in resources: m = importlib.import_module(module_name) if hasattr(m, "provision"): provisioner_name = m.__name__.split('.')[-1] log.info("Provisioning '%s' for tenant '%s' on tier '%s'", provisioner_name, tenant_name, tier_name) args = args_per_provisioner.get(provisioner_name, {}) m.provision(conf, args) # Mark the tenant as ready conf.tenant["state"] = "active" # Save out config log.info("Saving config to %s", origin) origin_backend = create_backend(origin) origin_backend.save_table_store(ts) local_origin = 'file://~/.drift/config/' + g.conf.domain['domain_name'] log.info("Saving config to %s", local_origin) local_store = create_backend(local_origin) local_store.save_table_store(ts) # invalidate flask config current_app.extensions['driftconfig'].refresh() return "OK"
superkaiju.dg-api.com directivegames-borkbork .dg-api.com ''' logging.basicConfig(level='INFO') config_path = os.path.join(os.path.expanduser("~"), '.drift', 'config') print "config_path is", config_path # Set up backends. One on local hard drive, one on S3 and one in Redis s3_store = S3Backend('relib-test', 'directive-games', 'eu-west-1') s3_store = create_backend('s3://relib-test/directive-games') redis_store = RedisBackend() # Create an empty config ts = get_drift_table_store() if 0: s3_store.load(ts) print "whee got all the config", ts redis_store.save(ts) print "now i have dumped all the s3 config into redis" local_store.save(ts) print "its also on mny local disk hes" config_path = os.path.join(os.path.expanduser("~"), '.drift', 'config2') FileBackend(config_path).save(ts) import sys
def push_to_origin(local_ts, force=False, _first=False): """ Pushed 'local_ts' to origin. Returns a dict with 'pushed' as True or False depending on success. If local store has not been modified since last pull, and the origin has the same version, no upload is actually performed and the return value contains 'reason' = 'push_skipped_crc_match'. If local store has indeed been modified since last pull, but the origin has stayed unchanged, upload is performed and the return value contains 'reason' = 'pushed_to_origin'. If origin has changed since last pull, the push is cancelled and the return value contains 'reason' = 'checksum_differ' and 'time_diff' is the time difference between the changes. To force a push to a modified origin, set 'force' = True. If 'skip_cache' is true, the cache, if defined for the table store, will not be updated. '_first' is used internally, and indicates it's the first time the table store is pushed. """ origin = local_ts.get_table('domain')['origin'] origin_backend = create_backend(origin) if _first: crc_match = force = True else: try: origin_ts = origin_backend.load_table_store() except Exception as e: log.warning("Can't load table store from %s: %s", origin_backend, repr(e)) crc_match = force = True else: crc_match = local_ts.meta['checksum'] == origin_ts.meta['checksum'] if not force and not crc_match: local_modified = parse_8601(local_ts.meta['last_modified']) origin_modified = parse_8601(origin_ts.meta['last_modified']) return { 'pushed': False, 'reason': 'checksum_differ', 'time_diff': origin_modified - local_modified } old, new = local_ts.refresh_metadata() if crc_match and old == new and not force: return {'pushed': True, 'reason': 'push_skipped_crc_match'} # Always turn on all integrity check when saving to origin tmp = driftconfig.relib.CHECK_INTEGRITY driftconfig.relib.CHECK_INTEGRITY = [ 'pk', 'fk', 'unique', 'schema', 'constraints' ] try: origin_backend.save_table_store(local_ts) finally: driftconfig.relib.CHECK_INTEGRITY = tmp return {'pushed': True, 'reason': 'pushed_to_origin'}