def can_upgrade(self, game_ver): """ game_version: (list)version numbers. """ # The game version should be equal or bigger than from_min_version. if compare_version(game_ver, self.from_min_version) == -1: return False # The game version should be smaller than from_max_version. if compare_version(game_ver, self.from_max_version) != -1: return False return True
def test_compare_version(self): self.assertTrue(compare_version("2", "1") > 0) self.assertTrue(compare_version("2", "1.0") > 0) self.assertTrue(compare_version("2", "1.0.3") > 0) self.assertTrue(compare_version("2.0", "1") > 0) self.assertTrue(compare_version("2.0", "1.0") > 0) self.assertTrue(compare_version("2.0", "1.0.3") > 0) self.assertTrue(compare_version("2.0.0", "1") > 0) self.assertTrue(compare_version("2.0.0", "1.0") > 0) self.assertTrue(compare_version("2.0.0", "1.0.3") > 0)
def push(self, repository): registry, repository = auth.resolve_repository_name(repository) if getattr(self, '_cfg', None) is None: self._cfg = auth.load_config() authcfg = auth.resolve_authconfig(self._cfg, registry) u = self._url("/images/{0}/push".format(repository)) if utils.compare_version('1.5', self._version) >= 0: headers = { 'X-Registry-Auth': auth.encode_header(authcfg) } return self._result(self._post_json(u, None, headers=headers)) return self._result(self._post_json(u, authcfg))
def push(self, repository): registry, repository = auth.resolve_repository_name(repository) u = self._url("/images/{0}/push".format(repository)) headers = {} if getattr(self, '_cfg', None) is None: self._cfg = auth.load_config() authcfg = auth.resolve_authconfig(self._cfg, registry) if utils.compare_version('1.5', self._version) >= 0: # do not fail if no atuhentication exists # for this specific registry as we can have an anon push if authcfg: headers['X-Registry-Auth'] = auth.encode_header(authcfg) return self._result(self._post_json(u, None, headers=headers)) return self._result(self._post_json(u, authcfg))
def check_migration(): log.info('Checking whether we need to migrate something') last_migration = settings('last_migrated_PKC_version') if last_migration == v.ADDON_VERSION: log.info('Already migrated to PKC version %s' % v.ADDON_VERSION) return if not last_migration: log.info('Never migrated, so checking everything') last_migration = '1.0.0' if not compare_version(v.ADDON_VERSION, '1.8.2'): log.info('Migrating to version 1.8.1') # Set the new PKC theMovieDB key settings('themoviedbAPIKey', value='19c90103adb9e98f2172c6a6a3d85dc4') settings('last_migrated_PKC_version', value=v.ADDON_VERSION)
def pull(self, repository, tag=None): registry, repo_name = auth.resolve_repository_name(repository) if repo_name.count(":") == 1: repository, tag = repository.rsplit(":", 1) params = { 'tag': tag, 'fromImage': repository } headers = {} if utils.compare_version('1.5', self._version) >= 0: if getattr(self, '_cfg', None) is None: self._cfg = auth.load_config() authcfg = auth.resolve_authconfig(self._cfg, registry) headers = { 'X-Registry-Auth': auth.encode_header(authcfg) } u = self._url("/images/create") return self._result(self.post(u, params=params, headers=headers))
def pull(self, repository, tag=None): registry, repo_name = auth.resolve_repository_name(repository) if repo_name.count(":") == 1: repository, tag = repository.rsplit(":", 1) params = { 'tag': tag, 'fromImage': repository } headers = {} if utils.compare_version('1.5', self._version) >= 0: if getattr(self, '_cfg', None) is None: self._cfg = auth.load_config() authcfg = auth.resolve_authconfig(self._cfg, registry) # do not fail if no atuhentication exists # for this specific registry as we can have a readonly pull if authcfg: headers['X-Registry-Auth'] = auth.encode_header(authcfg) u = self._url("/images/create") return self._result(self.post(u, params=params, headers=headers))
def lookup_image(self, client, image_type, product_version): plist_p = c_void_p() ret = mobile_image_mounter_lookup_image(client, image_type.encode("utf-8"), pointer(plist_p)) if ret != MobileImageMounterError.MOBILE_IMAGE_MOUNTER_E_SUCCESS: return False, "Can not lookup image, error code %d" % ret data = read_data_from_plist_ptr(plist_p) plist_free(plist_p) if data is None: return False, "Can not parse plist result" if "Error" in data: error = data['Error'] return False, error if compare_version(product_version, "10.0") >= 0: return "ImageSignature" in data, None else: return data[ 'ImagePresent'] if "ImagePresent" in data else False, None
def app(sourcedb='', targetdb='', version_filename=False, output_directory=None, log_directory=None, no_date=False, tag=None, charset=None, sync_auto_inc=False, sync_comments=False): """Main Application""" options = locals() if not os.path.isabs(output_directory): print "Error: Output directory must be an absolute path. Quiting." return 1 if not os.path.isdir(output_directory): print "Error: Output directory does not exist. Quiting." return 1 if not log_directory or not os.path.isdir(log_directory): if log_directory: print "Log directory does not exist, writing log to %s" % output_directory log_directory = output_directory logging.basicConfig(filename=os.path.join(log_directory, LOG_FILENAME), level=logging.INFO, format='[%(levelname)s %(asctime)s] %(message)s') console = logging.StreamHandler() console.setLevel(logging.DEBUG) logging.getLogger('').addHandler(console) if not sourcedb: logging.error("Source database URL not provided. Exiting.") return 1 source_info = schemaobject.connection.parse_database_url(sourcedb) if not source_info: logging.error("Invalid source database URL format. Exiting.") return 1 if not source_info['protocol'] == 'mysql': logging.error("Source database must be MySQL. Exiting.") return 1 if 'db' not in source_info: logging.error("Source database name not provided. Exiting.") return 1 if not targetdb: logging.error("Target database URL not provided. Exiting.") return 1 target_info = schemaobject.connection.parse_database_url(targetdb) if not target_info: logging.error("Invalid target database URL format. Exiting.") return 1 if not target_info['protocol'] == 'mysql': logging.error("Target database must be MySQL. Exiting.") return 1 if 'db' not in target_info: logging.error("Target database name not provided. Exiting.") return 1 source_obj = schemaobject.SchemaObject(sourcedb, charset) target_obj = schemaobject.SchemaObject(targetdb, charset) if utils.compare_version(source_obj.version, '5.0.0') < 0: logging.error("%s requires MySQL version 5.0+ (source is v%s)" % (APPLICATION_NAME, source_obj.version)) return 1 if utils.compare_version(target_obj.version, '5.0.0') < 0: logging.error("%s requires MySQL version 5.0+ (target is v%s)" % (APPLICATION_NAME, target_obj.version)) return 1 # data transformation filters filters = ( lambda d: utils.REGEX_MULTI_SPACE.sub(' ', d), lambda d: utils.REGEX_DISTANT_SEMICOLIN.sub(';', d), lambda d: utils.REGEX_SEMICOLON_EXPLODE_TO_NEWLINE.sub(";\n", d)) # Information about this run, used in the patch/revert templates ctx = dict(app_version=APPLICATION_VERSION, server_version=target_obj.version, target_host=target_obj.host, target_database=target_obj.selected.name, created=datetime.datetime.now().strftime(TPL_DATE_FORMAT)) p_fname, r_fname = utils.create_pnames(target_obj.selected.name, tag=tag, date_format=DATE_FORMAT, no_date=no_date) ctx['type'] = "Patch Script" p_buffer = utils.PatchBuffer(name=os.path.join(output_directory, p_fname), filters=filters, tpl=PATCH_TPL, ctx=ctx.copy(), version_filename=version_filename) ctx['type'] = "Revert Script" r_buffer = utils.PatchBuffer(name=os.path.join(output_directory, r_fname), filters=filters, tpl=PATCH_TPL, ctx=ctx.copy(), version_filename=version_filename) db_selected = False for patch, revert in syncdb.sync_schema(source_obj.selected, target_obj.selected, options): if patch and revert: if not db_selected: p_buffer.write(target_obj.selected.select() + '\n') r_buffer.write(target_obj.selected.select() + '\n') p_buffer.write(target_obj.selected.fk_checks(0) + '\n') r_buffer.write(target_obj.selected.fk_checks(0) + '\n') db_selected = True p_buffer.write(patch + '\n') r_buffer.write(revert + '\n') if db_selected: p_buffer.write(target_obj.selected.fk_checks(1) + '\n') r_buffer.write(target_obj.selected.fk_checks(1) + '\n') for patch, revert in syncdb.sync_views(source_obj.selected, target_obj.selected): if patch and revert: if not db_selected: p_buffer.write(target_obj.selected.select() + '\n') r_buffer.write(target_obj.selected.select() + '\n') db_selected = True p_buffer.write(patch + '\n') r_buffer.write(revert + '\n') for patch, revert in syncdb.sync_triggers(source_obj.selected, target_obj.selected): if patch and revert: if not db_selected: p_buffer.write(target_obj.selected.select() + '\n') r_buffer.write(target_obj.selected.select() + '\n') db_selected = True p_buffer.write(patch + '\n') r_buffer.write(revert + '\n') for patch, revert in syncdb.sync_procedures(source_obj.selected, target_obj.selected): if patch and revert: if not db_selected: p_buffer.write(target_obj.selected.select() + '\n') r_buffer.write(target_obj.selected.select() + '\n') p_buffer.write(target_obj.selected.fk_checks(0) + '\n') r_buffer.write(target_obj.selected.fk_checks(0) + '\n') db_selected = True p_buffer.write(patch + '\n') r_buffer.write(revert + '\n') if db_selected: p_buffer.write(target_obj.selected.fk_checks(1) + '\n') r_buffer.write(target_obj.selected.fk_checks(1) + '\n') for patch, revert in syncdb.sync_views(source_obj.selected, target_obj.selected): if patch and revert: if not db_selected: p_buffer.write(target_obj.selected.select() + '\n') r_buffer.write(target_obj.selected.select() + '\n') db_selected = True p_buffer.write(patch + '\n') r_buffer.write(revert + '\n') for patch, revert in syncdb.sync_triggers(source_obj.selected, target_obj.selected): if patch and revert: if not db_selected: p_buffer.write(target_obj.selected.select() + '\n') r_buffer.write(target_obj.selected.select() + '\n') db_selected = True p_buffer.write(patch + '\n') r_buffer.write(revert + '\n') for patch, revert in syncdb.sync_procedures(source_obj.selected, target_obj.selected): if patch and revert: if not db_selected: p_buffer.write(target_obj.selected.select() + '\n') r_buffer.write(target_obj.selected.select() + '\n') db_selected = True p_buffer.write(patch + '\n') r_buffer.write(revert + '\n') if not p_buffer.modified: logging.info(("No migration scripts written." " mysql://%s/%s and mysql://%s/%s were in sync.") % (source_obj.host, source_obj.selected.name, target_obj.host, target_obj.selected.name)) else: try: p_buffer.save() r_buffer.save() logging.info("Migration scripts created for mysql://%s/%s\n" "Patch Script: %s\nRevert Script: %s" % (target_obj.host, target_obj.selected.name, p_buffer.name, r_buffer.name)) except OSError, e: p_buffer.delete() r_buffer.delete() logging.error("Failed writing migration scripts. %s" % e) return 1
def app(sourcedb='', targetdb='', version_filename=False, output_directory=None, log_directory=None, tag=None, sync_auto_inc=False, sync_comments=False): """Main Application""" options = locals() if not os.path.isabs(output_directory): print "Error: Output directory must be an absolute path. Quiting." return 1 if not os.path.isdir(output_directory): print "Error: Output directory does not exist. Quiting." return 1 if not log_directory or not os.path.isdir(log_directory): if log_directory: print "Log directory does not exist, writing log to %s" % output_directory log_directory = output_directory logging.basicConfig(filename=os.path.join(log_directory, LOG_FILENAME), level=logging.INFO, format= '[%(levelname)s %(asctime)s] %(message)s') console = logging.StreamHandler() console.setLevel(logging.DEBUG) logging.getLogger('').addHandler(console) if not sourcedb: logging.error("Source database URL not provided. Exiting.") return 1 source_info = schemaobject.connection.parse_database_url(sourcedb) if not source_info: logging.error("Invalid source database URL format. Exiting.") return 1 if not source_info['protocol'] == 'mysql': logging.error("Source database must be MySQL. Exiting.") return 1 if 'db' not in source_info: logging.error("Source database name not provided. Exiting.") return 1 if not targetdb: logging.error("Target database URL not provided. Exiting.") return 1 target_info = schemaobject.connection.parse_database_url(targetdb) if not target_info: logging.error("Invalid target database URL format. Exiting.") return 1 if not target_info['protocol'] == 'mysql': logging.error("Target database must be MySQL. Exiting.") return 1 if 'db' not in target_info: logging.error("Target database name not provided. Exiting.") return 1 source_obj = schemaobject.SchemaObject(sourcedb) target_obj = schemaobject.SchemaObject(targetdb) if utils.compare_version(source_obj.version, '5.0.0') < 0: logging.error("%s requires MySQL version 5.0+ (source is v%s)" % (APPLICATION_NAME, source_obj.version)) return 1 if utils.compare_version(target_obj.version, '5.0.0') < 0: logging.error("%s requires MySQL version 5.0+ (target is v%s)" % (APPLICATION_NAME, target_obj.version)) return 1 # data transformation filters filters = (lambda d: utils.REGEX_MULTI_SPACE.sub(' ', d), lambda d: utils.REGEX_DISTANT_SEMICOLIN.sub(';', d)) # Information about this run, used in the patch/revert templates ctx = dict(app_version=APPLICATION_VERSION, server_version=target_obj.version, target_host=target_obj.host, target_database=target_obj.selected.name, created=datetime.datetime.now().strftime(TPL_DATE_FORMAT)) p_fname, r_fname = utils.create_pnames(target_obj.selected.name, tag=tag, date_format=DATE_FORMAT) ctx['type'] = "Patch Script" pBuffer = utils.PatchBuffer(name=os.path.join(output_directory, p_fname), filters=filters, tpl=PATCH_TPL, ctx=ctx.copy(), version_filename=version_filename) ctx['type'] = "Revert Script" rBuffer = utils.PatchBuffer(name=os.path.join(output_directory, r_fname), filters=filters, tpl=PATCH_TPL, ctx=ctx.copy(), version_filename=version_filename) db_selected = False for patch, revert in syncdb.sync_schema(source_obj.selected, target_obj.selected, options): if patch and revert: if not db_selected: pBuffer.write(target_obj.selected.select() + '\n') rBuffer.write(target_obj.selected.select() + '\n') db_selected = True pBuffer.write(patch + '\n') rBuffer.write(revert + '\n') if not pBuffer.modified: logging.info(("No migration scripts written." " mysql://%s/%s and mysql://%s/%s were in sync.") % (source_obj.host, source_obj.selected.name, target_obj.host, target_obj.selected.name)) else: try: pBuffer.save() rBuffer.save() logging.info("Migration scripts created for mysql://%s/%s\n" "Patch Script: %s\nRevert Script: %s" % (target_obj.host, target_obj.selected.name, pBuffer.name, rBuffer.name)) except OSError, e: pBuffer.delete() rBuffer.delete() logging.error("Failed writing migration scripts. %s" % e) return 1
output_directory=output_directory, log_directory=log_directory, no_date=no_date, tag=tag, charset=charset, sync_auto_inc=sync_auto_inc, sync_comments=sync_comments) except schemaobject.connection.DatabaseError, e: logging.error("MySQL Error %d: %s (Ignore)" % (e.args[0], e.args[1])) return 1 source_obj = schemaobject.SchemaObject(sourcedb, charset) target_obj = schemaobject.SchemaObject(targetdb, charset) if utils.compare_version(source_obj.version, '5.0.0') < 0: logging.error("%s requires MySQL version 5.0+ (source is v%s)" % (APPLICATION_NAME, source_obj.version)) return 1 if utils.compare_version(target_obj.version, '5.0.0') < 0: logging.error("%s requires MySQL version 5.0+ (target is v%s)" % (APPLICATION_NAME, target_obj.version)) return 1 # data transformation filters filters = ( lambda d: utils.REGEX_MULTI_SPACE.sub(' ', d), lambda d: utils.REGEX_DISTANT_SEMICOLIN.sub(';', d), lambda d: utils.REGEX_SEMICOLON_EXPLODE_TO_NEWLINE.sub(";\n", d))