def create_build(build_type_dir, config=None, targets=None, extra_args=None): '''Helper to instantiate a Build object from the dynamic generate code Assumes the working directory is alongside src and development :param build_type_dir: currently always "development" :param targets: the targets this build will concern itself with; a value of `None` signifies all targets :type targets: iterable :param extra_args: command line arguments that haven't been consumed yet :type extra_args: sequence ''' generate_dynamic = import_generate_dynamic() if config == None: app_config = build_config.load_app() else: app_config = config local_config = build_config.load_local() extra_args = [] if extra_args is None else extra_args ignore_patterns = _get_ignore_patterns_for_src(defaults.SRC_DIR) enabled_platforms = _enabled_platforms(build_type_dir) if targets is not None: enabled_platforms = set(enabled_platforms) & set(targets) build_to_run = generate_dynamic.build.Build(app_config, defaults.SRC_DIR, build_type_dir, enabled_platforms=enabled_platforms, ignore_patterns=ignore_patterns, local_config=local_config, extra_args=extra_args, forge_root=defaults.FORGE_ROOT) return build_to_run
def push_stream(build, stream_id): from forge import build_config manifest = dict() file_for_hash = dict() # TODO: Some kind of partial build (insert all.js references) src = os.path.join('development', 'reload', 'src') for root, dirs, files in os.walk(src): for filename in files: filename = os.path.join(root, filename) with open(filename, 'rb') as file: hash = hashlib.sha1(file.read()).hexdigest() manifest[filename[len(src)+1:].replace('\\','/')] = hash file_for_hash[hash] = filename remote_hashes = remote._api_post('reload/snapshots/filter', files={'manifest': StringIO(json.dumps(manifest))}) if remote_hashes['result'] != 'ok': raise ReloadError("Remote hash filter failed") hashes_to_upload = set(remote_hashes['manifest'].values()) with lib.temp_file() as zip_file_path: with ZipFile(zip_file_path, 'w') as zip_file: for hash in hashes_to_upload: zip_file.write(file_for_hash[hash], hash) with open(zip_file_path, 'rb') as zip_file: created = remote._api_post('reload/snapshots/%s/%s' % (build.config['uuid'], stream_id), files={ 'config': StringIO(json.dumps(build_config.load_app())), 'manifest': StringIO(json.dumps(manifest)), 'forge-deploy': zip_file}) if created['result'] != 'ok': raise ReloadError("Remote snapshot creation failed") LOG.info("Pushed snapshot to stream '%s'" % stream_id)
def test_pre_identity_config(self): self.identity_file_contents = '' @contextmanager def open_file_mock(filename, *args, **kw): if filename.endswith(defaults.APP_CONFIG_FILE): self.opened_file.read.return_value = '{"uuid": "DUMMY_UUID"}' yield self.opened_file elif filename.endswith(defaults.IDENTITY_FILE): def mock_write(contents): self.identity_file_contents += contents self.opened_file.write = mock_write self.opened_file.read.return_value = self.identity_file_contents yield self.opened_file else: raise IOError("No such file: {0}".format(filename)) self.open_file.side_effect = open_file_mock with mock.patch('forge.build_config.open_file', new=self.open_file): resp = build_config.load_app() eq_(resp['uuid'], 'DUMMY_UUID') eq_(self.open_file.call_args_list, [ ((join(".", defaults.APP_CONFIG_FILE), ), {}), ((join(".", defaults.IDENTITY_FILE), 'w'), {}), ((join(".", defaults.IDENTITY_FILE), ), {}), ])
def push_stream(build, remote, stream_id): manifest = dict() file_for_hash = dict() src = os.path.join('development', 'reload', 'src') for root, dirs, files in os.walk(src): for filename in files: filename = os.path.join(root, filename) with open(filename, 'rb') as file_to_be_hashed: hash_of_file = hashlib.sha1(file_to_be_hashed.read()).hexdigest() manifest[filename[len(src) + 1:].replace('\\', '/')] = hash_of_file file_for_hash[hash_of_file] = filename content_dict = remote._api_post('reload/snapshot/filter', files={'manifest': StringIO(json.dumps(manifest))}) hashes_to_upload = set(content_dict['data']['manifest'].values()) with lib.temp_file() as zip_file_path: with ZipFile(zip_file_path, 'w') as zip_file: for hash_of_file in hashes_to_upload: zip_file.write(file_for_hash[hash_of_file], hash_of_file) with open(zip_file_path, 'rb') as zip_file: remote._api_post('reload/snapshot/%s/%s' % (build.config['uuid'], stream_id), files={ 'config': StringIO(json.dumps(build_config.load_app())), 'manifest': StringIO(json.dumps(manifest)), 'forge-deploy': zip_file }) LOG.info("Pushed snapshot to stream '%s'" % stream_id)
def app_config(): # forge-generate expects uuid to be stored in config.json config = build_config.load_app() (fd, config_tmp) = tempfile.mkstemp(suffix=".json") os.close(fd) with open(config_tmp, 'w') as f: json.dump(config, f) return (config, config_tmp)
def test_normal_json(self, path): path.isfile.return_value = True self.opened_file.read.return_value = '{"a": 1, "b": [null, true]}' with mock.patch('forge.build_config.open_file', new=self.open_file): resp = build_config.load_app() eq_(resp, {"a": 1, "b": [None, True]})
def normalize_config(self, path_to_app="."): self._authenticate() app_config = build_config.load_app(path_to_app) url = 'reload/{uuid}/normalize_config'.format(uuid=app_config['uuid']) resp = self._api_post(url, files={ 'config': StringIO(json.dumps(app_config)) }) return resp
def pushcdn_stream(build, remote, stream_id, manifest_url): external = dict() external["manifest_url"] = manifest_url remote._api_post('reload/snapshot/%s/%s' % (build.config['uuid'], stream_id), files={ 'config': StringIO(json.dumps(build_config.load_app())), 'external': StringIO(json.dumps(external)) }) LOG.info("Pushed snapshot to stream '%s' with manifest url '%s'" % (stream_id, manifest_url))
def server_says_should_rebuild(self, path_to_app="."): self._authenticate() app_config = build_config.load_app(path_to_app) url = 'app/{uuid}/should_rebuild'.format(uuid=app_config['uuid']) resp = self._api_get(url, params = dict( platform_version=app_config['platform_version'], platform_changeset=lib.platform_changeset(path_to_app) ) ) return resp
def pushcdn_stream(build, stream_id, manifest_url): from forge import build_config external = dict() external["manifest_url"] = manifest_url created = remote._api_post('reload/snapshots/%s/%s' % (build.config['uuid'], stream_id), files={ 'config': StringIO(json.dumps(build_config.load_app())), 'external': StringIO(json.dumps(external)) }) if created['result'] != 'ok': raise ReloadError("Remote snapshot creation failed") LOG.info("Pushed snapshot to stream '%s' with manifest url '%s'" % (stream_id, manifest_url))
def state(cookies, path): """Returns the state of an app, which includes: { 'platform_version': platform version in config.json. 'problems': a list of problems which should be displayed in the app status box. 'should_rebuild': whether the app needs to be rebuilt due to config or platform changes. 'did_migrate': whether the app was updated in some way to make it compatible with the platform version in config.json. } :param path: Path to the app we want the state of. """ file_problem = _missing_file_problems(path) if file_problem: return dict( problem=file_problem, should_rebuild=False ) app_config = build_config.load_app(path) app_platform = app_config.get('platform_version') if app_platform is None: return dict( problem={ 'type': 'platform_version_missing' }, should_rebuild=False ) try: response_dict = forge_tool.singleton.remote.server_says_should_rebuild(path, cookies=cookies) except RequestError as e: return dict( platform_version=app_platform, problem=_classify_request_error(e), should_rebuild=False ) if not response_dict['should_rebuild']: did_migrate = lib.make_dynamic_call( [os.path.join(path, '.template'), os.path.join(path, '.template', '.lib')], 'generate_dynamic.customer_goals.migrate_app_to_current', {'path': path}, fail_if_missing=False ) if did_migrate: async.current_call().emit('migration') return dict( platform_version=app_platform, should_rebuild=response_dict['should_rebuild'], )
def test_no_identity(self): @contextmanager def open_file_mock(filename, *args, **kw): if filename.endswith(defaults.APP_CONFIG_FILE): result = mock.Mock() result.read.return_value = "{}" yield result else: raise IOError("No such file: {0}".format(filename)) self.open_file.side_effect = open_file_mock with mock.patch('forge.build_config.open_file', new=self.open_file): resp = build_config.load_app()
def create_build(build_type_dir, config=None, targets=None, extra_args=None, generate_dynamic=None): '''Helper to instantiate a Build object from the dynamic generate code Assumes the working directory is alongside src and development :param build_type_dir: currently always "development" :param targets: the targets this build will concern itself with; a value of `None` signifies all targets :type targets: iterable :param extra_args: command line arguments that haven't been consumed yet :type extra_args: sequence :param generate_dynamic: generate_dynamic module to get Build class from ''' if generate_dynamic is None: # prevent cyclic recursion with import_generate_dynamic generate_dynamic = import_generate_dynamic() if config is None: app_config = build_config.load_app() else: app_config = config local_config = build_config.load_local() extra_args = [] if extra_args is None else extra_args ignore_patterns = _get_ignore_patterns_for_src(defaults.SRC_DIR) enabled_platforms = _enabled_platforms(build_type_dir) if targets is not None: enabled_platforms = set(enabled_platforms) & set(targets) build_to_run = generate_dynamic.build.Build( app_config, defaults. SRC_DIR, # confusingly this is meant to point to platform source code... this value is not used by the customer tasks. build_type_dir, # where the output of build should go enabled_platforms=enabled_platforms, ignore_patterns=ignore_patterns, local_config=local_config, extra_args=extra_args, forge_root=defaults.FORGE_ROOT) return build_to_run
def import_app(cookies, path, project): """Attempts to use the path given as an app directory. 1) Checks for src/config.json in the given folder 2) Extracts uuid field 3) See if it correlates with an app that this user has access to """ path = os.path.expanduser(path) project = int(project) app_paths = [details['path'] for details in _get_local_apps().values()] if path in app_paths: raise Exception('Already have an app imported from %s' % path) if not os.path.isdir(path): raise Exception('No such folder "%s"' % path) try: config_dict = build_config.load_app(path) except OSError as e: if e.errno == errno.ENOENT: raise Exception("%s is missing" % defaults.APP_CONFIG_FILE) raise uuid = config_dict['uuid'] platform_version = config_dict['platform_version'] name = config_dict.get('name') if project: # check with server that this app does indeed belong to the project app = forge_tool.singleton.remote._api_get('app/' + uuid, cookies=cookies) if app['data']['project_id'] != project: raise Exception('You cannot import an app which does not belong to this project') if name is None: name = '(Unititled App)' set_settings(uuid, {'path': path}) return { "uuid": uuid, "path": path, "name": name, "platform_version": platform_version }
def create_build(build_type_dir, config=None, targets=None, extra_args=None, generate_dynamic=None): '''Helper to instantiate a Build object from the dynamic generate code Assumes the working directory is alongside src and development :param build_type_dir: currently always "development" :param targets: the targets this build will concern itself with; a value of `None` signifies all targets :type targets: iterable :param extra_args: command line arguments that haven't been consumed yet :type extra_args: sequence :param generate_dynamic: generate_dynamic module to get Build class from ''' if generate_dynamic is None: # prevent cyclic recursion with import_generate_dynamic generate_dynamic = import_generate_dynamic() if config is None: app_config = build_config.load_app() else: app_config = config local_config = build_config.load_local() extra_args = [] if extra_args is None else extra_args ignore_patterns = _get_ignore_patterns_for_src(defaults.SRC_DIR) enabled_platforms = _enabled_platforms(build_type_dir) if targets is not None: enabled_platforms = set(enabled_platforms) & set(targets) build_to_run = generate_dynamic.build.Build( app_config, defaults.SRC_DIR, # confusingly this is meant to point to platform source code... this value is not used by the customer tasks. build_type_dir, # where the output of build should go enabled_platforms=enabled_platforms, ignore_patterns=ignore_patterns, local_config=local_config, extra_args=extra_args, forge_root=defaults.FORGE_ROOT ) return build_to_run
def push_stream(build, stream_id): from forge import build_config manifest = dict() file_for_hash = dict() # TODO: Some kind of partial build (insert all.js references) src = os.path.join('development', 'reload', 'src') for root, dirs, files in os.walk(src): for filename in files: filename = os.path.join(root, filename) with open(filename, 'rb') as file: hash = hashlib.sha1(file.read()).hexdigest() manifest[filename[len(src) + 1:].replace('\\', '/')] = hash file_for_hash[hash] = filename remote_hashes = remote._api_post( 'reload/snapshots/filter', files={'manifest': StringIO(json.dumps(manifest))}) if remote_hashes['result'] != 'ok': raise ReloadError("Remote hash filter failed") hashes_to_upload = set(remote_hashes['manifest'].values()) with lib.temp_file() as zip_file_path: with ZipFile(zip_file_path, 'w') as zip_file: for hash in hashes_to_upload: zip_file.write(file_for_hash[hash], hash) with open(zip_file_path, 'rb') as zip_file: created = remote._api_post( 'reload/snapshots/%s/%s' % (build.config['uuid'], stream_id), files={ 'config': StringIO(json.dumps(build_config.load_app())), 'manifest': StringIO(json.dumps(manifest)), 'forge-deploy': zip_file }) if created['result'] != 'ok': raise ReloadError("Remote snapshot creation failed") LOG.info("Pushed snapshot to stream '%s'" % stream_id)
def fetch_generate_instructions(self, to_dir): '''Retreive the generation instructions for our current environment. Rather than hard-coding these instructions - how to inject customer code into the apps - they are loaded dynamically from the server to allow for different platforms versions to work with a larger number of build-tools versions. :param to_dir: where the instructions will be put ''' self._authenticate() platform_version = build_config.load_app()['platform_version'] temp_instructions_file = 'instructions.zip' LOG.info("Fetching generation instructions for {platform_version} " "into \"{to_dir}\"".format(**locals())) try: # ensure generate_dynamic dir is there before extracting instructions into it if not path.isdir(to_dir): os.makedirs(to_dir) with lib.cd(to_dir): self._get_file( urljoin( self.server, 'platform/{platform_version}/generate_instructions/' .format(platform_version=platform_version)), temp_instructions_file ) lib.unzip_with_permissions(temp_instructions_file) finally: if path.isfile(path.join(to_dir, temp_instructions_file)): os.remove(path.join(to_dir, temp_instructions_file)) return to_dir
def __init__(self, **kw): self.app_config = build_config.load_app()
def buildevents(self, path_to_app="."): self._authenticate() app_config = build_config.load_app(path_to_app) url = 'reload/buildevents/{uuid}'.format(uuid=app_config['uuid']) resp = self._api_get(url) return resp
def development_build(unhandled_args, has_target=True): '''Pull down new version of platform code in a customised build, and create unpacked development add-on. :param has_target: If this is False, just fetch the generation instructions, don't build any targets. ''' _check_working_directory_is_safe() if not os.path.isdir(defaults.SRC_DIR): raise ForgeError( 'Source folder "{src}" does not exist - have you run {prog} create yet?'.format( src=defaults.SRC_DIR, prog=ENTRY_POINT_NAME, ) ) config = build_config.load() remote = Remote(config) remote.check_version() manager = Manager(config) instructions_dir = defaults.INSTRUCTIONS_DIR if forge.settings.get('full', False): # do this first, so that bugs in generate_dynamic can always be nuked with a -f LOG.debug("Full rebuild requested: removing previous templates") shutil.rmtree(instructions_dir, ignore_errors=True) app_config = build_config.load_app() should_rebuild = remote.server_says_should_rebuild() server_changed = should_rebuild['should_rebuild'] reason = should_rebuild['reason'] stable_platform = should_rebuild['stable_platform'] platform_state = should_rebuild['platform_state'] if server_changed: # Need new generate dynamic - download it LOG.debug("Server requires rebuild: {reason}".format(reason=reason)) LOG.info("Your Forge platform has been updated, downloading updated build instructions.") manager.fetch_instructions() config_changed = manager.need_new_templates_for_config() if config_changed: # Need new builds due to local config change LOG.info("Your local config has been changed, downloading updated build instructions.") manager.fetch_instructions() reload_result = remote.create_buildevent(app_config) if not has_target: # No need to go further if we aren't building a target return try: target = unhandled_args.pop(0) if target.startswith("-"): raise ForgeError("Target required for 'forge build'") except IndexError: raise ForgeError("Target required for 'forge build'") # Not all targets output into a folder by the same name. target_dirs = { 'safari': 'forge.safariextension', } target_dir = target if target in target_dirs: target_dir = target_dirs[target] reload_config = json.loads(reload_result['config']) reload_config_hash = reload_result['config_hash'] if target != "reload": # Don't do a server side build for reload if not path.exists(path.join('.template', target_dir)): LOG.info("Your app configuration has changed since your last build of this platform, performing a remote build of your app. Once this is downloaded future builds will be faster.") build = remote.build(config=reload_config, target=target) remote.fetch_unpackaged(build, to_dir=defaults.TEMPLATE_DIR, target=target) else: LOG.info('Config matches previously downloaded build, performing local build.') current_platform = app_config['platform_version'] # Advise user about state of their current platform platform_category = classify_platform(stable_platform, current_platform) if platform_category == 'nonstandard': LOG.warning("Platform version: %s is a non-standard platform version, it may not be receiving updates and it is recommended you update to the stable platform version: %s" % (current_platform, stable_platform)) elif platform_category == 'minor': LOG.warning("Platform version: %s is a minor platform version, it may not be receiving updates, it is recommended you update to a major platform version" % current_platform) elif platform_category == 'old': LOG.warning("Platform version: %s is no longer the current platform version, it is recommended you migrate to a newer version using the 'forge migrate' command. See http://current-docs.trigger.io/release-notes.html for more details" % current_platform) if platform_state == "deprecated": LOG.warning("Platform version: %s is deprecated, it is highly recommended you migrate to a newer version as soon as possible." % current_platform) def move_files_across(): shutil.rmtree(path.join('development', target_dir), ignore_errors=True) if target != "reload": # Delete reload as other targets may build it shutil.rmtree(path.join('development', 'reload'), ignore_errors=True) # No reload server template shutil.copytree(path.join(defaults.TEMPLATE_DIR, target_dir), path.join('development', target_dir)) # Windows often gives a permission error without a small wait try_a_few_times(move_files_across) # Put config hash in config object for local generation # copy first as mutating dict makes assertions about previous uses tricky reload_config_for_local = reload_config.copy() reload_config_for_local['config_hash'] = reload_config_hash # have templates and instructions - inject code generator = Generate() generator.all('development', defaults.SRC_DIR, extra_args=unhandled_args, config=reload_config_for_local, target=target) LOG.info("Development build created. Use {prog} run to run your app.".format( prog=ENTRY_POINT_NAME ))
def test_malformed_json(self): self.opened_file.read.return_value = '[{]' with mock.patch('forge.build_config.open_file', new=self.open_file): build_config.load_app()