Esempio n. 1
0
def run_command(build, args):
    global remote
    from forge import build_config
    import forge
    from forge.remote import Remote

    config = build_config.load()
    remote = Remote(config)
    remote._authenticate()

    if len(args) == 0:
        raise ReloadError("Expecting command to after 'forge reload'")
    if args[0] == 'list':
        if len(args) != 1:
            raise ReloadError(
                "Invalid number of arguments passed to 'forge reload list'")
        list_streams(build)
    elif args[0] == 'create':
        if len(args) != 2:
            raise ReloadError(
                "Invalid number of arguments passed to 'forge reload create'")
        create_stream(build, args[1])
    elif args[0] == 'push':
        if len(args) != 2:
            raise ReloadError(
                "Invalid number of arguments passed to 'forge reload push'")
        push_stream(build, args[1])
    else:
        raise ReloadError("Unknown command 'forge reload %s'" % args[0])
Esempio n. 2
0
def run_command(build, args):
	global remote
	from forge import build_config
	import forge
	from forge.remote import Remote

	config = build_config.load()
	remote = Remote(config)
	remote._authenticate()

	if len(args) == 0:
		raise ReloadError("Expecting command to after 'forge reload'")
	if args[0] == 'list':
		if len(args) != 1:
			raise ReloadError("Invalid number of arguments passed to 'forge reload list'")
		list_streams(build)
	elif args[0] == 'create':
		if len(args) != 2:
			raise ReloadError("Invalid number of arguments passed to 'forge reload create'")
		create_stream(build, args[1])
	elif args[0] == 'push':
		if len(args) != 2:
			raise ReloadError("Invalid number of arguments passed to 'forge reload push'")
		push_stream(build, args[1])
	else:
		raise ReloadError("Unknown command 'forge reload %s'" % args[0])
Esempio n. 3
0
def create(unhandled_args):
	'Create a new development environment'
	_check_working_directory_is_safe()
	config = build_config.load()
	remote = Remote(config)
	remote.check_version()

	if os.path.exists(defaults.SRC_DIR):
		raise ForgeError('Source folder "%s" already exists, if you really want to create a new app you will need to remove it!' % defaults.SRC_DIR)
	else:
		if "name" in forge.settings and forge.settings["name"]:
			name = forge.settings["name"]
		else:
			event_id = async.current_call().emit('question', schema={
				'description': 'Enter details for app',
				'properties': {
					'name': {
						'type': 'string',
						'title': 'App Name',
						'description': 'This name is what your application will be called on devices. You can change it later through config.json.'
					}
				}
			})

			name = async.current_call().wait_for_response(event_id)['data']['name']
		uuid = remote.create(name)
		remote.fetch_initial(uuid)
		LOG.info("Building app for the first time...")
		development_build([], has_target=False)
		LOG.info('App structure created. To proceed:')
		LOG.info('1) Put your code in the "%s" folder' % defaults.SRC_DIR)
		LOG.info('2) Run %s build to make a build' % ENTRY_POINT_NAME)
		LOG.info('3) Run %s run to test out your build' % ENTRY_POINT_NAME)
Esempio n. 4
0
def check_local_config_schema(build):
	log.info("Verifying your configuration settings...")
	# leave this import here: might not be on sys.path in some situations
	import validictory

	local_conf_filename = build.tool_config.get('general.local_config')
	if local_conf_filename is not None:
		# explicit conf file defined
		if not path.isfile(local_conf_filename):
			raise ConfigurationError("{file} does not exist!".format(file=local_conf_filename))
	else:
		local_conf_filename = 'local_config.json'
		if not path.isfile(local_conf_filename):
			log.warning("Local configuration file '{file}' does not exist!".format(file=local_conf_filename))
	
	with open(local_conf_filename) as local_conf_file:
		local_conf = json.load(local_conf_file)

	from forge.remote import Remote
	from forge import build_config
	remote = Remote(build_config.load())
	local_conf_schema = remote._api_get('platform/{platform_version}/local_config_schema'.format(
			platform_version=build.config['platform_version']))
	
	try:
		validictory.validate(local_conf, local_conf_schema)
	except validictory.validator.UnexpectedPropertyError as e:
		log.warning('Unexpected setting: "{error}" in "{file}". This will be ignored.'.format(
			file=local_conf_filename,
			error=e)
		)
	log.info("Configuration settings check complete")
	def __init__(self, config=None):
		# have to pass in expect_app_config=False otherwise this call fails when
		# looking for an app specific config.json
		if config is None:
			self.config = build_config.load()
		else:
			self.config = config
		self.remote = Remote(self.config, use_cookie_file=False, enable_caching=True)
Esempio n. 6
0
def ensure_lib_available(build, file):
	# In case of forge-generate check for file
	server_path = path.abspath(path.join(path.split(path.abspath(__file__))[0], '..', '..', 'generate', 'lib', file))
	if path.isfile(server_path):
		return server_path

	lib_dir = path.join(path.dirname(build.source_dir), '.lib')
	hash_path = path.join(path.dirname(build.source_dir), '.template', 'lib', 'hash.json')
	if not path.exists(lib_dir):
		os.makedirs(lib_dir)
		
	# Hide directory on windows
	if sys.platform == 'win32':
		try:
			PopenWithoutNewConsole(['attrib', '+h', lib_dir]).wait()
		except Exception:
			# don't care if we fail to hide the templates dir
			pass
	
	hashes = None
	if path.exists(hash_path):
		with open(hash_path, 'r') as hash_file:
			hashes = json.load(hash_file)
	
	file_path = path.join(lib_dir, file)

	if path.exists(file_path) and file in hashes:
		# Check hash
		with open(file_path, 'rb') as cur_file:
			hash = hashlib.md5(cur_file.read()).hexdigest()
			if hash == hashes[file]:
				# File exists and is correct
				build.log.debug("File: %s, already downloaded and correct." % file)
				return file_path

	# File doesn't exist, or has the wrong hash or has no known hash - download
	build.log.info("Downloading lib file: %s, this will only happen when a new file is available." % file)
	
	from forge.remote import Remote
	from forge import build_config
	config = build_config.load()
	remote = Remote(config)

	server_details = urlparse.urlparse(remote.server)
	url = "{protocol}://{netloc}/lib-static/{platform_version}/{file}".format(
		protocol=server_details.scheme,
		netloc=server_details.netloc,
		platform_version=build.config['platform_version'],
		file=file
	)
	remote._get_file(url, file_path)
	
	# Make file executable.
	os.chmod(file_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
	
	return file_path
def populate_trigger_domain(build):
	try:
		from forge import build_config
		config = build_config.load()
		build.config['trigger_domain'] = config['main']['server'][:-5]
	except ImportError:
		build.config['trigger_domain'] = "TRIGGER_DOMAIN_HERE"

	if not "config_hash" in build.config:
		build.config['config_hash'] = "CONFIG_HASH_HERE"
def populate_trigger_domain(build):
    try:
        from forge import build_config
        config = build_config.load()
        build.config['trigger_domain'] = config['main']['server'][:-5]
    except ImportError:
        build.config['trigger_domain'] = "TRIGGER_DOMAIN_HERE"

    if not "config_hash" in build.config:
        build.config['config_hash'] = "CONFIG_HASH_HERE"
Esempio n. 9
0
def ensure_lib_available(build, file):
    lib_dir = path.join(path.dirname(build.source_dir), '.lib')
    hash_path = path.join(path.dirname(build.source_dir), '.template', 'lib',
                          'hash.json')
    if not path.exists(lib_dir):
        os.makedirs(lib_dir)

    # Hide directory on windows
    if sys.platform == 'win32':
        try:
            lib.PopenWithoutNewConsole(['attrib', '+h', lib_dir]).wait()
        except Exception:
            # don't care if we fail to hide the templates dir
            pass

    hashes = None
    if path.exists(hash_path):
        with open(hash_path, 'r') as hash_file:
            hashes = json.load(hash_file)

    file_path = path.join(lib_dir, file)
    if path.exists(file_path) and file in hashes:
        # Check hash
        with open(file_path, 'rb') as cur_file:
            hash = hashlib.md5(cur_file.read()).hexdigest()
            if hash == hashes[file]:
                # File exists and is correct
                build.log.debug("File: %s, already downloaded and correct." %
                                file)
                return file_path

    # File doesn't exist, or has the wrong hash or has no known hash - download
    build.log.info(
        "Downloading lib file: %s, this will only happen when a new file is available."
        % file)

    from forge.remote import Remote
    from forge import build_config
    config = build_config.load()
    remote = Remote(config)

    server_details = urlparse.urlparse(remote.server)
    url = "{protocol}://{netloc}/lib-static/{platform_version}/{file}".format(
        protocol=server_details.scheme,
        netloc=server_details.netloc,
        platform_version=build.config['platform_version'],
        file=file)
    remote._get_file(url, file_path)

    # Make file executable.
    os.chmod(
        file_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP
        | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)

    return file_path
Esempio n. 10
0
def log_build(build, action):
    '''
	Bundle together some stats and send it to the server for tracking
	This is called by every other function in this module, just before running
	the build.
	'''
    from forge import build_config
    import forge

    log = {}
    log['action'] = action
    log['platform'] = platform.platform()
    log['version'] = sys.version
    log['uuid'] = build.config['uuid']
    log['tools_version'] = forge.VERSION
    config = build_config.load()
Esempio n. 11
0
def init(kw):
	global singleton, singleton_params

	if singleton is not None:
		return

	with lock:
		if singleton is None:
			if not kw:
				singleton_params = {
					'config': build_config.load()
				}
			else:
				singleton_params = kw

			singleton = ForgeTool(**singleton_params)
def log_build(build, action):
	'''
	Bundle together some stats and send it to the server for tracking
	This is called by every other function in this module, just before running
	the build.
	'''
	from forge import build_config
	import forge

	log = {}
	log['action']	     = action
	log['platform']	     = platform.platform()
	log['version']	     = sys.version
	log['uuid']	     = build.config['uuid']
	log['tools_version'] = forge.VERSION
	config = build_config.load()
Esempio n. 13
0
def ensure_lib_available(build, file):
	lib_dir = path.join(path.dirname(build.source_dir), '.lib')
	hash_path = path.join(path.dirname(build.source_dir), '.template', 'lib', 'hash.json')
	if not path.exists(lib_dir):
		os.makedirs(lib_dir)
		
	# Hide directory on windows
	if sys.platform == 'win32':
		try:
			lib.PopenWithoutNewConsole(['attrib', '+h', lib_dir]).wait()
		except Exception:
			# don't care if we fail to hide the templates dir
			pass
	
	hashes = None
	if path.exists(hash_path):
		with open(hash_path, 'r') as hash_file:
			hashes = json.load(hash_file)
	
	file_path = path.join(lib_dir, file)
	if path.exists(file_path) and file in hashes:
		# Check hash
		with open(file_path, 'rb') as cur_file:
			hash = hashlib.md5(cur_file.read()).hexdigest()
			if hash == hashes[file]:
				# File exists and is correct
				build.log.debug("File: %s, already downloaded and correct." % file)
				return file_path

	# File doesn't exist, or has the wrong hash or has no known hash - download
	build.log.info("Downloading lib file: %s, this will only happen when a new file is available." % file)
	
	from forge.remote import Remote
	from forge import build_config
	config = build_config.load()
	remote = Remote(config)

	remote._get_file("https://%s/lib-static/%s/%s" % (remote.hostname, build.config['platform_version'], file), file_path)
	
	# Make file executable.
	os.chmod(file_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
	
	return file_path
Esempio n. 14
0
def log_build(build, action):
	'''
	Bundle together some stats and send it to the server for tracking
	This is called by every other function in this module, just before running
	the build.
	'''
	from forge import build_config
	import forge
	from forge.remote import Remote

	log = {}
	log['action']	     = action
	log['platform']	     = platform.platform()
	log['version']	     = sys.version
	log['uuid']	     = build.config['uuid']
	log['tools_version'] = forge.VERSION
	if len(build.enabled_platforms):
		target = list(build.enabled_platforms)[0]
		log['target'] = target
	log['package_name'] = dig(build.config, ['core',target,'package_name'], "io.trigger.forge" + build.config["uuid"])
	config = build_config.load()
	remote = Remote(config)
	remote._authenticate()
	remote._api_post('track/', data=log)
def populate_trigger_domain(build):
    from forge import build_config
    config = build_config.load()
    build.config['trigger_domain'] = config['main']['server'][:-5]
Esempio n. 16
0
def development_build(unhandled_args, has_target=True):
	'''Pull down new version of platform code in a customised build, and create unpacked development add-on.

	:param has_target: If this is False, just fetch the generation instructions, don't build any targets.
	'''
	_check_working_directory_is_safe()

	if not os.path.isdir(defaults.SRC_DIR):
		raise ForgeError(
			'Source folder "{src}" does not exist - have you run {prog} create yet?'.format(
				src=defaults.SRC_DIR,
				prog=ENTRY_POINT_NAME,
			)
		)

	config = build_config.load()
	remote = Remote(config)
	remote.check_version()
	manager = Manager(config)

	instructions_dir = defaults.INSTRUCTIONS_DIR
	if forge.settings.get('full', False):
		# do this first, so that bugs in generate_dynamic can always be nuked with a -f
		LOG.debug("Full rebuild requested: removing previous templates")
		shutil.rmtree(instructions_dir, ignore_errors=True)

	app_config = build_config.load_app()
	
	should_rebuild = remote.server_says_should_rebuild()
	server_changed = should_rebuild['should_rebuild']
	reason = should_rebuild['reason']
	stable_platform = should_rebuild['stable_platform']
	platform_state = should_rebuild['platform_state']
	
	if server_changed:
		# Need new generate dynamic - download it
		LOG.debug("Server requires rebuild: {reason}".format(reason=reason))
		LOG.info("Your Forge platform has been updated, downloading updated build instructions.")
		manager.fetch_instructions()
		
	config_changed = manager.need_new_templates_for_config()
	if config_changed:
		# Need new builds due to local config change
		LOG.info("Your local config has been changed, downloading updated build instructions.")
		manager.fetch_instructions()

	reload_result = remote.create_buildevent(app_config)
	if not has_target:
		# No need to go further if we aren't building a target
		return
		
	try:
		target = unhandled_args.pop(0)
		if target.startswith("-"):
			raise ForgeError("Target required for 'forge build'")
	except IndexError:
		raise ForgeError("Target required for 'forge build'")
	
	# Not all targets output into a folder by the same name.
	target_dirs = {
		'safari': 'forge.safariextension',
	}
	target_dir = target
	if target in target_dirs:
		target_dir = target_dirs[target]

	reload_config = json.loads(reload_result['config'])
	reload_config_hash = reload_result['config_hash']

	
	if target != "reload": # Don't do a server side build for reload
		if not path.exists(path.join('.template', target_dir)):
			LOG.info("Your app configuration has changed since your last build of this platform, performing a remote build of your app. Once this is downloaded future builds will be faster.")

			build = remote.build(config=reload_config, target=target)
			remote.fetch_unpackaged(build, to_dir=defaults.TEMPLATE_DIR, target=target)
	else:
		LOG.info('Config matches previously downloaded build, performing local build.')
	
	current_platform = app_config['platform_version']
	
	# Advise user about state of their current platform
	platform_category = classify_platform(stable_platform, current_platform)
	if platform_category == 'nonstandard':
		LOG.warning("Platform version: %s is a non-standard platform version, it may not be receiving updates and it is recommended you update to the stable platform version: %s" % (current_platform, stable_platform))

	elif platform_category == 'minor':
		LOG.warning("Platform version: %s is a minor platform version, it may not be receiving updates, it is recommended you update to a major platform version" % current_platform)
	
	elif platform_category == 'old':
		LOG.warning("Platform version: %s is no longer the current platform version, it is recommended you migrate to a newer version using the 'forge migrate' command. See http://current-docs.trigger.io/release-notes.html for more details" % current_platform)
	
	if platform_state == "deprecated":
		LOG.warning("Platform version: %s is deprecated, it is highly recommended you migrate to a newer version as soon as possible." % current_platform)

	def move_files_across():
		shutil.rmtree(path.join('development', target_dir), ignore_errors=True)
		if target != "reload":
			# Delete reload as other targets may build it
			shutil.rmtree(path.join('development', 'reload'), ignore_errors=True)
			# No reload server template
			shutil.copytree(path.join(defaults.TEMPLATE_DIR, target_dir), path.join('development', target_dir))

	# Windows often gives a permission error without a small wait
	try_a_few_times(move_files_across)

	# Put config hash in config object for local generation
	# copy first as mutating dict makes assertions about previous uses tricky
	reload_config_for_local = reload_config.copy()
	reload_config_for_local['config_hash'] = reload_config_hash

	# have templates and instructions - inject code
	generator = Generate()
	generator.all('development', defaults.SRC_DIR, extra_args=unhandled_args, config=reload_config_for_local, target=target)

	LOG.info("Development build created. Use {prog} run to run your app.".format(
		prog=ENTRY_POINT_NAME
	))
Esempio n. 17
0
def _dispatch_command(command, other_args):
	"""Runs our subcommand in a separate thread, and handles events emitted by it"""
	call = None
	task_thread = None
	try:
		other_other_args = handle_secondary_options(command, other_args)

		subcommand = COMMANDS[command]

		# setup enough stuff so the target function can communicate back using events
		call = async.Call(
			call_id=0,
			target=subcommand,
			args=(other_other_args, ),
			input=Queue.Queue(),
			output=Queue.Queue(),
		)
		async.set_current_call(call, thread_local=True)

		# capture logging on any thread but this one and turn it into events
		handler = async.CallHandler(call)
		handler.setLevel(logging.DEBUG)

		current_thread = threading.current_thread().name
		filtered_handler = FilterHandler(handler, lambda r: r.threadName != current_thread)
		filtered_handler.setLevel(logging.DEBUG)

		logging.root.addHandler(filtered_handler)
		logging.root.setLevel(logging.DEBUG)

		task_thread = threading.Thread(target=call.run)
		task_thread.daemon = True
		task_thread.start()

		while True:
			try:
				# KeyboardInterrupts aren't seen until the .get() completes :S
				# So we set a timeout here to make sure we receive it
				next_event = call._output.get(block=True, timeout=1)
			except Queue.Empty:
				continue
			event_type = next_event['type']

			if event_type == 'question':
				answer = cli.ask_question(next_event)

				call.input({
					'eventId': next_event['eventId'],
					'data': answer,
				})

			if event_type == 'progressStart':
				cli.start_progress(next_event)

			if event_type == 'progressEnd':
				cli.end_progress(next_event)

			if event_type == 'progress':
				cli.progress_bar(next_event)

			# TODO: handle situation of logging while progress bar is running
			# e.g. extra newline before using LOG.log
			if event_type == 'log':
				# all logging in our task thread comes out as events, which we then
				# plug back into the logging system, which then directs it to file/console output
				logging_level = getattr(logging, next_event.get('level', 'DEBUG'))
				LOG.log(logging_level, next_event.get('message', ''))

			elif event_type == 'success':
				return 0

			elif event_type == 'error':
				# re-raise exception originally from other thread/process
				try:
					raise call.exception

				except RunningInForgeRoot:
					LOG.error(
						"You're trying to run commands in the build tools directory.\n"
						"You need to move to another directory outside of this one first.\n"
					)

				except UpdateRequired:
					LOG.info("An update to these command line tools is required, downloading...")

					# TODO: refactor so that we don't need to instantiate Remote here
					config = build_config.load()
					remote = Remote(config)
					try:
						remote.update()
						LOG.info("Update complete, run your command again to continue")

					except Exception as e:
						LOG.error("Upgrade process failed: %s" % e)
						LOG.debug("%s" % traceback.format_exc(e))
						LOG.error("You can get the tools from https://trigger.io/api/latest_tools and extract them yourself")
						LOG.error("Contact [email protected] if you have any further issues")

				except ForgeError as e:
					# thrown by us, expected
					LOG.error(next_event.get('message'))
					LOG.debug(str(next_event.get('traceback')))

				except Exception:
					LOG.error("Something went wrong that we didn't expect:")
					LOG.error(next_event.get('message'))
					LOG.debug(str(next_event.get('traceback')))

					LOG.error("See %s for more details" % ERROR_LOG_FILE)
					LOG.error("Please contact [email protected]")

				return 1
	except KeyboardInterrupt:
		sys.stdout.write('\n')
		LOG.info('Exiting...')
		if call:
			call.interrupt()
			task_thread.join(timeout=5)
		return 1
Esempio n. 18
0
def populate_trigger_domain(build):
	from forge import build_config
	config = build_config.load()
	build.config['trigger_domain'] = config['main']['server'][:-5]
def populate_trigger_domain(build):
    from forge import build_config

    config = build_config.load()
    build.config["trigger_domain"] = config["main"]["server"][:-5]
Esempio n. 20
0
def embed():
	bottle.request.environ['HTTP_IF_MODIFIED_SINCE'] = None
	bottle.response.content_type = 'text/html'

	with codecs.open(os.path.join(workspace.path('static'), 'index.html'), 'r') as index_file:
		return pystache.render(index_file.read(), {'embed': True, 'debug': build_config.load().get('debug', False)})