def __init__(self): BaseScript.__init__( self, config_options=self.config_options, # other stuff all_actions=[ 'setup-mock', 'checkout-servo', 'clobber-obj', 'configure', 'build', 'check', ], default_actions=[ 'checkout-servo', 'clobber-obj', 'configure', 'build', 'check', ], config={ 'default_vcs': 'gittool', 'backup_rust': True, 'concurrency': 1, }, )
def __init__(self): BaseScript.__init__( self, config_options=self.config_options, require_config_file=False, config={ "virtualenv_modules": [ "boto", "redo", ], "virtualenv_path": "venv", }, all_actions=[ "create-virtualenv", "activate-virtualenv", "push-to-releases", ], default_actions=[ "create-virtualenv", "activate-virtualenv", "push-to-releases", ], ) # set the env var for boto to read our special config file # rather than anything else we have at ~/.boto os.environ["BOTO_CONFIG"] = os.path.abspath(self.config["credentials"])
def __init__(self): BaseScript.__init__( self, config_options=self.config_options, require_config_file=False, config={ "virtualenv_modules": [ "boto", "redo", "mar", ], "virtualenv_path": "venv", }, all_actions=[ "create-virtualenv", "activate-virtualenv", "get-extract-script", "get-files", "scan-files", "cleanup-cache", ], default_actions=[ "create-virtualenv", "activate-virtualenv", "get-extract-script", "get-files", "scan-files", "cleanup-cache", ], ) self.excludes = self.config.get('excludes', self.DEFAULT_EXCLUDES) self.dest_dir = self.CACHE_DIR
def __init__(self): # fxbuild style: buildscript_kwargs = { 'all_actions': DesktopPartnerRepacks.actions, 'default_actions': DesktopPartnerRepacks.actions, 'config': { 'buildbot_json_path': os.environ.get('PROPERTIES_FILE'), "log_name": "partner-repacks", "hashType": "sha512", 'virtualenv_modules': [ 'requests==2.2.1', 'PyHawk-with-a-single-extra-commit==0.1.5', 'taskcluster==0.0.15', 's3cmd==1.6.0', ], 'virtualenv_path': 'venv', 'workdir': 'partner-repacks', }, } # BaseScript.__init__( self, config_options=self.config_options, **buildscript_kwargs ) if 'version' not in self.config: self.fatal("Version (-v) not supplied.") if 'buildnumber' not in self.config: self.fatal("Build number (-n) not supplied.") if 'repo_file' not in self.config: self.fatal("repo_file not supplied.") if 'repack_manifests_url' not in self.config: self.fatal("repack_manifests_url not supplied.")
def __init__(self): BaseScript.__init__(self, config_options=self.config_options, require_config_file=False, config={ "virtualenv_modules": [ "boto", "redo", ], "virtualenv_path": "venv", }, all_actions=[ "create-virtualenv", "activate-virtualenv", "push-to-releases", ], default_actions=[ "create-virtualenv", "activate-virtualenv", "push-to-releases", ], ) # set the env var for boto to read our special config file # rather than anything else we have at ~/.boto os.environ["BOTO_CONFIG"] = os.path.abspath(self.config["credentials"])
def __init__(self): # fxbuild style: buildscript_kwargs = { 'all_actions': DesktopPartnerRepacks.actions, 'default_actions': DesktopPartnerRepacks.actions, 'config': { 'buildbot_json_path': 'buildprops.json', "log_name": "partner-repacks", "hashType": "sha512", 'virtualenv_modules': [ 'requests==2.2.1', 'PyHawk-with-a-single-extra-commit==0.1.5', 'taskcluster==0.0.15', 's3cmd==1.6.0', ], 'virtualenv_path': 'venv', 'workdir': 'partner-repacks', }, } # BaseScript.__init__( self, config_options=self.config_options, **buildscript_kwargs )
def __init__(self): BaseScript.__init__( self, config_options=self.config_options, require_config_file=False, config={ "virtualenv_modules": [ "boto", "redo", ], "virtualenv_path": "venv", }, all_actions=[ "create-virtualenv", "activate-virtualenv", "collect-individual-checksums", "create-big-checksums", "sign", "get-upload-script", "upload", ], default_actions=[ "create-virtualenv", "activate-virtualenv", "collect-individual-checksums", "create-big-checksums", "sign", "get-upload-script", "upload", ], ) self.checksums = {}
def __init__(self): BaseScript.__init__(self, config_options=self.config_options, require_config_file=False, config={ "virtualenv_modules": [ "boto", "redo", "mar", ], "virtualenv_path": "venv", }, all_actions=[ "create-virtualenv", "activate-virtualenv", "get-extract-script", "get-files", "scan-files", "cleanup-cache", ], default_actions=[ "create-virtualenv", "activate-virtualenv", "get-extract-script", "get-files", "scan-files", "cleanup-cache", ], ) self.excludes = self.config.get('excludes', self.DEFAULT_EXCLUDES) self.dest_dir = self.CACHE_DIR
def __init__(self): # fxbuild style: buildscript_kwargs = { 'all_actions': DesktopPartnerRepacks.actions, 'default_actions': DesktopPartnerRepacks.actions, 'config': { 'buildbot_json_path': os.environ.get('PROPERTIES_FILE'), "log_name": "partner-repacks", "hashType": "sha512", 'virtualenv_modules': [ 'requests==2.2.1', 'PyHawk-with-a-single-extra-commit==0.1.5', 'taskcluster==0.0.15', 's3cmd==1.6.0', ], 'virtualenv_path': 'venv', 'workdir': 'partner-repacks', }, } # BaseScript.__init__(self, config_options=self.config_options, **buildscript_kwargs) if 'repo_file' not in self.config: self.fatal("repo_file not supplied.") if 'repack_manifests_url' not in self.config: self.fatal("repack_manifests_url not supplied.")
def __init__(self): BaseScript.__init__( self, config_options=self.config_options, # other stuff all_actions=[ "setup-mock", "reuse-mock", "checkout-tools", # First, build an optimized JS shell for running the analysis "checkout-source", "clobber-shell", "configure-shell", "build-shell", # Next, build a tree with the analysis plugin # active. Note that we are using the same # checkout for the JS shell build and the build # of the source to be analyzed, which is a # little unnecessary (no need to rebuild the JS # shell all the time). (Different objdir, # though.) "clobber-analysis", "setup-analysis", "run-analysis", "collect-analysis-output", "upload-analysis", "check-expectations", ], default_actions=[ #'reuse-mock', "setup-mock", "checkout-tools", "checkout-source", "clobber-shell", "configure-shell", "build-shell", "clobber-analysis", "setup-analysis", "run-analysis", "collect-analysis-output", "upload-analysis", "check-expectations", ], config={ "default_vcs": "hgtool", "vcs_share_base": os.environ.get("HG_SHARE_BASE_DIR"), "ccache": True, "buildbot_json_path": os.environ.get("PROPERTIES_FILE"), "tooltool_servers": None, "tools_repo": "http://hg.mozilla.org/build/tools", "upload_ssh_server": None, "upload_remote_basepath": None, "enable_try_uploads": True, }, ) self.nonmock_env = self.query_env(purge_env=nuisance_env_vars) self.env = self.nonmock_env self.buildtime = None
def __init__(self, require_config_file=False, config={}, all_actions=all_actions, default_actions=default_actions): # Default configuration default_config = { 'debug_build': False, 'pip_index': True, # this will pip install it automajically when we call the create-virtualenv action 'virtualenv_modules': ['google-api-python-client'], "find_links": [ # so mozharness knows where to look for the package "http://pypi.pvt.build.mozilla.org/pub", "http://pypi.pub.build.mozilla.org/pub", ], # the path inside the work_dir ('build') of where we will install the env. # pretty sure it's the default and not needed. 'virtualenv_path': 'venv', } default_config.update(config) BaseScript.__init__( self, config_options=self.config_options, require_config_file=require_config_file, config=default_config, all_actions=all_actions, default_actions=default_actions, )
def __init__(self, require_config_file=True): # fxbuild style: buildscript_kwargs = { 'all_actions': [ "clobber", "pull", "list-locales", "setup", "repack", "taskcluster-upload", "funsize-props", "submit-to-balrog", "summary", ], 'config': { "buildbot_json_path": "buildprops.json", "ignore_locales": ["en-US"], "locales_dir": "browser/locales", "update_mar_dir": "dist/update", "buildid_section": "App", "buildid_option": "BuildID", "application_ini": "application.ini", "log_name": "single_locale", "clobber_file": 'CLOBBER', "appName": "Firefox", "hashType": "sha512", "taskcluster_credentials_file": "oauth.txt", 'virtualenv_modules': [ 'requests==2.2.1', 'PyHawk-with-a-single-extra-commit==0.1.5', 'taskcluster==0.0.15', ], 'virtualenv_path': 'venv', }, } # LocalesMixin.__init__(self) BaseScript.__init__( self, config_options=self.config_options, require_config_file=require_config_file, **buildscript_kwargs ) self.buildid = None self.make_ident_output = None self.bootstrap_env = None self.upload_env = None self.revision = None self.version = None self.upload_urls = {} self.locales_property = {} self.package_urls = {} self.pushdate = None # upload_files is a dictionary of files to upload, keyed by locale. self.upload_files = {} if 'mock_target' in self.config: self.enable_mock()
def __init__(self): BaseScript.__init__(self, config_options=self.config_options, require_config_file=False, config={ "virtualenv_modules": [ "boto", ], "virtualenv_path": "venv", 'buildbot_json_path': 'buildprops.json', }, all_actions=[ "create-virtualenv", "collect-individual-checksums", "create-big-checksums", "sign", "upload", "copy-info-files", ], default_actions=[ "create-virtualenv", "collect-individual-checksums", "create-big-checksums", "sign", "upload", ], ) self.checksums = {} self.bucket = None self.bucket_name = self._get_bucket_name() self.file_prefix = self._get_file_prefix() # set the env var for boto to read our special config file # rather than anything else we have at ~/.boto os.environ["BOTO_CONFIG"] = os.path.abspath(self.config["credentials"])
def __init__(self, require_config_file=False, config={}, all_actions=all_actions, default_actions=default_actions): # Default configuration default_config = { 'debug_build': False, 'pip_index': True, # this will pip install it automajically when we call the # create-virtualenv action 'virtualenv_modules': ['google-api-python-client'], "find_links": [ "http://pypi.pvt.build.mozilla.org/pub", "http://pypi.pub.build.mozilla.org/pub", ], 'virtualenv_path': 'venv', } default_config.update(config) BaseScript.__init__( self, config_options=self.config_options, require_config_file=require_config_file, config=default_config, all_actions=all_actions, default_actions=default_actions, ) self.all_locales_url = self.config['l10n_api_url'] + "api/?done&channel={channel}" self.locale_url = self.config['l10n_api_url'] + "api/?locale={locale}&channel={channel}" self.mapping_url = self.config['l10n_api_url'] + "api/?locale_mapping&reverse"
def __init__(self): BaseScript.__init__( self, config_options=self.config_options, require_config_file=False, config={ "virtualenv_modules": [ "boto", ], "virtualenv_path": "venv", }, all_actions=[ "create-virtualenv", "collect-individual-checksums", "create-big-checksums", "create-summary", ], default_actions=[ "create-virtualenv", "collect-individual-checksums", "create-big-checksums", "create-summary", ], ) self.checksums = {} self.file_prefix = self._get_file_prefix()
def __init__(self, **kwargs): kwargs.setdefault('config_options', self.config_options) kwargs.setdefault('all_actions', ['clobber', 'read-buildbot-config', 'download-and-extract', 'create-virtualenv', 'install', 'generate-config', 'run-tests', ]) kwargs.setdefault('default_actions', ['clobber', 'download-and-extract', 'create-virtualenv', 'install', 'generate-config', 'run-tests', ]) kwargs.setdefault('config', {}) kwargs['config'].setdefault('virtualenv_modules', ["talos", "mozinstall"]) BaseScript.__init__(self, **kwargs) self.workdir = self.query_abs_dirs()['abs_work_dir'] # convenience # results output self.results_url = self.config.get('results_url') if self.results_url is None: # use a results_url by default based on the class name in the working directory self.results_url = 'file://%s' % os.path.join(self.workdir, self.__class__.__name__.lower() + '.txt') self.installer_url = self.config.get("installer_url")
def __init__(self, require_config_file=True): # fxbuild style: buildscript_kwargs = { 'all_actions': [ "clobber", "pull", "list-locales", "setup", "repack", "taskcluster-upload", "funsize-props", "submit-to-balrog", "summary", ], 'config': { "buildbot_json_path": "buildprops.json", "ignore_locales": ["en-US"], "locales_dir": "browser/locales", "update_mar_dir": "dist/update", "buildid_section": "App", "buildid_option": "BuildID", "application_ini": "application.ini", "log_name": "single_locale", "clobber_file": 'CLOBBER', "appName": "Firefox", "hashType": "sha512", "taskcluster_credentials_file": "oauth.txt", 'virtualenv_modules': [ 'requests==2.8.1', 'PyHawk-with-a-single-extra-commit==0.1.5', 'taskcluster==0.0.26', ], 'virtualenv_path': 'venv', }, } # LocalesMixin.__init__(self) BaseScript.__init__( self, config_options=self.config_options, require_config_file=require_config_file, **buildscript_kwargs ) self.buildid = None self.make_ident_output = None self.bootstrap_env = None self.upload_env = None self.revision = None self.version = None self.upload_urls = {} self.locales_property = {} self.package_urls = {} self.pushdate = None # upload_files is a dictionary of files to upload, keyed by locale. self.upload_files = {} if 'mock_target' in self.config: self.enable_mock()
def __init__(self, require_config_file=False): LocalesMixin.__init__(self) BaseScript.__init__(self, config_options=self.config_options, all_actions=[ 'pull', 'build', 'summary', ], require_config_file=require_config_file, # Default configuration config={ 'gaia_l10n_vcs': 'hg', 'vcs_share_base': os.environ.get('HG_SHARE_BASE_DIR'), 'locales_dir': 'b2g/locales', 'l10n_dir': 'gecko-l10n', # I forget what this was for. Copied from the Android multilocale stuff 'ignore_locales': ["en-US", "multi"], # This only has 2 locales in it. We probably need files that mirror gaia's locale lists # We need 2 sets of locales files because the locale names in gaia are different than gecko, e.g. 'es' vs 'es-ES' # We'll need to override this for localizer buidls 'locales_file': 'build/b2g/locales/all-locales', 'mozilla_dir': 'build', 'objdir': 'obj-firefox', 'merge_locales': True, 'work_dir': '.', 'vcs_output_timeout': 600, # 10 minutes should be enough for anyone! }, )
def __init__(self): BaseScript.__init__(self, config_options=self.config_options, # other stuff all_actions=[ 'setup-mock', 'checkout-servo', 'clobber-obj', 'configure', 'build', 'check', ], default_actions=[ 'checkout-servo', 'clobber-obj', 'configure', 'build', 'check', ], config={ 'default_vcs': 'gittool', 'backup_rust': True, 'concurrency': 1, }, )
def __init__(self): BaseScript.__init__(self, config_options=self.config_options, # other stuff all_actions=[ 'purge', 'checkout-tools', # First, build an optimized JS shell for running the analysis 'checkout-source', 'get-blobs', 'clobber-shell', 'configure-shell', 'build-shell', # Next, build a tree with the analysis plugin # active. Note that we are using the same # checkout for the JS shell build and the build # of the source to be analyzed, which is a # little unnecessary (no need to rebuild the JS # shell all the time). (Different objdir, # though.) 'clobber-analysis', 'setup-analysis', 'run-analysis', 'collect-analysis-output', 'upload-analysis', 'check-expectations', ], default_actions=[ 'purge', 'checkout-tools', 'checkout-source', 'get-blobs', 'clobber-shell', 'configure-shell', 'build-shell', 'clobber-analysis', 'setup-analysis', 'run-analysis', 'collect-analysis-output', 'upload-analysis', 'check-expectations', ], config={ 'default_vcs': 'hgtool', 'vcs_share_base': os.environ.get('HG_SHARE_BASE_DIR'), 'ccache': True, 'buildbot_json_path': os.environ.get('PROPERTIES_FILE'), 'tools_repo': 'https://hg.mozilla.org/build/tools', 'upload_ssh_server': None, 'upload_remote_basepath': None, 'enable_try_uploads': True, 'source': None, }, ) self.buildid = None self.analysis = HazardAnalysis()
def __init__(self): BaseScript.__init__( self, config_options=self.config_options, # other stuff all_actions=[ "purge", "checkout-tools", # First, build an optimized JS shell for running the analysis "checkout-source", "get-blobs", "clobber-shell", "configure-shell", "build-shell", # Next, build a tree with the analysis plugin # active. Note that we are using the same # checkout for the JS shell build and the build # of the source to be analyzed, which is a # little unnecessary (no need to rebuild the JS # shell all the time). (Different objdir, # though.) "clobber-analysis", "setup-analysis", "run-analysis", "collect-analysis-output", "upload-analysis", "check-expectations", ], default_actions=[ "purge", "checkout-tools", "checkout-source", "get-blobs", "clobber-shell", "configure-shell", "build-shell", "clobber-analysis", "setup-analysis", "run-analysis", "collect-analysis-output", # Temporary - see bug 1211402 #'upload-analysis', "check-expectations", ], config={ "default_vcs": "hgtool", "vcs_share_base": os.environ.get("HG_SHARE_BASE_DIR"), "ccache": True, "buildbot_json_path": os.environ.get("PROPERTIES_FILE"), "tools_repo": "https://hg.mozilla.org/build/tools", "upload_ssh_server": None, "upload_remote_basepath": None, "enable_try_uploads": True, "source": None, "stage_product": "firefox", }, ) self.buildid = None self.analysis = HazardAnalysis()
def __init__(self, require_config_file=True): # fxbuild style: buildscript_kwargs = { "all_actions": [ "clone-locales", "list-locales", "setup", "repack", "summary", ], "config": { "ignore_locales": ["en-US"], "locales_dir": "browser/locales", "log_name": "single_locale", "hg_l10n_base": "https://hg.mozilla.org/l10n-central", }, } LocalesMixin.__init__(self) BaseScript.__init__( self, config_options=self.config_options, require_config_file=require_config_file, **buildscript_kwargs ) self.bootstrap_env = None self.upload_env = None self.upload_urls = {} self.pushdate = None # upload_files is a dictionary of files to upload, keyed by locale. self.upload_files = {}
def __init__(self, require_config_file=False): LocalesMixin.__init__(self) BaseScript.__init__(self, config_options=self.config_options, all_actions=[ 'pull', 'build', 'summary', ], require_config_file=require_config_file, # Default configuration config={ 'gaia_l10n_vcs': 'hg', 'vcs_share_base': os.environ.get('HG_SHARE_BASE_DIR'), 'locales_dir': 'b2g/locales', 'l10n_dir': 'gecko-l10n', # I forget what this was for. Copied from the Android multilocale stuff 'ignore_locales': ["en-US", "multi"], # This only has 2 locales in it. We probably need files that mirror gaia's locale lists # We need 2 sets of locales files because the locale names in gaia are different than gecko, e.g. 'es' vs 'es-ES' # We'll need to override this for localizer buidls 'locales_file': 'build/b2g/locales/all-locales', 'mozilla_dir': 'build', 'objdir': 'obj-firefox', 'merge_locales': True, 'work_dir': '.', }, )
def __init__(self): BaseScript.__init__( self, config_options=self.config_options, config={ "virtualenv_modules": [ "mozrelease", ], "virtualenv_path": "venv", }, all_actions=[ "create-virtualenv", "activate-virtualenv", "gather-info", "create-config", "write-config", ], default_actions=[ "create-virtualenv", "activate-virtualenv", "gather-info", "create-config", "write-config", ], )
def __init__(self, require_config_file=True): # fxbuild style: buildscript_kwargs = { 'all_actions': [ "clone-locales", "list-locales", "setup", "repack", "summary", ], 'config': { "ignore_locales": ["en-US"], "locales_dir": "browser/locales", "log_name": "single_locale", }, } LocalesMixin.__init__(self) BaseScript.__init__(self, config_options=self.config_options, require_config_file=require_config_file, **buildscript_kwargs) self.bootstrap_env = None self.upload_env = None self.revision = None self.version = None self.upload_urls = {} self.locales_property = {} self.pushdate = None # upload_files is a dictionary of files to upload, keyed by locale. self.upload_files = {}
def __init__(self, require_config_file=True): LocalesMixin.__init__(self) BaseScript.__init__( self, config_options=self.config_options, all_actions=[ "clobber", "pull", "list-locales", "setup", "repack", #"generate-complete-mar", #"generate-partials", "create-nightly-snippets", "upload-nightly-repacks", "upload-snippets", "summary", ], require_config_file=require_config_file ) self.buildid = None self.make_ident_output = None self.repack_env = None self.revision = None self.version = None self.upload_urls = {} self.locales_property = {} self.l10n_dir = None if 'mock_target' in self.config: self.enable_mock()
def __init__(self): BaseScript.__init__(self, config_options=self.config_options, require_config_file=False, config={ "virtualenv_modules": [ "boto", "redo", ], "virtualenv_path": "venv", }, all_actions=[ "create-virtualenv", "activate-virtualenv", "collect-individual-checksums", "create-big-checksums", "sign", "get-upload-script", "upload", ], default_actions=[ "create-virtualenv", "activate-virtualenv", "collect-individual-checksums", "create-big-checksums", "sign", "get-upload-script", "upload", ], ) self.checksums = {}
def __init__(self, require_config_file=False): BaseScript.__init__(self, config_options=self.config_options, all_actions=[ 'source', ], usage=SOURCE_TOOL_USAGE, require_config_file=require_config_file)
def __init__(self): BaseScript.__init__( self, config_options=self.config_options, # other stuff all_actions=["setup-mock", "checkout-servo", "clobber-obj", "configure", "build", "check"], default_actions=["checkout-servo", "clobber-obj", "configure", "build", "check"], config={"default_vcs": "gittool", "backup_rust": True, "concurrency": 1, "log_level": DEBUG}, )
def __init__(self, require_config_file=False): self.config_files = [] BaseScript.__init__(self, config_options=self.config_options, all_actions=['list-config-files', 'test-json-configs', 'test-python-configs', ], default_actions=['test-json-configs', 'test-python-configs'], require_config_file=require_config_file)
def __init__(self, require_config_file=False): script_kwargs = { 'all_actions': [ "setup", "repackage", ], } BaseScript.__init__(self, require_config_file=require_config_file, **script_kwargs)
def __init__(self, require_config_file=True): # fxbuild style: buildscript_kwargs = { 'all_actions': [ "clobber", "pull", "list-locales", "setup", "repack", "upload-repacks", "submit-to-balrog", "summary", ], 'config': { "buildbot_json_path": "buildprops.json", "ignore_locales": ["en-US"], "locales_dir": "browser/locales", "previous_mar_dir": "previous", "current_mar_dir": "current", "update_mar_dir": "dist/update", "previous_mar_filename": "previous.mar", "current_work_mar_dir": "current.work", "buildid_section": "App", "buildid_option": "BuildID", "application_ini": "application.ini", "unpack_script": "tools/update-packaging/unwrap_full_update.pl", "log_name": "single_locale", "clobber_file": 'CLOBBER', "appName": "Firefox", "hashType": "sha512", }, } # LocalesMixin.__init__(self) BaseScript.__init__( self, config_options=self.config_options, require_config_file=require_config_file, **buildscript_kwargs ) self.buildid = None self.make_ident_output = None self.repack_env = None self.upload_env = None self.revision = None self.version = None self.upload_urls = {} self.locales_property = {} self.l10n_dir = None self.package_urls = {} self.partials = {} if 'mock_target' in self.config: self.enable_mock()
def __init__(self, require_config_file=True): # fxbuild style: buildscript_kwargs = { 'all_actions': [ "clobber", "pull", "list-locales", "setup", "repack", "upload-repacks", "submit-to-balrog", "summary", ], 'config': { "buildbot_json_path": "buildprops.json", "ignore_locales": ["en-US"], "locales_dir": "browser/locales", "previous_mar_dir": "previous", "current_mar_dir": "current", "update_mar_dir": "dist/update", "previous_mar_filename": "previous.mar", "current_work_mar_dir": "current.work", "buildid_section": "App", "buildid_option": "BuildID", "application_ini": "application.ini", "unpack_script": "tools/update-packaging/unwrap_full_update.pl", "log_name": "single_locale", "clobber_file": 'CLOBBER', "appName": "Firefox", "hashType": "sha512", }, } # LocalesMixin.__init__(self) BaseScript.__init__( self, config_options=self.config_options, require_config_file=require_config_file, **buildscript_kwargs ) self.buildid = None self.make_ident_output = None self.bootstrap_env = None self.upload_env = None self.revision = None self.version = None self.upload_urls = {} self.locales_property = {} self.l10n_dir = None self.package_urls = {} self.partials = {} if 'mock_target' in self.config: self.enable_mock()
def __init__(self, require_config_file=False): script_kwargs = { 'all_actions': [ "download_input", "setup", "repackage", ], } BaseScript.__init__( self, require_config_file=require_config_file, **script_kwargs )
def __init__(self, require_config_file=False): self.config_files = [] BaseScript.__init__(self, config_options=self.config_options, all_actions=['list-config-files', 'test-json-configs', 'test-python-configs', 'summary', ], default_actions=['test-json-configs', 'test-python-configs', 'summary', ], require_config_file=require_config_file)
def __init__(self, require_config_file=True): # fxbuild style: buildscript_kwargs = { 'all_actions': [ "clobber", "pull", "clone-locales", "list-locales", "setup", "repack", "taskcluster-upload", "funsize-props", "submit-to-balrog", "summary", ], 'config': { "ignore_locales": ["en-US"], "locales_dir": "browser/locales", "buildid_section": "App", "buildid_option": "BuildID", "application_ini": "application.ini", "log_name": "single_locale", "appName": "Firefox", "hashType": "sha512", 'virtualenv_modules': [ 'requests==2.8.1', ], 'virtualenv_path': 'venv', }, } LocalesMixin.__init__(self) BaseScript.__init__(self, config_options=self.config_options, require_config_file=require_config_file, **buildscript_kwargs) self.buildid = None self.make_ident_output = None self.bootstrap_env = None self.upload_env = None self.revision = None self.version = None self.upload_urls = {} self.locales_property = {} self.pushdate = None # upload_files is a dictionary of files to upload, keyed by locale. self.upload_files = {}
def __init__(self, require_config_file=False): script_kwargs = { 'all_actions': [ "download_input", "setup", "repackage", ], } BaseScript.__init__(self, config_options=self.config_options, require_config_file=require_config_file, **script_kwargs) # Assert we have it either passed in or in environment assert self.config.get('signed_input'), \ "Must pass --signed-input or be set in the environment as SIGNED_INPUT"
def __init__(self): # fxbuild style: buildscript_kwargs = { "all_actions": DesktopPartnerRepacks.actions, "default_actions": DesktopPartnerRepacks.actions, "config": { "log_name": "partner-repacks", "hashType": "sha512", "workdir": "partner-repacks", }, } # BaseScript.__init__(self, config_options=self.config_options, **buildscript_kwargs)
def __init__(self): # fxbuild style: buildscript_kwargs = { 'all_actions': DesktopPartnerRepacks.actions, 'default_actions': DesktopPartnerRepacks.actions, 'config': { "log_name": "partner-repacks", "hashType": "sha512", 'workdir': 'partner-repacks', }, } # BaseScript.__init__(self, config_options=self.config_options, **buildscript_kwargs)
def __init__(self): BaseScript.__init__( self, config_options=self.config_options, config={}, all_actions=[ "gather-info", "create-config", "write-config", ], default_actions=[ "gather-info", "create-config", "write-config", ], )
def __init__(self, require_config_file=True): BaseScript.__init__(self, config_options=self.config_options, require_config_file=require_config_file, # other stuff all_actions=[ 'clobber', 'download-shipped-locales', 'submit', ], default_actions=[ 'clobber', 'download-shipped-locales', 'submit', ], ) self.locales = None self.credentials = None
def __init__(self, require_config_file=False, config={}, all_actions=all_actions, default_actions=default_actions): default_config = { # the path inside the work_dir ('build') of where we will install the env. # pretty sure it's the default and not needed. 'virtualenv_path': 'venv', } default_config.update(config) BaseScript.__init__( self, config_options=self.config_options, require_config_file=require_config_file, config=default_config, all_actions=all_actions, )
def __init__(self, require_config_file=False): script_kwargs = { 'all_actions': [ "download_input", "setup", "repackage", ], } BaseScript.__init__( self, config_options=self.config_options, require_config_file=require_config_file, **script_kwargs ) # Assert we have it either passed in or in environment assert self.config.get('signed_input'), \ "Must pass --signed-input or be set in the environment as SIGNED_INPUT"
def __init__(self, require_config_file=False): self.config_files = [] BaseScript.__init__( self, config_options=self.config_options, all_actions=[ "list-config-files", "test-json-configs", "test-python-configs", "summary", ], default_actions=[ "test-json-configs", "test-python-configs", "summary", ], require_config_file=require_config_file, )
def __init__(self, aws_creds): BaseScript.__init__( self, config_options=self.config_options, require_config_file=False, config={ "virtualenv_modules": [ "pip==1.5.5", "boto", "redo", ], "virtualenv_path": "venv", }, all_actions=[ "create-virtualenv", "activate-virtualenv", "push-to-releases", ], default_actions=[ "create-virtualenv", "activate-virtualenv", "push-to-releases", ], ) # validate aws credentials if not (all(aws_creds) or self.config.get('credentials')): self.fatal( "aws creds not defined. please add them to your config or env." ) if any(aws_creds) and self.config.get('credentials'): self.fatal( "aws creds found in env and self.config. please declare in one place only." ) # set aws credentials if all(aws_creds): self.aws_key_id, self.aws_secret_key = aws_creds else: # use self.aws_key_id, self.aws_secret_key = None, None # set the env var for boto to read our special config file # rather than anything else we have at ~/.boto os.environ["BOTO_CONFIG"] = os.path.abspath( self.config["credentials"])
def __init__(self): BaseScript.__init__(self, config_options=self.config_options, config={ "virtualenv_modules": [ "pem", "redo", "requests", ], "virtualenv_path": "venv", }, require_config_file=False, all_actions=["add-chain"], default_actions=["add-chain"], ) self.chain_url = self.config["chain"] self.log_url = self.config["log"] self.sct_filename = self.config["sct"]
def __init__(self): BaseScript.__init__( self, config_options=self.config_options, config={ "virtualenv_modules": [ "pem", "redo", "requests", ], "virtualenv_path": "venv", }, require_config_file=False, all_actions=["add-chain"], default_actions=["add-chain"], ) self.chain_url = self.config["chain"] self.log_url = self.config["log"] self.sct_filename = self.config["sct"]
def __init__(self, require_config_file=False, config={}, all_actions=all_actions, default_actions=default_actions): # Default configuration default_config = { 'debug_build': False, 'pip_index': True, # this will pip install it automajically when we call the # create-virtualenv action 'virtualenv_modules': ['google-api-python-client'], "find_links": [ "http://pypi.pvt.build.mozilla.org/pub", "http://pypi.pub.build.mozilla.org/pub", ], 'virtualenv_path': 'venv', } default_config.update(config) BaseScript.__init__( self, config_options=self.config_options, require_config_file=require_config_file, config=default_config, all_actions=all_actions, default_actions=default_actions, ) self.all_locales_url = self.config[ 'l10n_api_url'] + "api/?done&channel={channel}" self.locale_url = self.config[ 'l10n_api_url'] + "api/?locale={locale}&channel={channel}" self.mapping_url = self.config[ 'l10n_api_url'] + "api/?locale_mapping&reverse" self.translationMgmt = storel10n(config, {})
def __init__(self, aws_creds): BaseScript.__init__(self, config_options=self.config_options, require_config_file=False, config={ "virtualenv_modules": [ "boto", "redo", ], "virtualenv_path": "venv", }, all_actions=[ "create-virtualenv", "activate-virtualenv", "push-to-releases", ], default_actions=[ "create-virtualenv", "activate-virtualenv", "push-to-releases", ], ) # validate aws credentials if not (all(aws_creds) or self.config.get('credentials')): self.fatal("aws creds not defined. please add them to your config or env.") if any(aws_creds) and self.config.get('credentials'): self.fatal("aws creds found in env and self.config. please declare in one place only.") # set aws credentials if all(aws_creds): self.aws_key_id, self.aws_secret_key = aws_creds else: # use self.aws_key_id, self.aws_secret_key = None, None # set the env var for boto to read our special config file # rather than anything else we have at ~/.boto os.environ["BOTO_CONFIG"] = os.path.abspath(self.config["credentials"])
def __init__(self, require_config_file=False): BaseScript.__init__(self, config_options=self.config_options, all_actions=[ 'clobber', 'apt-get-dependencies', 'download-aosp', 'download-kernel', 'download-ndk', 'download-test-binaries', 'checkout-orangutan', 'patch-aosp', 'build-aosp', 'build-kernel', 'build-orangutan-su', 'make-base-avd', 'customize-avd', 'clone-customized-avd', 'bundle-avds', 'bundle-emulators' ], default_actions=[ 'apt-get-dependencies', 'download-aosp', 'download-kernel', 'download-ndk', 'download-test-binaries', 'checkout-orangutan', 'patch-aosp', 'build-aosp', 'build-kernel', 'build-orangutan-su', 'make-base-avd', 'customize-avd', 'clone-customized-avd', 'bundle-avds', 'bundle-emulators' ], require_config_file=require_config_file, # Default configuration config={ 'host_arch': sniff_host_arch(), 'target_arch': 'armv7a', 'android_version': 'gingerbread', 'android_tag': 'inferred', 'patch': 'inferred', 'android_apilevel': 'inferred', 'work_dir': 'android_emulator_build', 'android_url': 'https://android.googlesource.com/platform/manifest', 'ndk_version': 'r9', 'install_android_dir': '/home/cltbld/.android', 'avd_count': '4', 'jdk': 'sun' }) if platform.system() != "Linux": self.fatal("this script only works on (ubuntu) linux") if platform.dist() != ('Ubuntu', '12.04', 'precise'): self.fatal("this script only works on ubuntu 12.04 precise") if not (platform.machine() in ['i386', 'i486', 'i586', 'i686', 'x86_64']): self.fatal("this script only works on x86 and x86_64") self.tag = self.config['android_tag'] if self.tag == 'inferred': self.tag = self.select_android_tag(self.config['android_version']) self.patches = self.config['patch'] if self.patches == 'inferred': self.patches = self.select_patches(self.tag) else: self.patches = [x.split('=') for x in self.patches.split(',')] self.apilevel = self.config['android_apilevel'] if self.apilevel == 'inferred': self.apilevel = self.android_apilevel(self.tag) self.workdir = os.path.abspath(self.config['work_dir']) self.bindir = os.path.join(self.workdir, "bin") self.aospdir = os.path.join(self.workdir, "aosp") self.goldfishdir = os.path.join(self.workdir, "goldfish") self.ndkdir = os.path.join(self.workdir, "android-ndk-" + self.config['ndk_version']) self.ncores = multiprocessing.cpu_count() self.androiddir = os.path.join(self.workdir, ".android") self.avddir = os.path.join(self.androiddir, "avd") self.aosphostdir = os.path.join(self.aospdir, "out/host/linux-x86") self.aosphostbindir = os.path.join(self.aosphostdir, "bin") self.aospprodoutdir = os.path.join(self.aospdir, "out/target/product/generic") self.emu = os.path.join(self.aosphostbindir, "emulator") self.adb = os.path.join(self.aosphostbindir, "adb") self.navds = int(self.config['avd_count'])
def __init__(self, require_config_file=False): BaseScript.__init__(self, config_options=self.config_options, all_actions=['source',], usage=SOURCE_TOOL_USAGE, require_config_file=require_config_file)