def remove_site(repo, branch, alias, mysql_config): # Drop DB... # 'build' and 'buildtype' can be none because only needed for revert which isn't relevant drush_runtime_location = "/var/www/live.%s.%s/www" % (repo, branch) drush_output = Drupal.drush_status(repo, branch, None, None, None, drush_runtime_location) dbname = Drupal.get_db_name(repo, branch, None, None, "default", drush_output) # If the dbname variable is still empty, fail the build early if not dbname: raise SystemExit( "###### Could not determine the database name, so we cannot proceed with tearing down the site." ) print "===> Dropping database and user: %s" % dbname sudo("mysql --defaults-file=%s -e 'DROP DATABASE IF EXISTS `%s`;'" % (mysql_config, dbname)) sudo("mysql --defaults-file=%s -e \"DROP USER \'%s\'@\'localhost\';\"" % (mysql_config, dbname)) with settings(warn_only=True): # Remove site directories print "===> Unlinking live symlink and removing site directories..." sudo("unlink /var/www/live.%s.%s" % (repo, branch)) sudo("rm -rf /var/www/%s_%s_*" % (repo, branch)) # Remove files directories print "===> Removing files directories..." sudo("rm -rf /var/www/shared/%s_%s_*" % (alias, branch)) # Remove shared settings file print "===> Removing settings.inc file..." sudo("rm /var/www/config/%s_%s.settings.inc" % (alias, branch))
def remove_site(repo, branch, alias, site, mysql_config, mysql_user_ip): # Drop DB... # 'build' and 'buildtype' can be none because only needed for revert which isn't relevant # This needs to be in a with settings(warn_only=True) to prevent the build from failing if the site is broken with settings(warn_only=True): drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site) drush_output = run("cd %s && drush -l %s status --format=yaml" % (drush_runtime_location, site)) dbname = Drupal.get_db_name(repo, branch, None, None, site, drush_output) dbuser = Drupal.get_db_user(repo, branch, site, drush_output) # If the dbname variable is still empty, fail the build early if not dbname: raise SystemExit("###### Could not determine the database name, so we cannot proceed with tearing down the site.") # If the dbuser variable is still empty, fail the build early if not dbuser: raise SystemExit("###### Could not determine the database username, so we cannot proceed with tearing down the site.") print "===> Dropping database %s and user %s" % (dbname, dbuser) sudo("mysql --defaults-file=%s -e 'DROP DATABASE IF EXISTS `%s`;'" % (mysql_config, dbname)) sudo("mysql --defaults-file=%s -e \"DROP USER IF EXISTS \'%s\'@\'%s\';\"" % (mysql_config, dbuser, mysql_user_ip)) with settings(warn_only=True): # Remove files directories print "===> Removing files directories..." sudo("rm -rf /var/www/shared/%s_%s_*" % (alias, branch)) # Remove shared settings file print "===> Removing settings.inc file..." sudo("rm /var/www/config/%s_%s.settings.inc" % (alias, branch))
def main(shortname, branch, command, backup=True): if backup == "False": backup = False if backup == "True": backup = True # Define server roles (if applicable) common.Utils.define_roles(None, False) # Run the command with settings(warn_only=True): if run("readlink /var/www/live.%s.%s" % (shortname, branch)).failed: raise SystemError( "You can't run a command on a site that doesn't exist! Alias @%s_%s not recognised." % (shortname, branch)) # Take a database backup first if told to. if backup: drush_runtime_location = "/var/www/live.%s.%s/www" % (shortname, branch) drush_output = Drupal.drush_status(shortname, branch, None, None, None, drush_runtime_location) db_name = Drupal.get_db_name(shortname, branch, None, branch, "default", drush_output) execute(common.MySQL.mysql_backup_db, db_name, 'drush_command', True) # Strip nastiness from the command command = command.replace(";", "") command = command.replace("&&", "") command = command.replace("&", "") command = command.replace("||", "") command = command.replace("|", "") command = command.replace("!", "") command = command.replace("<", "") command = command.replace(">", "") print "Command is drush %s against the %s site" % (command, branch) BLACKLISTED_CMDS = [ 'sql-drop', 'site-install', 'si', 'sudo', 'rm', 'shutdown', 'reboot', 'halt', 'chown', 'chmod', 'cp', 'mv', 'nohup', 'echo', 'cat', 'tee', 'php-eval', 'variable-set', 'vset' ] blacklisted = False blacklisted = common.Utils.detect_malicious_strings( BLACKLISTED_CMDS, command) if blacklisted: raise SystemError( "Surely you jest... I won't run drush %s against the %s site. Ask a sysadmin instead." % (command, branch)) run("cd /var/www/live.%s.%s/www && drush -y %s" % (shortname, branch, command))
def _revert_db(repo, branch, build, buildtype, site): print "===> Reverting the database for %s site..." % site drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % ( repo, branch, site) drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location) db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output) # Get Drupal version to pass to cache clear and _revert_go_online() drupal_version = run( "echo \"%s\" | grep \"drupal-version\" | cut -d\: -f2 | cut -d. -f1" % drush_output) drupal_version = drupal_version.strip() # Older versions of Drupal put version in single quotes drupal_version = drupal_version.strip("'") drupal_version = int(drupal_version) common.MySQL.mysql_revert_db(db_name, build) Drupal.drush_clear_cache(repo, branch, build, site, drupal_version) _revert_go_online(repo, branch, build, buildtype, site, drupal_version)
def existing_build_wrapper(url, www_root, site_root, site_link, repo, branch, build, buildtype, previous_build, alias, site, no_dev, config, config_export, drupal_version, readonlymode, notifications_email, autoscale, do_updates, import_config, fra, run_cron, feature_branches, php_ini_file): print "===> Looks like the site %s exists already. We'll try and launch a new build..." % url with shell_env(PHPRC='%s' % php_ini_file): # Check Drupal status to retrieve database name drush_runtime_location = "%s/www/sites/%s" % (previous_build, site) drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location) db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output) # Backup database execute(common.MySQL.mysql_backup_db, db_name, build, True) # Build the location of the backup previous_db = common.Utils.get_previous_db(repo, branch, build) execute(AdjustConfiguration.adjust_settings_php, repo, branch, build, buildtype, alias, site) execute(AdjustConfiguration.adjust_drushrc_php, repo, branch, build, site) execute(AdjustConfiguration.adjust_files_symlink, repo, branch, build, alias, site) # Let's allow developers to perform some actions right after Drupal is built execute(common.Utils.perform_client_deploy_hook, repo, branch, build, buildtype, config, stage='mid', hosts=env.roledefs['app_all']) # Export the config if we need to (Drupal 8+) if config_export: execute(Drupal.config_export, repo, branch, build, drupal_version) execute(Drupal.drush_status, repo, branch, build, buildtype, site, None, alias, revert_settings=True) # Time to update the database! if do_updates == True: execute(Drupal.go_offline, repo, branch, site, alias, readonlymode, drupal_version) execute(Drupal.drush_clear_cache, repo, branch, build, site, drupal_version) execute(Drupal.drush_updatedb, repo, branch, build, buildtype, site, alias, drupal_version) # This will revert the database if it fails if fra == True: if branch in feature_branches: execute(Drupal.drush_fra, repo, branch, build, buildtype, site, alias, drupal_version) if run_cron == True: execute(Drupal.drush_cron, repo, branch, build, site, drupal_version) execute(Drupal.drush_status, repo, branch, build, buildtype, site, None, alias, revert=True) # This will revert the database if it fails (maybe hook_updates broke ability to bootstrap) # Cannot use try: because execute() return not compatible. execute(common.Utils.adjust_live_symlink, repo, branch, build, hosts=env.roledefs['app_all']) # This will revert the database if fails live_build = run("readlink %s/live.%s.%s" % (www_root, repo, branch)) this_build = "%s/%s_%s_%s" % (www_root, repo, branch, build) # The above paths should match - something is wrong if they don't! if not this_build == live_build: common.MySQL.mysql_revert_db(db_name, build) execute(Revert._revert_settings, repo, branch, build, buildtype, site, alias) raise SystemExit("####### Could not successfully adjust the symlink pointing to the build! Could not take this build live. Database may have had updates applied against the newer build already. Reverting database") if import_config: execute(Drupal.config_import, repo, branch, build, buildtype, site, alias, drupal_version, previous_build) # This will revert database, settings and live symlink if it fails. # Let's allow developers to use other config management for imports, such as CMI execute(common.Utils.perform_client_deploy_hook, repo, branch, build, buildtype, config, stage='config', hosts=env.roledefs['app_primary']) execute(Drupal.secure_admin_password, repo, branch, build, site, drupal_version) execute(Drupal.go_online, repo, branch, build, buildtype, alias, site, previous_build, readonlymode, drupal_version) # This will revert the database and switch the symlink back if it fails execute(Drupal.check_node_access, repo, alias, branch, build, site, notifications_email) else: print "####### WARNING: by skipping database updates we cannot check if the node access table will be rebuilt. If it will this is an intrusive action that may result in an extended outage." execute(Drupal.drush_status, repo, branch, build, buildtype, site, None, alias, revert=True) # This will revert the database if it fails (maybe hook_updates broke ability to bootstrap) # Cannot use try: because execute() return not compatible. execute(common.Utils.adjust_live_symlink, repo, branch, build, hosts=env.roledefs['app_all']) # This will revert the database if fails live_build = run("readlink %s/live.%s.%s" % (www_root, repo, branch)) this_build = "%s/%s_%s_%s" % (www_root, repo, branch, build) # The above paths should match - something is wrong if they don't! if not this_build == live_build: common.MySQL.mysql_revert_db(db_name, build) execute(Revert._revert_settings, repo, branch, build, buildtype, site, alias) raise SystemExit("####### Could not successfully adjust the symlink pointing to the build! Could not take this build live. Database may have had updates applied against the newer build already. Reverting database") if import_config: execute(Drupal.config_import, repo, branch, build, buildtype, site, alias, drupal_version, previous_build) # This will revert database, settings and live symlink if it fails. # Let's allow developers to use other config management for imports, such as CMI execute(common.Utils.perform_client_deploy_hook, repo, branch, build, buildtype, config, stage='config', hosts=env.roledefs['app_primary']) execute(Drupal.secure_admin_password, repo, branch, build, site, drupal_version) # Final clean up and run tests, if applicable execute(common.Services.clear_php_cache, hosts=env.roledefs['app_all']) execute(common.Services.clear_varnish_cache, hosts=env.roledefs['app_all']) execute(Drupal.generate_drush_cron, repo, branch, autoscale) # Let's allow developers to perform some post-build actions if they need to execute(common.Utils.perform_client_deploy_hook, repo, branch, build, buildtype, config, stage='post', hosts=env.roledefs['app_all'])
def main(repo, repourl, build, branch, buildtype, keepbuilds=10, url=None, freshdatabase="Yes", syncbranch=None, sanitise="no", import_config=False, statuscakeuser=None, statuscakekey=None, statuscakeid=None, restartvarnish="yes", cluster=False, sanitised_email=None, sanitised_password=None, webserverport='8080', mysql_version=5.5, rds=False, autoscale=None, mysql_config='/etc/mysql/debian.cnf', config_filename='config.ini', config_fullpath=False, php_ini_file=None, do_updates=True): if config_fullpath == "False": config_fullpath = False if config_fullpath == "True": config_fullpath = True if do_updates == "False": do_updates = False if do_updates == "True": do_updates = True if import_config == "False": import_config = False if import_config == "True": import_config = True # Read the config.ini file from repo, if it exists config = common.ConfigFile.buildtype_config_file(buildtype, config_filename, fullpath=config_fullpath) # Can be set in the config.ini [AWS] section aws_credentials = common.ConfigFile.return_config_item(config, "AWS", "aws_credentials", "string", "/home/jenkins/.aws/credentials") aws_autoscale_group = common.ConfigFile.return_config_item(config, "AWS", "aws_autoscale_group", "string", "prod-asg-prod") aws_package_all_builds = common.ConfigFile.return_config_item(config, "AWS", "aws_package_all_builds", "boolean", False) aws_build_tar = common.ConfigFile.return_config_item(config, "AWS", "aws_build_tar", "boolean", True) # Now we need to figure out what server(s) we're working with # Define primary host common.Utils.define_host(config, buildtype, repo) # Define server roles (if applicable) common.Utils.define_roles(config, cluster, autoscale, aws_credentials, aws_autoscale_group) # Check where we're deploying to - abort if nothing set in config.ini if env.host is None: raise ValueError("===> You wanted to deploy a build but we couldn't find a host in the map file for repo %s so we're aborting." % repo) # Set some default config options and variables user = "******" previous_build = "" previous_db = "" statuscake_paused = False www_root = "/var/www" application_directory = "www" site_root = www_root + '/%s_%s_%s' % (repo, branch, build) site_link = www_root + '/live.%s.%s' % (repo, branch) site_exists = None behat_config_file_default = "%s/%s_%s_%s/tests/behat/behat.yml" % (www_root, repo, branch, build) composer_lock_outdated = False # Set our host_string based on user@host env.host_string = '%s@%s' % (user, env.host) # Can be set in the config.ini [Build] section ssh_key = common.ConfigFile.return_config_item(config, "Build", "ssh_key") notifications_email = common.ConfigFile.return_config_item(config, "Build", "notifications_email") php_ini_file = common.ConfigFile.return_config_item(config, "Build", "php_ini_file", "string", php_ini_file) build_hook_version = common.ConfigFile.return_config_item(config, "Build", "build_hook_version", "string", "1") # If this is a multisite build, set the url to None so one is generated for every site in the multisite setup. This particular line will ensure the *first* site has its url generated. if config.has_section("Sites"): print "===> Config file has a [Sites] section, so we'll assume this is a multisite build and set url to None" url = None # Need to keep potentially passed in 'url' value as default else: url = common.ConfigFile.return_config_item(config, "Build", "url", "string", url) # Can be set in the config.ini [Database] section db_name = common.ConfigFile.return_config_item(config, "Database", "db_name") db_username = common.ConfigFile.return_config_item(config, "Database", "db_username") db_password = common.ConfigFile.return_config_item(config, "Database", "db_password") db_backup = common.ConfigFile.return_config_item(config, "Database", "db_backup", "boolean", True) # Need to keep potentially passed in MySQL version and config path as defaults mysql_config = common.ConfigFile.return_config_item(config, "Database", "mysql_config", "string", mysql_config) mysql_version = common.ConfigFile.return_config_item(config, "Database", "mysql_version", "string", mysql_version) dump_file = common.ConfigFile.return_config_item(config, "Database", "dump_file") # Can be set in the config.ini [Drupal] section ### @TODO: deprecated, can be removed later drupal_version = common.ConfigFile.return_config_item(config, "Version", "drupal_version", "string", None, True, True, replacement_section="Drupal") # This is the correct location for 'drupal_version' - note, respect the deprecated value as default drupal_version = common.ConfigFile.return_config_item(config, "Drupal", "drupal_version", "string", drupal_version) profile = common.ConfigFile.return_config_item(config, "Drupal", "profile", "string", "minimal") run_cron = common.ConfigFile.return_config_item(config, "Drupal", "run_cron", "boolean", False) import_config = common.ConfigFile.return_config_item(config, "Drupal", "import_config", "boolean", import_config) import_config_method = common.ConfigFile.return_config_item(config, "Drupal", "import_config_method", "string", "cim") ### @TODO: deprecated, can be removed later fra = common.ConfigFile.return_config_item(config, "Features", "fra", "boolean", False, True, True, replacement_section="Drupal") # This is the correct location for 'fra' - note, respect the deprecated value as default fra = common.ConfigFile.return_config_item(config, "Drupal", "fra", "boolean", fra) ### @TODO: deprecated, can be removed later readonlymode = common.ConfigFile.return_config_item(config, "Readonly", "readonly", "string", "maintenance", True, True, replacement_section="Drupal") # This is the correct location for 'readonly' - note, respect the deprecated value as default readonlymode = common.ConfigFile.return_config_item(config, "Drupal", "readonly", "string", readonlymode) ### @TODO: deprecated, can be removed later config_export = common.ConfigFile.return_config_item(config, "Hooks", "config_export", "boolean", False, True, True, replacement_section="Drupal") # This is the correct location for 'config_export' - note, respect the deprecated value as default config_export = common.ConfigFile.return_config_item(config, "Drupal", "config_export", "boolean", config_export) secure_user_one = common.ConfigFile.return_config_item(config, "Drupal", "secure_user_one", "boolean", True) # Can be set in the config.ini [Composer] section composer = common.ConfigFile.return_config_item(config, "Composer", "composer", "boolean", True) composer_lock = common.ConfigFile.return_config_item(config, "Composer", "composer_lock", "boolean", True) no_dev = common.ConfigFile.return_config_item(config, "Composer", "no_dev", "boolean", True) through_ssh = common.ConfigFile.return_config_item(config, "Composer", "through_ssh", "boolean", False) mark_unstable = common.ConfigFile.return_config_item(config, "Composer", "mark_unstable", "boolean", False) # Can be set in the config.ini [Testing] section # PHPUnit is in common/Tests because it can be used for any PHP application phpunit_run = common.ConfigFile.return_config_item(config, "Testing", "phpunit_run", "boolean", False) phpunit_fail_build = common.ConfigFile.return_config_item(config, "Testing", "phpunit_fail_build", "boolean", False) phpunit_revert_build = common.ConfigFile.return_config_item(config, "Testing", "phpunit_revert_build", "boolean", False) phpunit_group = common.ConfigFile.return_config_item(config, "Testing", "phpunit_group", "string", "unit") phpunit_test_directory = common.ConfigFile.return_config_item(config, "Testing", "phpunit_test_directory", "string", "%s/modules/custom" % application_directory) phpunit_path = common.ConfigFile.return_config_item(config, "Testing", "phpunit_path", "string", "vendor/phpunit/phpunit/phpunit") phpunit_install = common.ConfigFile.return_config_item(config, "Testing", "phpunit_install", "boolean", True) # CodeSniffer itself is in common/Tests, but standards used here are Drupal specific, see drupal/DrupalTests.py for the wrapper to apply them codesniffer = common.ConfigFile.return_config_item(config, "Testing", "codesniffer", "boolean") codesniffer_extensions = common.ConfigFile.return_config_item(config, "Testing", "codesniffer_extensions", "string", "php,module,inc,install,test,profile,theme,info,txt,md") codesniffer_ignore = common.ConfigFile.return_config_item(config, "Testing", "codesniffer_ignore", "string", "node_modules,bower_components,vendor") codesniffer_paths = common.ConfigFile.return_config_item(config, "Testing", "codesniffer_paths", "string", "%s/modules/custom %s/themes/custom" % (application_directory, application_directory)) # Regex check string_to_check = common.ConfigFile.return_config_item(config, "Testing", "string_to_check", "string") curl_options = common.ConfigFile.return_config_item(config, "Testing", "curl_options", "string", "sL") check_protocol = common.ConfigFile.return_config_item(config, "Testing", "check_protocol", "string", "https") # Behat config file location behat_config_file = common.ConfigFile.return_config_item(config, "Behat", "behat_config_file", "string", behat_config_file_default) # Set SSH key if needed # @TODO: this needs to be moved to config.ini for Code Enigma GitHub projects if "*****@*****.**" in repourl: ssh_key = "/var/lib/jenkins/.ssh/id_rsa_github" # Prepare Behat variables behat_config = None behat_tests_failed = False if config.has_section("Behat"): behat_config = DrupalTests.prepare_behat_tests(config, buildtype) # Pause StatusCake monitoring statuscake_paused = common.Utils.statuscake_state(statuscakeuser, statuscakekey, statuscakeid, "pause") # Run the tasks. # -------------- execute(common.Utils.clone_repo, repo, repourl, branch, build, None, ssh_key, hosts=env.roledefs['app_all']) # Gitflow workflow means '/' in branch names, need to clean up. branch = common.Utils.generate_branch_name(branch) print "===> Branch is %s" % branch # Check the php_ini_file string isn't doing anything naughty malicious_code = False malicious_code = common.Utils.detect_malicious_strings([';', '&&'], php_ini_file) # Set CLI PHP version, if we need to if php_ini_file and not malicious_code: run("export PHPRC='%s'" % php_ini_file) # Set branches to be treated as feature branches # Regardless of whether or not 'fra' is set, we need to set 'branches' # our our existing_build_wrapper() function gets upset later. feature_branches = Drupal.drush_fra_branches(config, branch) # Now we have the codebase and a clean branch name we can figure out the Drupal version # Don't use execute() because it returns an array of values returned keyed by hostname drupal_version = int(DrupalUtils.determine_drupal_version(drupal_version, repo, branch, build, config)) print "===> the drupal_version variable is set to %s" % drupal_version # Let's allow developers to perform some early actions if they need to execute(common.Utils.perform_client_deploy_hook, repo, branch, build, buildtype, config, stage='pre', build_hook_version="1", hosts=env.roledefs['app_all']) execute(common.Utils.perform_client_deploy_hook, repo, branch, build, buildtype, config, stage='pre-prim', build_hook_version="1", hosts=env.roledefs['app_primary']) # @TODO: This will be a bug when Drupal 9 comes out! # We need to cast version as an integer and use < 8 if drupal_version < 8: import_config = False import_config_method = "cim" if drupal_version > 7 and composer is True: # Sometimes people use the Drupal Composer project which puts Drupal 8's composer.json file in repo root. with shell_env(PHPRC='%s' % php_ini_file): with settings(warn_only=True): if run("find %s/composer.json" % site_root).return_code == 0: path = site_root else: path = site_root + "/" + application_directory if mark_unstable: composer_lock_outdated = common.PHP.composer_validate(path) execute(common.PHP.composer_command, path, "install", None, no_dev, composer_lock, through_ssh=through_ssh) # Compile a site mapping, which is needed if this is a multisite build # Just sets to 'default' if it is not mapping = {} mapping = Drupal.configure_site_mapping(repo, mapping, config) # Empty dictionary for sites that have been deployed. A site is added to the # dictionary at the *start* of its deployment so it is also reverted if a stage # of the deployment fails. Though it is only added if the build is an existing # build. sites_deployed = {} # Empty directory for site URLs that have been deployed. The structure will be # alias: url site_urls = {} # Record the link to the previous build previous_build = common.Utils.get_previous_build(repo, branch, build) # Take a backup of all sites, then take all sites offline before doing anything else. offline_site_exists = None for offline_alias,offline_site in mapping.iteritems(): offline_site_exists = DrupalUtils.check_site_exists(previous_build, offline_site) if offline_site_exists: drush_runtime_location = "%s/%s/sites/%s" % (previous_build, application_directory, offline_site) drush_output = Drupal.drush_status(repo, branch, build, buildtype, offline_site, drush_runtime_location) offline_db_name = Drupal.get_db_name(repo, branch, build, buildtype, offline_site, drush_output) # If database updates will run, take the site offline if do_updates: execute(Drupal.go_offline, repo, branch, offline_site, offline_alias, readonlymode, drupal_version) # Backup database if db_backup: previous_db = execute(common.MySQL.mysql_backup_db, offline_db_name, build, True) previous_db = previous_db[env.roledefs['app_primary'][0]] offline_site_exists = None # Run new installs for alias,site in mapping.iteritems(): # Compile variables for feature branch builds (if applicable) FeatureBranches.configure_feature_branch(buildtype, config, branch, alias) print "===> Feature branch debug information below:" print "httpauth_pass: %s" % FeatureBranches.httpauth_pass print "ssl_enabled: %s" % FeatureBranches.ssl_enabled print "ssl_cert: %s" % FeatureBranches.ssl_cert print "ssl_ip: %s" % FeatureBranches.ssl_ip print "drupal_common_config: %s" % FeatureBranches.drupal_common_config print "featurebranch_url: %s" % FeatureBranches.featurebranch_url print "featurebranch_vhost: %s" % FeatureBranches.featurebranch_vhost site_exists = DrupalUtils.check_site_exists(previous_build, site) cimy_mapping = {} if drupal_version > 7 and import_config_method == "cimy": cimy_mapping = DrupalConfig.configure_cimy_params(config, site) if freshdatabase == "Yes" and buildtype == "custombranch": # For now custombranch builds to clusters cannot work dump_file = Drupal.prepare_database(repo, branch, build, buildtype, alias, site, syncbranch, env.host_string, sanitise, sanitised_password, sanitised_email) # Need to make sure the env.host variable is set correctly, after potentially fetching a database dump from production env.host = env.roledefs['app_primary'][0] if FeatureBranches.featurebranch_url is not None: url = FeatureBranches.featurebranch_url url = common.Utils.generate_url(url, alias, branch) # Now check if we have a Drush alias with that name. If not, run an install with settings(hide('warnings', 'stderr'), warn_only=True): # Because this runs in Jenkins home directory, it will use 'system' drush if not site_exists: print "===> Didn't find a previous build so we'll install this new site %s" % url initial_build_wrapper(url, www_root, application_directory, repo, branch, build, site, alias, profile, buildtype, sanitise, config, db_name, db_username, db_password, mysql_version, mysql_config, dump_file, sanitised_password, sanitised_email, cluster, rds, drupal_version, import_config, import_config_method, cimy_mapping, webserverport, behat_config, autoscale, php_ini_file, build_hook_version, secure_user_one, previous_build) else: # Otherwise it's an existing build # This does not bring sites online that have been taken offline but not yet deployed sites_deployed[alias] = site existing_build_wrapper(url, www_root, application_directory, site_root, site_link, repo, branch, build, buildtype, previous_build, db_backup, alias, site, no_dev, config, config_export, drupal_version, readonlymode, notifications_email, autoscale, do_updates, import_config, import_config_method, cimy_mapping, fra, run_cron, feature_branches, php_ini_file, build_hook_version, secure_user_one, sites_deployed) # Now everything should be in a good state, let's enable environment indicator for this site, if present execute(Drupal.environment_indicator, www_root, repo, branch, build, buildtype, alias, site, drupal_version) site_urls[alias] = url # If this is a single site, we're done with the 'url' variable anyway # If this is a multisite, we have to set it to None so a new 'url' gets generated on the next pass url = None site_exists = None # Adjust the live symlink now that all sites have been deployed. Bring them online after this has happened. if previous_build is not None: execute(common.Utils.adjust_live_symlink, repo, branch, build, hosts=env.roledefs['app_all']) # This will revert the database if fails live_build = run("readlink %s/live.%s.%s" % (www_root, repo, branch)) this_build = "%s/%s_%s_%s" % (www_root, repo, branch, build) # The above paths should match - something is wrong if they don't! if not this_build == live_build: # Make sure the live symlink is pointing at the previous working build. sudo("ln -nsf %s %s/live.%s.%s" % (www_root, previous_build, repo, branch)) for revert_alias,revert_site in sites_deployed.iteritems(): if db_backup: common.MySQL.mysql_revert_db(db_name, build) else: print "####### Due to your config settings no database backup was taken so your database may be broken!" execute(Revert._revert_settings, repo, branch, build, buildtype, revert_site, revert_alias) execute(Revert._revert_go_online, repo, branch, build, buildtype, site, drupal_version) raise SystemExit("####### Could not successfully adjust the symlink pointing to the build! Could not take this build live. Database may have had updates applied against the newer build already. Reverting database") if do_updates and previous_build is not None: for online_alias,online_site in sites_deployed.iteritems(): execute(Drupal.go_online, repo, branch, build, buildtype, online_alias, online_site, previous_build, readonlymode, drupal_version, sites_deployed=sites_deployed) # This will revert the database and switch the symlink back if it fails # Clear the opcache again after the site has been brought online execute(common.Services.clear_php_cache, hosts=env.roledefs['app_all']) for test_alias,test_site in mapping.iteritems(): behat_url = site_urls[test_alias] # After any build we want to run all the available automated tests test_runner(www_root, repo, branch, build, test_alias, buildtype, behat_url, ssl_enabled, db_backup, config, behat_config, behat_config_file, import_config, import_config_method, cimy_mapping, drupal_version, phpunit_run, phpunit_group, phpunit_test_directory, phpunit_path, phpunit_fail_build, phpunit_revert_build, phpunit_install, test_site, codesniffer, codesniffer_extensions, codesniffer_ignore, codesniffer_paths, string_to_check, check_protocol, curl_options, notifications_email, build_hook_version, sites_deployed, previous_build) behat_url = None # Unset CLI PHP version if we need to if php_ini_file: run("export PHPRC=''") # Resume StatusCake monitoring if statuscake_paused: common.Utils.statuscake_state(statuscakeuser, statuscakekey, statuscakeid) # If this is autoscale at AWS, let's update the tarball in S3 if autoscale: # In some cases, you may not want to tarball up the builds. # For example, when all builds are tarballed up, you can't # reliably have multiple builds running for dev and stage # as it will cause an error when the contents of /var/www # change. if aws_build_tar: execute(common.Utils.tarball_up_to_s3, www_root, repo, branch, build, autoscale, aws_package_all_builds) else: print "Don't create a tarball after this build. Assume the tarballing is happening separately, such as in an overnight job." #commit_new_db(repo, repourl, url, build, branch) execute(common.Utils.remove_old_builds, repo, branch, keepbuilds, hosts=env.roledefs['app_all']) script_dir = os.path.dirname(os.path.realpath(__file__)) if put(script_dir + '/../util/revert', '/home/jenkins', mode=0755).failed: print "####### BUILD COMPLETE. Could not copy the revert script to the application server, revert will need to be handled manually" else: print "####### BUILD COMPLETE. If you need to revert this build, run the following command: sudo /home/jenkins/revert -b %s -d /home/jenkins/dbbackups/%s -s %s/live.%s.%s -a %s_%s" % (previous_build, previous_db, www_root, repo, branch, repo, branch) # We have two scenarios where a build might be marked as unstable: # 1) If the composer.lock file is outdated (r45198) # 2) If any of our tests failed, abort the job (r23697) unstable_text = "" unstable_build = False if behat_tests_failed: unstable_text = unstable_text + "####### Some tests failed. Aborting the job.\n" unstable_build = True if composer_lock_outdated: unstable_text = unstable_text + "####### composer.lock is outdated.\n" unstable_build = True if unstable_build: print "%s" % unstable_text sys.exit(3)