def _revert_go_online(repo, branch, build, buildtype, site, drupal_version=None): print "===> Bringing the %s site back online." % site drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % ( repo, branch, site) with settings(warn_only=True): if drupal_version is None: # Use "sync" method here so the check uses the live symlink to get the Drupal version rather than the new build, which just failed drupal_version = int( DrupalUtils.determine_drupal_version(None, repo, branch, build, None, "sync")) if drupal_version > 7: online_command = "state-set system.maintenance_mode 0" else: online_command = "vset site_offline 0" DrupalUtils.drush_command(online_command, site, drush_runtime_location) Drupal.drush_clear_cache(repo, branch, build, site, drupal_version)
def initial_db_and_config(repo, branch, build, site, import_config, drupal_version): with settings(warn_only=True): # Run database updates drush_runtime_location = "/var/www/%s_%s_%s/www/sites/default" % ( repo, branch, build) if DrupalUtils.drush_command("updatedb", site, drush_runtime_location, True, None, None, True).failed: raise SystemExit( "###### Could not run database updates! Everything else has been done, but failing the build to alert to the fact database updates could not be run." ) else: Drupal.drush_clear_cache(repo, branch, build, "default", drupal_version) # Run entity updates if drupal_version > 7: if DrupalUtils.drush_command("entity-updates", site, drush_runtime_location, True, None, None, True).failed: print "###### Could not carry out entity updates! Continuing anyway, as this probably isn't a major issue." # Import config if drupal_version > 7 and import_config: print "===> Importing configuration for Drupal 8 site..." if DrupalUtils.drush_command("cim", site, drush_runtime_location, True, None, None, True).failed: raise SystemExit( "###### Could not import configuration! Failing build.") else: print "===> Configuration imported. Running a cache rebuild..." Drupal.drush_clear_cache(repo, branch, build, "default", drupal_version)
def drush_fra(repo, branch, build, buildtype, site, alias, drupal_version): # Set drush variables drush_runtime_location = "/var/www/%s_%s_%s/www/sites/%s" % (repo, branch, build, site) drush_command = "pm-list --pipe --type=module --status=enabled --no-core" drush_output = DrupalUtils.drush_command(drush_command, site, drush_runtime_location, False, "yaml") if run("echo \"%s\" | grep -q ^features$" % drush_output).return_code != 0: print "===> Features module not installed, skipping feature revert" else: print "===> Reverting all features..." with settings(warn_only=True): if DrupalUtils.drush_command("fra", site, drush_runtime_location, True, None, None, True).failed: print "###### Could not revert features! Reverting database and settings..." execute(Revert._revert_db, repo, branch, build, buildtype, site) execute(Revert._revert_settings, repo, branch, build, buildtype, site, alias) raise SystemExit( "###### Could not revert features! Site remains on previous build" ) else: drush_clear_cache(repo, branch, build, site, drupal_version)
def drush_updatedb(repo, branch, build, buildtype, site, alias, drupal_version): print "===> Running any database hook updates" with settings(warn_only=True): # Clear the Drupal cache before running database updates, as sometimes there can be unexpected results drush_clear_cache(repo, branch, build, site, drupal_version) # Apparently APC cache can interfere with drush updatedb expected results here. Clear any chance of caches common.Services.clear_php_cache() common.Services.clear_varnish_cache() # Set drush location drush_runtime_location = "/var/www/%s_%s_%s/www/sites/%s" % ( repo, branch, build, site) if DrupalUtils.drush_command("updatedb", site, drush_runtime_location, True, None, None, True).failed: print "###### Could not apply database updates! Reverting this database" execute(Revert._revert_db, repo, branch, build, buildtype, site) execute(Revert._revert_settings, repo, branch, build, buildtype, site, alias) raise SystemExit( "###### Could not apply database updates! Reverted database. Site remains on previous build" ) if drupal_version > 7: if DrupalUtils.drush_command("entity-updates", site, drush_runtime_location, True, None, None, True).failed: print "###### Could not carry out entity updates! Continuing anyway, as this probably isn't a major issue." print "===> Database updates applied" drush_clear_cache(repo, branch, build, site, drupal_version)
def reenable_modules(repo, alias, branch, build, site, buildtype, drupal_version, enable_modules=[]): drush_runtime_location = "/var/www/%s_%s_%s/www/sites/%s" % (repo, branch, build, site) with settings(warn_only=True): if drupal_version > 7: if DrupalUtils.drush_command("cim", site, drush_runtime_location).failed: print "###### Cannot import config to enable modules. Manual investigation is required." else: print " ===> Modules re-enabled via config import." else: if enable_modules: for module in enable_modules: drush_command = "pm-enable %s" % module if DrupalUtils.drush_command( drush_command, site, drush_runtime_location).failed: print "###### Cannot enable %s. Manual investigation is required." % module else: print "===> %s re-enabled." % module
def go_offline(repo, branch, site, alias, readonlymode, drupal_version): # readonlymode can either be 'maintenance' (the default) or 'readonlymode' # which uses the readonlymode module print "===> go_offline mode is %s" % readonlymode # Set drush location drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % ( repo, branch, site) # If readonlymode is 'readonlymode', check that it exists if readonlymode == "readonlymode": print "===> First checking that the readonlymode module exists..." with settings(warn_only=True): if run("find /var/www/live.%s.%s/www -type d -name readonlymode | egrep '.*'" % (repo, branch)).return_code == 0: print "===> It does exist, so enable it if it's not already enabled" # Enable the module if it isn't already enabled DrupalUtils.drush_command("pm-enable readonlymode", site, drush_runtime_location) # Set the site_readonly mode variable to 1 print "===> Setting readonlymode so content cannot be changed while database updates are run..." DrupalUtils.drush_command("vset site_readonly 1", site, drush_runtime_location) else: print "###### The readonly flag in config.ini was set to readonly, yet the readonlymode module does not exist. We'll revert to normal maintenance mode..." readonlymode = 'maintenance' if readonlymode == "maintenance": print "===> Taking the site offline temporarily to do the drush updatedb..." if drupal_version > 7: DrupalUtils.drush_command("state-set system.maintenancemode 1", site, drush_runtime_location) else: DrupalUtils.drush_command("vset site_offline 1", site, drush_runtime_location) DrupalUtils.drush_command("vset maintenance_mode 1", site, drush_runtime_location)
def main(shortname, branch, bucket_name, method='zip', sanitise='yes', region='eu-west-1'): print "===> You want to download a database dump for %s %s. Let's start by fetching a fresh database..." % (shortname, branch) try: DrupalUtils.get_database(shortname, branch, sanitise) common.Utils.s3_upload(shortname, branch, method, "database_dump", bucket_name, "client-db-dumps", region) except: e = sys.exc_info()[1] raise SystemError(e)
def drush_clear_cache(repo, branch, build, site, drupal_version): print "===> Clearing Drupal cache..." with settings(warn_only=True): if drupal_version > 7: drush_command = "cr" else: drush_command = "cc all" drush_runtime_location = "/var/www/%s_%s_%s/www/sites/%s" % (repo, branch, build, site) DrupalUtils.drush_command(drush_command, site, drush_runtime_location, True, None, None, True)
def config_import(repo, branch, build, buildtype, site, alias, drupal_version, import_config_method, cimy_mapping, previous_build, sites_deployed=None): with settings(warn_only=True): # Check to see if this is a Drupal 8 build if drupal_version > 7: import_config_command = DrupalConfig.import_config_command( repo, branch, build, site, import_config_method, cimy_mapping) print "===> Importing configuration for Drupal 8 site..." drush_runtime_location = "/var/www/%s_%s_%s/www/sites/%s" % ( repo, branch, build, site) if DrupalUtils.drush_command("%s" % import_config_command, site, drush_runtime_location, True, None, None, True).failed: print "###### Could not import configuration! Reverting this database and settings" for revert_alias, revert_site in sites_deployed.iteritems(): execute(Revert._revert_db, repo, branch, build, buildtype, revert_site) execute(Revert._revert_settings, repo, branch, build, buildtype, revert_site, revert_alias) raise SystemExit( "###### Could not import configuration! Reverted database and settings. Site remains on previous build" ) else: print "===> Configuration imported. Running a cache rebuild..." drush_clear_cache(repo, branch, build, site, drupal_version)
def drush_fra(repo, branch, build, buildtype, site, alias, drupal_version, sites_deployed=None): # Set drush variables drush_runtime_location = "/var/www/%s_%s_%s/www/sites/%s" % (repo, branch, build, site) if run("cd %s && drush -y --format=yaml -l %s pm-list --pipe --type=module --status=enabled --no-core | grep -q ^features:$" % (drush_runtime_location, site)).return_code != 0: print "===> Features module not installed, skipping feature revert" else: print "===> Reverting all features..." with settings(warn_only=True): if DrupalUtils.drush_command("fra", site, drush_runtime_location, True, None, None, True).failed: print "###### Could not revert features! Reverting database and settings..." for revert_alias, revert_site in sites_deployed.iteritems(): execute(Revert._revert_db, repo, branch, build, buildtype, revert_site) execute(Revert._revert_settings, repo, branch, build, buildtype, revert_site, revert_alias) raise SystemExit( "###### Could not revert features! Site remains on previous build" ) else: drush_clear_cache(repo, branch, build, site, drupal_version)
def config_export(repo, branch, build, drupal_version): if drupal_version > 7: print "===> Executing hook: config_export" print "===> Exporting site config, which will be downloadable" with settings(warn_only=True): print "First see if the directory /var/www/shared/%s_%s_exported_config exists." % ( repo, branch) if run("stat /var/www/shared/%s_%s_exported_config" % (repo, branch)).return_code == 0: print "Exported config directory exists. Remove its contents" if sudo("rm -r /var/www/shared/%s_%s_exported_config" % (repo, branch)).failed: print "Warning: Cannot remove old exported config. Stop exporting, but proceed with rest of the build" else: print "Exporting config" sudo( "chown -R jenkins:www-data /var/www/shared/%s_%s_exported_config" % (repo, branch)) # Set drush variables drush_runtime_location = "%s/%s_%s_%s/www/sites/%s" % ( www_root, repo, branch, build, site) drush_command = "cex --destination=/var/www/shared/%s_%s_exported_config" % ( repo, branch) if DrupalUtils.drush_command(drush_command, site, drush_runtime_location, True, None, None, True).failed: print "###### Warning: Cannot export config. Stop exporting, but proceed with rest of the build" else: print "===> Exported config successfully. It will be available at /var/www/shared/%s_%s_exported_config" % ( repo, branch)
def initial_build_updatedb(repo, branch, build, site, drupal_version): print "===> Running any database hook updates" with settings(warn_only=True): # Set drush location drush_runtime_location = "/var/www/%s_%s_%s/www/sites/%s" % ( repo, branch, build, site) if DrupalUtils.drush_command("updatedb", site, drush_runtime_location, True, None, None, True).failed: raise SystemExit( "###### Could not apply database updates! Everything else has been done, but failing the build to alert to the fact database updates could not be run." ) if drupal_version > 7: if DrupalUtils.drush_command("entity-updates", site, drush_runtime_location, True, None, None, True).failed: print "###### Could not carry out entity updates! Continuing anyway, as this probably isn't a major issue." print "===> Database updates applied"
def go_online(repo, branch, build, buildtype, alias, site, previous_build, readonlymode, drupal_version): drush_runtime_location = "/var/www/%s_%s_%s/www/sites/%s" % (repo, branch, build, site) # readonlymode can either be 'maintenance' (the default) or 'readonlymode', which uses the readonlymode module # If readonlymode is 'readonlymode', check that it exists if readonlymode == "readonlymode": print "===> First checking that the readonlymode module exists..." with settings(warn_only=True): if run("find /var/www/%s_%s_%s/www -type d -name readonlymode | egrep '.*'" % (repo, branch, build)).return_code == 0: print "It does exist, so enable it if it's not already enabled" # Enable the module if it isn't already enabled DrupalUtils.drush_command("pm-enable readonlymode", site, drush_runtime_location) # Set the site_readonly mode variable to 1 print "===> Setting readonlymode back to 0 so content can once again be edited..." if DrupalUtils.drush_command("vset site_readonly 0", site, drush_runtime_location).failed: print "###### Could not set the site out of read only mode! Reverting this build and database." sudo("unlink /var/www/live.%s.%s" % (repo, branch)) sudo("ln -s %s /var/www/live.%s.%s" % (previous_build, repo, branch)) execute(Revert._revert_db, repo, branch, build, buildtype, site) execute(Revert._revert_settings, repo, branch, build, buildtype, site, alias) else: print "###### The readonly flag in config.ini was set to readonly, yet the readonlymode module does not exist. We'll revert to normal maintenance mode..." readonlymode = 'maintenance' if readonlymode == "maintenance": print "===> Taking the site back online..." with settings(warn_only=True): if drupal_version > 7: if DrupalUtils.drush_command( "state-set system.maintenancemode 0", site, drush_runtime_location).failed: print "###### Could not set the site back online! Reverting this build and database" sudo("unlink /var/www/live.%s.%s" % (repo, branch)) sudo("ln -s %s /var/www/live.%s.%s" % (previous_build, repo, branch)) execute(Revert._revert_db, repo, branch, build, buildtype, site) execute(Revert._revert_settings, repo, branch, build, buildtype, site, alias) else: if DrupalUtils.drush_command("vset site_offline 0", site, drush_runtime_location).failed: print "###### Could not set the site back online! Reverting this build and database" sudo("unlink /var/www/live.%s.%s" % (repo, branch)) sudo("ln -s %s /var/www/live.%s.%s" % (previous_build, repo, branch)) execute(Revert._revert_db, repo, branch, build, buildtype, site) execute(Revert._revert_settings, repo, branch, build, buildtype, site, alias) else: DrupalUtils.drush_command("vset maintenance_mode 0", site, drush_runtime_location)
def drush_cron(repo, branch, build, site, drupal_version): print "===> Running Drupal cron..." drush_runtime_location = "/var/www/%s_%s_%s/www/sites/%s" % (repo, branch, build, site) with settings(warn_only=True): if DrupalUtils.drush_command("cron", site, drush_runtime_location).failed: print "###### Could not run cron!" raise SystemExit("###### Could not run cron! Site remains on previous build.") else: drush_clear_cache(repo, branch, build, site, drupal_version)
def check_node_access(repo, alias, branch, build, site, notifications_email): drush_runtime_location = "/var/www/%s_%s_%s/www/sites/%s" % (repo, branch, build, site) with settings(warn_only=True): node_access_needs_rebuild = DrupalUtils.drush_command("php-eval 'echo node_access_needs_rebuild();'", site, drush_runtime_location) if node_access_needs_rebuild == 1: print "####### WARNING: this release needs the content access table to be rebuilt. This is an intrusive operation that imply the site needs to stay in maintenance mode untill the whole process is finished." print "####### Depending on the number of nodes and the complexity of access rules, this can take several hours. Be sure to either plan the release appropriately, or when possible use alternative method that are not intrusive." print "####### We recommend you consider this module: https://www.drupal.org/project/node_access_rebuild_progressive" # Send an email if an address is provided in config.ini if notifications_email: local("echo 'Your build for %s of branch %s has triggered a warning of a possible content access table rebuild - this may cause an extended outage of your website. Please review!' | mail -s 'Content access table warning' %s" % (alias, branch, notifications_email)) print "===> Sent warning email to %s" % notifications_email else: print "===> Node access rebuild check completed, as far as we can tell this build is safe"
def initial_build_config_import(repo, branch, build, site, drupal_version): with settings(warn_only=True): # Check to see if this is a Drupal 8 build if drupal_version > 7: # Set drush location drush_runtime_location = "/var/www/%s_%s_%s/www/sites/%s" % ( repo, branch, build, site) print "===> Importing configuration for Drupal 8 site..." if DrupalUtils.drush_command("cim", site, drush_runtime_location, True, None, None, True).failed: raise SystemExit( "###### Could not import configuration! Failing the initial build." ) else: print "===> Configuration imported."
def reenable_modules(repo, alias, branch, build, site, buildtype, import_config, import_config_method, cimy_mapping, drupal_version, enable_modules=[]): drush_runtime_location = "/var/www/%s_%s_%s/www/sites/%s" % (repo, branch, build, site) with settings(warn_only=True): if drupal_version > 7: if import_config: import_config_command = DrupalConfig.import_config_command( repo, branch, build, site, import_config_method, cimy_mapping) if DrupalUtils.drush_command("%s" % import_config_command, site, drush_runtime_location, True, None, None, True).failed: print "###### Cannot import config to enable modules. Manual investigation is required." else: print " ===> Modules re-enabled via config import." else: print "Enable modules using pm-enable instead." if enable_modules and not import_config: for module in enable_modules: drush_command = "pm-enable %s" % module if DrupalUtils.drush_command(drush_command, site, drush_runtime_location).failed: print "###### Cannot enable %s. Manual investigation is required." % module else: print "===> %s re-enabled." % module
def secure_admin_password(repo, branch, build, site, drupal_version): print "===> Setting secure username and password for uid 1" drush_runtime_location = "/var/www/%s_%s_%s/www/sites/%s" % (repo, branch, build, site) u1pass = common.Utils._gen_passwd(20) u1name = "codeenigma-%s-admin" % branch with cd('/var/www/%s_%s_%s/www/sites/%s' % (repo, branch, build, site)): with settings(warn_only=True): if drupal_version > 7: drush_command = 'sqlq "UPDATE users_field_data SET name = \'%s\' WHERE uid = 1"' % u1name DrupalUtils.drush_command(drush_command, site, drush_runtime_location) else: drush_command = 'sqlq "UPDATE users SET name = \'%s\' WHERE uid = 1"' % u1name DrupalUtils.drush_command(drush_command, site, drush_runtime_location) drush_clear_cache(repo, branch, build, site, drupal_version) drush_command = DrupalUtils.get_drush_user_password_command(u1name, u1pass) DrupalUtils.drush_command(drush_command, site, drush_runtime_location)
def drush_status(repo, branch, build, buildtype, site, drush_runtime_location=None, alias=None, db_backup=True, revert=False, revert_settings=False, sites_deployed=None): print "===> Running a drush status test" if not drush_runtime_location: drush_runtime_location = "/var/www/%s_%s_%s/www/sites/%s" % ( repo, branch, build, site) drush_output = DrupalUtils.drush_command("status", site, drush_runtime_location, False, "yaml") if run("echo \"%s\" | egrep 'Connected|Successful'" % drush_output).failed: print "###### Could not bootstrap the database!" if revert == False and revert_settings == True: for revert_alias, revert_site in sites_deployed.iteritems(): execute(Revert._revert_settings, repo, branch, build, buildtype, revert_site, revert_alias) execute(Revert._revert_go_online, repo, branch, build, buildtype, site) else: if revert: for revert_alias, revert_site in sites_deployed.iteritems(): if db_backup: execute(Revert._revert_db, repo, branch, build, buildtype, revert_site) else: print "####### Due to your config settings no database backup was taken so your database may be broken!" execute(Revert._revert_settings, repo, branch, build, buildtype, revert_site, revert_alias) raise SystemExit( "###### Could not bootstrap the database on this build! Aborting") else: # Send back the drush output in case we need it return drush_output
def drush_status(repo, branch, build, buildtype, site, drush_runtime_location=None, alias=None, revert=False, revert_settings=False): print "===> Running a drush status test" if not drush_runtime_location: drush_runtime_location = "/var/www/%s_%s_%s/www/sites/%s" % ( repo, branch, build, site) drush_output = DrupalUtils.drush_command("status", site, drush_runtime_location, False, "yaml") if run("echo \"%s\" | egrep 'Connected|Successful'" % drush_output).failed: print "###### Could not bootstrap the database!" if revert == False and revert_settings == True: if alias: execute(Revert._revert_settings, repo, branch, build, buildtype, site, alias) else: print "###### Could not revert settings, no alias provided" else: if revert == True: execute(Revert._revert_db, repo, branch, build, buildtype, site) if alias: execute(Revert._revert_settings, repo, branch, build, buildtype, site, alias) else: print "###### Could not revert settings, no alias provided" raise SystemExit( "###### Could not bootstrap the database on this build! Aborting") else: # Send back the drush output in case we need it return drush_output
def config_import(repo, branch, build, buildtype, site, alias, drupal_version, previous_build): with settings(warn_only=True): # Check to see if this is a Drupal 8 build if drupal_version > 7: print "===> Importing configuration for Drupal 8 site..." drush_runtime_location = "/var/www/%s_%s_%s/www/sites/%s" % ( repo, branch, build, site) if DrupalUtils.drush_command("cim", site, drush_runtime_location, True, None, None, True).failed: print "###### Could not import configuration! Reverting this database and settings" sudo("unlink /var/www/live.%s.%s" % (repo, branch)) sudo("ln -s %s /var/www/live.%s.%s" % (previous_build, repo, branch)) execute(Revert._revert_db, repo, branch, build, buildtype, site) execute(Revert._revert_settings, repo, branch, build, buildtype, site, alias) raise SystemExit( "###### Could not import configuration! Reverted database and settings. Site remains on previous build" ) else: print "===> Configuration imported. Running a cache rebuild..." drush_clear_cache(repo, branch, build, site, drupal_version)
def main(shortname, staging_branch, prod_branch, synctype='both', fresh_database='no', sanitise='yes', sanitised_password=None, sanitised_email=None, staging_shortname=None, remote_files_dir=None, staging_files_dir=None, sync_dir=None): # Set the variables we need. drupal_version = None # If we didn't get a staging shortname, we should set it to shortname if staging_shortname is None: staging_shortname = shortname # Run the tasks. # -------------- # If this is the first build, attempt to install the site for the first time. with settings(warn_only=True): if run('drush sa | grep ^@%s_%s$ > /dev/null' % (staging_shortname, staging_branch)).failed: raise SystemError( "You can't sync to a site if it hasn't been set up yet in this environment." ) else: print "===> We found the site, so we'll continue with the sync" path_to_drupal = run( "drush @%s_%s st --fields=root | grep -oEi '\/var\/www\/%s_%s_build_[0-9]+' | sed 's/ //g'" % (staging_shortname, staging_branch, staging_shortname, staging_branch)) print "===> Path is %s" % path_to_drupal path_to_config_file = path_to_drupal + '/config.ini' drupal_config = common.ConfigFile.read_config_file( path_to_config_file, False, True, True) if config.has_section(shortname): try: orig_host = "%s@%s" % (env.user, env.host) # Get Drupal version ### @TODO: deprecated, can be removed later drupal_version = common.ConfigFile.return_config_item( config, "Version", "drupal_version", "string", None, True, True, replacement_section="Drupal") # This is the correct location for 'drupal_version' - note, respect the deprecated value as default drupal_version = common.ConfigFile.return_config_item( config, "Drupal", "drupal_version", "string", drupal_version) drupal_version = int( DrupalUtils.determine_drupal_version(drupal_version, shortname, staging_branch, 0, drupal_config, 'sync')) # Allow developer to run a script prior to a sync common.Utils.perform_client_sync_hook(path_to_drupal, staging_branch, 'pre') # Database syncing if synctype == 'db' or synctype == 'both': Sync.backup_db(staging_shortname, staging_branch) Sync.sync_db(orig_host, shortname, staging_shortname, staging_branch, prod_branch, fresh_database, sanitise, sanitised_password, sanitised_email, config) # Allow developer to run a script mid-way through a sync common.Utils.perform_client_sync_hook(path_to_drupal, staging_branch, 'mid-db') Sync.drush_updatedb(orig_host, staging_shortname, staging_branch) # Files syncing (uploads) if synctype == 'files' or synctype == 'both': Sync.sync_assets(orig_host, shortname, staging_shortname, staging_branch, prod_branch, config, remote_files_dir, staging_files_dir, sync_dir) # Allow developer to run a script mid-way through a sync common.Utils.perform_client_sync_hook(path_to_drupal, staging_branch, 'mid-files') # Cleanup Sync.clear_caches(orig_host, staging_shortname, staging_branch, drupal_version) env.host_string = orig_host common.Services.clear_php_cache() common.Services.clear_varnish_cache() common.Services.reload_webserver() # Allow developer to run a script after a sync common.Utils.perform_client_sync_hook(path_to_drupal, staging_branch, 'post') except: e = sys.exc_info()[1] raise SystemError(e) else: raise SystemError( "Could not find this shortname %s in the sync.ini so we cannot proceed." % staging_shortname)
def environment_indicator(www_root, repo, branch, build, buildtype, alias, site, drupal_version): # Check if the module exists in the build with settings(warn_only=True): if run("find %s/%s_%s_%s/www -type d -name environment_indicator | egrep '.*'" % (www_root, repo, branch, build)).return_code == 0: environment_indicator_module = True print "===> environment_indicator module exists" else: environment_indicator_module = False print "===> environment_indicator module does not exist" # The module exists, now check if it's configured if environment_indicator_module: # Set up colours if buildtype == "dev": environment_indicator_color = "#00E500" elif buildtype == "stage": environment_indicator_color = "#ff9b01" else: # We don't know this buildtype, let's assume the worst and treat it as prod environment_indicator_color = "#ff0101" # Append the config to settings.inc if not already present # Use of Fabfile's 'append()' is meant to silently ignore if the text already exists in the file. So we don't bother # checking for it - if it exists but with a different value, appending will overrule the previous entry (maybe a bit # ugly or confusing when reading the file, but saves a horrible amount of kludge here grepping for existing entries) if drupal_version == 7: # Unfortunately this can't check inside the $buildtype.settings.php include, if there is one, so we still need to # check for that. print "===> Drupal 7 site, checking in %s/%s_%s_%s/www/sites/%s/%s.settings.php for $conf['environment_indicator_overwritten_name']" % ( www_root, repo, branch, build, site, buildtype) contain_string = "$conf['environment_indicator_overwritten_name']" settings_file = "%s/%s_%s_%s/www/sites/%s/%s.settings.php" % ( www_root, repo, branch, build, site, buildtype) does_contain = contains(settings_file, contain_string, exact=False, use_sudo=True) if does_contain: print "===> Settings already exist in %s.settings.php, we will not write anything to %s/config/%s_%s.settings.inc" % ( buildtype, www_root, alias, branch) else: print "===> Checking for and appending environment_indicator settings to %s/config/%s_%s.settings.inc" % ( www_root, alias, branch) append( "%s/config/%s_%s.settings.inc" % (www_root, alias, branch), "$conf['environment_indicator_overwrite'] = 'TRUE';", True) append( "%s/config/%s_%s.settings.inc" % (www_root, alias, branch), "$conf['environment_indicator_overwritten_name'] = '%s';" % buildtype, True) append( "%s/config/%s_%s.settings.inc" % (www_root, alias, branch), "$conf['environment_indicator_overwritten_color'] = '%s';" % environment_indicator_color, True) append( "%s/config/%s_%s.settings.inc" % (www_root, alias, branch), "$conf['environment_indicator_overwritten_text_color'] = '#ffffff';", True) if drupal_version > 7: # Unfortunately this can't check inside the $buildtype.settings.php include, if there is one, so we still need to # check for that. print "===> Drupal 8+ site, checking in %s/%s_%s_%s/www/sites/%s/%s.settings.php for $config['environment_indicator.indicator']['name']" % ( www_root, repo, branch, build, site, buildtype) contain_string = "$config['environment_indicator.indicator']['name']" settings_file = "%s/%s_%s_%s/www/sites/%s/%s.settings.php" % ( www_root, repo, branch, build, site, buildtype) does_contain = contains(settings_file, contain_string, exact=False, use_sudo=True) if does_contain: print "===> Settings already exist in %s.settings.php, we will not write anything to %s/config/%s_%s.settings.inc" % ( buildtype, www_root, alias, branch) else: print "===> Checking for and appending environment_indicator settings to %s/config/%s_%s.settings.inc" % ( www_root, alias, branch) append( "%s/config/%s_%s.settings.inc" % (www_root, alias, branch), "$config['environment_indicator.indicator']['name'] = '%s';" % buildtype, True) append( "%s/config/%s_%s.settings.inc" % (www_root, alias, branch), "$config['environment_indicator.indicator']['bg_color'] = '%s';" % environment_indicator_color, True) append( "%s/config/%s_%s.settings.inc" % (www_root, alias, branch), "$config['environment_indicator.indicator']['fg_color'] = '#ffffff';", True) if drupal_version > 6: drush_runtime_location = "%s/%s_%s_%s/www/sites/%s" % ( www_root, repo, branch, build, site) DrupalUtils.drush_command("pm-enable environment_indicator", site, drush_runtime_location, True, None, None, True) if drupal_version == 6: print "Drupal 6 site. Not setting up environment_indicator at this time.." else: print "The environment_indicator module was not present. Moving on..."
def main(repo, repourl, build, branch, buildtype, keepbuilds=10, url=None, freshdatabase="Yes", syncbranch=None, sanitise="no", import_config=False, statuscakeuser=None, statuscakekey=None, statuscakeid=None, restartvarnish="yes", cluster=False, sanitised_email=None, sanitised_password=None, webserverport='8080', mysql_version=5.5, rds=False, autoscale=None, mysql_config='/etc/mysql/debian.cnf', config_filename='config.ini', config_fullpath=False, php_ini_file=None, do_updates=True): if config_fullpath == "False": config_fullpath = False if config_fullpath == "True": config_fullpath = True if do_updates == "False": do_updates = False if do_updates == "True": do_updates = True if import_config == "False": import_config = False if import_config == "True": import_config = True # Read the config.ini file from repo, if it exists config = common.ConfigFile.buildtype_config_file(buildtype, config_filename, fullpath=config_fullpath) # Can be set in the config.ini [AWS] section aws_credentials = common.ConfigFile.return_config_item(config, "AWS", "aws_credentials", "string", "/home/jenkins/.aws/credentials") aws_autoscale_group = common.ConfigFile.return_config_item(config, "AWS", "aws_autoscale_group", "string", "prod-asg-prod") aws_package_all_builds = common.ConfigFile.return_config_item(config, "AWS", "aws_package_all_builds", "boolean", False) aws_build_tar = common.ConfigFile.return_config_item(config, "AWS", "aws_build_tar", "boolean", True) # Now we need to figure out what server(s) we're working with # Define primary host common.Utils.define_host(config, buildtype, repo) # Define server roles (if applicable) common.Utils.define_roles(config, cluster, autoscale, aws_credentials, aws_autoscale_group) # Check where we're deploying to - abort if nothing set in config.ini if env.host is None: raise ValueError("===> You wanted to deploy a build but we couldn't find a host in the map file for repo %s so we're aborting." % repo) # Set some default config options and variables user = "******" previous_build = "" previous_db = "" statuscake_paused = False www_root = "/var/www" application_directory = "www" site_root = www_root + '/%s_%s_%s' % (repo, branch, build) site_link = www_root + '/live.%s.%s' % (repo, branch) site_exists = None behat_config_file_default = "%s/%s_%s_%s/tests/behat/behat.yml" % (www_root, repo, branch, build) composer_lock_outdated = False # Set our host_string based on user@host env.host_string = '%s@%s' % (user, env.host) # Can be set in the config.ini [Build] section ssh_key = common.ConfigFile.return_config_item(config, "Build", "ssh_key") notifications_email = common.ConfigFile.return_config_item(config, "Build", "notifications_email") php_ini_file = common.ConfigFile.return_config_item(config, "Build", "php_ini_file", "string", php_ini_file) build_hook_version = common.ConfigFile.return_config_item(config, "Build", "build_hook_version", "string", "1") # If this is a multisite build, set the url to None so one is generated for every site in the multisite setup. This particular line will ensure the *first* site has its url generated. if config.has_section("Sites"): print "===> Config file has a [Sites] section, so we'll assume this is a multisite build and set url to None" url = None # Need to keep potentially passed in 'url' value as default else: url = common.ConfigFile.return_config_item(config, "Build", "url", "string", url) # Can be set in the config.ini [Database] section db_name = common.ConfigFile.return_config_item(config, "Database", "db_name") db_username = common.ConfigFile.return_config_item(config, "Database", "db_username") db_password = common.ConfigFile.return_config_item(config, "Database", "db_password") db_backup = common.ConfigFile.return_config_item(config, "Database", "db_backup", "boolean", True) # Need to keep potentially passed in MySQL version and config path as defaults mysql_config = common.ConfigFile.return_config_item(config, "Database", "mysql_config", "string", mysql_config) mysql_version = common.ConfigFile.return_config_item(config, "Database", "mysql_version", "string", mysql_version) dump_file = common.ConfigFile.return_config_item(config, "Database", "dump_file") # Can be set in the config.ini [Drupal] section ### @TODO: deprecated, can be removed later drupal_version = common.ConfigFile.return_config_item(config, "Version", "drupal_version", "string", None, True, True, replacement_section="Drupal") # This is the correct location for 'drupal_version' - note, respect the deprecated value as default drupal_version = common.ConfigFile.return_config_item(config, "Drupal", "drupal_version", "string", drupal_version) profile = common.ConfigFile.return_config_item(config, "Drupal", "profile", "string", "minimal") run_cron = common.ConfigFile.return_config_item(config, "Drupal", "run_cron", "boolean", False) import_config = common.ConfigFile.return_config_item(config, "Drupal", "import_config", "boolean", import_config) import_config_method = common.ConfigFile.return_config_item(config, "Drupal", "import_config_method", "string", "cim") ### @TODO: deprecated, can be removed later fra = common.ConfigFile.return_config_item(config, "Features", "fra", "boolean", False, True, True, replacement_section="Drupal") # This is the correct location for 'fra' - note, respect the deprecated value as default fra = common.ConfigFile.return_config_item(config, "Drupal", "fra", "boolean", fra) ### @TODO: deprecated, can be removed later readonlymode = common.ConfigFile.return_config_item(config, "Readonly", "readonly", "string", "maintenance", True, True, replacement_section="Drupal") # This is the correct location for 'readonly' - note, respect the deprecated value as default readonlymode = common.ConfigFile.return_config_item(config, "Drupal", "readonly", "string", readonlymode) ### @TODO: deprecated, can be removed later config_export = common.ConfigFile.return_config_item(config, "Hooks", "config_export", "boolean", False, True, True, replacement_section="Drupal") # This is the correct location for 'config_export' - note, respect the deprecated value as default config_export = common.ConfigFile.return_config_item(config, "Drupal", "config_export", "boolean", config_export) secure_user_one = common.ConfigFile.return_config_item(config, "Drupal", "secure_user_one", "boolean", True) # Can be set in the config.ini [Composer] section composer = common.ConfigFile.return_config_item(config, "Composer", "composer", "boolean", True) composer_lock = common.ConfigFile.return_config_item(config, "Composer", "composer_lock", "boolean", True) no_dev = common.ConfigFile.return_config_item(config, "Composer", "no_dev", "boolean", True) through_ssh = common.ConfigFile.return_config_item(config, "Composer", "through_ssh", "boolean", False) mark_unstable = common.ConfigFile.return_config_item(config, "Composer", "mark_unstable", "boolean", False) # Can be set in the config.ini [Testing] section # PHPUnit is in common/Tests because it can be used for any PHP application phpunit_run = common.ConfigFile.return_config_item(config, "Testing", "phpunit_run", "boolean", False) phpunit_fail_build = common.ConfigFile.return_config_item(config, "Testing", "phpunit_fail_build", "boolean", False) phpunit_revert_build = common.ConfigFile.return_config_item(config, "Testing", "phpunit_revert_build", "boolean", False) phpunit_group = common.ConfigFile.return_config_item(config, "Testing", "phpunit_group", "string", "unit") phpunit_test_directory = common.ConfigFile.return_config_item(config, "Testing", "phpunit_test_directory", "string", "%s/modules/custom" % application_directory) phpunit_path = common.ConfigFile.return_config_item(config, "Testing", "phpunit_path", "string", "vendor/phpunit/phpunit/phpunit") phpunit_install = common.ConfigFile.return_config_item(config, "Testing", "phpunit_install", "boolean", True) # CodeSniffer itself is in common/Tests, but standards used here are Drupal specific, see drupal/DrupalTests.py for the wrapper to apply them codesniffer = common.ConfigFile.return_config_item(config, "Testing", "codesniffer", "boolean") codesniffer_extensions = common.ConfigFile.return_config_item(config, "Testing", "codesniffer_extensions", "string", "php,module,inc,install,test,profile,theme,info,txt,md") codesniffer_ignore = common.ConfigFile.return_config_item(config, "Testing", "codesniffer_ignore", "string", "node_modules,bower_components,vendor") codesniffer_paths = common.ConfigFile.return_config_item(config, "Testing", "codesniffer_paths", "string", "%s/modules/custom %s/themes/custom" % (application_directory, application_directory)) # Regex check string_to_check = common.ConfigFile.return_config_item(config, "Testing", "string_to_check", "string") curl_options = common.ConfigFile.return_config_item(config, "Testing", "curl_options", "string", "sL") check_protocol = common.ConfigFile.return_config_item(config, "Testing", "check_protocol", "string", "https") # Behat config file location behat_config_file = common.ConfigFile.return_config_item(config, "Behat", "behat_config_file", "string", behat_config_file_default) # Set SSH key if needed # @TODO: this needs to be moved to config.ini for Code Enigma GitHub projects if "*****@*****.**" in repourl: ssh_key = "/var/lib/jenkins/.ssh/id_rsa_github" # Prepare Behat variables behat_config = None behat_tests_failed = False if config.has_section("Behat"): behat_config = DrupalTests.prepare_behat_tests(config, buildtype) # Pause StatusCake monitoring statuscake_paused = common.Utils.statuscake_state(statuscakeuser, statuscakekey, statuscakeid, "pause") # Run the tasks. # -------------- execute(common.Utils.clone_repo, repo, repourl, branch, build, None, ssh_key, hosts=env.roledefs['app_all']) # Gitflow workflow means '/' in branch names, need to clean up. branch = common.Utils.generate_branch_name(branch) print "===> Branch is %s" % branch # Check the php_ini_file string isn't doing anything naughty malicious_code = False malicious_code = common.Utils.detect_malicious_strings([';', '&&'], php_ini_file) # Set CLI PHP version, if we need to if php_ini_file and not malicious_code: run("export PHPRC='%s'" % php_ini_file) # Set branches to be treated as feature branches # Regardless of whether or not 'fra' is set, we need to set 'branches' # our our existing_build_wrapper() function gets upset later. feature_branches = Drupal.drush_fra_branches(config, branch) # Now we have the codebase and a clean branch name we can figure out the Drupal version # Don't use execute() because it returns an array of values returned keyed by hostname drupal_version = int(DrupalUtils.determine_drupal_version(drupal_version, repo, branch, build, config)) print "===> the drupal_version variable is set to %s" % drupal_version # Let's allow developers to perform some early actions if they need to execute(common.Utils.perform_client_deploy_hook, repo, branch, build, buildtype, config, stage='pre', build_hook_version="1", hosts=env.roledefs['app_all']) execute(common.Utils.perform_client_deploy_hook, repo, branch, build, buildtype, config, stage='pre-prim', build_hook_version="1", hosts=env.roledefs['app_primary']) # @TODO: This will be a bug when Drupal 9 comes out! # We need to cast version as an integer and use < 8 if drupal_version < 8: import_config = False import_config_method = "cim" if drupal_version > 7 and composer is True: # Sometimes people use the Drupal Composer project which puts Drupal 8's composer.json file in repo root. with shell_env(PHPRC='%s' % php_ini_file): with settings(warn_only=True): if run("find %s/composer.json" % site_root).return_code == 0: path = site_root else: path = site_root + "/" + application_directory if mark_unstable: composer_lock_outdated = common.PHP.composer_validate(path) execute(common.PHP.composer_command, path, "install", None, no_dev, composer_lock, through_ssh=through_ssh) # Compile a site mapping, which is needed if this is a multisite build # Just sets to 'default' if it is not mapping = {} mapping = Drupal.configure_site_mapping(repo, mapping, config) # Empty dictionary for sites that have been deployed. A site is added to the # dictionary at the *start* of its deployment so it is also reverted if a stage # of the deployment fails. Though it is only added if the build is an existing # build. sites_deployed = {} # Empty directory for site URLs that have been deployed. The structure will be # alias: url site_urls = {} # Record the link to the previous build previous_build = common.Utils.get_previous_build(repo, branch, build) # Take a backup of all sites, then take all sites offline before doing anything else. offline_site_exists = None for offline_alias,offline_site in mapping.iteritems(): offline_site_exists = DrupalUtils.check_site_exists(previous_build, offline_site) if offline_site_exists: drush_runtime_location = "%s/%s/sites/%s" % (previous_build, application_directory, offline_site) drush_output = Drupal.drush_status(repo, branch, build, buildtype, offline_site, drush_runtime_location) offline_db_name = Drupal.get_db_name(repo, branch, build, buildtype, offline_site, drush_output) # If database updates will run, take the site offline if do_updates: execute(Drupal.go_offline, repo, branch, offline_site, offline_alias, readonlymode, drupal_version) # Backup database if db_backup: previous_db = execute(common.MySQL.mysql_backup_db, offline_db_name, build, True) previous_db = previous_db[env.roledefs['app_primary'][0]] offline_site_exists = None # Run new installs for alias,site in mapping.iteritems(): # Compile variables for feature branch builds (if applicable) FeatureBranches.configure_feature_branch(buildtype, config, branch, alias) print "===> Feature branch debug information below:" print "httpauth_pass: %s" % FeatureBranches.httpauth_pass print "ssl_enabled: %s" % FeatureBranches.ssl_enabled print "ssl_cert: %s" % FeatureBranches.ssl_cert print "ssl_ip: %s" % FeatureBranches.ssl_ip print "drupal_common_config: %s" % FeatureBranches.drupal_common_config print "featurebranch_url: %s" % FeatureBranches.featurebranch_url print "featurebranch_vhost: %s" % FeatureBranches.featurebranch_vhost site_exists = DrupalUtils.check_site_exists(previous_build, site) cimy_mapping = {} if drupal_version > 7 and import_config_method == "cimy": cimy_mapping = DrupalConfig.configure_cimy_params(config, site) if freshdatabase == "Yes" and buildtype == "custombranch": # For now custombranch builds to clusters cannot work dump_file = Drupal.prepare_database(repo, branch, build, buildtype, alias, site, syncbranch, env.host_string, sanitise, sanitised_password, sanitised_email) # Need to make sure the env.host variable is set correctly, after potentially fetching a database dump from production env.host = env.roledefs['app_primary'][0] if FeatureBranches.featurebranch_url is not None: url = FeatureBranches.featurebranch_url url = common.Utils.generate_url(url, alias, branch) # Now check if we have a Drush alias with that name. If not, run an install with settings(hide('warnings', 'stderr'), warn_only=True): # Because this runs in Jenkins home directory, it will use 'system' drush if not site_exists: print "===> Didn't find a previous build so we'll install this new site %s" % url initial_build_wrapper(url, www_root, application_directory, repo, branch, build, site, alias, profile, buildtype, sanitise, config, db_name, db_username, db_password, mysql_version, mysql_config, dump_file, sanitised_password, sanitised_email, cluster, rds, drupal_version, import_config, import_config_method, cimy_mapping, webserverport, behat_config, autoscale, php_ini_file, build_hook_version, secure_user_one, previous_build) else: # Otherwise it's an existing build # This does not bring sites online that have been taken offline but not yet deployed sites_deployed[alias] = site existing_build_wrapper(url, www_root, application_directory, site_root, site_link, repo, branch, build, buildtype, previous_build, db_backup, alias, site, no_dev, config, config_export, drupal_version, readonlymode, notifications_email, autoscale, do_updates, import_config, import_config_method, cimy_mapping, fra, run_cron, feature_branches, php_ini_file, build_hook_version, secure_user_one, sites_deployed) # Now everything should be in a good state, let's enable environment indicator for this site, if present execute(Drupal.environment_indicator, www_root, repo, branch, build, buildtype, alias, site, drupal_version) site_urls[alias] = url # If this is a single site, we're done with the 'url' variable anyway # If this is a multisite, we have to set it to None so a new 'url' gets generated on the next pass url = None site_exists = None # Adjust the live symlink now that all sites have been deployed. Bring them online after this has happened. if previous_build is not None: execute(common.Utils.adjust_live_symlink, repo, branch, build, hosts=env.roledefs['app_all']) # This will revert the database if fails live_build = run("readlink %s/live.%s.%s" % (www_root, repo, branch)) this_build = "%s/%s_%s_%s" % (www_root, repo, branch, build) # The above paths should match - something is wrong if they don't! if not this_build == live_build: # Make sure the live symlink is pointing at the previous working build. sudo("ln -nsf %s %s/live.%s.%s" % (www_root, previous_build, repo, branch)) for revert_alias,revert_site in sites_deployed.iteritems(): if db_backup: common.MySQL.mysql_revert_db(db_name, build) else: print "####### Due to your config settings no database backup was taken so your database may be broken!" execute(Revert._revert_settings, repo, branch, build, buildtype, revert_site, revert_alias) execute(Revert._revert_go_online, repo, branch, build, buildtype, site, drupal_version) raise SystemExit("####### Could not successfully adjust the symlink pointing to the build! Could not take this build live. Database may have had updates applied against the newer build already. Reverting database") if do_updates and previous_build is not None: for online_alias,online_site in sites_deployed.iteritems(): execute(Drupal.go_online, repo, branch, build, buildtype, online_alias, online_site, previous_build, readonlymode, drupal_version, sites_deployed=sites_deployed) # This will revert the database and switch the symlink back if it fails # Clear the opcache again after the site has been brought online execute(common.Services.clear_php_cache, hosts=env.roledefs['app_all']) for test_alias,test_site in mapping.iteritems(): behat_url = site_urls[test_alias] # After any build we want to run all the available automated tests test_runner(www_root, repo, branch, build, test_alias, buildtype, behat_url, ssl_enabled, db_backup, config, behat_config, behat_config_file, import_config, import_config_method, cimy_mapping, drupal_version, phpunit_run, phpunit_group, phpunit_test_directory, phpunit_path, phpunit_fail_build, phpunit_revert_build, phpunit_install, test_site, codesniffer, codesniffer_extensions, codesniffer_ignore, codesniffer_paths, string_to_check, check_protocol, curl_options, notifications_email, build_hook_version, sites_deployed, previous_build) behat_url = None # Unset CLI PHP version if we need to if php_ini_file: run("export PHPRC=''") # Resume StatusCake monitoring if statuscake_paused: common.Utils.statuscake_state(statuscakeuser, statuscakekey, statuscakeid) # If this is autoscale at AWS, let's update the tarball in S3 if autoscale: # In some cases, you may not want to tarball up the builds. # For example, when all builds are tarballed up, you can't # reliably have multiple builds running for dev and stage # as it will cause an error when the contents of /var/www # change. if aws_build_tar: execute(common.Utils.tarball_up_to_s3, www_root, repo, branch, build, autoscale, aws_package_all_builds) else: print "Don't create a tarball after this build. Assume the tarballing is happening separately, such as in an overnight job." #commit_new_db(repo, repourl, url, build, branch) execute(common.Utils.remove_old_builds, repo, branch, keepbuilds, hosts=env.roledefs['app_all']) script_dir = os.path.dirname(os.path.realpath(__file__)) if put(script_dir + '/../util/revert', '/home/jenkins', mode=0755).failed: print "####### BUILD COMPLETE. Could not copy the revert script to the application server, revert will need to be handled manually" else: print "####### BUILD COMPLETE. If you need to revert this build, run the following command: sudo /home/jenkins/revert -b %s -d /home/jenkins/dbbackups/%s -s %s/live.%s.%s -a %s_%s" % (previous_build, previous_db, www_root, repo, branch, repo, branch) # We have two scenarios where a build might be marked as unstable: # 1) If the composer.lock file is outdated (r45198) # 2) If any of our tests failed, abort the job (r23697) unstable_text = "" unstable_build = False if behat_tests_failed: unstable_text = unstable_text + "####### Some tests failed. Aborting the job.\n" unstable_build = True if composer_lock_outdated: unstable_text = unstable_text + "####### composer.lock is outdated.\n" unstable_build = True if unstable_build: print "%s" % unstable_text sys.exit(3)
def run_behat_tests(repo, branch, build, alias, site, buildtype, url, ssl_enabled, behat_config_file, junit, import_config, import_config_method, cimy_mapping, drupal_version, tags=[], disable_modules=[]): cwd = os.getcwd() continue_tests = True tests_failed = False drush_runtime_location = "/var/www/%s_%s_%s/www/sites/%s" % (repo, branch, build, site) with settings(warn_only=True): while continue_tests: # Disable modules that enable HTTP auth. if disable_modules: if drupal_version > 7: for module in disable_modules: drush_command = "pm-uninstall %s" % module if DrupalUtils.drush_command( drush_command, site, drush_runtime_location).failed: print "###### Cannot disable %s. Stopping tests early..." % module continue_tests = False break else: for module in disable_modules: drush_command = "dis %s" % module if DrupalUtils.drush_command( drush_command, site, drush_runtime_location).failed: print "###### Cannot disable %s. Stopping tests early..." % module continue_tests = False break with cd("/var/www/%s_%s_%s/tests/behat" % (repo, branch, build)): if run("stat behat.yml").failed: # No behat.yml file, so let's move our buildtype specific behat file into place, if it exists. if run("stat %s.behat.yml" % buildtype).failed: # No buildtype.behat.yml either. In that case, don't run any tests and break out. print "No behat.yml or %s.behat.yml file. Stopping tests early..." % buildtype continue_tests = False break else: # We found a buildtype.behat.yml file, so move it into place print "Found a %s.behat.yml file. Moving it to behat.yml because behat.yml didn't exist." % buildtype sudo("mv %s.behat.yml behat.yml" % buildtype) else: # We found a behat.yml file. Let's see if there's a buildtype.behat.yml file before we move it out of the way. if run("stat %s.behat.yml" % buildtype).failed: # Didn't find a buildtype.behat.yml file. Nothing else to do. print "didn't find a %s.behat.yml file, so we'll use the behat.yml file that was found." % buildtype else: # We found a buildtype.behat.yml file, so we want to use that. Move the behat.yml file aside so the buildtype specific file can be used. print "Found %s.behat.yml, so we'll move behat.yml out of the way." % buildtype sudo("mv behat.yml behat.yml.backup") sudo("mv %s.behat.yml behat.yml" % buildtype) # If buildtype is 'custombranch', this is a feature branch deployment, so there're some special things we need to do. if buildtype == "custombranch": behat_file = "behat.yml" if run("grep \"base_url:\" %s" % behat_file).return_code == 0: # The behat.yml file does contain the base_url: string. Let's replace it with the url of our feature site. print "Replacing the old base_url value with the URL for the feature branch site..." scheme = 'https' if ssl_enabled else 'http' replace_string = "base_url: .*" replace_with = "base_url: %s://%s" % (scheme, url) replace_with = replace_with.lower() sed(behat_file, replace_string, replace_with, limit='', use_sudo=False, backup='.bak', flags="i", shell=False) else: # Seems like the behat.yml file doesn't contain the string we're looking for. Stop performing tests. print "It doesn't look like the behat.yml file has the string we're looking for. Stopping tests early..." continue_tests = False break # Now it's time to run the tests... print "===> Running behat tests (without Selenium)..." if tags: test_tags = '&&'.join(tags) print "Debug info - test_tags = %s" % test_tags else: test_tags = '~@javascript' if junit: test_method = '-f progress -o std -f junit -o xml' else: test_method = '-f pretty -o std' if run("/var/www/%s_%s_%s/vendor/bin/behat --config=%s -v --tags=\"%s\" %s" % (repo, branch, build, behat_config_file, test_tags, test_method)).failed: print "Behat tests seem to have failed!" tests_failed = True else: print "Looks like Behat tests were successful!" if junit: print "We need to copy the JUnit report to the Jenkins server so it can be processed." if sudo("stat /var/www/live.%s.%s/tests/behat/xml/default.xml" % (repo, branch)).failed: print "No xml file found in /var/www/live.%s.%s/tests/behat/xml. That's weird, but we'll carry on." % ( repo, branch) else: print "Found an xml in /var/www/live.%s.%s/tests/behat/xml. Going to copy it to the Jenkins server." % ( repo, branch) local("mkdir -p %s/tests/behat/xml" % cwd) if local( "scp jenkins@%s:/var/www/live.%s.%s/tests/behat/xml/default.xml %s/tests/behat/xml/" % (env.host, repo, branch, cwd)).failed: print "Could not copy JUnit report to Jenkins server. We won't fail the build here, though." else: print "Copied JUnit report to Jenkins server." continue_tests = False # End while loop # Re-enable modules if disable_modules: reenable_modules(repo, alias, branch, build, site, buildtype, import_config, import_config_method, cimy_mapping, drupal_version, disable_modules) # Send test status back to main fabfile return tests_failed
def main(repo, repourl, build, branch, buildtype, keepbuilds=10, url=None, freshdatabase="Yes", syncbranch=None, sanitise="no", import_config=False, statuscakeuser=None, statuscakekey=None, statuscakeid=None, restartvarnish="yes", cluster=False, sanitised_email=None, sanitised_password=None, webserverport='8080', mysql_version=5.5, rds=False, autoscale=None, mysql_config='/etc/mysql/debian.cnf', config_filename='config.ini', php_ini_file=None): # Read the config.ini file from repo, if it exists config = common.ConfigFile.buildtype_config_file(buildtype, config_filename) # Now we need to figure out what server(s) we're working with # Define primary host common.Utils.define_host(config, buildtype, repo) # Define server roles (if applicable) common.Utils.define_roles(config, cluster, autoscale) # Check where we're deploying to - abort if nothing set in config.ini if env.host is None: raise ValueError("===> You wanted to deploy a build but we couldn't find a host in the map file for repo %s so we're aborting." % repo) # Set some default config options and variables user = "******" previous_build = "" previous_db = "" statuscake_paused = False www_root = "/var/www" site_root = www_root + '/%s_%s_%s' % (repo, branch, build) site_link = www_root + '/live.%s.%s' % (repo, branch) # Set our host_string based on user@host env.host_string = '%s@%s' % (user, env.host) # Can be set in the config.ini [Build] section ssh_key = common.ConfigFile.return_config_item(config, "Build", "ssh_key") notifications_email = common.ConfigFile.return_config_item(config, "Build", "notifications_email") php_ini_file = common.ConfigFile.return_config_item(config, "Build", "php_ini_file", "string", php_ini_file) # If this is a multisite build, set the url to None so one is generated for every site in the multisite setup. This particular line will ensure the *first* site has its url generated. if config.has_section("Sites"): print "===> Config file has a [Sites] section, so we'll assume this is a multisite build and set url to None" url = None # Need to keep potentially passed in 'url' value as default else: url = common.ConfigFile.return_config_item(config, "Build", "url", "string", url) # Can be set in the config.ini [Database] section db_name = common.ConfigFile.return_config_item(config, "Database", "db_name") db_username = common.ConfigFile.return_config_item(config, "Database", "db_username") db_password = common.ConfigFile.return_config_item(config, "Database", "db_password") # Need to keep potentially passed in MySQL version and config path as defaults mysql_config = common.ConfigFile.return_config_item(config, "Database", "mysql_config", "string", mysql_config) mysql_version = common.ConfigFile.return_config_item(config, "Database", "mysql_version", "string", mysql_version) dump_file = common.ConfigFile.return_config_item(config, "Database", "dump_file") # Can be set in the config.ini [Drupal] section ### @TODO: deprecated, can be removed later drupal_version = common.ConfigFile.return_config_item(config, "Version", "drupal_version", "string", None, True, True, replacement_section="Drupal") # This is the correct location for 'drupal_version' - note, respect the deprecated value as default drupal_version = common.ConfigFile.return_config_item(config, "Drupal", "drupal_version", "string", drupal_version) profile = common.ConfigFile.return_config_item(config, "Drupal", "profile", "string", "minimal") do_updates = common.ConfigFile.return_config_item(config, "Drupal", "do_updates", "boolean", True) run_cron = common.ConfigFile.return_config_item(config, "Drupal", "run_cron", "boolean", False) import_config = common.ConfigFile.return_config_item(config, "Drupal", "import_config", "boolean", import_config) ### @TODO: deprecated, can be removed later fra = common.ConfigFile.return_config_item(config, "Features", "fra", "boolean", False, True, True, replacement_section="Drupal") # This is the correct location for 'fra' - note, respect the deprecated value as default fra = common.ConfigFile.return_config_item(config, "Drupal", "fra", "boolean", fra) ### @TODO: deprecated, can be removed later readonlymode = common.ConfigFile.return_config_item(config, "Readonly", "readonly", "string", "maintenance", True, True, replacement_section="Drupal") # This is the correct location for 'readonly' - note, respect the deprecated value as default readonlymode = common.ConfigFile.return_config_item(config, "Drupal", "readonly", "string", readonlymode) ### @TODO: deprecated, can be removed later config_export = common.ConfigFile.return_config_item(config, "Hooks", "config_export", "boolean", False, True, True, replacement_section="Drupal") # This is the correct location for 'config_export' - note, respect the deprecated value as default config_export = common.ConfigFile.return_config_item(config, "Drupal", "config_export", "boolean", config_export) # Can be set in the config.ini [Composer] section composer = common.ConfigFile.return_config_item(config, "Composer", "composer", "boolean", True) composer_lock = common.ConfigFile.return_config_item(config, "Composer", "composer_lock", "boolean", True) no_dev = common.ConfigFile.return_config_item(config, "Composer", "no_dev", "boolean", True) # Can be set in the config.ini [Testing] section # PHPUnit is in common/Tests because it can be used for any PHP application phpunit_run = common.ConfigFile.return_config_item(config, "Testing", "phpunit_run", "boolean", False) phpunit_fail_build = common.ConfigFile.return_config_item(config, "Testing", "phpunit_fail_build", "boolean", False) phpunit_group = common.ConfigFile.return_config_item(config, "Testing", "phpunit_group", "string", "unit") phpunit_test_directory = common.ConfigFile.return_config_item(config, "Testing", "phpunit_test_directory", "string", "www/modules/custom") phpunit_path = common.ConfigFile.return_config_item(config, "Testing", "phpunit_path", "string", "vendor/phpunit/phpunit/phpunit") # CodeSniffer itself is in common/Tests, but standards used here are Drupal specific, see drupal/DrupalTests.py for the wrapper to apply them codesniffer = common.ConfigFile.return_config_item(config, "Testing", "codesniffer", "boolean") codesniffer_extensions = common.ConfigFile.return_config_item(config, "Testing", "codesniffer_extensions", "string", "php,module,inc,install,test,profile,theme,info,txt,md") codesniffer_ignore = common.ConfigFile.return_config_item(config, "Testing", "codesniffer_ignore", "string", "node_modules,bower_components,vendor") codesniffer_paths = common.ConfigFile.return_config_item(config, "Testing", "codesniffer_paths", "string", "www/modules/custom www/themes/custom") # Regex check string_to_check = common.ConfigFile.return_config_item(config, "Testing", "string_to_check", "string") curl_options = common.ConfigFile.return_config_item(config, "Testing", "curl_options", "string", "sL") check_protocol = common.ConfigFile.return_config_item(config, "Testing", "check_protocol", "string", "https") # Set SSH key if needed # @TODO: this needs to be moved to config.ini for Code Enigma GitHub projects if "*****@*****.**" in repourl: ssh_key = "/var/lib/jenkins/.ssh/id_rsa_github" # Prepare Behat variables behat_config = None behat_tests_failed = False if config.has_section("Behat"): behat_config = DrupalTests.prepare_behat_tests(config, buildtype) # Pause StatusCake monitoring statuscake_paused = common.Utils.statuscake_state(statuscakeuser, statuscakekey, statuscakeid, "pause") # Run the tasks. # -------------- execute(common.Utils.clone_repo, repo, repourl, branch, build, None, ssh_key, hosts=env.roledefs['app_all']) # Gitflow workflow means '/' in branch names, need to clean up. branch = common.Utils.generate_branch_name(branch) print "===> Branch is %s" % branch # Check the php_ini_file string isn't doing anything naughty malicious_code = False malicious_code = common.Utils.detect_malicious_strings([';', '&&'], php_ini_file) # Set CLI PHP version, if we need to if php_ini_file and not malicious_code: run("export PHPRC='%s'" % php_ini_file) # Set branches to be treated as feature branches # Regardless of whether or not 'fra' is set, we need to set 'branches' # our our existing_build_wrapper() function gets upset later. feature_branches = Drupal.drush_fra_branches(config, branch) # Now we have the codebase and a clean branch name we can figure out the Drupal version # Don't use execute() because it returns an array of values returned keyed by hostname drupal_version = int(DrupalUtils.determine_drupal_version(drupal_version, repo, branch, build, config)) print "===> the drupal_version variable is set to %s" % drupal_version # Let's allow developers to perform some early actions if they need to execute(common.Utils.perform_client_deploy_hook, repo, branch, build, buildtype, config, stage='pre', hosts=env.roledefs['app_all']) # @TODO: This will be a bug when Drupal 9 comes out! # We need to cast version as an integer and use < 8 if drupal_version < 8: import_config = False if drupal_version > 7 and composer is True: # Sometimes people use the Drupal Composer project which puts Drupal 8's composer.json file in repo root. with shell_env(PHPRC='%s' % php_ini_file): with settings(warn_only=True): if run("find %s/composer.json" % site_root).return_code == 0: path = site_root else: path = site_root + "/www" execute(common.PHP.composer_command, path, "install", None, no_dev, composer_lock) # Compile a site mapping, which is needed if this is a multisite build # Just sets to 'default' if it is not mapping = {} mapping = Drupal.configure_site_mapping(repo, mapping, config) # Record the link to the previous build previous_build = common.Utils.get_previous_build(repo, branch, build) # Run new installs for alias,site in mapping.iteritems(): # Compile variables for feature branch builds (if applicable) FeatureBranches.configure_feature_branch(buildtype, config, branch, alias) print "===> Feature branch debug information below:" print "httpauth_pass: %s" % FeatureBranches.httpauth_pass print "ssl_enabled: %s" % FeatureBranches.ssl_enabled print "ssl_cert: %s" % FeatureBranches.ssl_cert print "ssl_ip: %s" % FeatureBranches.ssl_ip print "drupal_common_config: %s" % FeatureBranches.drupal_common_config print "featurebranch_url: %s" % FeatureBranches.featurebranch_url print "featurebranch_vhost: %s" % FeatureBranches.featurebranch_vhost if freshdatabase == "Yes" and buildtype == "custombranch": # For now custombranch builds to clusters cannot work dump_file = Drupal.prepare_database(repo, branch, build, buildtype, alias, site, syncbranch, env.host_string, sanitise, sanitised_password, sanitised_email) if FeatureBranches.featurebranch_url is not None: url = FeatureBranches.featurebranch_url url = common.Utils.generate_url(url, alias, branch) # Now check if we have a Drush alias with that name. If not, run an install with settings(hide('warnings', 'stderr'), warn_only=True): # Because this runs in Jenkins home directory, it will use 'system' drush if previous_build is None: print "===> Didn't find a previous build so we'll install this new site %s" % url initial_build_wrapper(url, www_root, repo, branch, build, site, alias, profile, buildtype, sanitise, config, db_name, db_username, db_password, mysql_version, mysql_config, dump_file, sanitised_password, sanitised_email, cluster, rds, drupal_version, import_config, webserverport, behat_config, autoscale, php_ini_file) else: # Otherwise it's an existing build existing_build_wrapper(url, www_root, site_root, site_link, repo, branch, build, buildtype, previous_build, alias, site, no_dev, config, config_export, drupal_version, readonlymode, notifications_email, autoscale, do_updates, import_config, fra, run_cron, feature_branches, php_ini_file) # After any build we want to run all the available automated tests test_runner(www_root, repo, branch, build, alias, buildtype, url, ssl_enabled, config, behat_config, drupal_version, phpunit_run, phpunit_group, phpunit_test_directory, phpunit_path, phpunit_fail_build, site, codesniffer, codesniffer_extensions, codesniffer_ignore, codesniffer_paths, string_to_check, check_protocol, curl_options, notifications_email) # Now everything should be in a good state, let's enable environment indicator for this site, if present execute(Drupal.environment_indicator, www_root, repo, branch, build, buildtype, alias, site, drupal_version) # If this is a single site, we're done with the 'url' variable anyway # If this is a multisite, we have to set it to None so a new 'url' gets generated on the next pass url = None # Unset CLI PHP version if we need to if php_ini_file: run("export PHPRC=''") # Resume StatusCake monitoring if statuscake_paused: common.Utils.statuscake_state(statuscakeuser, statuscakekey, statuscakeid) # If this is autoscale at AWS, let's update the tarball in S3 if autoscale: execute(common.Utils.tarball_up_to_s3, www_root, repo, branch, build, autoscale) #commit_new_db(repo, repourl, url, build, branch) execute(common.Utils.remove_old_builds, repo, branch, keepbuilds, hosts=env.roledefs['app_all']) script_dir = os.path.dirname(os.path.realpath(__file__)) if put(script_dir + '/../util/revert', '/home/jenkins', mode=0755).failed: print "####### BUILD COMPLETE. Could not copy the revert script to the application server, revert will need to be handled manually" else: print "####### BUILD COMPLETE. If you need to revert this build, run the following command: sudo /home/jenkins/revert -b %s -d %s -s /var/www/live.%s.%s -a %s_%s" % (previous_build, previous_db, repo, branch, repo, branch) # If any of our tests failed, abort the job # r23697 if behat_tests_failed: print "####### Some tests failed. Aborting the job." sys.exit(3)
def prepare_database(repo, branch, build, buildtype, alias, site, syncbranch, orig_host, sanitise, sanitised_password, sanitised_email, freshinstall=True): # Read the config.ini file from repo, if it exists config = common.ConfigFile.read_config_file() now = common.Utils._gen_datetime() dump_file = None if syncbranch is None: raise SystemError("######## Sync branch cannot be empty when wanting a fresh database when deploying a custom branch for the first time. Aborting early.") current_env = env.host if not freshinstall: drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site) drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location) db_name = get_db_name(repo, branch, build, buildtype, site, drush_output) # If freshinstall is True, this occurs during an initial build, so we need to check if there's # a db/ directory, remove all .sql.bz2 files. If a db/ directory doesn't exist create one. If # this isn't a freshinstall, we don't need to do anything with the db/ directory with settings(warn_only=True): if freshinstall: if run("find /var/www/%s_%s_%s -maxdepth 1 -type d -name db | egrep '.*'" % (repo, branch, build)).return_code == 0: sudo("rm /var/www/%s_%s_%s/db/*.sql.bz2" % (repo, branch, build)) print "===> Found a /db directory, so removed all .sql.bz2 files." else: run("mkdir -p /var/www/%s_%s_%s/db" % (repo, branch, build)) print "===> Could not find a /db directory, so one was created." # Let's first get the hostname of the server where the site we want a fresh db from resides # Typically, the stage site had a buildtype of [stage], but the master/dev site has [dev] if config.has_section(syncbranch): sync_branch_host = config.get(syncbranch, repo) else: # We cannot find a section with that buildtype, so abort raise SystemError("######## Cannot find a buildtype %s in config.ini. Aborting." % syncbranch) # If sync_branch_host and current_env match, we don't need to connect to another # server to get the dump if sync_branch_host == current_env: # Check that the syncbranch site exists on this server syncbranch_site = common.Utils.get_previous_build(repo, syncbranch, None) if syncbranch_site is None: raise SystemError("######## Cannot find a site to sync the database from. Aborting.") # If freshinstall is True, this occurs during the initial build, so we create a new database # dump in the db/ directory which will be imported if freshinstall: print "===> Database to get a fresh dump from is on the same server. Getting database dump now..." # Time to dump the database and save it to db/ dump_file = "%s_%s.sql.bz2" % (alias, syncbranch) run('cd /var/www/live.%s.%s/www/sites/%s && drush -l %s -y sql-dump --result-file=/dev/stdout --result-file=/dev/stdout | bzip2 -f > /var/www/%s_%s_%s/db/%s' % (repo, syncbranch, site, site, repo, branch, build, dump_file)) else: # Because freshinstall is False and the site we're syncing from is on the same server, # we can use drush sql-sync to sync that database to this one print "===> Database to sync to site is on the same server. Syncing %s database now..." % syncbranch run("drush @%s_%s -y sql-drop" % (alias, branch)) if run("drush sql-sync -y @%s_%s @%s_%s" % (alias, syncbranch, alias, branch)).failed: common.MySQL.mysql_revert_db(db_name, build) raise SystemError("######## Could not sync %s database. Reverting the %s database and aborting." % (syncbranch, branch)) else: print "===> %s database synced successfully." % syncbranch # For cases where we processed the import, we do not want to send dump_file back dump_file = None # If sync_branch_host and current_env don't match, the database to fetch to on another server else: env.host = sync_branch_host env.user = "******" env.host_string = '%s@%s' % (env.user, env.host) print "===> Switching host to %s to get database dump..." % env.host_string # Check that the syncbranch site exists on this server syncbranch_site = common.Utils.get_previous_build(repo, syncbranch, None) # Check the site exists on the host server. If not, abort if syncbranch_site is None: raise SystemError("######## Cannot find a site to sync the database from. Aborting.") if sanitise == "yes": script_dir = os.path.dirname(os.path.realpath(__file__)) if put(script_dir + '/../util/drupal-obfuscate.rb', '/home/jenkins', mode=0755).failed: raise SystemExit("######## Could not copy the obfuscate script to the application server, aborting as we cannot safely sanitise the live data") else: print "===> Obfuscate script copied to %s:/home/jenkins/drupal-obfuscate.rb - obfuscating data" % env.host with settings(hide('running', 'stdout', 'stderr')): drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, syncbranch, site) dbname_output = DrupalUtils.drush_command("status -l %s Database\ name" % site, drush_site=None, drush_runtime_location=drush_runtime_location, drush_sudo=False, drush_format=None, drush_path=None, www_user=False) dbuser_output = DrupalUtils.drush_command("status -l %s Database\ user" % site, drush_site=None, drush_runtime_location=drush_runtime_location, drush_sudo=False, drush_format=None, drush_path=None, www_user=False) dbpass_output = DrupalUtils.drush_command("--show-passwords status -l %s Database\ pass" % site, drush_site=None, drush_runtime_location=drush_runtime_location, drush_sudo=False, drush_format=None, drush_path=None, www_user=False) dbhost_output = DrupalUtils.drush_command("status -l %s Database\ host" % site, drush_site=None, drush_runtime_location=drush_runtime_location, drush_sudo=False, drush_format=None, drush_path=None, www_user=False) dbname = run("echo \"%s\" | awk {'print $4'} | head -1" % dbname_output) dbuser = run("echo \"%s\" | awk {'print $4'} | head -1" % dbuser_output) dbpass = run("echo \"%s\" | awk {'print $4'} | head -1" % dbpass_output) dbhost = run("echo \"%s\" | awk {'print $4'} | head -1" % dbhost_output) run('mysqldump --single-transaction -c --opt -Q --hex-blob -u%s -p%s -h%s %s | /home/jenkins/drupal-obfuscate.rb | bzip2 -f > ~jenkins/dbbackups/custombranch_%s_%s.sql.bz2' % (dbuser, dbpass, dbhost, dbname, alias, now)) else: run('cd /var/www/live.%s.%s/www/sites/%s && drush -l %s -y sql-dump | bzip2 -f > ~jenkins/dbbackups/custombranch_%s_%s.sql.bz2' % (repo, syncbranch, site, site, alias, now)) print "===> Fetching the database from the remote server..." dump_file = "custombranch_%s_%s_from_%s.sql.bz2" % (alias, now, syncbranch) get('~/dbbackups/custombranch_%s_%s.sql.bz2' % (alias, now), '/tmp/dbbackups/%s' % dump_file) run('rm ~/dbbackups/custombranch_%s_%s.sql.bz2' % (alias, now)) # Switch back to original host and send the database dump to it env.host_string = orig_host print "===> Host string is now %s..." % env.host_string print "===> Sending database dump to host..." # If freshinstall is True, this is for an initial build, so we just need to copy the database # into the db/ directory and do nothing else with it. If freshinstall is False, this is to # sync the chosen database to the custom branch site, so we copy it to /home/jenkins/dbbackups # then import it if freshinstall: local('scp /tmp/dbbackups/%s %s:/var/www/%s_%s_%s/db/' % (dump_file, env.host_string, repo, branch, build)) else: local('scp /tmp/dbbackups/%s %s:~/dbbackups/' % (dump_file, env.host_string)) print "===> Importing the %s database into %s..." % (syncbranch, branch) # Need to drop all tables first in case there are existing tables that have to be ADDED # from an upgrade run("drush @%s_%s -y sql-drop" % (alias, branch)) with settings(warn_only=True): if run("bzcat ~/dbbackups/%s | drush @%s_%s sql-cli" % (dump_file, alias, branch)).failed: common.MySQL.mysql_revert_db(db_name, build) raise SystemError("######## Cannot import %s database into %s. Reverting database and aborting." % (syncbranch, alias)) else: if sanitise == "yes": if sanitised_password is None: sanitised_password = common.Utils._gen_passwd() if sanitised_email is None: sanitised_email = 'example.com' print "===> Sanitising database..." run("drush @%s_%s -y sql-sanitize --sanitize-email=%s+%%uid@%s --sanitize-password=%s" % (alias, branch, alias, sanitised_email, sanitised_password)) print "===> Data sanitised, email domain set to %s+%%uid@%s, passwords set to %s" % (alias, sanitised_email, sanitised_password) print "===> %s database imported." % syncbranch # For cases where we processed the import, we do not want to send dump_file back dump_file = None # Tidying up on host server run("rm ~/dbbackups/%s" % dump_file) # Tidying up on Jenkins server local('rm /tmp/dbbackups/%s' % dump_file) # Send the dump_file back for later use return dump_file
def main(repo, repourl, build, branch, buildtype, url=None, profile="minimal", keepbuilds=10, runcron="False", doupdates="Yes", freshdatabase="Yes", syncbranch=None, sanitise="no", statuscakeuser=None, statuscakekey=None, statuscakeid=None, importconfig="yes", restartvarnish="yes", cluster=False, webserverport='8080', rds=False, composer=True): dontbuild = False # Define variables drupal_version = None user = "******" mapping = {} global varnish_restart varnish_restart = restartvarnish readonlymode = "maintenance" fra = False config_export = False previous_build = "" previous_db = "" statuscake_paused = False behat_config = None tests_failed = False # Set SSH key if needed ssh_key = None if "*****@*****.**" in repourl: ssh_key = "/var/lib/jenkins/.ssh/id_rsa_github" # Define primary host common.Utils.define_host(config, buildtype, repo) # Define server roles (if applicable) common.Utils.define_roles(config, cluster) # Check where we're deploying to - abort if nothing set in config.ini if env.host is None: raise ValueError( "===> You wanted to deploy a build but we couldn't find a host in the map file for repo %s so we're aborting." % repo) # Set our host_string based on user@host env.host_string = '%s@%s' % (user, env.host) # Make sure /var/www/config exists execute(Multisite.create_config_dir) # Compile variables for feature branch builds (if applicable) FeatureBranches.configure_feature_branch(buildtype, config, branch) print "Feature branch debug information below:" print "httpauth_pass: %s" % FeatureBranches.httpauth_pass print "ssl_enabled: %s" % FeatureBranches.ssl_enabled print "ssl_cert: %s" % FeatureBranches.ssl_cert print "ssl_ip: %s" % FeatureBranches.ssl_ip print "drupal_common_config: %s" % FeatureBranches.drupal_common_config # Prepare variables for various Drupal tasks if config.has_section("Features"): fra = config.getboolean("Features", "fra") if fra: branches = Drupal.drush_fra_branches(config) readonlymode = Drupal.configure_readonlymode(config) # Compile a site mapping, which is needed if this is a multisite build mapping = Multisite.configure_site_mapping(repo, mapping, config) # These are our standard deployment hooks, such as config_export # All of the standard hooks are in hooks/StandardHooks.py # First, declare the variables that relate to our hooks # An example would be: # [Hooks] # config_export: True # config_export = Drupal.configure_config_export(config) # Prepare Behat variables if config.has_section("Behat"): behat_config = DrupalTests.prepare_behat_tests(config, buildtype) # Set a URL if one wasn't already provided and clean it up if it was url = common.Utils.generate_url(url, repo, branch) # Pause StatusCake monitoring statuscake_paused = common.Utils.statuscake_state(statuscakeuser, statuscakekey, statuscakeid, "pause") # Run the tasks. # -------------- # If this is the first build, attempt to install the site for the first time. if dontbuild: print "===> Not actually doing a proper build. This is a debugging build." else: execute(common.Utils.clone_repo, repo, repourl, branch, build, None, ssh_key) # Gitflow workflow means '/' in branch names, need to clean up. branch = common.Utils.generate_branch_name(branch) print "===> Branch is %s" % branch # Let's allow developers to perform some early actions if they need to execute(common.Utils.perform_client_deploy_hook, repo, branch, build, buildtype, config, stage='pre', hosts=env.roledefs['app_all']) # Because execute() returns an array of values returned keyed by hostname drupal_version = DrupalUtils.determine_drupal_version( drupal_version, repo, branch, build, config) print "===> Set drupal_version variable to %s" % drupal_version if drupal_version != '8': importconfig = "no" if drupal_version == '8' and composer is True: execute(Drupal.run_composer_install, repo, branch, build) new_sites = Multisite.check_for_new_installs(repo, branch, build, mapping) if new_sites is not None: execute(Multisite.new_site_live_symlink, repo, branch, build, mapping, new_sites) execute(Multisite.new_site_files, repo, branch, build, mapping, new_sites) execute(Multisite.new_site_create_database, repo, branch, build, buildtype, profile, mapping, new_sites, drupal_version, cluster, rds, config) execute(Multisite.new_site_copy_settings, repo, branch, build, mapping, new_sites) execute(Multisite.new_site_force_dbupdate, repo, branch, build, mapping, new_sites) execute(Multisite.new_site_build_vhost, repo, branch, mapping, new_sites, webserverport) execute(Multisite.generate_drush_alias, repo, branch, mapping, new_sites) execute(Multisite.generate_drush_cron, repo, branch, mapping, new_sites) execute(Multisite.new_site_fix_perms, repo, branch, mapping, new_sites, drupal_version) execute(Multisite.backup_db, repo, branch, build, mapping, new_sites) execute(Multisite.adjust_files_symlink, repo, branch, build, mapping, new_sites) execute(Multisite.adjust_settings_php, repo, branch, build, buildtype, mapping, new_sites) # Let's allow developers to perform some actions right after Drupal is built execute(common.Utils.perform_client_deploy_hook, repo, branch, build, buildtype, config, stage='mid', hosts=env.roledefs['app_all']) #environment_indicator(repo, branch, build, buildtype) execute(Multisite.drush_status, repo, branch, build, buildtype, mapping, new_sites, revert_settings=True) if doupdates == 'yes': execute(Multisite.drush_updatedb, repo, branch, build, buildtype, mapping, new_sites, drupal_version) if fra: if branch in branches: execute(Multisite.drush_fra, repo, branch, build, buildtype, mapping, new_sites, drupal_version) #drush_status(repo, branch, build, revert=True) # This will revert the database if it fails (maybe hook_updates broke ability to bootstrap) execute(common.Utils.adjust_live_symlink, repo, branch, build, hosts=env.roledefs['app_all']) execute(Multisite.secure_admin_password, repo, branch, build, mapping, drupal_version) execute(common.Services.clear_php_cache, hosts=env.roledefs['app_all']) execute(common.Services.clear_varnish_cache, hosts=env.roledefs['app_all']) for alias, buildsite in mapping.iteritems(): execute(Multisite.drush_cache_clear, repo, branch, build, buildsite, drupal_version, hosts=env.roledefs['app_primary']) # Let's allow developers to perform some post-build actions if they need to execute(common.Utils.perform_client_deploy_hook, repo, branch, build, buildtype, config, stage='post', hosts=env.roledefs['app_all']) # Resume StatusCake monitoring if statuscake_paused: common.Utils.statuscake_state(statuscakeuser, statuscakekey, statuscakeid) execute(common.Utils.remove_old_builds, repo, branch, keepbuilds, hosts=env.roledefs['app_all'])
def main(repo, repourl, build, branch, buildtype, keepbuilds=10, config_filename='config.ini'): # Read the config.ini file from repo, if it exists config = common.ConfigFile.buildtype_config_file(buildtype, config_filename) # We don't need to define a host, as that should be defined in the Jenkins job (or whatever CI is being used) # Define server roles (if applicable) common.Utils.define_roles(config, False, None) user = "******" application_directory = "www" www_root = "/var/www" site_root = www_root + '/%s_%s_%s' % (repo, branch, build) site_link = www_root + '/live.%s.%s' % (repo, branch) # Set our host_string based on user@host env.host_string = '%s@%s' % (user, env.host) ssh_key = common.ConfigFile.return_config_item(config, "Build", "ssh_key") # Can be set in the config.ini [Drupal] section ### @TODO: deprecated, can be removed later drupal_version = common.ConfigFile.return_config_item( config, "Version", "drupal_version", "string", None, True, True, replacement_section="Drupal") # This is the correct location for 'drupal_version' - note, respect the deprecated value as default drupal_version = common.ConfigFile.return_config_item( config, "Drupal", "drupal_version", "string", drupal_version) # Can be set in the config.ini [Composer] section composer = common.ConfigFile.return_config_item(config, "Composer", "composer", "boolean", True) composer_lock = common.ConfigFile.return_config_item( config, "Composer", "composer_lock", "boolean", True) no_dev = common.ConfigFile.return_config_item(config, "Composer", "no_dev", "boolean", True) # Set SSH key if needed # @TODO: this needs to be moved to config.ini for Code Enigma GitHub projects if "*****@*****.**" in repourl: ssh_key = "/var/lib/jenkins/.ssh/id_rsa_github" # Run the tasks. # -------------- execute(common.Utils.clone_repo, repo, repourl, branch, build, None, ssh_key, hosts=env.roledefs['app_all']) # Gitflow workflow means '/' in branch names, need to clean up. branch = common.Utils.generate_branch_name(branch) print "===> Branch is %s" % branch drupal_version = int( DrupalUtils.determine_drupal_version(drupal_version, repo, branch, build, config)) print "===> the drupal_version variable is set to %s" % drupal_version if drupal_version > 7 and composer is True: # Sometimes people use the Drupal Composer project which puts Drupal 8's composer.json file in repo root. with settings(warn_only=True): if run("find %s/composer.json" % site_root).return_code == 0: path = site_root else: path = site_root + "/" + application_directory execute(common.PHP.composer_command, path, "install", None, no_dev, composer_lock) # Compile a site mapping, which is needed if this is a multisite build # Just sets to 'default' if it is not mapping = {} mapping = Drupal.configure_site_mapping(repo, mapping, config) for alias, site in mapping.iteritems(): execute(AdjustConfiguration.adjust_settings_php, repo, branch, build, buildtype, alias, site, www_root, application_directory) execute(AdjustConfiguration.adjust_drushrc_php, repo, branch, build, site, www_root, application_directory) execute(AdjustConfiguration.adjust_files_symlink, repo, branch, build, alias, site, www_root, application_directory) execute(common.Utils.adjust_live_symlink, repo, branch, build, hosts=env.roledefs['app_all']) # Final clean up and run tests, if applicable execute(common.Services.clear_php_cache, hosts=env.roledefs['app_all']) execute(common.Services.clear_varnish_cache, hosts=env.roledefs['app_all']) execute(common.Utils.remove_old_builds, repo, branch, keepbuilds, hosts=env.roledefs['app_all'])
def main(shortname, staging_branch, prod_branch, synctype='both', fresh_database='no', sanitise='yes', sanitised_password=None, sanitised_email=None, staging_shortname=None, remote_files_dir=None, staging_files_dir=None, sync_dir=None, config_filename='config.ini', db_import_method='drush'): # Set the variables we need. drupal_version = None app_dir = "www" # If we didn't get a staging shortname, we should set it to shortname if staging_shortname is None: staging_shortname = shortname # Run the tasks. # -------------- # If this is the first build, attempt to install the site for the first time. with settings(warn_only=True): site_exists = common.Utils.get_previous_build(staging_shortname, staging_branch, 0) if site_exists is None: raise SystemError( "You can't sync to a site if it hasn't been set up yet in this environment." ) else: print "===> We found the site, so we'll continue with the sync" path_to_drupal = site_exists print "===> Path is %s" % path_to_drupal path_to_config_file = path_to_drupal + '/' + config_filename drupal_config = common.ConfigFile.read_config_file( path_to_config_file, False, True, True) if config.has_section(shortname): orig_host = "%s@%s" % (env.user, env.host) # Get Drupal version ### @TODO: deprecated, can be removed later drupal_version = common.ConfigFile.return_config_item( config, "Version", "drupal_version", "string", None, True, True, replacement_section="Drupal") # This is the correct location for 'drupal_version' - note, respect the deprecated value as default drupal_version = common.ConfigFile.return_config_item( config, "Drupal", "drupal_version", "string", drupal_version) drupal_version = int( DrupalUtils.determine_drupal_version(drupal_version, shortname, staging_branch, 0, drupal_config, 'sync')) # Allow developer to run a script prior to a sync common.Utils.perform_client_sync_hook(path_to_drupal, staging_branch, 'pre', config_filename) stage_drupal_root = path_to_drupal + '/' + app_dir mapping = {} mapping = Drupal.configure_site_mapping(shortname, mapping, drupal_config, method="sync", branch=staging_branch) for alias, site in mapping.iteritems(): # Need to check invididual sites are setup on both stage and prod # There could be a time where a site is setup on stage, but hasn't been # setup on prod yet, so we can't sync that particular site stage_site_exists = DrupalUtils.check_site_exists( site_exists, site) env.host = config.get(shortname, 'host') env.user = config.get(shortname, 'user') env.host_string = '%s@%s' % (env.user, env.host) prod_site_exists = DrupalUtils.check_site_exists( '/var/www/live.%s.%s' % (shortname, prod_branch), site) env.host_string = orig_host if stage_site_exists and prod_site_exists: print "Both the stage and prod sites exist. Continue with sync." else: SystemError( "One of the sites does not exist. Fail the sync early.") # Database syncing if synctype == 'db' or synctype == 'both': Sync.backup_db(staging_shortname, staging_branch, stage_drupal_root, site) Sync.sync_db(orig_host, shortname, staging_shortname, staging_branch, prod_branch, fresh_database, sanitise, sanitised_password, sanitised_email, config, drupal_version, stage_drupal_root, app_dir, site, db_import_method) # Allow developer to run a script mid-way through a sync common.Utils.perform_client_sync_hook(path_to_drupal, staging_branch, 'mid-db', config_filename) Sync.drush_updatedb(orig_host, staging_shortname, staging_branch, stage_drupal_root, site) # Files syncing (uploads) if synctype == 'files' or synctype == 'both': Sync.sync_assets(orig_host, shortname, staging_shortname, staging_branch, prod_branch, config, app_dir, remote_files_dir, staging_files_dir, sync_dir, site, alias) # Allow developer to run a script mid-way through a sync common.Utils.perform_client_sync_hook(path_to_drupal, staging_branch, 'mid-files') # Cleanup Sync.clear_caches(orig_host, staging_shortname, staging_branch, drupal_version, stage_drupal_root, site) env.host_string = orig_host common.Services.clear_php_cache() common.Services.clear_varnish_cache() common.Services.reload_webserver() # Allow developer to run a script after a sync common.Utils.perform_client_sync_hook(path_to_drupal, staging_branch, 'post', config_filename) else: raise SystemError( "Could not find this shortname %s in the sync.ini so we cannot proceed." % staging_shortname)