def install_gtfs_update_crontab(self): '''Installs and starts a crontab to automatically dl and build a data bundle nightly. ''' # prepare update script refresh_settings = dict(gtfs_dl_file=unix_path_join(self.data_dir, 'google_transit.zip'), gtfs_static_url=self.gtfs_conf.get('gtfs_static_url'), gtfs_dl_logfile=unix_path_join(self.data_dir, 'nightly_dl.out'), federation_builder_folder=self.federation_builder_folder, bundle_dir=self.bundle_dir, extra_args=self.gtfs_conf.get('extra_bundle_build_args'), user=self.user, cron_email=self.aws_conf.get('cron_email'), from_mailer=env.host_string) # check if script folders exists if not exists(self.script_dir): run('mkdir {0}'.format(self.script_dir)) self.populate_and_upload_template_file(refresh_settings, conf_helper, 'gtfs_refresh.sh', self.script_dir) with cd(self.script_dir): run('chmod 755 gtfs_refresh.sh') # prepare update script with open(os.path.join(CONFIG_TEMPLATE_DIR, 'gtfs_refresh_crontab')) as f: refresh_cron_template = f.read() cron_settings = dict(cron_email=self.aws_conf.get('cron_email'), logfile=unix_path_join(self.data_dir, 'nightly_bundle.out'), script_folder=self.script_dir) gtfs_refresh_cron = refresh_cron_template.format(**cron_settings) crontab_update(gtfs_refresh_cron, 'gtfs_refresh_cron')
def crontabs(): if env.environment_class in ['production']: update_open_zendesk_tickets = '0 * * * 1,2,3,4,5 cd %s/%s && %sbin/python manage.py update_open_zendesk_tickets' % (env.remote_project_path, 'current', env.virtualenv_path) crontab.crontab_update(update_open_zendesk_tickets, 'update-open-zendesk-tickets') # Must always run 30 mins after the update_open_zendesk_tickets # as it will read from the latest tickets results list generated by: update_open_zendesk_tickets zendesk_ticket_report = '30 8 * * 1,2,3,4,5 cd %s/%s && %sbin/python manage.py zendesk_ticket_report' % (env.remote_project_path, 'current', env.virtualenv_path) crontab.crontab_update(zendesk_ticket_report, 'zendesk-ticket-report')
def install_monitor(self): run('git clone https://github.com/evansiroky/otvia2-monitor.git') run('virtualenv -p /usr/bin/python otvia2-monitor') # install gcc so pycrypto can compile sudo('yum -y install gcc') monitor_conf = ConfHelper.get_config('monitor') with cd('otvia2-monitor'): # upload monitoring config run('mkdir config') put(os.path.join(CONFIG_DIR, 'monitor.ini'), 'config/') # install run scripts self.venv('python setup.py develop') # install node.js server server_cfg = dict(server_admin_username=monitor_conf.get('server_admin_username'), server_admin_password=monitor_conf.get('server_admin_password'), server_access_username=monitor_conf.get('server_access_username'), server_access_password=monitor_conf.get('server_access_password')) put(ConfHelper.write_template(server_cfg, 'server.js'), 'config') put(ConfHelper.write_template(dict(google_analytics_tracking_id=\ monitor_conf.get('google_analytics_tracking_id')), 'web.js'), 'config') run('npm install') run('npm run build') # redirect port 80 to 3000 for node app sudo('iptables -t nat -I PREROUTING -p tcp --dport 80 -j REDIRECT --to-port 3000') sudo('service iptables save') # start server run('forever start -a --uid "otvia2-monitor" server/index.js') # install cron to run monitor script with open(os.path.join(TEMPLATE_DIR, 'monitor_crontab')) as f: cron_template = f.read() collect_script = unix_path_join(self.user_home, 'otvia2-monitor', 'bin', 'monitor') cron_settings = dict(cron_email=self.conf.get('cron_email'), path_to_monitor_script=collect_script) cron = cron_template.format(**cron_settings) crontab_update(cron, 'otvia2_data_collection')
def install_custom_monitoring(self): '''Installs a custom monitoring script to monitor memory and disk utilization. ''' # install helpers sudo('yum -y install perl-DateTime perl-Sys-Syslog perl-LWP-Protocol-https') # dl scripts run('wget http://aws-cloudwatch.s3.amazonaws.com/downloads/CloudWatchMonitoringScripts-1.2.1.zip') sudo('unzip CloudWatchMonitoringScripts-1.2.1.zip -d /usr/local') run('rm CloudWatchMonitoringScripts-1.2.1.zip') # prepare the monitoring crontab cron = 'MAILTO={cron_email}\n' cron += '*/5 * * * * /usr/local/aws-scripts-mon/mon-put-instance-data.pl --mem-util --disk-space-util --disk-path=/ --from-cron --aws-access-key-id={aws_access_key_id} --aws-secret-key={aws_secret_key}' cron_settings = dict(aws_access_key_id=self.conf.get('aws_access_key_id'), aws_secret_key=self.conf.get('aws_secret_access_key'), cron_email=self.conf.get('cron_email')) aws_logging_cron = cron.format(**cron_settings) # start crontab for aws monitoring crontab_update(aws_logging_cron, 'aws_monitoring')
def setup(): """ Create container structure if not exists Create virtual envirionemen if not existst Upload configuration files for services """ _dynamic_env() with warn_only(): env.run("mkdir -p %(container_path)s" % env) env.run("mkdir -p %(media)s" % env) env.run("mkdir -p %(container_path)s/media/ckeditor" % env) env.run("mkdir -p %(container_path)s/static" % env) env.run("mkdir -p %(container_path)s/logs" % env) env.run("mkdir -p %(container_path)s/etc" % env) env.run("mkdir -p %(container_path)s/tmp" % env) env.run("mkdir -p %(source_path)s" % env) clearsessions = "%(python)s %(source_path)s/manage.py clearsessions" \ % env crontab_update("0 0 */1 * * %s > /dev/null 2>&1" % clearsessions, "clearsessions_%(project_fullname)s" % env) if not exists(env.env_path): env.run("virtualenv %(env_path)s --python=%(bin_python)s" % env) etc_out_folder = "%s/etc" % env.container_path env.run("mkdir -p %s" % etc_out_folder) etc_in_folder = os.path.join(os.path.dirname(__file__), 'etc') for filename in os.listdir(etc_in_folder): conf_out = os.path.join(etc_out_folder, filename) upload_template(filename, conf_out, template_dir=etc_in_folder, context={'env': env}, use_jinja=True)