def deploy_regression_tests(): _needs_targetenv() target_dir = env.demo_data_root+"/regression" if not exists(target_dir): sudo("mkdir -p %(target_dir)s" % venv()) sudo("chown %(user)s %(target_dir)s" % venv()) rsync_project(target_dir, "%s/testfeeds/regression/" % env.projectroot, exclude=".*")
def ping_start_collect_feed(default_feed=None): _needs_targetenv() if default_feed: if not verify_url_content(" --data 'feed=%(default_feed)s' %(collector_url)s" % vars(), "Scheduled collect of", alternate_string_exists_in_content="is already scheduled for collect"): print "Failed to start collect of '%s'" % default_feed return main_host_and_port = env.roledefs['main'][0] if env.target!='dev_unix' else "%s:8180" % env.roledefs['main'][0] collector_url = "http://%s/collector" % main_host_and_port if env.target=='regression': feed_url = "http://%s/feed/current.atom" % env.roledefs['demosource'][0] if not verify_url_content(" --data 'feed=%(feed_url)s' %(collector_url)s" % vars(), "Scheduled collect of"): raise Exception("Scheduled collect failed") else: filename = "%(resources)s/%(target)s/datasources.n3" % venv() read_file = open(filename, 'r') for line in read_file: line = line.strip() if line.startswith('iana:current'): start_index = line.index('<') + 1 end_index = line.index('>') feed_url = line[start_index:end_index].strip() if feed_url=='http://rinfo.lagrummet.se/feed/current': continue if not verify_url_content(" --data 'feed=%(feed_url)s' %(collector_url)s" % vars(), "Scheduled collect of", alternate_string_exists_in_content="is already scheduled for collect"): print "Failed to start collect of '%s'" % feed_url
def package(deps="1", test="1"): if int(deps): local_lib_rinfo_pkg(test) _needs_targetenv() flags = "" if int(test) else "-Dmaven.test.skip=true" local("cd %(java_packages)s/rinfo-checker/ && " "mvn %(flags)s -P%(target)s clean package war:war" % venv(), capture=False)
def package(deps="1", test="1"): """Builds and packages the rinfo-service war, configured for the target env.""" if int(deps): app.local_lib_rinfo_pkg(test) _needs_targetenv() flags = "" if int(test) else "-Dmaven.test.skip=true" local("cd %(java_packages)s/rinfo-service/ && " "mvn %(flags)s -P%(target)s clean package war:war" % venv(), capture=False)
def upload(dataset): """Upload the transformed demo data depot to the demo server.""" _can_handle_dataset(dataset) _needs_targetenv() if not exists(env.demo_data_root): sudo("mkdir -p %(demo_data_root)s" % env) sudo("chown %(user)s %(demo_data_root)s" % env) rsync_project(env.demo_data_root, "%(demodata_dir)s/%(dataset)s" % venv(), exclude=".*", delete=True)
def get_rdf_list_in_depot(type=""): _needs_targetenv() file_name = 'rdf_list_%s.txt' % env.target remote_file_and_path = '/tmp/%s' % file_name with cd("%s/depot/publ/%s" % (env.rinfo_main_store, type)): run("find | grep .rdf > %s" % remote_file_and_path) get(remote_file_and_path, '/tmp/%s' % file_name) local('sort /tmp/%s > /tmp/sorted_%s' % (file_name, file_name))
def install_skrapat_source(): _needs_targetenv() create_path(env.mgr_workdir) create_path(env.demo_data_root, use_sudo=True) take_ownership(env.demo_data_root, use_sudo=True) ftp_fetch(env.skrapat_compressed_file_name, env.testfeed_ftp_path, env.mgr_workdir, get_value_from_password_store(PASSWORD_FILE_FTP_USERNAME_PARAM_NAME), get_value_from_password_store(PASSWORD_FILE_FTP_PASSWORD_PARAM_NAME)) untar('%s/%s' % (env.mgr_workdir, env.skrapat_compressed_file_name), env.demo_data_root)
def test(): _needs_targetenv() report = JUnitReport() url = "http://"+env.roledefs['main'][0] test_url(report, "Verify feed exists and contains 'tag:lagrummet.se,2009:rinfo'", "main.feed", "%(url)s/feed/current" % venv(), "tag:lagrummet.se,2009:rinfo") test_url(report, "Verify dataset exists and contains 'tag:lagrummet.se,2009:rinfo'", "main.dataset", "%(url)s/sys/dataset/rdf" % venv(), "tag:lagrummet.se,2009:rinfo") if not report.empty(): file_name = "%(projectroot)s/testreport/main_test_report.log" % venv() report.create_report(file_name) print "Created report '%s'" % file_name
def setup(): """Creates neccessary directories for rinfo-service runtime data.""" _needs_targetenv() if not exists(env.dist_dir): run("mkdir %(dist_dir)s" % env) if not exists(env.rinfo_dir): sudo("mkdir %(rinfo_dir)s" % env) if not exists(env.rinfo_rdf_repo_dir): sudo("mkdir %(rinfo_rdf_repo_dir)s" % env) sudo("chown %(tomcat_user)s %(rinfo_rdf_repo_dir)s" % env) if not exists(env.target_config_dir): sudo("mkdir %(target_config_dir)s" % env) put("%(java_packages)s/rinfo-service/src/environments/%(target)s/rinfo-service.properties" % env,"%(target_config_dir)srinfo-service.properties" % env, use_sudo=True)
def ping_start_collect_admin(): _needs_targetenv() feed_url = '' if env.target=='regression': feed_url = "http://%s/feed/current.atom" % env.roledefs['demosource'][0] elif env.target=='dev_unix': feed_url = "http://%s:8280/feed/current.atom" % env.roledefs['admin'][0] else: feed_url = "http://%s/feed/current" % env.roledefs['admin'][0] main_host_and_port = env.roledefs['main'][0] if env.target!='dev_unix' else "%s:8180" % env.roledefs['main'][0] collector_url = "http://%s/collector" % main_host_and_port if not verify_url_content(" --data 'feed=%(feed_url)s' %(collector_url)s" % vars(), "Scheduled collect of"): raise Exception("Test failed")
def install_varnish(): _needs_targetenv() gpg_key = local("curl -s 'http://repo.varnish-cache.org/debian/GPG-key.txt'", capture=True) sudo("echo '%s' | apt-key add -" % gpg_key) sudo("echo 'deb http://repo.varnish-cache.org/debian/ wheezy varnish-3.0' >> /etc/apt/sources.list") sudo("apt-get update") sudo("apt-get install varnish=3.0.6-1~wheezy -y") stop_varnish() # stop default daemon started by installation if not exists("%(workdir_varnish)s" % env): sudo("mkdir %(workdir_varnish)s" % env) if not exists("%(workdir_varnish)s/cache" % env): sudo("mkdir %(workdir_varnish)s/cache" % env) put(p.join(env.manageroot, "sysconf", "common", "varnish", "rinfo-service.vcl"), "%(workdir_varnish)s" % env, use_sudo=True) put(p.join(env.manageroot, "sysconf", "%(target)s" % env, "varnish", "backend.vcl"), "%(workdir_varnish)s" % env, use_sudo=True) put(p.join(env.manageroot, "sysconf", "%(target)s" % env, "varnish", "host.vcl"), "%(workdir_varnish)s" % env, use_sudo=True) put(p.join(env.manageroot, "sysconf", "%(target)s" % env, "etc", "default", "varnish"), "/etc/default", use_sudo=True)
def _deploy_war_norestart(localwar, warname, headless=False): _needs_targetenv() rsync_project("%(dist_dir)s/%(warname)s.war" % venv(), localwar, '--progress') run("chmod 644 %(dist_dir)s/%(warname)s.war" % venv()) run("cp %(dist_dir)s/%(warname)s.war %(tomcat_webapps)s/." % venv()) run("touch %(tomcat_webapps)s/%(warname)s.war" % venv())
def _prepare_mgr_work(): _needs_targetenv() mkdirpath("%(mgr_workdir)s/install" % env) put(p.join(env.manageroot, "sysconf", "install", "*.sh"), "%(mgr_workdir)s/install" % env) put(p.join(env.manageroot, "sysconf", "common", "tomcat", "server.xml"), "%(mgr_workdir)s/install" % env) mkdirpath("%(mgr_workdir)s/tomcat_pkg" % env)
def _deploy_war(localwar, warname, headless=False): _needs_targetenv() rsync_project("%(dist_dir)s/%(warname)s.war" % venv(), localwar, '--progress') with _managed_tomcat_restart(5, headless): run("rm -rf %(tomcat_webapps)s/%(warname)s/" % venv()) run("unzip -q %(dist_dir)s/%(warname)s.war -d %(tomcat_webapps)s/%(warname)s" % venv())
def destroy_service_repository(default_repository='rinfo'): _needs_targetenv() run("curl -X POST http://localhost:8080/sesame-workbench/repositories/%s/clear" % default_repository)
def setup(): _needs_targetenv() if not exists(env.admin_webroot): sudo("mkdir %(admin_webroot)s" % env) sudo("chown %(user)s %(admin_webroot)s" % env)
def ping_start_collect(): _needs_targetenv() feed_url = "http://%s/feed/current" % env.roledefs['main'][0] collector_url = "http://%s/collector" % env.roledefs['service'][0] if not verify_url_content(" --data 'feed=%(feed_url)s' %(collector_url)s" % vars(), "Scheduled collect of"): raise Exception("Test failed")
def ping_main(): """Ping rinfo-main to (re-)collect the admin feed""" _needs_targetenv() feed_url = "http://%s/feed/current" % env.roledefs['admin'][0] collector_url = "http://%s/collector" % env.roledefs['main'][0] print local("curl --data 'feed=%(feed_url)s' %(collector_url)s" % vars())
def test(): _needs_targetenv() url = "http://"+env.roledefs['service'][0] with lcd(env.projectroot+"/packages/java/rinfo-service/src/regression"): local("casperjs test . --xunit=%(projectroot)s/testreport/service_test_report.log --url=%(url)s" " --target=%(target)s --output=%(projectroot)s/testreport/" % venv())
def ban_varnish(ban_path=''): _needs_targetenv() if env.listen_ip_varnish: sudo("curl -X BAN %s:%s/%s" % (env.listen_ip_varnish, env.listen_port_varnish, ban_path))
def start_varnish(): _needs_targetenv() sudo("/etc/init.d/varnish start")
def start_elasticsearch(): _needs_targetenv() sudo("/etc/init.d/elasticsearch start")
def delete_elasticsearch_index(index_name='rinfo'): _needs_targetenv() run("curl -XDELETE 'http://localhost:9200/%s/'" % index_name)