def package(vcs='hg', all_deps="False"): """ Builds the magma package """ all_deps = False if all_deps == "False" else True # If a host list isn't specified, default to the magma vagrant vm if not env.hosts: setup_env_vagrant() if not hasattr(env, 'debug_mode'): print("Error: The Deploy target isn't specified. Specify one with\n\n" "\tfab [dev|test] package") exit(1) hash = pkg.get_commit_hash(vcs) with cd('~/magma/lte/gateway'): print("Building magma package, picking up commit %s..." % hash) run('make clean') build_type = "Debug" if env.debug_mode else "RelWithDebInfo" run('./release/build-magma.sh -h %s -t %s' % (hash, build_type)) # Generate magma dependency packages print("Generating magma dependency packages") with cd('python'): run('../release/pydep finddep -b ../python/setup.py') run('mv *.deb ../') run('rm -rf ~/magma-packages') run('mkdir -p ~/magma-packages') run('mv *.deb ~/magma-packages') if all_deps: pkg.download_all_pkgs() run('cp /var/cache/apt/archives/*.deb ~/magma-packages') pkg.upload_pkgs_to_aws()
def _push_archive_to_s3(vcs, target): pkg_name = "magma_feg_%s" % pkg.get_commit_hash(vcs) with cd(target): run('zip -r %s *' % (pkg_name)) zip_name = "%s.zip" % pkg_name local("rm -rf %s" % target) local("mkdir -p %s" % target) get('%s/%s' % (target, zip_name), '%s/%s' % (target, zip_name)) with lcd(target): local('%s s3 cp %s s3://magma-images/feg/' % (AWS, zip_name)) puts("Deployment bundle: s3://magma-images/feg/%s" % zip_name) return zip_name
def package(service, cloud_host="", vcs="hg", force="False", docker="False", version="latest"): """ Create deploy package and push to S3. This defaults to running on local vagrant cloud VM machines, but can also be pointed to an arbitrary host (e.g. amazon) by specifying a VM. cloud_host: The ssh address string of the machine to run the package command. Formatted as "<user>@<host>:<port>". If not specified, defaults to the `cloud` vagrant VM. vcs: version control system used, "hg" or "git". force: Bypass local commits or changes check if set to True. docker: Build package for deploying using docker version: Package version (used for docker pull) """ # Check that we have no local changes or commits at this point if force != "True" and pkg.check_commit_changes(): abort("Local changes or commits not allowed") _validate_service(service) # Use same temp folder name for local and VM operations folder = "/tmp/magmadeploy_%s" % service commit_hash = pkg.get_commit_hash(vcs) local("rm -rf %s" % folder) local("mkdir -p %s" % folder) if docker == "True": zip_name = _package_docker_zip(service, folder, commit_hash, version) else: zip_name = _package_vagrant_zip(service, folder, cloud_host, commit_hash) # Push the zip archive to s3 _push_archive_to_s3(service, folder, zip_name) local('rm -rf %s' % folder) return zip_name
def package(service, cloud_host=None, vcs="hg", force=False): """ Create deploy package and push to S3. This defaults to running on local vagrant cloud VM machines, but can also be pointed to an arbitrary host (e.g. amazon) by specifying a VM. cloud_host: The ssh address string of the machine to run the package command. Formatted as "<user>@<host>:<port>". If not specified, defaults to the `cloud` vagrant VM. vcs: version control system used, "hg" or "git". force: Bypass local commits or changes check if set to True. """ # Check that we have no local changes or commits at this point if not force and pkg.check_commit_changes(): abort("Local changes or commits not allowed") _validate_service(service) if cloud_host: env.host_string = cloud_host (env.user, _, _) = split_hoststring(cloud_host) else: _vagrant() # Use same temp folder name for local and VM operations folder = "/tmp/magmadeploy_%s" % service local("rm -rf %s" % folder) local("mkdir -p %s" % folder) run("rm -rf %s" % folder) run("mkdir -p %s" % folder) with cd('magma/orc8r/cloud/deploy'): run('cp -pr aws/%s_appspec.yml %s/appspec.yml' % (service, folder)) run('cp -pr aws/scripts %s/.' % folder) run("mkdir -p %s/ansible/roles" % folder) run('cp -pr %s.yml %s/ansible/main.yml' % (service, folder)) run('cp -pr roles/%s %s/ansible/roles/.' % (service, folder)) run('cp -pr roles/aws_setup %s/ansible/roles/.' % folder) run('cp -pr roles/osquery %s/ansible/roles/.' % folder) run('cp -pr roles/service_registry %s/ansible/roles/.' % folder) if service == "controller": run('cp -pr /etc/magma %s/configs' % folder) run('cp -pr files/scripts/setup_swagger_ui %s/scripts/.' % folder) run('cp -pr files/static/apidocs %s/.' % folder) if service == "proxy": run('cp -pr /etc/magma %s/configs' % folder) run('cp -pr roles/disk_metrics %s/ansible/roles/.' % folder) run('cp -pr ../../../orc8r/tools/ansible/roles/pkgrepo ' '%s/ansible/roles/.' % folder) if service == 'prometheus': run('cp -pr roles/prometheus %s/ansible/roles/.' % folder) run('mkdir -p %s/bin' % folder) # To make CodeDeploy happy # Build Go binaries and plugins build() run('cp -pr go/plugins %s' % folder) _copy_go_binaries(service, folder) # Zip and push to s3 pkg_name = "magma_%s_%s" % (service, pkg.get_commit_hash(vcs)) _push_archive_to_s3(service, folder, pkg_name) run('rm -rf %s' % folder) local('rm -rf %s' % folder) return "%s.zip" % pkg_name