Exemple #1
0
def debug():
    """
    Find files with debug symbols
    """
    clean()
    local('grep -ir "print" *')
    local('grep -ir "console.log" *')
Exemple #2
0
def delete_branch(branch_name):
    """ Delete branch """
    if branch_name == get_current_branch():
        abort('You have to switch to another branch')
    with fab_settings(warn_only=True):
        local("git branch -D %s" % branch_name)
        local("git push origin --delete %s" % branch_name)
Exemple #3
0
def migrate():
    for app in LOCAL_APPS:
        try:
            local('python manage.py schemamigration {app} --auto'.format(app=app), capture=False)
        except :
            pass
    local('python manage.py migrate', capture=False)
def upload_singal_file(filename=""):
    date=time.strftime("%m.%d.%H.%M.%S")
    with hide('stdout', 'stderr','running'):
        file_value = sudo("awk -F'|' '/\<%s\>/{print $1}' %s/revision.swf " %(filename,dest_path))
        if file_value.isdigit():
            new_file_value = int(file_value) + 1 
        elif len(file_value)==0:
            print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
            print "\033[1;40;31mcan't find the file %s\033[0m" %filename
            print "\033[1;40;31mupdate file %s manually\033[0m" %filename
            print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
            return 1
        else:
            print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
            print "\033[1;40;31mfind more than one file %s in revision.swf\033[0m" %filename
            print "\033[1;40;31mupdate file %s manually\033[0m"  %filename
            print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
            return 1
    with hide('stdout', 'stderr','running'):
        sudo("chmod -R 777 %s" %client_dest_dir)
    filename_path=sudo("find %s -name %s" %(dest_path,filename))
    if filename_path.count('\n'):
        print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
        print "\033[1;40;31mFind more than one file %s in %s\033[0m" %(filename,dest_path)
        print "\033[1;40;31mupdate file %s manually\033[0m"  %filename
        print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
        return 1
    sudo("cp %s %s/version_backup/%s.%s" %(filename_path,client_dest_dir,filename,date))
    sudo("cp %s/revision.swf %s/version_backup/revision.swf.%s" %(dest_path,client_dest_dir,date))
    sudo("rm -f %s" %filename_path)
    put("%s/%s" %(singal_file_dir,filename),os.path.dirname(filename_path))
    local("rm %s/%s" %(singal_file_dir,filename))
    with hide('stdout', 'stderr','running'):
        output=sudo("sed -i 's/%s\(.*%s.*\)/%s\\1/' %s/revision.swf" %(file_value,filename,new_file_value,dest_path))
    print "\033[1;40;33mUpdate file revision.swf successful\033[0m"
def rsync(fromDirectory, toDirectory, toServer=env.host, toUser=env.user):
    """
    Rsyncs from localhost to a remote directory
    
    :type fromDirectory: String
    :param fromDirectory: The directory which is to copied from localhost
    :type toDirectory: String
    :param toDirectory: The directory to which to rsync remotely
    :type toServer: String
    :param toServer: The remote server to which the file is to copied [Optional].
    :type toUser: String
    :param toUser: The remote user to be used which the remote server [Optional].
        
    
    >>> fab -H test.uk rsync:fromDirectory=/tmp/licence.jar,toDirectory=/tmp
    >>> fab dev_server rsync:fromDirectory=/tmp/licence.jar,toDirectory=/tmp
    >>> fab -H test.uk rsync:fromDirectory=/tmp/licence.jar,toDirectory=/tmp,toServer=test2.uk,toUser=user2
        
    """

    if toServer == None:
        toServer = env.host

    rsync = _rsyncCommand_() + fromDirectory + " " + toUser + "@" + toServer + ":" + toDirectory
    local(rsync)
def _getLocationOfFile_(message):
    """
    Used for taking an input from a user for a path and checking if the file exists on the localhost.
    Returns the filepath supplied by the user. If the filepath does not exist, the function loops
    and asks the user again.
    
    :type message: String
    :param message: The command to be executed by the user  
    
    .. warning:
        Internal function not to be called directly from command line
    
    
    >>> licenceFile = getLocationOfFile("What is the location of the licence file on the localhost?")
    """

    fileExists = False
    iFile = None  #: holds the input filepath from the user

    while not (fileExists):
        iFile = prompt(message)
        fileExists = os.path.isfile(iFile)

        if not (fileExists):
            print(iFile + " does not exist. Please provide absolute path on the localhost")
        else:
            local('echo "File found"; ls -lrt ' + iFile, capture=False)

    return iFile
Exemple #7
0
def update_go_the_distance_offices():

    print "started downloading Go the Distance Offices"

    local('cd d; curl "https://sheetsu.com/apis/b835e696" > go-the-distance-offices.csv')

    print "Finished Downloading Go the Distance offices"
Exemple #8
0
def todo():
    """
    Find all TODO and XXX
    """
    clean()
    local('grep -ir "TODO" *')
    local('grep -ir "XXX" *')
Exemple #9
0
def _register_deployment():
    branch = local('git rev-parse --abbrev-ref HEAD', capture=True)
    author = local('git log -1 --pretty=format:"%an"', capture=True)
    commit = local('git log -1 --pretty=format:"%B"', capture=True)
    git_url = f'https://github.com/CDE-UNIBE/qcat/tree/{branch}'

    sc = SlackClient(settings.SLACK_TOKEN)

    sc.api_call(
        'chat.postMessage',
        channel='server-info',
        username='******',
        text=f'Branch "{branch}" deployed: {git_url}',
        attachments=[
            {
                'pretext': f'Great success!',
                'title': commit,
                'title_link': git_url,
                'fields': [
                    {
                        'title': 'Branch',
                        'value': 'develop',
                        'short': False
                    },
                    {
                        'title': 'Author',
                        'value': author,
                        'short': False
                    }
                ],
                'image_url': 'https://qcat.wocat.net/static/assets/favicons/favicon-32x32.png'
            }
        ]
    )
Exemple #10
0
def docs(rebuild=False):
    # local('cp example_apps/*_app.py docs/source/includes/.')
    options=''
    if rebuild:
        options += ' -E'
    local('sphinx-build -b html -a {options} docs/source ../builds/flask_user1/docs'.format(options=options))
    local('cd ../builds/flask_user1/docs && zip -u -r flask_user1_docs *')
Exemple #11
0
def selenium_test():
    require('environment', provided_by=('staging', 'preview', 'demo', 'production', 'india'))
    prompt("Jenkins username:"******"jenkins_user", default="selenium")
    prompt("Jenkins password:"******"jenkins_password")
    url = env.selenium_url % {"token": "foobar", "environment": env.environment}
    local("curl --user %(user)s:%(pass)s '%(url)s'" % \
          {'user': env.jenkins_user, 'pass': env.jenkins_password, 'url': url})
Exemple #12
0
def create_schema():
    pass_parameter = ("-p" + mysql_pass) if mysql_pass != "" else ""
    local('mysql -u%(mysql_user)s %(pass_parameter)s -e "CREATE SCHEMA %(mysql_db_name)s"' % {
        "mysql_user": mysql_user,
        "pass_parameter": pass_parameter,
        "mysql_db_name": mysql_db_name,
        })
Exemple #13
0
def create_release_archive(head='HEAD'):
    with lcd(PROJECT_ROOT_DIR):
        local('mkdir -p %s' % RELEASES_RELATIVE_PATH_DIR)
        local('git archive --worktree-attributes --format=tar.gz %s:ohr > %s' % (
            head,
            get_release_filepath()
        ))
Exemple #14
0
def copy():
    default_ignore_list = ['build.tar.gz', ]
    ignore_list = []

    if 'scp_ignore_list' in env:
        ignore_list = env.scp_ignore_list

    ignore_list = ignore_list + default_ignore_list

    path = get_local_app_path()
    release_path = paths.get_deploy_path(env.current_release)

    env.run('mkdir -p {}'.format(release_path))

    with lcd(path), cd(release_path):
        build_filename = 'build.tar.gz'
        build_remote_path = "/".join([env.current_release, build_filename])

        exclude_args = map(lambda x: '--exclude="{}"'.format(x), ignore_list)

        local('tar {} -czf {} *'.format(
            ' '.join(exclude_args),
            build_filename
        ))

        put(build_filename, build_remote_path)
        env.run('tar -xzf {}'.format(build_filename))

        env.run('rm {}'.format(build_remote_path))
        local("rm build.tar.gz")
Exemple #15
0
def drop_schema():
    pass_parameter = ("-p" + mysql_pass) if mysql_pass != "" else ""
    local('mysql -u%(mysql_user)s %(pass_parameter)s -e "DROP SCHEMA IF EXISTS %(mysql_db_name)s"' % {
        "mysql_user": mysql_user,
        "pass_parameter": pass_parameter,
        "mysql_db_name": mysql_db_name,
        })
def ipawho(ipafile=None, variant=None):
    """
    show the users in the ipa's file provisioning profile
    
    gets the device list from your ios dev account, then
    parses the IPA file's provisioning profile to show
    all the user names / emails that can install the ipa.
    
    Usage:
        ipawho:/path/to/ipa
    
    alias ipawho = "find . -name *ipa | xargs -I/ fab ipawho:/"
    """
    import csv
    from StringIO import StringIO
    execute('check_iosappdev_credentials', variant)
    appdevuser = os.environ.get('IOS_APPDEVUSER_%s_%s' % (app, variant))
    with settings(warn_only=True):
        list = local('ios devices:list --format csv', capture=True)
    csvlist = StringIO(list)
    devlist = {}
    for device in csv.DictReader(csvlist) if list !='No devices found' else []:
        devid = device.get('Device Identifier').strip()
        user = device.get('Device Name')
        devlist[devid] = user
    ipainfo = local('ipa info %s' % ipafile, capture=True)
    parsing = False
    for line in ipainfo.split('\n'):
        if 'ProvisionedDevices' in line:
            parsing = True
        if 'TeamIdentifier' in line:
            parsing = False
        if parsing:
            uuid = line.replace('ProvisionedDevices', '').split('|')[2].strip()
            print uuid, devlist[uuid]
Exemple #17
0
def _get_version():
    with hide('running', 'stdout', 'stderr'):
        local('git fetch --tags', capture=True)
        tags = local('git tag -l', capture=True).splitlines()
        tags.sort(key=StrictVersion)
        if tags:
            return tags[-1]
Exemple #18
0
    def _run_task(self, task_command):
        options = {
            'pwd': self.working_directory(),
            'command': task_command
        }

        with fabric_settings(warn_only=True):
            with lcd(self.working_directory()):
                if _platform == "darwin":
                    script_command = "script %(pwd)s/frigg_testlog %(command)s"
                else:
                    script_command = "script %(pwd)s/frigg_testlog -c \"%(command)s\" -q "

                run_result = local(script_command % options)
                run_result = local(task_command)

                self.result.succeeded = run_result.succeeded
                self.result.return_code += "%s," % run_result.return_code

                log = 'Task: %(command)s\n' % options
                log += '------------------------------------\n'

                with file("%(pwd)s/frigg_testlog" % options, "r") as f:
                    log += f.read() + "\n"

                log += '------------------------------------\n'
                log += 'Exited with exit code: %s\n\n' % run_result.return_code
                self.result.result_log += log
                self.result.save()
Exemple #19
0
def publish():
    """
    Publish content
    """
    html()
    local('ghp-import _output')
    local('git push origin gh-pages')
Exemple #20
0
    def setUpClass(cls):
        BaseTestCase.setUpClass()

        # reload/reset configuration of VSM
        local('sudo apt-get install -y expect')
        cmd_reload = "{script} {vsm_ip} {login} {password}".format(
            script=os.path.join(PARENT_FOLDER_PATH,
                                'files/n1kv/telnet_vsm_reload.exp'),
            vsm_ip = cls.vsm_ip, login=cls.vsm_login,
            password=cls.vsm_password)
        local(cmd_reload)
        time.sleep(60*1)

        local_conf = LOCAL_CONF.format(
            neutron_repo=cls.neutron_repo,
            neutron_branch=cls.neutron_ref,
            net_cisco_repo=cls.net_cisco_repo,
            net_cisco_ref=cls.net_cisco_ref,
            Q_PLUGIN_EXTRA_CONF_PATH=Q_PLUGIN_EXTRA_CONF_PATH,
            Q_PLUGIN_EXTRA_CONF_FILES=Q_PLUGIN_EXTRA_CONF_FILES,
            VSM_IP=cls.vsm_ip,
            VSM_LOGIN=cls.vsm_login,
            VSM_PASSWORD=cls.vsm_password,
            UVEM_DEB=os.path.join(WORKSPACE, UVEM_DEB))

        cls.devstack.local_conf = local_conf
        cls.devstack.clone()
Exemple #21
0
def run_django(port="0.0.0.0:8000"):
    """
        Run django test server on open port, so it's accessible outside Vagrant.
    """
    commands = []

    if settings.RUN_TASKS_ASYNC:
        print("Starting background celery process. Warning: this has a documented memory leak, and developing with"
              " RUN_TASKS_ASYNC=False is usually easier unless you're specifically testing a Django-Celery interaction.")
        commands.append('celery -A perma worker --loglevel=info -B')

    # Only run the webpack background process in debug mode -- with debug False, dev server uses static assets,
    # and running webpack just messes up the webpack stats file.
    if settings.DEBUG:
        commands.append('npm start')

    proc_list = [subprocess.Popen(command, shell=True, stdout=sys.stdout, stderr=sys.stderr) for command in commands]

    try:
        try:
            # use runserver_plus if installed
            import django_extensions  # noqa
            # use --reloader-type stat because:
            #  (1) we have to have watchdog installed for pywb, which causes runserver_plus to attempt to use it as the reloader, which depends on inotify, but
            #  (2) we are using a Vagrant NFS mount, which does not support inotify
            # see https://github.com/django-extensions/django-extensions/pull/1041
            local("python manage.py runserver_plus %s --threaded --reloader-type stat" % port)
        except ImportError:
            local("python manage.py runserver %s" % port)
    finally:
        for proc in proc_list:
            os.kill(proc.pid, signal.SIGKILL)
Exemple #22
0
 def deploy():
     docker_host = 'tcp://%s:%d' % (env['host'], options.port)
     with shell_env(DOCKER_TLS_VERIFY="1",
                    DOCKER_HOST=docker_host):
         local('docker-compose %s' % options.extra)
         pass
     pass
Exemple #23
0
def setup_website():
    """
    Setup the website

    Copy the config files and restart the httpd daemon
    """
    local('sudo cp /home/ec2-user/projects/{0}/{0}.httpd.conf /etc/httpd/conf.d'.format(env.project_name))
Exemple #24
0
def start_daemons():
    """
    Start the BOINC daemons

    Run the BOINC script to start the daemons
    """
    local('cd /home/ec2-user/projects/{0}; bin/start'.format(env.project_name))
Exemple #25
0
def sauce_tunnel():
    """
        Set up Sauce tunnel before running functional tests targeted at localhost.
    """
    if subprocess.call(['which','sc']) == 1: # error return code -- program not found
        sys.exit("Please check that the `sc` program is installed and in your path. To install: https://docs.saucelabs.com/reference/sauce-connect/")
    local("sc -u %s -k %s" % (settings.SAUCE_USERNAME, settings.SAUCE_ACCESS_KEY))
Exemple #26
0
def initial_create_databases():
    """Create databases and/or db users."""

    databases = config('databases')

    # TODO: Currently we only do the 'default' database
    # TODO: And we don't copy it from production to staging yet.
    for dbname in databases:
        if dbname != 'default':
            print("Skipped database '%s'." % (dbname,))
            continue

        dbinfo = databases[dbname]

        if is_production_database(dbname):
            warning = "Database '{name}' on '{host} is a production database."
            print(red(warning.format(name=dbinfo['NAME'], host=['HOST'])))

        if confirm("Create user %s on %s?" % (dbinfo['USER'], dbinfo['HOST']),
               default=False):
            print(green("You will be prompted for a password. " +
                  "The site wants to use  %s .")
                  % (dbinfo['PASSWORD'],))
            print(green("\nIf it asks for another password, then that is " +
                        "postgres'\npassword. On the staging server, that " +
                        "is 'postgres'.\nIt might be different elsewhere.\n"))
            # Use warn-only so that the script doesn't halt if the user
            # exists already
            with settings(warn_only=True):
                local('createuser -h {host} -U postgres --pwprompt {user}'.
                      format(host=dbinfo['HOST'], user=dbinfo['USER']))
        create_individual_database(dbname)
def upload_client(host="",server_name="" ):
    '''upload file 
    '''

    with hide('stdout', 'stderr'):
        exist_client=sudo("if test -e %s ;then echo True;else echo False;fi" %dest_path )
        exist_bakdir=sudo("if test -e %s/version_backup ;then echo True;else echo False;fi" %client_dest_dir )

    if os.path.exists(local_path):
        with hide('stdout', 'stderr'):
            with lcd(client_src_dir):
                print "entering dir %s" %client_src_dir
                local("tar czvf new_client.tar.gz client",capture=False)
                put("new_client.tar.gz" , client_dest_dir)
    else:
        print "\033[1;40;31mdirectory %s is not exist in %s\033[0m" %(local_path,env.host_string)
        sys.exit(1)

    if exist_bakdir=="True":
        pass
    else:
        sudo("mkdir %s/version_backup" %client_dest_dir)

    if exist_client=="True":
        with hide('stdout', 'stderr'):
            with cd(client_dest_dir):
                print "entering dir %s" %client_dest_dir
                date=time.strftime("%m.%d.%H.%M.%S")
                sudo("mv client version_backup/client.%s" %date)
                sudo("tar xzvf new_client.tar.gz")
    else:
        print "\033[1;40;31mnot exists client in directory %s\033[0m" %client_dest_dir
        with hide('stdout', 'stderr'):
            with cd(client_dest_dir):
                sudo("tar xzvf new_client.tar.gz")
Exemple #28
0
def create_individual_database(dbname):
    """Create a single database. Used by initial_create_databases
    and copy_databases."""

    dbinfo = config('databases')[dbname]

    if 'postgis' in dbinfo['ENGINE']:
        if confirm("Create database %s on %s with template postgis?" % (
                dbinfo['NAME'], dbinfo['HOST']), default=False):
            print(green("The password required is that of user 'postgres'." +
                        " Often equal to 'postgres'."))
            cmd = ('createdb -h {host} -U postgres ' +
                   '--template=template_postgis --owner={user} {database}')
            # Use warn-only so that the script doesn't halt if the db
            # exists already
            with settings(warn_only=True):
                local(cmd.format(host=dbinfo['HOST'], user=dbinfo['USER'],
                                 database=dbinfo['NAME']))
    else:
        if confirm("Create database %s on %s?" % (
                dbinfo['NAME'], dbinfo['HOST']), default=False):
            print(green("The password required is that of user 'postgres'." +
                        " Often equal to 'postgres'."))
            cmd = ('createdb -h {host} -U postgres ' +
                   '--owner={user} {database}')
            # Use warn-only so that the script doesn't halt if the db
            # exists already
            with settings(warn_only=True):
                local(cmd.format(host=dbinfo['HOST'], user=dbinfo['USER'],
                                 database=dbinfo['NAME']))
Exemple #29
0
def commit_locale_changes():
    """Commit locale changes on the remote server and pull them in locally"""
    fix_locale_perms()
    with cd(env.code_root):
        sudo('-H -u %s git add commcare-hq/locale' % env.sudo_user, user=env.sudo_user)
        sudo('-H -u %s git commit -m "updating translation"' % env.sudo_user, user=env.sudo_user)
    local('git pull ssh://%s%s' % (env.host, env.code_root))
Exemple #30
0
def test():
    """
    Run unit tests for this Django Application
    """
    if len(APPS) == 0:
        return
    local('./manage.py test %s' % TESTS)
Exemple #31
0
def clone(repo_url=''):
    if len(repo_url) == 0:
        print "Usage: fab new:<repo_url>"
        return

    if not create_vagrantfile():
        return False

    # We'll create a temporary copy to get the appropriate settings for deployment
    if exists("temp"):
        if not confirm("This project already exists, continue anyways?"):
            return False
    else:
        local("mkdir temp")
        local("git clone %s temp/." % repo_url)

    #TODO: We still won't have any vagrant_settings.py or FABRIC in settings.py after we deploy and delete temp
    # Check to see if this project is ready for vagrant deployment - if not we'll 'upgrade' it
    if not exists("temp/deploy/vagrant_settings.py"):
        local("cp %s/vagrant_settings.py temp/deploy/vagrant_settings.py" % os.path.dirname(os.path.realpath(__file__)))

    # Add the current directory to our sys path so we can import fabric_settings
    sys.path.append(os.getcwd())

    os.chdir("temp")
    sys.path.append(os.getcwd())

    #TODO: Auto add fabric_settings.py if it doesn't exist
    # fabric_settings = import_module("fabric_settings")
    fabric_settings = __import__("fabric_settings", globals(), locals(), [], 0)

    #TODO: Auto add vagrant to fabric_settings.py if it doesn't exist
    env.settings = fabric_settings.FABRIC
    if 'vagrant' not in env.settings:
        print 'Please set up "vagrant" mode in fabric_settings.py and rerun this command'
        return False

    # And we're ready to go
    up()

    # Now clean up temporary copy
    os.chdir("..")
    local("rm -rf temp")
Exemple #32
0
def storeValue(server, probe, timestamp, server_up):
    try:
        messages = []
        current_ip = str(env.host_string).split('@')[-1]
        start = datetime.datetime.utcnow().replace(tzinfo=utc)

        messages.append("Executing sensor '%s'" % probe.display_name)

        with settings(warn_only=True):
            if current_ip in ["127.0.0.1", "localhost"]:
                output = local(probe.ssh_command, capture=True)
            elif probe.use_sudo:
                output = sudo(probe.ssh_command, shell=False, pty=False)
            else:
                output = run(probe.ssh_command, shell=False, pty=False)

        success = output.succeeded

        if not success:
            messages.append(output.stderr)
        try:
            for python_command in probe.python_parse.splitlines():
                exec(python_command)
        except:
            messages.append(traceback.format_exc())
            success = False

        if not success:
            output = "No data" if probe.graph_type.name == 'text' else "0"

        if probe.graph_type.name == 'text':
            Measure.objects.filter(server=server, probe=probe).delete()
            messages.append("\t-> Extracted value is %d characters long" % len(output))
        else:
            messages.append("\t-> Extracted values : %s" % output)
        Measure.objects.create(timestamp=timestamp, server=server, probe=probe, value=str(output))
    except:
        messages.append(traceback.format_exc())
        success = False
    finally:
        total_time = datetime.datetime.utcnow().replace(tzinfo=utc) - start
        duration = (total_time.seconds * 1000000) + total_time.microseconds
        messages.append("\t-> Sensor executed %s in %.2f seconds" % ("successfully" if success else "with errors", duration / 1000000.0))
        messages.append("-" * 40)
        return success, messages
Exemple #33
0
def releases():
    """List a releases made"""
    env.releases = sorted(
        local('ls -x %(releases_path)s' % {
            'releases_path': env.releases_path
        }).split())
    if len(env.releases) >= 1:
        env.current_revision = env.releases[-1]
        env.current_release = "%(releases_path)s/%(current_revision)s" % {
            'releases_path': env.releases_path,
            'current_revision': env.current_revision
        }
    if len(env.releases) > 1:
        env.previous_revision = env.releases[-2]
        env.previous_release = "%(releases_path)s/%(previous_revision)s" % {
            'releases_path': env.releases_path,
            'previous_revision': env.previous_revision
        }
Exemple #34
0
def test_python(apps=_default_tests, travis=False):
    """ Run Python tests. """
    if os.environ.get('DOCKERIZED'):
        print(
            "\n\n\n!!!!!!\n!!!!!!\nWarning! Webrecorder requires test-specific settings in this context.\n"
            +
            "Be sure to edit services/docker/webrecorder/wr.env appropriately and\n"
            +
            "reload the WR containers by re-running `docker-compose up` before running tests.\n!!!!!!\n!!!!!!\n\n\n"
        )

    # .pyc files can contain filepaths; this permits easy switching
    # between a Vagrant- and Docker-based dev environment
    local("find . -name '*.pyc' -delete")

    # In order to run functional_tests, we have to run collectstatic, since functional tests use DEBUG=False
    # For speed we use the default Django STATICFILES_STORAGE setting here, which also has to be set in settings_testing.py
    if "functional_tests" in apps and not os.environ.get('SERVER_URL'):
        local(
            "DJANGO__STATICFILES_STORAGE=django.contrib.staticfiles.storage.StaticFilesStorage python manage.py collectstatic --noinput"
        )

    # temporarily set MEDIA_ROOT to a tmp directory, in a way that lets us clean up after ourselves
    tmp = tempfile.mkdtemp()
    try:
        shell_envs = {
            'DJANGO__MEDIA_ROOT':
            os.path.join(tmp, '')  #join ensures path ends in /
        }
        with shell_env(**shell_envs):
            # NB: all arguments to Fabric tasks are interpreted as strings
            if travis == 'True':
                local(
                    "pytest %s --no-migrations --ds=perma.settings --cov --cov-report= "
                    % (apps))
            else:
                local(
                    "pytest %s --no-migrations --ds=perma.settings.deployments.settings_testing --cov --cov-report= "
                    % (apps))
    finally:
        # clean up after ourselves
        shutil.rmtree(tmp)
Exemple #35
0
def get_instances(properties_file):
    """Returns list of all instances in the cluster specified by properties_file"""
    result = local(
        "whirr list-cluster --config={0} --quiet".format(properties_file),
        capture=True)
    instances = []
    for line in result.split('\n'):
        instance_info = line.split('\t')
        instance = Instance()
        instance.identity = instance_info[0]
        instance.ami = instance_info[1]
        instance.external_ip = instance_info[2]
        instance.internal_ip = instance_info[3]
        instance.state = instance_info[4]
        instance.zone = instance_info[5]
        instance.roles = instance_info[6]
        instances.append(instance)
    return instances
Exemple #36
0
def _transfer_all_artifacts():
    services = "sessiond session_proxy pcrf ocs pipelined ingress"
    run(
        f'fab transfer_artifacts:services="{services}",'
        'get_core_dump=True',
    )
    # Copy log files out from the node
    local('mkdir cwf-artifacts')
    get('*.log', 'cwf-artifacts')
    if exists("coredump.tar.gz"):
        get('coredump.tar.gz', 'cwf-artifacts')
    local('sudo mkdir -p /tmp/logs/')
    local('sudo mv cwf-artifacts/* /tmp/logs/')
Exemple #37
0
def _find_matching_opensource_commit(
    magma_root: str,
    oss_repo: str = 'https://github.com/facebookincubator/magma.git ',
) -> str:
    # Find corresponding hash in opensource repo by grabbing the message of the
    # latest commit to the magma root directory of the current repository then
    # searching for it in the open source repo
    commit_subj = local(
        f'git --no-pager log --oneline --pretty=format:"%s" '
        f'-- {magma_root} | head -n 1',
        capture=True)
    local('rm -rf /tmp/ossmagma')
    local('mkdir -p /tmp/ossmagma')
    local(f'git clone {oss_repo} /tmp/ossmagma/magma')
    with lcd('/tmp/ossmagma/magma'):
        oss_hash = local(
            f'git --no-pager log --oneline --pretty=format:"%h" '
            f'--grep=\'{commit_subj}\' | head -n 1',
            capture=True)
        return oss_hash
Exemple #38
0
def coverage() -> None:
    """Code coverage report."""
    if os.path.exists(COVERAGE_REPORT_HTML_DIR):
        shutil.rmtree(COVERAGE_REPORT_HTML_DIR)

    if os.path.isfile(COVERAGE_REPORT_FILE):
        os.remove(COVERAGE_REPORT_FILE)

    # initialize_db()
    local("coverage run --source='.' -m py.test -v")

    local("coverage report --skip-covered")
    local("coverage html")
Exemple #39
0
def _check_git_branch():
    env.revision = None
    with cd(env.vcs_root_dir):
        with settings(warn_only=True):
            # get branch information
            server_branch = sudo_or_run('git rev-parse --abbrev-ref HEAD')
            server_commit = sudo_or_run('git rev-parse HEAD')
            local_branch = local('git rev-parse --abbrev-ref HEAD',
                                 capture=True)
            default_branch = env.default_branch.get(env.environment, 'master')
            git_branch_r = sudo_or_run('git branch --color=never -r')
            git_branch_r = git_branch_r.split('\n')
            branches = [
                b.split('/')[-1].strip() for b in git_branch_r
                if 'HEAD' not in b
            ]

        # if all branches are the same, just stick to this branch
        if server_branch == local_branch == default_branch:
            env.revision = server_branch
        else:
            if server_branch == 'HEAD':
                # not on a branch - just print a warning
                print 'The server git repository is not on a branch'

            print 'Branch mismatch found:'
            print '* %s is the default branch for this server' % default_branch
            if server_branch == 'HEAD':
                print '* %s is the commit checked out on the server.' % server_commit
            else:
                print '* %s is the branch currently checked out on the server' % server_branch
            print '* %s is the current branch of your local git repo' % local_branch
            print ''
            print 'Available branches are:'
            for branch in branches:
                print '* %s' % branch
            print ''
            escaped_branches = [re.escape(b) for b in branches]
            validate_branch = '^' + '|'.join(escaped_branches) + '$'

            env.revision = prompt(
                'Which branch would you like to use on the server? (or hit Ctrl-C to exit)',
                default=default_branch,
                validate=validate_branch)
Exemple #40
0
def deploy_event_data():
    update_event_data()

    local(
        "aws s3 cp js/event-data.gz s3://map.ourrevolution.com/js/event-data.gz --metadata-directive REPLACE --content-encoding \"gzip\" --content-type \"text/javascript\" --region \"us-west-2\" --profile \"events-map\""
    )
    local(
        "aws s3 cp d/events.json s3://map.ourrevolution.com/d/events.json --metadata-directive REPLACE --content-type \"text/plain\" --region \"us-west-2\" --profile \"events-map\""
    )
    local(
        "aws s3 cp d/registration-data.csv s3://map.ourrevolution.com/d/registration-data.csv --metadata-directive REPLACE --content-type \"text/plain\" --region \"us-west-2\" --acl \"public-read\" --profile \"events-map\""
    )

    invalidate_cloudfront_event_cache()
Exemple #41
0
def extracting():
    for r in os.listdir(DIR):
        if r.lower().endswith(".rar"):

            if re.search(PATTERN, r):
                if r.lower().endswith("part1.rar") or r.lower().endswith(
                        "part01.rar") or r.lower().endswith("part001.rar"):
                    local(f"cd {DIR} && unrar x {r}")

            else:
                local(f"cd {DIR} && unrar x {r}")

        if r.lower().endswith(".zip"):
            local(f'cd {DIR} && powershell -command "Expand-Archive {r}"')
Exemple #42
0
def coverage() -> None:
    """Code coverage report."""
    if os.path.exists(COVERAGE_REPORT_HTML_DIR):
        shutil.rmtree(COVERAGE_REPORT_HTML_DIR)

    if os.path.isfile(COVERAGE_REPORT_FILE):
        os.remove(COVERAGE_REPORT_FILE)

    local("coverage run --source='.' manage.py {0}".format(
        get_test_command(parallel=False, fake_migrations=False,
                         keep_db=False)))

    local("coverage report --skip-covered")
    local("coverage html")
Exemple #43
0
def prepareFluidinfo(runTests):
    """Prepare a Fluidinfo source tarball using the local branch.

    @param runTests: A flag to determine if the test suite should be run.
    @return: The name of the revision being deployed as a string matching the
        format, C{<date>-<time>}.
    """
    if runTests:
        local('make build-clean build', capture=False)
        local('make check-all', capture=False)

    local('git archive --prefix=fluidinfo/ -v --format tar HEAD | '
          'bzip2 > fluidinfo.tar.bz2')
    return datetime.utcnow().strftime('%Y%m%d-%H%M')
Exemple #44
0
def deploy_dev_image():
    # create vagrant box
    nginx_ppth = "/home/%s/conf" % PROJECT
    box = os.path.join(LOCAL_PROJECT_DIR, 'package.box')
    remotebox = Path(env.home, 'www', 'app', 'devbox')
    sudo("rm -rf %s" % remotebox, user="******")
    sudo("mkdir -p %s" % remotebox, user="******")
    sudo("mkdir -p %s" % nginx_ppth, user="******")
    local(
        'cd %s && vagrant halt && vagrant package --base "vision_development" '
        % LOCAL_PROJECT_DIR)
    local('cd %s && vagrant up' % LOCAL_PROJECT_DIR)
    put(box, os.path.join(remotebox, 'package.box'))
    local('rm -f %s' % box)
Exemple #45
0
def launch_command(probes):
    outputs = {}

    if env.host_string == "127.0.0.1":
        server_up = True
    else:
        try:
            server_up = run("echo", shell=False, pty=False).succeeded
        except:
            server_up = False

    for probe in probes:
        if server_up:
            logger.debug("---> Sensor '%s'" %
                         probe.display_name.encode('utf-8'))
            try:
                if env.host_string == "127.0.0.1":
                    output = local(probe.ssh_command, capture=True)
                elif probe.use_sudo:
                    output = sudo(probe.ssh_command, shell=False, pty=False)
                else:
                    output = run(probe.ssh_command, shell=False, pty=False)
                for python_command in probe.python_parse.splitlines():
                    exec(python_command)
            except Exception, e:
                logger.exception(e)
                if probe.graph_type.name == 'linegraph':
                    output = 0
                elif probe.graph_type.name == 'pie':
                    output = "Inconnu;100"
                else:
                    output = "Aucune donnée"
        else:
            logger.debug("---> Sensor '%s' : server is unreachable..." %
                         probe.display_name.encode('utf-8'))
            if probe.graph_type.name == 'linegraph':
                output = 0
            elif probe.graph_type.name == 'pie':
                output = "Inconnu;100"
            else:
                output = "Aucune donnée"
        outputs[probe.id] = output
Exemple #46
0
def do_pack():

    if ((os.path.exists('./versions') is True)
            and (os.path.isfile('./versions') is False)):
        pass
    elif ((os.path.exists('./versions') is True)
          and (os.path.isfile('./versions') is True)):
        local("rm versions && mkdir versions")
    else:
        local("mkdir versions")

    current_date = datetime.datetime.now()
    file_name = current_date.strftime("web_static_%Y%-m%d%H%M%S.tgz")
    command = "tar -cavf " + file_name + " web_static"
    move_file = "mv " + file_name + " versions"

    local(command)
    local(move_file)
    if (os.path.exists('./versions/' + file_name) is True):
        return (os.path.abspath('./versions/' + file_name))
    else:
        return None
Exemple #47
0
def chk_data_dir():
    """
    Distro-agnostic checking postgresql's data directory
    """
    mod = __import__('fabric_colors.distro.{0}.postgresql'.format(env.distro), fromlist=['{0}.postgresql'.format(env.distro)])
    DEFAULT_DATA_DIR = getattr(mod, 'DEFAULT_DATA_DIR')
    cmd = """if [ -e "{0}" ]; then echo 1; else echo ""; fi"""\
            .format(DEFAULT_DATA_DIR)

    if env.run == local:
        result = local(cmd, capture=True)
    else:
        result = env.run(cmd)

    if result:
        print(green("Postgresql server's data directory is present."))
    else:
        print(red("Postgresql server's data directory is not present."))

    return result
Exemple #48
0
 def _prepare_fs(self, cookbook_repo, branch, debug):
     ChefManager.install_chef_dk()
     ChefManager.create_chef_repo()
     with hide(*self.hidden_outputs):
         local('if [ ! -d eucalyptus-cookbook ]; then '
               'git clone '
               '{0} eucalyptus-cookbook;'
               'fi'.format(cookbook_repo))
         local('cd eucalyptus-cookbook; git checkout {0};'.format(branch))
         local(
             'cd eucalyptus-cookbook; git pull origin {0};'.format(branch))
     ChefManager.download_cookbooks('eucalyptus-cookbook/Berksfile',
                                    os.path.join(self.chef_repo_dir +
                                                 '/cookbooks'),
                                    debug=debug)
Exemple #49
0
def local_git_branch_exists(branch):
    """Determines if a branch exists in the current git repository on your local
  machine.

  **NOTE:** The current working directory is assumed to be inside the git
  repository of interest.

  Args:
    branch(str): Name of the branch

  Returns:
    bool: True if the given branch exists, False otherwise

  >>> local_git_branch_exists("master")
  True
  >>> local_git_branch_exists("non_existent_branch")
  False
  """
    with settings(warn_only=True):
        return local("git show-ref --verify --quiet refs/heads/{}".format(
            branch)).succeeded
Exemple #50
0
def babel(command):
    # Generate the .pot file from source code files
    if command == 'extract':
        local(
            'pybabel extract -F flask_user/translations/babel.cfg -k lazy_gettext -c NOTE -o flask_user/translations/flask_user.pot --project Flask-User --version v1.0.0.0 flask_user flask_user'
        )

    # Update .po files from the .pot file
    elif command == 'update':
        local(
            'pybabel update -i flask_user/translations/flask_user.pot --domain=flask_user --output-dir flask_user/translations'
        )
    elif command == 'compile':
        local(
            'pybabel compile -f --domain=flask_user --directory flask_user/translations'
        )
Exemple #51
0
def babel(command):
    # Generate the .pot file from source code files
    if command == 'extract':
        local(
            'pybabel extract -F flask_auth/translations/babel.cfg -k lazy_gettext -c NOTE -o flask_auth/translations/flask_auth.pot flask_auth flask_auth'
        )

    # Update .po files from the .pot file
    elif command == 'update':
        local(
            'pybabel update -i flask_auth/translations/flask_auth.pot --domain=flask_auth --output-dir flask_auth/translations'
        )
    elif command == 'compile':
        local(
            'pybabel compile -f --domain=flask_auth --directory flask_auth/translations'
        )
Exemple #52
0
def deploy(commit=False):
    if commit:
        print(cyan("Enter your git commit message"))
        msg = raw_input()
        local('git add .')
        local('git commit -am "%s"' % msg)
        print(green("Listing Branches"))
        local('git branch -a')
        print(cyan("Enter a branch name to push:"))
        branch = raw_input()
        local('git push origin %s' % branch)
        print(green("Deployment complete"))
    with cd(dir_in_server):
        print(magenta("Inside server"))
        run("git reset --hard || true")
        run("git pull origin master")
        run("source /home/cmpe273/env/bin/activate && pip install -r requirements.txt")
        with shell_env(DJANGO_SETTINGS_MODULE='twitter_analytics.settings.production'):
            run("source /home/cmpe273/env/bin/activate && ./manage.py collectstatic --noinput")
            run("source /home/cmpe273/env/bin/activate && ./manage.py migrate --no-initial-data")
        sudo("service apache2 restart")
        # sudo("supervisorctl restart celery")
    print(green("Deployment complete"))
Exemple #53
0
def _deploy_packages(repo: str, magma_root: str):
    repo_name = _get_repo_name(repo)

    # Grab all the build artifacts we need from the CI node
    get('/tmp/packages.tar.gz', 'packages.tar.gz')
    get('/tmp/packages.txt', 'packages.txt')
    get('/tmp/magma_version', 'magma_version')
    get(f'{repo_name}/{magma_root}/lte/gateway/release/magma.lockfile',
        'magma.lockfile')

    with open('magma_version') as f:
        magma_version = f.readlines()[0].strip()
    s3_path = f's3://magma-images/gateway/{magma_version}'
    local(f'aws s3 cp packages.txt {s3_path}.deplist')
    local(f'aws s3 cp magma.lockfile {s3_path}.lockfile')
    local(f'aws s3 cp packages.tar.gz {s3_path}.deps.tar.gz')
Exemple #54
0
def _env_set_distro():
    # Set env.distro

    with hide('running', 'stdout'):
        if env.run == local:
            name = local('uname -s', capture=True)
        else:
            name = env.run('uname -s')

        env.distro = None
        if name == "Darwin":
            env.distro = "mac"
        elif name == "Linux":
            result = env.run('cat /etc/*-release')
            import re
            result_list = re.findall(r'([^=\s,]+)=([^=\s,]+)', result)
            for item in result_list:
                if item[0] == 'ID':
                    env.distro = item[1]
                    return env.distro

    return env.distro
Exemple #55
0
def reset_local_db():
    """ Reset local database from remote host """
    require('code_root', provided_by=('production', 'staging'))
    if env.environment == 'production':
        utils.abort('Local DB reset is for staging environment only')
    question = 'Are you sure you want to reset your local '\
               'database with the %(environment)s database?' % env
    sys.path.append('.')
    if not console.confirm(question, default=False):
        utils.abort('Local database reset aborted.')
    local_db = loc['default']['NAME']
    remote_db = remote['default']['NAME']
    with settings(warn_only=True):
        local('dropdb %s' % local_db)
    local('createdb %s' % local_db)
    host = '%s@%s' % (env.user, env.hosts[0])
    local('ssh -C %s sudo -u commcare-hq pg_dump -Ox %s | psql %s' % (host, remote_db, local_db))
Exemple #56
0
def prepare_deploy():
    make_favicons()
    local("rm -rf stage")
    local("mkdir stage")
    local("cp -fR app/* stage/")
    asset_groups = {
        "css": config_stage.css,
        "javascripts_header": config_stage.javascripts_header,
        "javascripts_footer": config_stage.javascripts_footer
    }
    processed_groups, date = process_file_groups(asset_groups)

    process_templates(config_stage.source_path,
                      config_stage.dest_path,
                      config=processed_groups)

    minify(config_stage.minify_names, config_dev, date, source_dir="stage/")
Exemple #57
0
def generate_monitor_dist():
    local('cd %s && npm install -d' % (env.monitor_local_path))
    local('cd %s && bower install -d' % (env.monitor_local_path))
    local('cd %s && POLIGO_API_URL=http://%s grunt build' %
          (env.monitor_local_path, env.poliglo_api_domain))
    upload_file('%s/dist/' % env.monitor_local_path,
                '%s/' % env.deploy_path,
                user=env.deploy_user)
    monitor_path = '%s/monitor' % env.deploy_path
    if exists(monitor_path):
        sudo('mv %s/monitor %s/monitor.old' %
             (env.deploy_path, env.deploy_path),
             user=env.deploy_user)
    sudo('mv %s/dist %s/monitor' % (env.deploy_path, env.deploy_path),
         user=env.deploy_user)
    if exists(monitor_path + '.old'):
        sudo('rm -rf %s/monitor.old' % env.deploy_path)
Exemple #58
0
def pyflakes_file(filenames, verbose=False):
    """
    _pyflakes_file_

    Appyly pyflakes to file specified,
    return (filenames, score)
    """
    command = 'pyflakes ' + ' '.join(filenames)

    # we use fabric to run the pyflakes command, hiding the normal fab
    # output and warnings
    with hide('output', 'running', 'warnings'), settings(warn_only=True):
        result = local(command, capture=True)

    flakes = 0
    data = [x for x in result.split('\n') if x.strip()]
    if len(data) != 0:
        #We have at least one flake, find the rest
        flakes = count_flakes(data, verbose) + 1
    else:
        flakes = 0

    return filenames, flakes
Exemple #59
0
def save_article():
    current_file = __file__
    current_dir = os.path.dirname(current_file)
    root_dir = os.path.abspath(
        os.path.join(os.path.join(current_dir, os.pardir), os.pardir))
    web_dir = os.path.join(root_dir, "validator")
    # featching the data from article
    url_name = request.forms.get('url_name')
    article_data = request.forms.get('article_data')
    title = request.forms.get('title')
    description = request.forms.get('description')
    keywords = request.forms.get('keywords')

    #New Web Form
    new_post = url_name.replace(" ", "-") + ".html"

    #check file is exist
    if os.path.isfile(os.path.join(web_dir, new_post)):
        pass

    #load the template
    util.jinja_render_file(
        web_dir, "template_render.html", new_post, {
            "title": title,
            "content": article_data,
            "description": description,
            "keywords": keywords
        })

    #Save the template
    from fabric.operations import local
    git_commit_path = local(
        "git add -A && git commit -m \"Added New Article\"")
    if git_commit_path.succeeded:
        print(git_commit_path.return_code)

    return redirect("file_share_util.html")
Exemple #60
0
    def atualizar(self, area_interesse):
        from speed.osm.models import AtualizacaoOpenStreetMaps
        atualizacao_osm = AtualizacaoOpenStreetMaps.objects.create(
            area_interesse=area_interesse, sucesso=False)

        caminho_osm = self.download(area_interesse)

        if caminho_osm:

            try:

                comando = self.COMANDO_IMPORTACAO_PADRAO.format(
                    self.password, self.host, self.user, self.style,
                    caminho_osm, self.database, self.cache_size)

                saida = local(comando)

                atualizacao_osm.sucesso = True

            except:
                atualizacao_osm.sucesso = False

            finally:
                atualizacao_osm.save()