示例#1
0
 def put_file(self, path):
     temp_file = str(sh.mktemp("-p", self.tmp).stdout,'utf8').strip()
     path = path.strip()
     if "'" in path:
         returncode, stdout, stderr = launch_command(
             "dd if=\"{0}\" iflag=nofollow bs=4k | tee {1} | sha1sum".format(
                 path,
                 temp_file
             )
         )
     else:
         returncode, stdout, stderr = launch_command(
             "dd if='{0}' iflag=nofollow bs=4k | tee {1} | sha1sum".format(
                 path,
                 temp_file
             )
         )
     if returncode != 0:
         print(stdout)
         print(stderr)
         raise UnableToHashFile("File : {0}".format(path))
     hash_str = re.search("^[0-9a-f]*", str(stdout,'utf8')).group(0)
     destination_folder = self.create_destination_folder(hash_str)
     destination_path = os.path.join(destination_folder, hash_str)
     if not self.is_stored(hash_str):
         sh.mv(temp_file, destination_path)
         sh.chmod("444", destination_path)
     else:
         sh.rm(temp_file)
     return destination_path
示例#2
0
文件: build.py 项目: fernyb/vim
def BuildYcmdLibs(cmake_args):
    build_dir = unicode(sh.mktemp('-d', '-t', 'ycm_build.XXXXXX')).strip()

    try:
        full_cmake_args = ['-G', 'Unix Makefiles']
        if OnMac():
            full_cmake_args.extend(CustomPythonCmakeArgs())
        full_cmake_args.extend(cmake_args)
        full_cmake_args.append(p.join(DIR_OF_THIS_SCRIPT, 'cpp'))

        sh.cd(build_dir)
        sh.cmake(*full_cmake_args, _out=sys.stdout)

        build_target = ('ycm_support_libs' if 'YCM_TESTRUN' not in os.environ
                        else 'ycm_core_tests')
        sh.make('-j',
                NumCores(),
                build_target,
                _out=sys.stdout,
                _err=sys.stderr)

        if 'YCM_TESTRUN' in os.environ:
            RunYcmdTests(build_dir)
    finally:
        sh.cd(DIR_OF_THIS_SCRIPT)
        sh.rm('-rf', build_dir)
示例#3
0
 def create_output_path(self):
     if not self.output_path:
         sh.mkdir("-p", WORK_DIR)
         sh.chmod("700", WORK_DIR)
         self.output_path = str(
             sh.mktemp("-p", WORK_DIR, "-d").stdout, 'utf8').split("\n")[0]
         log.debug("Output path -> {0}".format(self.output_path))
         sh.chmod("700", self.output_path)
示例#4
0
    def get_env(self):
        include_dirs = [
            "-I{}/{}".format(
                self.ctx.include_dir,
                d.format(arch=self))
            for d in self.ctx.include_dirs]

        env = {}
        ccache = sh.which('ccache')
        cc = sh.xcrun("-find", "-sdk", self.sdk, "clang").strip()
        if ccache:
            ccache = ccache.strip()
            use_ccache = environ.get("USE_CCACHE", "1")
            if use_ccache != '1':
                env["CC"] = cc
            else:
                if not self._ccsh:
                    self._ccsh = ccsh = sh.mktemp().strip()
                    with open(ccsh, 'w') as f:
                        f.write('#!/bin/sh\n')
                        f.write(ccache + ' ' + cc + ' "$@"\n')
                    sh.chmod('+x', ccsh)
                else:
                    ccsh = self._ccsh
                env["USE_CCACHE"] = '1'
                env["CCACHE"] = ccache
                env["CC"] = ccsh

                env.update({k: v for k, v in environ.items() if k.startswith('CCACHE_')})
                env.setdefault('CCACHE_MAXSIZE', '10G')
                env.setdefault('CCACHE_HARDLINK', 'true')
                env.setdefault('CCACHE_SLOPPINESS', ('file_macro,time_macros,'
                    'include_file_mtime,include_file_ctime,file_stat_matches'))
        else:
            env["CC"] = cc
        env["AR"] = sh.xcrun("-find", "-sdk", self.sdk, "ar").strip()
        env["LD"] = sh.xcrun("-find", "-sdk", self.sdk, "ld").strip()
        env["OTHER_CFLAGS"] = " ".join(include_dirs)
        env["OTHER_LDFLAGS"] = " ".join([
            "-L{}/{}".format(self.ctx.dist_dir, "lib"),
        ])
        env["CFLAGS"] = " ".join([
            "-arch", self.arch,
            "-pipe", "-no-cpp-precomp",
            "--sysroot", self.sysroot,
            #"-I{}/common".format(self.ctx.include_dir),
            #"-I{}/{}".format(self.ctx.include_dir, self.arch),
            "-O3",
            self.version_min
        ] + include_dirs)
        env["LDFLAGS"] = " ".join([
            "-arch", self.arch,
            "--sysroot", self.sysroot,
            "-L{}/{}".format(self.ctx.dist_dir, "lib"),
            "-lsqlite3",
            self.version_min
        ])
        return env
示例#5
0
    def get_env(self):
        include_dirs = [
            "-I{}/{}".format(
                self.ctx.include_dir,
                d.format(arch=self))
            for d in self.ctx.include_dirs]

        env = {}
        ccache = sh.which('ccache')
        cc = sh.xcrun("-find", "-sdk", self.sdk, "clang").strip()
        if ccache:
            ccache = ccache.strip()
            use_ccache = environ.get("USE_CCACHE", "1")
            if use_ccache != '1':
                env["CC"] = cc
            else:
                if not self._ccsh:
                    self._ccsh = ccsh = sh.mktemp().strip()
                    with open(ccsh, 'w') as f:
                        f.write('#!/bin/sh\n')
                        f.write(ccache + ' ' + cc + ' "$@"\n')
                    sh.chmod('+x', ccsh)
                else:
                    ccsh = self._ccsh
                env["USE_CCACHE"] = '1'
                env["CCACHE"] = ccache
                env["CC"] = ccsh

                env.update({k: v for k, v in environ.items() if k.startswith('CCACHE_')})
                env.setdefault('CCACHE_MAXSIZE', '10G')
                env.setdefault('CCACHE_HARDLINK', 'true')
                env.setdefault('CCACHE_SLOPPINESS', ('file_macro,time_macros,'
                    'include_file_mtime,include_file_ctime,file_stat_matches'))
        else:
            env["CC"] = cc
        env["AR"] = sh.xcrun("-find", "-sdk", self.sdk, "ar").strip()
        env["LD"] = sh.xcrun("-find", "-sdk", self.sdk, "ld").strip()
        env["OTHER_CFLAGS"] = " ".join(include_dirs)
        env["OTHER_LDFLAGS"] = " ".join([
            "-L{}/{}".format(self.ctx.dist_dir, "lib"),
        ])
        env["CFLAGS"] = " ".join([
            "-arch", self.arch,
            "-pipe", "-no-cpp-precomp",
            "--sysroot", self.sysroot,
            #"-I{}/common".format(self.ctx.include_dir),
            #"-I{}/{}".format(self.ctx.include_dir, self.arch),
            "-O3",
            self.version_min
        ] + include_dirs)
        env["LDFLAGS"] = " ".join([
            "-arch", self.arch,
            "--sysroot", self.sysroot,
            "-L{}/{}".format(self.ctx.dist_dir, "lib"),
            "-lsqlite3",
            self.version_min
        ])
        return env
示例#6
0
文件: build.py 项目: arakashic/ycmd
def BuildYcmdLibs( cmake_args ):
  build_dir = unicode( sh.mktemp( '-d', '-t', 'ycm_build.XXXXXX' ) ).strip()

  try:
    full_cmake_args = [ '-G', 'Unix Makefiles' ]
    if OnMac():
      full_cmake_args.extend( CustomPythonCmakeArgs() )
    full_cmake_args.extend( cmake_args )
    full_cmake_args.append( p.join( DIR_OF_THIS_SCRIPT, 'cpp' ) )

    sh.cd( build_dir )
    sh.cmake( *full_cmake_args, _out = sys.stdout )

    build_target = ( 'ycm_support_libs' if 'YCM_TESTRUN' not in os.environ else
                     'ycm_core_tests' )
    sh.make( '-j', NumCores(), build_target, _out = sys.stdout, _err = sys.stderr )

    if 'YCM_TESTRUN' in os.environ:
      RunYcmdTests( build_dir )
  finally:
    sh.cd( DIR_OF_THIS_SCRIPT )
    sh.rm( '-rf', build_dir )
示例#7
0
def main():
    log = logging.getLogger("zulip-provisioner")
    # TODO: support other architectures
    if platform.architecture()[0] == '64bit':
        arch = 'amd64'
    else:
        log.critical("Only amd64 is supported.")

    vendor, version, codename = platform.dist()

    if not (vendor in SUPPORTED_PLATFORMS and codename in SUPPORTED_PLATFORMS[vendor]):
        log.critical("Unsupported platform: {} {}".format(vendor, codename))

    with sh.sudo:
        sh.apt_get.update(**LOUD)

        sh.apt_get.install(*APT_DEPENDENCIES["trusty"], assume_yes=True, **LOUD)

    temp_deb_path = sh.mktemp("package_XXXXXX.deb", tmpdir=True)

    sh.wget(
        "{}/{}_{}_{}.deb".format(
            TSEARCH_URL_BASE,
            TSEARCH_PACKAGE_NAME["trusty"],
            TSEARCH_VERSION,
            arch,
        ),
        output_document=temp_deb_path,
        **LOUD
    )

    with sh.sudo:
        sh.dpkg("--install", temp_deb_path, **LOUD)

    with sh.sudo:
        PHANTOMJS_PATH = "/srv/phantomjs"
        PHANTOMJS_TARBALL = os.path.join(PHANTOMJS_PATH, "phantomjs-1.9.8-linux-x86_64.tar.bz2")
        sh.mkdir("-p", PHANTOMJS_PATH, **LOUD)
        sh.wget("https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-1.9.8-linux-x86_64.tar.bz2",
                output_document=PHANTOMJS_TARBALL, **LOUD)
        sh.tar("xj", directory=PHANTOMJS_PATH, file=PHANTOMJS_TARBALL, **LOUD)
        sh.ln("-sf", os.path.join(PHANTOMJS_PATH, "phantomjs-1.9.8-linux-x86_64", "bin", "phantomjs"),
              "/usr/local/bin/phantomjs", **LOUD)

    with sh.sudo:
        sh.rm("-rf", VENV_PATH, **LOUD)
        sh.mkdir("-p", VENV_PATH, **LOUD)
        sh.chown("{}:{}".format(os.getuid(), os.getgid()), VENV_PATH, **LOUD)

    sh.virtualenv(VENV_PATH, **LOUD)

    # Add the ./tools and ./scripts/setup directories inside the repository root to
    # the system path; we'll reference them later.
    orig_path = os.environ["PATH"]
    os.environ["PATH"] = os.pathsep.join((
            os.path.join(ZULIP_PATH, "tools"),
            os.path.join(ZULIP_PATH, "scripts", "setup"),
            orig_path
    ))


    # Put Python virtualenv activation in our .bash_profile.
    with open(os.path.expanduser('~/.bash_profile'), 'w+') as bash_profile:
        bash_profile.writelines([
            "source .bashrc\n",
            "source %s\n" % (os.path.join(VENV_PATH, "bin", "activate"),),
        ])

    # Switch current Python context to the virtualenv.
    activate_this = os.path.join(VENV_PATH, "bin", "activate_this.py")
    execfile(activate_this, dict(__file__=activate_this))

    sh.pip.install(requirement=os.path.join(ZULIP_PATH, "requirements.txt"), **LOUD)

    with sh.sudo:
        sh.cp(REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH, **LOUD)

    # npm install and management commands expect to be run from the root of the project.
    os.chdir(ZULIP_PATH)

    sh.npm.install(**LOUD)

    os.system("tools/download-zxcvbn")
    os.system("tools/emoji_dump/build_emoji")
    os.system("generate_secrets.py -d")
    if "--travis" in sys.argv:
        os.system("sudo service rabbitmq-server restart")
        os.system("sudo service redis-server restart")
        os.system("sudo service memcached restart")
    sh.configure_rabbitmq(**LOUD)
    sh.postgres_init_dev_db(**LOUD)
    sh.do_destroy_rebuild_database(**LOUD)
    sh.postgres_init_test_db(**LOUD)
    sh.do_destroy_rebuild_test_database(**LOUD)
示例#8
0
from glob import glob
import os
import sys
import sh


# Display the help if any arguments are provided.
if len(sys.argv) > 1:
	print(HELP)
	sys.exit(0)

# Determine the location of this script and the .scr file
here   = os.path.dirname(os.path.realpath(__file__))
pngscr = os.path.join(here, '..', 'scr', 'png.scr')
scr    = os.path.join(here, '..', 'scr', 'script.scr')
tmppng = os.path.join(str(mktemp('/tmp/pngXXXXXXXXXX')).strip() + '.scr')
tmpscr = os.path.join(str(mktemp('/tmp/scrXXXXXXXXXX')).strip() + '.scr')

# Get the text from the png script file so that we can replace the 
# schematic name later.
pngscr_contents = ''
with open(pngscr, 'r') as f:
	pngscr_contents = f.read()

# Create script to run the pdf script
contents = ''
with open(scr, 'r') as f:
	contents = f.read()
contents = contents.replace('%SCRIPT_PATH%', tmppng)
with open(tmpscr, 'w') as f:
	f.write(contents)
示例#9
0
args = parser.parse_args()

if os.path.splitext(args.subtitulos)[-1].lower() != '.srt':
  raise ValueError ('El archivo de subtitulos no parece un archivo valido')

for fname in (args.video, args.subtitulos):
  if not os.path.exists(fname):
    raise ValueError ('El archivo %s no existe' % fname)

if os.path.exists(args.output):
  if not args.force:
    raise ValueError ('El archivo de salida %s ya existe (use -f para sobreescribirlo)' % args.output)

assert(os.path.splitext(args.output)[-1].lower() == '.avi')

tmp = mktemp().strip()
tmp_avi = mktemp('--tmpdir=/var/tmp').strip()
try:
  for line in nice.mencoder(args.video, '-oac', 'mp3lame', '-ovc', 'xvid', '-vf', 'scale', '-zoom', '-xy', 720, '-xvidencopts', 'bitrate=1500:me_quality=6:threads=2:pass=1', '-sub', args.subtitulos.decode('latin1'), '-subfont-text-scale', 2.8, '-subcp', args.encoding, '-subpos', 85, '-passlogfile', tmp, '-o', '/dev/null', '-quiet', _iter=True):
    print line.strip()

  for line in nice.mencoder(args.video, '-oac', 'mp3lame', '-ovc', 'xvid', '-vf', 'scale', '-zoom', '-xy', 720, '-xvidencopts', 'bitrate=1500:me_quality=6:threads=2:pass=2', '-sub', args.subtitulos.decode('latin1'), '-subfont-text-scale', 2.8, '-subcp', args.encoding, '-subpos', 85, '-passlogfile', tmp, '-o', tmp_avi, '-quiet', _iter=True):
    print line.strip()
    
  shutil.move (tmp_avi, args.output)

  mail ('-s', "encode de %s terminado" % os.path.basename(args.output), '*****@*****.**', _in='Proceso terminado')
finally:
  if os.path.exists(tmp):
    os.remove (tmp)
  if os.path.exists(tmp_avi):
示例#10
0
def create_temp_file(suffix):
    create_dir_cmd = mktemp("/tmp/" + suffix + "-XXXXX")
    return create_dir_cmd.stdout.strip()
示例#11
0
from glob import glob
import os
import sys
import sh

# Display the help if any arguments are provided.
if len(sys.argv) > 1:
    print(HELP)
    sys.exit(0)

# Determine the location of this script and the .scr file
here = os.path.dirname(os.path.realpath(__file__))
pngscr = os.path.join(here, '..', 'scr', 'png.scr')
scr = os.path.join(here, '..', 'scr', 'script.scr')
tmppng = os.path.join(str(mktemp('/tmp/pngXXXXXXXXXX')).strip() + '.scr')
tmpscr = os.path.join(str(mktemp('/tmp/scrXXXXXXXXXX')).strip() + '.scr')

# Get the text from the png script file so that we can replace the
# schematic name later.
pngscr_contents = ''
with open(pngscr, 'r') as f:
    pngscr_contents = f.read()

# Create script to run the pdf script
contents = ''
with open(scr, 'r') as f:
    contents = f.read()
contents = contents.replace('%SCRIPT_PATH%', tmppng)
with open(tmpscr, 'w') as f:
    f.write(contents)
示例#12
0
def create_temp_directory(suffix):
    create_dir_cmd = mktemp("-d", "/tmp/" + suffix + "-XXXXX")
    return create_dir_cmd.stdout.strip()
示例#13
0
            if len(line.strip()) == 0:
                continue
            opts = line.split(':', 1)
            d[opts[0].strip().lower()] = opts[1].strip()


# Display the help if any arguments are provided.
if len(sys.argv) > 1:
    print(HELP)
    sys.exit(0)

# Determine the location of this script and the .ulp file
here = os.path.dirname(os.path.realpath(__file__))
ulp = os.path.join(here, '..', 'ulp', 'attributes.ulp')
scr = os.path.join(here, '..', 'scr', 'ulp-sch.scr')
tmpscr = str(mktemp(os.path.join('/', 'tmp', 'ulp.scrXXXXXXXXXX'))).strip()

contents = ''
with open(scr, 'r') as f:
    contents = f.read()

contents = contents.replace('%ULP_PATH%', ulp)

with open(tmpscr, 'w') as f:
    f.write(contents)

# Figure out the name of the schematic to run this on.
for sch in glob.glob('*.sch'):
    attrs = {}

    sch_name = sch[:-4]
示例#14
0
def main():
    log = logging.getLogger("zulip-provisioner")
    # TODO: support other architectures
    if platform.architecture()[0] == '64bit':
        arch = 'amd64'
    else:
        log.critical("Only amd64 is supported.")

    vendor, version, codename = platform.dist()

    if not (vendor in SUPPORTED_PLATFORMS and codename in SUPPORTED_PLATFORMS[vendor]):
        log.critical("Unsupported platform: {} {}".format(vendor, codename))

    with sh.sudo:
        sh.apt_get.update(**LOUD)

        sh.apt_get.install(*APT_DEPENDENCIES["trusty"], assume_yes=True, **LOUD)

    temp_deb_path = sh.mktemp("package_XXXXXX.deb", tmpdir=True)

    sh.wget(
        "{}/{}_{}_{}.deb".format(
            TSEARCH_URL_BASE,
            TSEARCH_PACKAGE_NAME["trusty"],
            TSEARCH_VERSION,
            arch,
        ),
        output_document=temp_deb_path,
        **LOUD
    )

    with sh.sudo:
        sh.dpkg("--install", temp_deb_path, **LOUD)

    with sh.sudo:
        PHANTOMJS_PATH = "/srv/phantomjs"
        PHANTOMJS_TARBALL = os.path.join(PHANTOMJS_PATH, "phantomjs-1.9.8-linux-x86_64.tar.bz2")
        sh.mkdir("-p", PHANTOMJS_PATH, **LOUD)
        sh.wget("https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-1.9.8-linux-x86_64.tar.bz2",
                output_document=PHANTOMJS_TARBALL, **LOUD)
        sh.tar("xj", directory=PHANTOMJS_PATH, file=PHANTOMJS_TARBALL, **LOUD)
        sh.ln("-sf", os.path.join(PHANTOMJS_PATH, "phantomjs-1.9.8-linux-x86_64", "bin", "phantomjs"),
              "/usr/local/bin/phantomjs", **LOUD)

    with sh.sudo:
        sh.rm("-rf", VENV_PATH, **LOUD)
        sh.mkdir("-p", VENV_PATH, **LOUD)
        sh.chown("{}:{}".format(os.getuid(), os.getgid()), VENV_PATH, **LOUD)

    sh.virtualenv(VENV_PATH, **LOUD)

    # Add the ./tools and ./scripts/setup directories inside the repository root to
    # the system path; we'll reference them later.
    orig_path = os.environ["PATH"]
    os.environ["PATH"] = os.pathsep.join((
            os.path.join(ZULIP_PATH, "tools"),
            os.path.join(ZULIP_PATH, "scripts", "setup"),
            orig_path
    ))


    # Put Python virtualenv activation in our .bash_profile.
    with open(os.path.expanduser('~/.bash_profile'), 'w+') as bash_profile:
        bash_profile.writelines([
            "source .bashrc\n",
            "source %s\n" % (os.path.join(VENV_PATH, "bin", "activate"),),
        ])

    # Switch current Python context to the virtualenv.
    activate_this = os.path.join(VENV_PATH, "bin", "activate_this.py")
    execfile(activate_this, dict(__file__=activate_this))

    sh.pip.install(requirement=os.path.join(ZULIP_PATH, "requirements.txt"), **LOUD)

    with sh.sudo:
        sh.cp(REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH, **LOUD)

    # Add additional node packages for test-js-with-node.
    with sh.sudo:
        sh.npm.install(*NPM_DEPENDENCIES["trusty"], g=True, prefix="/usr", **LOUD)

    # Management commands expect to be run from the root of the project.
    os.chdir(ZULIP_PATH)

    os.system("tools/download-zxcvbn")
    os.system("tools/emoji_dump/build_emoji")
    os.system("generate_secrets.py -d")
    sh.configure_rabbitmq(**LOUD)
    sh.postgres_init_db(**LOUD)
    sh.do_destroy_rebuild_database(**LOUD)
    sh.postgres_init_test_db(**LOUD)
    sh.do_destroy_rebuild_test_database(**LOUD)
示例#15
0
    def organize(self, mfile, root_call=True):
        loaded_mfiles = set()

        if root_call:
            log.info("Organizing {0}".format(mfile))
        try:
            if mfile.load():
                log.info("Organizing childs of {0}".format(mfile))
                if mfile.is_source_container():
                    log.debug("{0} is source".format(mfile.path))
                    dump_dir_path = os.path.join(WORK_DIR, SW_PROJECTS_OUTPUT)
                    sh.mkdir("-p", dump_dir_path)

                    dump_dir_path = sh.mktemp(
                        "-d", "-p", dump_dir_path, "--suffix",
                        os.path.basename(mfile.path)).stdout.strip()
                    try:
                        sh.rsync("-rat", mfile.path, dump_dir_path)
                    except sh.ErrorReturnCode_23:
                        ## Rsync errs related with attrs or others
                        pass

                else:
                    loaded_mfiles.add(mfile)
                    #self.dive(mfile)
                    p = Process(target=Organizer.dive, args=[self, mfile])
                    p.start()
                    p.join()
            else:
                destination_path = self.index.put_file(mfile.path)
                metapath_file = open(
                    "{}.{}".format(destination_path, METAFPATHFILE), 'ab')
                metapath_file.write(bytes(mfile.path + "\n", 'utf8'))
                metapath_file.close()
                try:
                    ordered_path = mfile.get_ordered_path()
                    sh.mkdir("-p", os.path.join(ordered_path, 'NoMeta'))
                    fname = os.path.basename(mfile.path)
                    destination_fname = os.path.basename(destination_path)
                    for link in mfile.gen_ordered_paths():
                        log.debug("{} to {}".format(mfile.path, link))
                        sh.mkdir("-p", link)
                        try:
                            has_ext = re.search(r"(\..*)", fname)
                            extension = has_ext.group(1)
                            link = os.path.join(
                                link, u"{0}{1}".format(destination_fname,
                                                       extension))
                        except AttributeError:
                            link = os.path.join(
                                link, u"{0}".format(destination_fname))
                        log.info(u"File {0} @ {1}".format(
                            str(mfile), ordered_path))
                        sh.ln("-s", destination_path, link)
                except sh.ErrorReturnCode_1:
                    pass
                except sh.ErrorReturnCode as e:
                    log.exception(e)
        except Exception as e:
            log.error("Organizer error {0}".format(mfile.path))
            log.exception(e)
        finally:
            for loaded_mfile in loaded_mfiles:
                try:
                    loaded_mfile.unload()
                except Exception as e:
                    log.error("Error unloading {0}".format(mfile.path))
                    log.exception(e)
            return True
示例#16
0
    def get_env(self):
        include_dirs = [
            "-I{}/{}".format(self.ctx.include_dir, d.format(arch=self))
            for d in self.ctx.include_dirs
        ]

        env = {}
        ccache = sh.which('ccache')
        cc = sh.xcrun("-find", "-sdk", self.sdk, "clang").strip()
        cxx = sh.xcrun("-find", "-sdk", self.sdk, "clang++").strip()

        # we put the flags in CC / CXX as sometimes the ./configure test
        # with the preprocessor (aka CC -E) without CFLAGS, which fails for
        # cross compiled projects
        flags = " ".join([
            "--sysroot",
            self.sysroot,
            "-arch",
            self.arch,
            "-pipe",
            "-no-cpp-precomp",
        ])
        cc += " " + flags
        cxx += " " + flags
        if ccache:
            ccache = ccache.strip()
            use_ccache = environ.get("USE_CCACHE", "1")
            if use_ccache != '1':
                env["CC"] = cc
                env["CXX"] = cxx
            else:
                if not self._ccsh:
                    self._ccsh = ccsh = sh.mktemp().strip()
                    with open(ccsh, 'w') as f:
                        f.write('#!/bin/sh\n')
                        f.write(ccache + ' ' + cc + ' "$@"\n')
                    sh.chmod('+x', ccsh)
                    self._cxxsh = cxxsh = sh.mktemp().strip()
                    with open(cxxsh, 'w') as f:
                        f.write('#!/bin/sh\n')
                        f.write(ccache + ' ' + cxx + ' "$@"\n')
                    sh.chmod('+x', cxxsh)
                else:
                    ccsh = self._ccsh
                    cxxsh = self._cxxsh
                env["USE_CCACHE"] = '1'
                env["CCACHE"] = ccache
                env["CC"] = ccsh
                env["CXX"] = cxxsh

                env.update({
                    k: v
                    for k, v in environ.items() if k.startswith('CCACHE_')
                })
                env.setdefault('CCACHE_MAXSIZE', '10G')
                env.setdefault('CCACHE_HARDLINK', 'true')
                env.setdefault('CCACHE_SLOPPINESS', (
                    'file_macro,time_macros,'
                    'include_file_mtime,include_file_ctime,file_stat_matches'))
        else:
            env["CC"] = cc
            env["CXX"] = cxx
        env["AR"] = sh.xcrun("-find", "-sdk", self.sdk, "ar").strip()
        env["LD"] = sh.xcrun("-find", "-sdk", self.sdk, "ld").strip()
        env["OTHER_CFLAGS"] = " ".join(include_dirs)
        env["OTHER_LDFLAGS"] = " ".join([
            "-L{}/{}".format(self.ctx.dist_dir, "lib"),
        ])
        env["CFLAGS"] = " ".join(["-O3", self.version_min] + include_dirs)
        env["LDFLAGS"] = " ".join([
            "-arch", self.arch, "--sysroot", self.sysroot,
            "-L{}/{}".format(self.ctx.dist_dir,
                             "lib"), "-lsqlite3", self.version_min
        ])
        return env
示例#17
0
def main():
    log = logging.getLogger("zulip-provisioner")

    if platform.architecture()[0] == '64bit':
        arch = 'amd64'
        phantomjs_arch = 'x86_64'
    elif platform.architecture()[0] == '32bit':
        arch = "i386"
        phantomjs_arch = 'i686'
    else:
        log.critical("Only x86 is supported; ping [email protected] if you want another architecture.")
        sys.exit(1)

    vendor, version, codename = platform.dist()

    if not (vendor in SUPPORTED_PLATFORMS and codename in SUPPORTED_PLATFORMS[vendor]):
        log.critical("Unsupported platform: {} {}".format(vendor, codename))

    with sh.sudo:
        sh.apt_get.update(**LOUD)

        sh.apt_get.install(*APT_DEPENDENCIES["trusty"], assume_yes=True, **LOUD)

    temp_deb_path = sh.mktemp("package_XXXXXX.deb", tmpdir=True)

    sh.wget(
        "{}/{}_{}_{}.deb".format(
            TSEARCH_URL_BASE,
            TSEARCH_PACKAGE_NAME["trusty"],
            TSEARCH_VERSION,
            arch,
        ),
        output_document=temp_deb_path,
        **LOUD
    )

    with sh.sudo:
        sh.dpkg("--install", temp_deb_path, **LOUD)

    with sh.sudo:
        PHANTOMJS_PATH = "/srv/phantomjs"
        PHANTOMJS_BASENAME = "phantomjs-1.9.8-linux-%s" % (phantomjs_arch,)
        PHANTOMJS_TARBALL_BASENAME = PHANTOMJS_BASENAME + ".tar.bz2"
        PHANTOMJS_TARBALL = os.path.join(PHANTOMJS_PATH, PHANTOMJS_TARBALL_BASENAME)
        PHANTOMJS_URL = "https://bitbucket.org/ariya/phantomjs/downloads/%s" % (PHANTOMJS_TARBALL_BASENAME,)
        sh.mkdir("-p", PHANTOMJS_PATH, **LOUD)
        if not os.path.exists(PHANTOMJS_TARBALL):
            sh.wget(PHANTOMJS_URL, output_document=PHANTOMJS_TARBALL, **LOUD)
        sh.tar("xj", directory=PHANTOMJS_PATH, file=PHANTOMJS_TARBALL, **LOUD)
        sh.ln("-sf", os.path.join(PHANTOMJS_PATH, PHANTOMJS_BASENAME, "bin", "phantomjs"),
              "/usr/local/bin/phantomjs", **LOUD)

    with sh.sudo:
        sh.rm("-rf", VENV_PATH, **LOUD)
        sh.mkdir("-p", VENV_PATH, **LOUD)
        sh.chown("{}:{}".format(os.getuid(), os.getgid()), VENV_PATH, **LOUD)

    sh.virtualenv(VENV_PATH, **LOUD)

    # Add the ./tools and ./scripts/setup directories inside the repository root to
    # the system path; we'll reference them later.
    orig_path = os.environ["PATH"]
    os.environ["PATH"] = os.pathsep.join((
            os.path.join(ZULIP_PATH, "tools"),
            os.path.join(ZULIP_PATH, "scripts", "setup"),
            orig_path
    ))


    # Put Python virtualenv activation in our .bash_profile.
    with open(os.path.expanduser('~/.bash_profile'), 'w+') as bash_profile:
        bash_profile.writelines([
            "source .bashrc\n",
            "source %s\n" % (os.path.join(VENV_PATH, "bin", "activate"),),
        ])

    # Switch current Python context to the virtualenv.
    activate_this = os.path.join(VENV_PATH, "bin", "activate_this.py")
    execfile(activate_this, dict(__file__=activate_this))

    sh.pip.install(requirement=os.path.join(ZULIP_PATH, "requirements.txt"), **LOUD)

    with sh.sudo:
        sh.cp(REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH, **LOUD)

    # npm install and management commands expect to be run from the root of the project.
    os.chdir(ZULIP_PATH)

    sh.npm.install(**LOUD)

    os.system("tools/download-zxcvbn")
    os.system("tools/emoji_dump/build_emoji")
    os.system("generate_secrets.py -d")
    if "--travis" in sys.argv:
        os.system("sudo service rabbitmq-server restart")
        os.system("sudo service redis-server restart")
        os.system("sudo service memcached restart")
    elif "--docker" in sys.argv:
        os.system("sudo service rabbitmq-server restart")
        os.system("sudo pg_dropcluster --stop 9.3 main")
        os.system("sudo pg_createcluster -e utf8 --start 9.3 main")
        os.system("sudo service redis-server restart")
        os.system("sudo service memcached restart")
    sh.configure_rabbitmq(**LOUD)
    sh.postgres_init_dev_db(**LOUD)
    sh.do_destroy_rebuild_database(**LOUD)
    sh.postgres_init_test_db(**LOUD)
    sh.do_destroy_rebuild_test_database(**LOUD)
    return 0
示例#18
0
		for line in f:
			if len(line.strip()) == 0:
				continue
			opts = line.split(':', 1)
			d[opts[0].strip().lower()] = opts[1].strip()

# Display the help if any arguments are provided.
if len(sys.argv) > 1:
	print(HELP)
	sys.exit(0)

# Determine the location of this script and the .ulp file
here   = os.path.dirname(os.path.realpath(__file__))
ulp    = os.path.join(here, '..', 'ulp', 'attributes.ulp')
scr    = os.path.join(here, '..', 'scr', 'ulp-sch.scr')
tmpscr = str(mktemp(os.path.join('/', 'tmp', 'ulp.scrXXXXXXXXXX'))).strip()

contents = ''
with open(scr, 'r') as f:
	contents = f.read()

contents = contents.replace('%ULP_PATH%', ulp)

with open(tmpscr, 'w') as f:
	f.write(contents)

# Figure out the name of the schematic to run this on.
for sch in glob.glob('*.sch'):
	attrs = {}

	sch_name = sch[:-4]