Example #1
0
File: sift.py Project: jguinet/s2p
def image_keypoints(im, x, y, w, h, max_nb=None, extra_params=''):
    """
    Runs SIFT (the keypoints detection and description only, no matching).

    It uses Ives Rey Otero's implementation published in IPOL:
    http://www.ipol.im/pub/pre/82/

    Args:
        im: path to the input image
        max_nb (optional): maximal number of keypoints. If more keypoints are
            detected, those at smallest scales are discarded
        extra_params (optional): extra parameters to be passed to the sift
            binary

    Returns:
        path to the file containing the list of descriptors
    """
    keyfile = common.tmpfile('.txt')
    if max_nb:
        cmd = "sift_roi %s %d %d %d %d --max-nb-pts %d %s -o %s" % (im, x, y, w,
                                                                    h, max_nb,
                                                                    extra_params,
                                                                    keyfile)
    else:
        cmd = "sift_roi %s %d %d %d %d %s -o %s" % (im, x, y, w, h,
                                                    extra_params, keyfile)
    common.run(cmd)
    return keyfile
def getExistingMasterInstance(current_master_ip=None):
    say('Making sure jenkins is not running on the master...', banner="*")
    # First make sure you can run a dummy command.
    cmd = '{} {}@{} \'echo hello\''.format(g_ssh_cmd, g_args.ssh_user,
                                           g_args.current_master_ip)
    run(cmd, hide_command=g_hide_command)
    say('Script was able to ssh onto master and run "echo hello world"...')

    # OK, not make sure jenkins is not running: Add an extra "-t", otherwise you will get:
    # sudo: sorry, you must have a tty to run sudo
    cmd = '{} -t {}@{} \'sudo service jenkins status\''.format(
        g_ssh_cmd, g_args.ssh_user, g_args.current_master_ip)
    output, returncode = run(cmd,
                             raise_on_failure=False,
                             hide_command=g_hide_command,
                             debug=g_args.debug)
    if 'jenkins' in output and 'is running...' in output:
        user_input = input(
            'Jenkins is running on the master.  Are you sure you want to continue? (y|n)'
        )
        if user_input != 'y':
            say('goodbye!')
            sys.exit(0)
    say('Getting instance from IP: ' + g_args.current_master_ip)
    output, returncode = run(
        'aws ec2 describe-instances --filters "Name=ip-address,Values=' +
        g_args.current_master_ip + '"',
        hide_command=g_hide_command,
        debug=g_args.debug)
    instance = json.loads(output)['Reservations'][0]['Instances'][0]
    say('Instance-id of existing Jenkins master: ' + instance['InstanceId'])
    return instance
Example #3
0
def loop_zhang(F, w, h):
    """
    Computes rectifying homographies from a fundamental matrix, with Loop-Zhang.

    Args:
        F: 3x3 numpy array containing the fundamental matrix
        w, h: images size. The two images are supposed to have same size

    Returns:
        The two rectifying homographies.

    The rectifying homographies are computed using the Pascal Monasse binary
    named rectify_mindistortion. It uses the Loop-Zhang algorithm.
    """
    Ffile = common.tmpfile('.txt')
    Haf = common.tmpfile('.txt')
    Hbf = common.tmpfile('.txt')
    common.matrix_write(Ffile, F)
    common.run('rectify_mindistortion %s %d %d %s %s > /dev/null' % (Ffile, w,
                                                                     h, Haf,
                                                                     Hbf))
    Ha = common.matrix_read(Haf, size=(3, 3))
    Hb = common.matrix_read(Hbf, size=(3, 3))

    # check if both the images are rotated
    a = does_this_homography_change_the_vertical_direction(Ha)
    b = does_this_homography_change_the_vertical_direction(Hb)
    if a and b:
        R = np.array([[-1, 0, 0], [0, -1, 0], [0, 0, 1]])
        Ha = np.dot(R, Ha)
        Hb = np.dot(R, Hb)
    return Ha, Hb
Example #4
0
def mosaic_gdal(fout, w, h, list_tiles, tw, th, ov):
    """
    Compose several tiles of the same size into a bigger image (using gdal vrt)

    Args:
        fout: path to the output image
        w, h: output image dimensions
        list_tiles: list containing paths to the input tiles
        tw, th: dimensions of a tile (they must all have the same dimensions)
        ov: overlap between tiles (in pixels)

    Returns:
        nothing
    """
    N = len(list_tiles)
    ntx = np.ceil(float(w - ov) / (tw - ov)).astype(int)
    nty = np.ceil(float(h - ov) / (th - ov)).astype(int)
    assert (ntx * nty == N)

    vrtfilename = fout + '.vrt'

    vrtfile = open(vrtfilename, 'w')

    vrtfile.write("<VRTDataset rasterXSize=\"%i\" rasterYSize=\"%i\">\n" %
                  (w, h))
    vrtfile.write("\t<VRTRasterBand dataType=\"Float32\" band=\"1\">\n")
    vrtfile.write("\t\t<ColorInterp>Gray</ColorInterp>\n")

    # loop over all the tiles
    for j in range(nty):
        for i in range(ntx):
            x0 = i * (tw - ov)
            y0 = j * (th - ov)
            x1 = min(x0 + tw, w)
            y1 = min(y0 + th, h)
            f = list_tiles[j * ntx + i]
            if os.path.isfile(f):
                # remove first dir name from path
                tile_fname = os.path.join(
                    os.path.split(os.path.dirname(f))[1], os.path.basename(f))
                vrtfile.write("\t\t<SimpleSource>\n")
                vrtfile.write(
                    "\t\t\t<SourceFilename relativeToVRT=\"1\">%s</SourceFilename>\n"
                    % tile_fname)
                vrtfile.write("\t\t\t<SourceBand>1</SourceBand>\n")
                vrtfile.write(
                    "\t\t\t<SrcRect xOff=\"%i\" yOff=\"%i\" xSize=\"%i\" ySize=\"%i\"/>\n"
                    % (0, 0, x1 - x0, y1 - y0))
                vrtfile.write(
                    "\t\t\t<DstRect xOff=\"%i\" yOff=\"%i\" xSize=\"%i\" ySize=\"%i\"/>\n"
                    % (x0, y0, x1 - x0, y1 - y0))
                vrtfile.write("\t\t</SimpleSource>\n")

    vrtfile.write("\t</VRTRasterBand>\n")
    vrtfile.write("</VRTDataset>\n")
    vrtfile.close()

    common.run('gdal_translate %s %s' % (vrtfilename, fout))

    return
Example #5
0
def plot_vectors(p, v, x, y, w, h, f=1, out_file=None):
    """
    Plots vectors on an image, using gnuplot

    Args:
        p: points (origins of vectors),represented as a numpy Nx2 array
        v: vectors, represented as a numpy Nx2 array
        x, y, w, h: rectangular ROI
        f: (optional, default is 1) exageration factor
        out_file: (optional, default is None) path to the output file

    Returns:
        nothing, but opens a display or write a png file
    """
    tmp = common.tmpfile('.txt')
    data = np.hstack((p, v))
    np.savetxt(tmp, data, fmt='%6f')
    gp_string = 'set term png size %d,%d;unset key;unset tics;plot [%d:%d] [%d:%d] "%s" u($1):($2):(%d*$3):(%d*$4) w vectors head filled' % (w, h, x, x+w, y, y+h, tmp, f, f)

    if out_file is None:
        out_file = common.tmpfile('.png')

    common.run("gnuplot -p -e '%s' > %s" % (gp_string, out_file))
    print out_file

    if out_file is None:
        os.system("v %s &" % out_file)
Example #6
0
def compute_point_cloud(cloud, heights, rpc, H=None, crop_colorized='',
                        off_x=None, off_y=None, ascii_ply=False,
                        with_normals=False):
    """
    Computes a color point cloud from a height map.

    Args:
        cloud: path to the output points cloud (ply format)
        heights: height map, sampled on the same grid as the crop_colorized
            image. In particular, its size is the same as crop_colorized.
        rpc: path to xml file containing RPC data for the current Pleiade image
        H (optional, default None): path to the file containing the coefficients
            of the homography transforming the coordinates system of the
            original full size image into the coordinates system of the crop we
            are dealing with.
        crop_colorized (optional, default ''): path to a colorized crop of a
            Pleiades image
        off_{x,y} (optional, default None): coordinates of the point we want to
            use as origin in the local coordinate system of the computed cloud
        ascii_ply (optional, default false): boolean flag to tell if the output
            ply file should be encoded in plain text (ascii).
    """
    hij = " ".join([str(x) for x in np.loadtxt(H).flatten()]) if H else ""
    asc = "--ascii" if ascii_ply else ""
    nrm = "--with-normals" if with_normals else ""
    command = "colormesh %s %s %s %s -h \"%s\" %s %s" % (cloud, heights, rpc,
                                                         crop_colorized, hij,
                                                         asc, nrm)
    if off_x:
        command += " --offset_x %d" % off_x
    if off_y:
        command += " --offset_y %d" % off_y
    common.run(command)
def attacheVolume(volume_id=None, instance_id=None, region=None):
    say('Attaching volume...', banner="*")
    cmd = 'aws ec2 attach-volume --volume-id ' + str(volume_id) + \
          ' --instance-id ' + str(instance_id) + \
          ' --device /dev/sdb'
    output, returncode = run(cmd,
                             hide_command=g_hide_command,
                             debug=g_args.debug)
    say(output)
    while True:
        cmd = 'aws ec2 describe-volumes --volume-ids ' + str(volume_id)
        output, returncode = run(cmd,
                                 hide_command=g_hide_command,
                                 debug=g_args.debug)
        state = json.loads(output)['Volumes'][0]['State']
        if state == 'in-use':
            say('Volume has been attached: ' + str(volume_id))
            break
        else:
            say('Current State: ' + str(state))
            time.sleep(15)
    # Re-describe the instance, since it now has a new volume:
    cmd = 'aws ec2 describe-instances --instance-ids {} --region {}'.format(
        instance_id, region)
    output, returncode = run(cmd,
                             hide_command=g_hide_command,
                             debug=g_args.debug)
    instance = json.loads(output)['Reservations'][0]['Instances'][0]
    return instance
Example #8
0
def playbook(install_path,
             playbook='playbook',
             ask_sudo_pass=False,
             ask_vault_pass=False,
             extras=None):
    common.get_dotenv()
    check_roles_dir(install_path)
    os.environ["ANSIBLE_CONFIG"] = os.path.join(install_path, 'ansible.cfg')
    command_string = 'ansible-playbook'
    command_string += ' -i "%s"' % os.path.join(install_path, 'inventory')

    if ask_sudo_pass or os.environ.get("OSXSTRAP_ASK_SUDO_PASS") == '1':
        command_string += ' --ask-sudo-pass'
    if ask_vault_pass or os.environ.get("OSXSTRAP_ASK_VAULT_PASS") == '1':
        command_string += ' --ask-vault-pass'
    if extras:
        command_string += ' ' + extras

    default_playbook_path = os.path.join(install_path, playbook) + '.yml'
    if os.path.exists(default_playbook_path):
        command_string += ' "%s"' % default_playbook_path
    else:
        custom_playbook_path = os.path.join(custom_playbooks_path,
                                            playbook) + '.yml'
        if os.path.exists(custom_playbook_path):
            command_string += ' "%s"' % custom_playbook_path
        else:
            output.abort(
                "Cannot find playbook %s.yml, looked for it at\n%s\n%s" %
                (playbook, default_playbook_path, custom_playbook_path))
    common.run(command_string)
def createSnapshot(volume_id=None):
    say('Creating snapshot...', banner="*")
    cmd = 'aws ec2 create-snapshot --description jenkins-master-snapshot --volume-id ' + volume_id
    output, returncode = run(cmd,
                             hide_command=g_hide_command,
                             debug=g_args.debug)
    snapshot_id = json.loads(output)['SnapshotId']
    while True:
        cmd = 'aws ec2 describe-snapshots --snapshot-ids ' + str(snapshot_id)
        output, returncode = run(cmd,
                                 hide_command=g_hide_command,
                                 debug=g_args.debug)
        status = json.loads(output)['Snapshots'][0]['State']
        progress = json.loads(output)['Snapshots'][0]['Progress']
        if status == 'completed':
            say('Snapshot has been created!')
            break
        else:
            say('Current Status: ' + str(status) + '. Current Progress: ' +
                str(progress))
            time.sleep(15)
    cmd = 'aws ec2 create-tags --resources ' + str(
        snapshot_id) + ' --tags Key=Name,Value=jenkins-master-snapshot'
    output, returncode = run(cmd,
                             hide_command=g_hide_command,
                             debug=g_args.debug)
    return snapshot_id
def main():
  options = common.parse_args(use_isolate_server=True, use_swarming=True)
  try:
    common.note(
        'Archiving directory \'payload\' to %s' % options.isolate_server)
    payload_isolated_sha1 = common.capture(
        [
          'isolateserver.py',
          'archive',
          '--isolate-server', options.isolate_server,
          'payload',
        ]).split()[0]

    common.note(
        'Archiving custom .isolated file to %s' % options.isolate_server)
    handle, isolated = tempfile.mkstemp(
        prefix=u'hello_world', suffix=u'.isolated')
    os.close(handle)
    try:
      data = {
        'algo': 'sha-1',
        'command': ['python', 'hello_world.py', 'Custom'],
        'includes': [payload_isolated_sha1],
        'version': '1.0',
      }
      with open(isolated, 'wb') as f:
        json.dump(data, f, sort_keys=True, separators=(',',':'))
      isolated_sha1 = common.capture(
          [
            'isolateserver.py',
            'archive',
            '--isolate-server', options.isolate_server,
            isolated,
          ]).split()[0]
    finally:
      common.note('Deleting temporary file, it is not necessary anymore.')
      os.remove(isolated)

    # Now trigger as usual. You could look at run_exmaple_swarming_involved for
    # the involved way but use the short way here.

    common.note('Running %s on %s' % (isolated_sha1, options.swarming))
    cmd = [
      'swarming.py',
      'run',
      '--swarming', options.swarming,
      '--isolate-server', options.isolate_server,
      '--dimension', 'os', options.swarming_os,
      '--dimension', 'pool', 'default',
      '--task-name', options.task_name,
      isolated_sha1,
    ]
    if options.idempotent:
      cmd.append('--idempotent')
    if options.priority is not None:
      cmd.extend(('--priority', str(options.priority)))
    common.run(cmd, options.verbose)
    return 0
  except subprocess.CalledProcessError as e:
    return e.returncode
Example #11
0
def run(*, verbose=False):
    common.run(_LOGGER, verbose=verbose)
    os.environ['PIPENV_VENV_IN_PROJECT'] = "1"

    common.run_cmd(_LOGGER, ["pipenv", "uninstall", "--all"])
    common.run_cmd(_LOGGER, ["pipenv", "install", "--dev"])
    common.run_cmd(_LOGGER, ["pipenv", "--venv"])
Example #12
0
def transfer_map(in_map, H, x, y, w, h, zoom, out_map):
    """
    Transfer the heights computed on the rectified grid to the original
    Pleiades image grid.

    Args:
        in_map: path to the input map, usually a height map or a mask, sampled
            on the rectified grid
        H: path to txt file containing a numpy 3x3 array representing the
            rectifying homography
        x, y, w, h: four integers defining the rectangular ROI in the original
            image. (x, y) is the top-left corner, and (w, h) are the dimensions
            of the rectangle.
        zoom: zoom factor (usually 1, 2 or 4) used to produce the input height
            map
        out_map: path to the output map
    """
    # write the inverse of the resampling transform matrix. In brief it is:
    # homography * translation * zoom
    # This matrix transports the coordinates of the original cropped and
    # zoomed grid (the one desired for out_height) to the rectified cropped and
    # zoomed grid (the one we have for height)
    Z = np.diag([zoom, zoom, 1])
    A = common.matrix_translation(x, y)
    HH = np.dot(np.loadtxt(H), np.dot(A, Z))

    # apply the homography
    # write the 9 coefficients of the homography to a string, then call synflow
    # to produce the flow, then backflow to apply it
    # zero:256x256 is the iio way to create a 256x256 image filled with zeros
    hij = ' '.join(['%r' % num for num in HH.flatten()])
    common.run('synflow hom "%s" zero:%dx%d /dev/null - | BILINEAR=1 backflow - %s %s' % (
        hij, w/zoom, h/zoom, in_map, out_map))
Example #13
0
def merge(im1, im2, thresh, out):
    """
    Args:
        im1, im2: paths to the two input images
        thresh: distance threshold on the intensity values
        out: path to the output image

    This function merges two images. They are supposed to be two height maps,
    sampled on the same grid. If a pixel has a valid height (ie not inf) in
    only one of the two maps, then we keep this height. When two heights are
    available, if they differ less than the threshold we take the mean, if not
    we discard the pixel (ie assign NAN to the output pixel).
    """
    # first register the second image on the first
    im2 = register_heights(im1, im2)

    # then merge
    # the following plambda expression implements:
    # if isfinite x
    #   if isfinite y
    #     if fabs(x - y) < t
    #       return (x+y)/2
    #     return nan
    #   return x
    # return y
    common.run("""
        plambda %s %s "x isfinite y isfinite x y - fabs %f < x y + 2 / nan if x
        if y if" -o %s
        """ % ( im1, im2, thresh, out))
Example #14
0
    def _set_default_hosts(self):
        """Clears hosts and sets default hostname."""
        if "Linux" in plugin.get_os():
            hostname = "CADSHOST"

            common.backup("/etc/hostname")
            with open("/etc/hostname", "w") as out_file:
                out_file.write(hostname + "\n")

            with open("policies/hosts") as in_file:
                text = in_file.read()
            hosts = text.format(hostname)

            common.backup("/etc/hosts")
            with open("/etc/hosts", "w") as out_file:
                out_file.write(hosts)

            common.run("hostname {}".format(hostname))
        else:
            with open("policies/hosts.win") as in_file:
                text = in_file.read()
            path = "C:\\Windows\\System32\\drivers\\etc\\hosts"
            # Ah ha, CI, you won't get past this! common.backup(path)
            with open(path, "w") as out_file:
                out_file.write(text)
Example #15
0
def transfer_map(in_map, H, x, y, w, h, zoom, out_map):
    """
    Transfer the heights computed on the rectified grid to the original
    Pleiades image grid.

    Args:
        in_map: path to the input map, usually a height map or a mask, sampled
            on the rectified grid
        H: path to txt file containing a numpy 3x3 array representing the
            rectifying homography
        x, y, w, h: four integers defining the rectangular ROI in the original
            image. (x, y) is the top-left corner, and (w, h) are the dimensions
            of the rectangle.
        zoom: zoom factor (usually 1, 2 or 4) used to produce the input height
            map
        out_map: path to the output map
    """
    # write the inverse of the resampling transform matrix. In brief it is:
    # homography * translation * zoom
    # This matrix transports the coordinates of the original cropped and
    # zoomed grid (the one desired for out_height) to the rectified cropped and
    # zoomed grid (the one we have for height)
    Z = np.diag([zoom, zoom, 1])
    A = common.matrix_translation(x, y)
    HH = np.dot(np.loadtxt(H), np.dot(A, Z))

    # apply the homography
    # write the 9 coefficients of the homography to a string, then call synflow
    # to produce the flow, then backflow to apply it
    # zero:256x256 is the iio way to create a 256x256 image filled with zeros
    hij = ' '.join(['%r' % num for num in HH.flatten()])
    common.run(
        'synflow hom "%s" zero:%dx%d /dev/null - | BILINEAR=1 backflow - %s %s'
        % (hij, w / zoom, h / zoom, in_map, out_map))
Example #16
0
def shared_graph_worker(args):

    f = args.filename
    cuffcompare_folder=get_cuffcompare_folder(args, f)
    jeweler_folder=get_jeweler_folder(args, f)
    bracelet_folder=get_bracelet_folder(args,f)
    mismatch_analyzer_folder = get_mismatch_analyzer(args, f)
    shared_graph_folder = get_shared_graph_folder(args, f)
    sample_id =  os.path.basename(f.strip().replace('.bam',''))
    cufflinks_folder = get_cufflinks_folder(args, f)
    ##stupid python evoke a R program that cannot read a file
    if args.classify_gene:
        command = "python shop/classifier2.py "
    elif args.plot_shared_graph:
        command = "python shop/shared_graph.py "
    if args.simulation_profile:
        extra = " --simulation "  + args.simulation_profile
    common.run(command +
               cuffcompare_folder + "/ " +
               jeweler_folder + "/ " +
               bracelet_folder + "/ " +
               mismatch_analyzer_folder + "/ " +
               shared_graph_folder + "/ " +
               cufflinks_folder + "/ " +
               sample_id + extra)
Example #17
0
def compute_height_map(rpc1,
                       rpc2,
                       H1,
                       H2,
                       disp,
                       mask,
                       height,
                       rpc_err,
                       A=None):
    """
    Computes a height map from a disparity map, using rpc.

    Args:
        rpc1, rpc2: paths to the xml files
        H1, H2: path to txt files containing two 3x3 numpy arrays defining
            the rectifying homographies
        disp, mask: paths to the diparity and mask maps
        height: path to the output height map
        rpc_err: path to the output rpc_error of triangulation
        A (optional): pointing correction matrix for im2
    """
    if A is not None:
        HH2 = common.tmpfile('.txt')
        np.savetxt(HH2, np.dot(np.loadtxt(H2), np.linalg.inv(A)))
    else:
        HH2 = H2

    common.run("disp_to_h %s %s %s %s %s %s %s %s" %
               (rpc1, rpc2, H1, HH2, disp, mask, height, rpc_err))
    return
def main():
  options = common.parse_args(use_isolate_server=True, use_swarming=True)
  try:
    common.note(
        'Archiving directory \'payload\' to %s' % options.isolate_server)
    payload_isolated_sha1 = common.capture(
        [
          'isolateserver.py',
          'archive',
          '--isolate-server', options.isolate_server,
          'payload',
        ]).split()[0]

    common.note(
        'Archiving custom .isolated file to %s' % options.isolate_server)
    handle, isolated = tempfile.mkstemp(
        prefix=u'hello_world', suffix=u'.isolated')
    os.close(handle)
    try:
      data = {
        'algo': 'sha-1',
        'command': ['python', 'hello_world.py', 'Custom'],
        'includes': [payload_isolated_sha1],
        'version': '1.0',
      }
      with open(isolated, 'wb') as f:
        json.dump(data, f, sort_keys=True, separators=(',',':'))
      isolated_sha1 = common.capture(
          [
            'isolateserver.py',
            'archive',
            '--isolate-server', options.isolate_server,
            isolated,
          ]).split()[0]
    finally:
      common.note('Deleting temporary file, it is not necessary anymore.')
      os.remove(isolated)

    # Now trigger as usual. You could look at run_exmaple_swarming_involved for
    # the involved way but use the short way here.

    common.note('Running %s on %s' % (isolated_sha1, options.swarming))
    cmd = [
      'swarming.py',
      'run',
      '--swarming', options.swarming,
      '--isolate-server', options.isolate_server,
      '--dimension', 'os', options.swarming_os,
      '--dimension', 'pool', 'default',
      '--task-name', options.task_name,
      isolated_sha1,
    ]
    if options.idempotent:
      cmd.append('--idempotent')
    if options.priority is not None:
      cmd.extend(('--priority', str(options.priority)))
    common.run(cmd, options.verbose)
    return 0
  except subprocess.CalledProcessError as e:
    return e.returncode
Example #19
0
def image_keypoints(im, x, y, w, h, max_nb=None, extra_params=''):
    """
    Runs SIFT (the keypoints detection and description only, no matching).

    It uses Ives Rey Otero's implementation published in IPOL:
    http://www.ipol.im/pub/pre/82/

    Args:
        im: path to the input image
        max_nb (optional): maximal number of keypoints. If more keypoints are
            detected, those at smallest scales are discarded
        extra_params (optional): extra parameters to be passed to the sift
            binary

    Returns:
        path to the file containing the list of descriptors
    """
    keyfile = common.tmpfile('.txt')
    if max_nb:
        cmd = "sift_roi %s %d %d %d %d --max-nb-pts %d %s -o %s" % (
            im, x, y, w, h, max_nb, extra_params, keyfile)
    else:
        cmd = "sift_roi %s %d %d %d %d %s -o %s" % (im, x, y, w, h,
                                                    extra_params, keyfile)
    common.run(cmd)
    return keyfile
Example #20
0
def main():
    args = common.parse_args(use_isolate_server=True, use_swarming=False)
    tempdir = unicode(tempfile.mkdtemp(prefix=u'hello_world'))
    try:
        isolated_sha1 = common.archive(tempdir, args.isolate_server,
                                       args.verbose, args.which)

        common.note(
            'Downloading from %s and running in a temporary directory' %
            args.isolate_server)
        cachei = os.path.join(tempdir, u'cachei')
        cachen = os.path.join(tempdir, u'cachen')
        common.run([
            'run_isolated.py',
            '--cache',
            cachei.encode('utf-8'),
            '--named-cache-root',
            cachen.encode('utf-8'),
            '--isolate-server',
            args.isolate_server,
            '--isolated',
            isolated_sha1,
            '--no-log',
            '--',
            args.which + u'.py',
            'Dear 💩',
            '${ISOLATED_OUTDIR}',
        ], args.verbose)
        return 0
    except subprocess.CalledProcessError as e:
        return e.returncode
    finally:
        shutil.rmtree(tempdir)
Example #21
0
def loop_zhang(F, w, h):
    """
    Computes rectifying homographies from a fundamental matrix, with Loop-Zhang.

    Args:
        F: 3x3 numpy array containing the fundamental matrix
        w, h: images size. The two images are supposed to have same size

    Returns:
        The two rectifying homographies.

    The rectifying homographies are computed using the Pascal Monasse binary
    named rectify_mindistortion. It uses the Loop-Zhang algorithm.
    """
    Ffile = common.tmpfile('.txt')
    Haf = common.tmpfile('.txt')
    Hbf = common.tmpfile('.txt')
    common.matrix_write(Ffile, F)
    common.run('rectify_mindistortion %s %d %d %s %s > /dev/null' %
               (Ffile, w, h, Haf, Hbf))
    Ha = common.matrix_read(Haf, size=(3, 3))
    Hb = common.matrix_read(Hbf, size=(3, 3))

    # check if both the images are rotated
    a = does_this_homography_change_the_vertical_direction(Ha)
    b = does_this_homography_change_the_vertical_direction(Hb)
    if a and b:
        R = np.array([[-1, 0, 0], [0, -1, 0], [0, 0, 1]])
        Ha = np.dot(R, Ha)
        Hb = np.dot(R, Hb)
    return Ha, Hb
Example #22
0
def set_repeat_key(state):
	global GNOME_A11Y_DEFAULT_STATE
	# If this is the first time (in this session) that we touch this value
	# get its default
	if GNOME_A11Y_DEFAULT_STATE is None:
		GNOME_A11Y_DEFAULT_STATE = common.value_as_bool(common.run([
		    'gconftool',
		    '--get', '/apps/gnome_settings_daemon/plugins/a11y-keyboard/active'
		])[0])

	state = common.value_as_bool(state)
	# Only enable/disable a11y if it was previously enabled
	if GNOME_A11Y_DEFAULT_STATE:
		common.run([
			'gconftool',
			'--set', '/apps/gnome_settings_daemon/plugins/a11y-keyboard/active',
			'--type', 'bool',
			str(state).lower()
		])
	common.run([
	    'gconftool',
	    '--set', '/desktop/gnome/peripherals/keyboard/repeat',
	    '--type', 'bool',
	    str(state).lower()
	])
	return state
Example #23
0
def main():
    options = common.parse_args(use_isolate_server=False, use_swarming=False)
    tempdir = tempfile.mkdtemp(prefix='hello_world')
    try:
        # All the files are put in a temporary directory. This is optional and
        # simply done so the current directory doesn't have the following files
        # created:
        # - hello_world.isolated
        # - hello_world.isolated.state
        # - cache/
        # - hashtable/
        cachedir = os.path.join(tempdir, 'cache')
        hashtabledir = os.path.join(tempdir, 'hashtable')
        isolateddir = os.path.join(tempdir, 'isolated')
        isolated = os.path.join(isolateddir, 'hello_world.isolated')

        os.mkdir(isolateddir)

        common.note('Archiving to %s' % hashtabledir)
        # TODO(maruel): Parse the output from run() to get 'isolated_sha1'.
        common.run([
            'isolate.py',
            'hashtable',
            '--isolate',
            os.path.join('payload', 'hello_world.isolate'),
            '--isolated',
            isolated,
            '--outdir',
            hashtabledir,
            '--config-variable',
            'OS',
            'Yours',
        ], options.verbose)

        common.note(
            'Running the executable in a temporary directory from the hash table'
        )
        with open(isolated, 'rb') as f:
            isolated_sha1 = hashlib.sha1(f.read()).hexdigest()
        common.run(
            [
                'run_isolated.py',
                '--cache',
                cachedir,
                '--indir',
                hashtabledir,
                '--hash',
                isolated_sha1,
                # TODO(maruel): Should not require this.
                '--namespace',
                'default',
                '--no-log',
            ],
            options.verbose)
        return 0
    except subprocess.CalledProcessError as e:
        return e.returncode
    finally:
        shutil.rmtree(tempdir)
Example #24
0
def galaxy_install(install_path):
	roles_path = os.path.join(install_path, 'roles')
	common.mkdir(roles_path)
	common.run('ansible-galaxy install -f -r "%s" -p "%s"' % (os.path.join(install_path, 'requirements.yml'), common.roles_path))
	if os.path.exists(custom_requirements_path):
		if not os.path.exists(custom_roles_path):
			common.mkdir(custom_roles_path)
		common.run('ansible-galaxy install -f -r "%s" -p "%s"' % (custom_requirements_path, custom_roles_path))
Example #25
0
 def _create_users(self, users):
     for user in users:
         common.info("Adding {}...".format(user))
         if "Linux" in plugin.get_os():
             common.run("useradd -s /bin/bash -m {}".format(user))
             common.info("Added user {}".format(user))
         elif "Windows" in plugin.get_os():
             os.system("net user \"{}\" /add".format(user))
Example #26
0
def main():
    options = common.parse_args(use_isolate_server=True, use_swarming=True)
    try:
        tempdir = tempfile.mkdtemp(prefix=u'hello_world')
        try:
            _, hashval = common.isolate(tempdir, options.isolate_server,
                                        options.swarming_os, options.verbose)

            json_file = os.path.join(tempdir, 'task.json')
            common.note('Running on %s' % options.swarming)
            cmd = [
                'swarming.py',
                'trigger',
                '--swarming',
                options.swarming,
                '--isolate-server',
                options.isolate_server,
                '--dimension',
                'os',
                options.swarming_os,
                '--task-name',
                options.task_name,
                '--dump-json',
                json_file,
                '--isolated',
                hashval,
                '--shards',
                '2',
            ]
            if options.idempotent:
                cmd.append('--idempotent')
            if options.priority is not None:
                cmd.extend(('--priority', str(options.priority)))
            cmd.extend(('--', '${ISOLATED_OUTDIR}'))
            common.run(cmd, options.verbose)

            common.note('Getting results from %s' % options.swarming)
            common.run([
                'swarming.py',
                'collect',
                '--swarming',
                options.swarming,
                '--json',
                json_file,
                '--task-output-dir',
                'example_result',
            ], options.verbose)
            for root, _, files in os.walk('example_result'):
                for name in files:
                    p = os.path.join(root, name)
                    with open(p, 'rb') as f:
                        print('%s content:' % p)
                        print(f.read())
            return 0
        finally:
            shutil.rmtree(tempdir)
    except subprocess.CalledProcessError as e:
        return e.returncode
Example #27
0
    def _set_shadow(self):
        # sets all system accounts to a no log on shell
        common.run_full("awk -F: '($1!=\"root\" && $1!=\"sync\" && $1!=\"shutdown\" && $1!=\"halt\" && $1!~/^\\+/ && $3<'\"$(awk '/^\\s*UID_MIN/{print $2}' /etc/login.defs)\"' && $7!=\"'\"$(which nologin)\"'\" && $7!=\"/bin/false\") {print $1}' /etc/passwd | while read user; do usermod -s $(which nologin) $user; done")

        # locks all non root system accounts
        common.run_full("awk -F: '($1!=\"root\" && $1!~/^\\+/ && $3<'\"$(awk '/^\\s*UID_MIN/{print $2}' /etc/login.defs)\"') {print $1}' /etc/passwd | xargs -I '{}' passwd -S '{}' | awk '($2!=\"L\" && $2!=\"LK\") {print $1}' | while read user; do usermod -L $user; done")

        # sets root group uid to 0
        common.run("usermod -g 0 root")
Example #28
0
def run_multidelta(check, level, files):
  for f in files:
    shutil.copy(f, f + '-orig')
    print 'file %s at level %s' % (f, level)
    topformflat (f, level, True)
    common.run([check, f], stdout = None)

  for f in files:
    run_delta (check, f);
Example #29
0
    def _set_password_lockout(self):
        paths = ["/etc/pam.d/system-authand", "/etc/pam.d/password-auth"]
        for path in paths:
            if os.path.isfile(path):
                common.warn(
                    "{} exists, needs checking (password lockout)".format(
                        path))

        path = "/etc/pam.d/common-auth"
        common.backup(path)

        with open(path) as in_file:
            lines = in_file.read().split("\n")

        permit_index = None

        # used for fedora based distros
        #     text = """auth required pam_tally2.so onerr=fail audit silent deny=5 unlock_time=900
        # auth required pam_faillock.so preauth audit silent deny=5 unlock_time=900
        # auth sufficient pam_unix.so
        # auth [default=die] pam_faillock.so authfail audit deny=5unlock_time=900
        # auth sufficient pam_faillock.so authsucc audit deny=5 unlock_time=900"""

        text = """auth required pam_tally2.so onerr=fail audit silent deny=5 unlock_time=900"""

        for index, line in enumerate(lines):
            if "pam_faillock.so" in line:
                common.warn(
                    "Found faillock, needs checking (password lockout)")
            elif "pam_permit.so" in line:
                permit_index = index

        if text == lines[permit_index - 1]:
            common.debug("Tally already exists")
        elif permit_index is not None:
            lines.insert(permit_index, text)
        else:
            common.error("Error {} not formatted as expected".format(path))
            return

        with open(path, "w") as out_file:
            out_file.write("\n".join(lines))

        # Ensure user doesn't get locked out
        user = common.input_text("Enter current user")
        common.run("pam_tally2 -u {} --reset".format(user))
        # Everytime they use sudo the tally gets reset
        with open("/home/{}/.bashrc".format(user), "a") as out_file:
            out_file.write(
                "\nalias sudo='sudo pam_tally2 -u {} --reset >/dev/null; sudo '\n"
                .format(user))
        common.reminder(
            "You need to reload .bashrc in all currently used terminals: source ~/.bashrc"
        )

        common.debug("Set Password Lockout")
Example #30
0
def colorize(crop_panchro, im_color, x, y, zoom, out_colorized, rmin,rmax):
    """
    Colorizes a Pleiades gray crop using low-resolution color information.

    Args:
        crop_panchro: path to the panchro (ie gray) crop
        im_color: path to the full color image (tiff or jp2)
        x, y: coordinates of the top-left corner of crop_panchro, in the full
            Pleiade image frame.
        zoom: subsampling zoom-factor that was used to generate crop_panchro
        out_colorized: path to the output file
    """
    # get a translated and zoomed crop from the color image. It has to be
    # sampled on exactly the same grid as the panchro crop.
    # To do that we compose the translation + zoom transformation with a 4x
    # zoom (because color pleiades images have 4x lower resolution).  There is
    # also a small horizontal translation (4 pixels at the panchro resolution)
    # The resulting transformation is the composition of:
    #   translation (-1 - x/4, -y/4)
    #   zoom 4/z
    w, h = common.image_size_tiffinfo(crop_panchro)
    xx = np.floor(x / 4.0) 
    yy = np.floor(y / 4.0)
    ww = np.ceil((x + w * zoom) / 4.0) - xx 
    hh = np.ceil((y + h * zoom) / 4.0) - yy
    crop_ms = common.image_crop_TIFF(im_color, xx, yy, ww, hh)
    crop_ms = common.image_zoom_gdal(crop_ms, zoom/4.0)
    # crop_ms = common.image_safe_zoom_fft(crop_ms, zoom/4.0)

    # crop the crop_ms image to remove the extra-pixels due to the integer crop
    # followed by zoom
    x0 = max(0,x - 4*xx)
    y0 = max(0,y - 4*yy)
    crop_ms = common.image_crop_TIFF(crop_ms, x0, y0, w, h)
    assert(common.image_size_tiffinfo(crop_panchro) ==
           common.image_size_tiffinfo(crop_ms))

    # convert rgbi to rgb
    rgb = common.rgbi_to_rgb(crop_ms, out=None, tilewise=True)

    # blend intensity and color to obtain the result
    # each channel value r, g or b is multiplied by 3*y / (r+g+b), where y
    # denotes the panchro intensity
    tmp = common.tmpfile('.tif')
    pcmd = "dup split + + / * 3 *"
    os.environ['TMPDIR'] = os.path.join(cfg['temporary_dir'], 'meta/')
    cmd = 'tiffu meta \"plambda ^ ^1 \\\"%s\\\" -o @\" %s %s -- %s' % (pcmd,
                                                                      crop_panchro,
                                                                      rgb, tmp)
    common.run(cmd)
    if w * h > 25e6:  # image larger than 5000 x 5000 pixels
        common.image_qauto_otb(out_colorized, tmp)
    else:
        #common.image_qauto(tmp, out_colorized)
        common.image_rescaleintensities(tmp, out_colorized, rmin, rmax)
    return
Example #31
0
def colorize(crop_panchro, im_color, x, y, zoom, out_colorized, rmin, rmax):
    """
    Colorizes a Pleiades gray crop using low-resolution color information.

    Args:
        crop_panchro: path to the panchro (ie gray) crop
        im_color: path to the full color image (tiff or jp2)
        x, y: coordinates of the top-left corner of crop_panchro, in the full
            Pleiade image frame.
        zoom: subsampling zoom-factor that was used to generate crop_panchro
        out_colorized: path to the output file
    """
    # get a translated and zoomed crop from the color image. It has to be
    # sampled on exactly the same grid as the panchro crop.
    # To do that we compose the translation + zoom transformation with a 4x
    # zoom (because color pleiades images have 4x lower resolution).  There is
    # also a small horizontal translation (4 pixels at the panchro resolution)
    # The resulting transformation is the composition of:
    #   translation (-1 - x/4, -y/4)
    #   zoom 4/z
    w, h = common.image_size_tiffinfo(crop_panchro)
    xx = np.floor(x / 4.0)
    yy = np.floor(y / 4.0)
    ww = np.ceil((x + w * zoom) / 4.0) - xx
    hh = np.ceil((y + h * zoom) / 4.0) - yy
    crop_ms = common.image_crop_tif(im_color, xx, yy, ww, hh)
    crop_ms = common.image_zoom_gdal(crop_ms, zoom / 4.0)
    # crop_ms = common.image_safe_zoom_fft(crop_ms, zoom/4.0)

    # crop the crop_ms image to remove the extra-pixels due to the integer crop
    # followed by zoom
    x0 = max(0, x - 4 * xx)
    y0 = max(0, y - 4 * yy)
    crop_ms = common.image_crop_tif(crop_ms, x0, y0, w, h)
    assert (common.image_size_tiffinfo(crop_panchro) ==
            common.image_size_tiffinfo(crop_ms))

    # convert rgbi to rgb
    rgb = common.rgbi_to_rgb(crop_ms, out=None, tilewise=True)

    # blend intensity and color to obtain the result
    # each channel value r, g or b is multiplied by 3*y / (r+g+b), where y
    # denotes the panchro intensity
    tmp = common.tmpfile('.tif')
    pcmd = "dup split + + / * 3 *"
    os.environ['TMPDIR'] = os.path.join(cfg['temporary_dir'], 'meta/')
    cmd = 'tiffu meta \"plambda ^ ^1 \\\"%s\\\" -o @\" %s %s -- %s' % (
        pcmd, crop_panchro, rgb, tmp)
    common.run(cmd)
    if w * h > 25e6:  # image larger than 5000 x 5000 pixels
        common.image_qauto_otb(out_colorized, tmp)
    else:
        #common.image_qauto(tmp, out_colorized)
        common.image_rescaleintensities(tmp, out_colorized, rmin, rmax)
    return
Example #32
0
def mosaic_gdal(fout, w, h, list_tiles, tw, th, ov):
    """
    Compose several tiles of the same size into a bigger image (using gdal vrt)

    Args:
        fout: path to the output image
        w, h: output image dimensions
        list_tiles: list containing paths to the input tiles
        tw, th: dimensions of a tile (they must all have the same dimensions)
        ov: overlap between tiles (in pixels)

    Returns:
        nothing
    """
    N = len(list_tiles)
    ntx = np.ceil(float(w - ov) / (tw - ov)).astype(int)
    nty = np.ceil(float(h - ov) / (th - ov)).astype(int)
    assert(ntx * nty == N)

    vrtfilename = fout+'.vrt'

    vrtfile = open(vrtfilename, 'w')

    vrtfile.write("<VRTDataset rasterXSize=\"%i\" rasterYSize=\"%i\">\n" % (w,
                                                                            h))
    vrtfile.write("\t<VRTRasterBand dataType=\"Float32\" band=\"1\">\n")
    vrtfile.write("\t\t<ColorInterp>Gray</ColorInterp>\n")

    # loop over all the tiles
    for j in range(nty):
        for i in range(ntx):
            x0 = i * (tw - ov)
            y0 = j * (th - ov)
            x1 = min(x0 + tw, w)
            y1 = min(y0 + th, h)
            f = list_tiles[j * ntx + i]
            if os.path.isfile(f):
                # remove first dir name from path
                tile_fname = os.path.join(os.path.split(os.path.dirname(f))[1],
                                                        os.path.basename(f))
                vrtfile.write("\t\t<SimpleSource>\n")
                vrtfile.write("\t\t\t<SourceFilename relativeToVRT=\"1\">%s</SourceFilename>\n" % tile_fname)
                vrtfile.write("\t\t\t<SourceBand>1</SourceBand>\n")
                vrtfile.write("\t\t\t<SrcRect xOff=\"%i\" yOff=\"%i\" xSize=\"%i\" ySize=\"%i\"/>\n" % (0, 0, x1-x0, y1-y0))
                vrtfile.write("\t\t\t<DstRect xOff=\"%i\" yOff=\"%i\" xSize=\"%i\" ySize=\"%i\"/>\n" % (x0, y0, x1-x0, y1-y0))
                vrtfile.write("\t\t</SimpleSource>\n")

    vrtfile.write("\t</VRTRasterBand>\n")
    vrtfile.write("</VRTDataset>\n")
    vrtfile.close()

    common.run('gdal_translate %s %s' % (vrtfilename, fout))

    return
Example #33
0
def main():
    options = common.parse_args(use_isolate_server=True, use_swarming=True)
    tempdir = tempfile.mkdtemp(prefix=u'hello_world')
    try:
        # All the files are put in a temporary directory. This is optional and
        # simply done so the current directory doesn't have the following files
        # created:
        # - hello_world.isolated
        # - hello_world.isolated.state
        isolated = os.path.join(tempdir, 'hello_world.isolated')

        common.note(
            'Creating hello_world.isolated. Note that this doesn\'t archives '
            'anything.')
        common.run([
            'isolate.py',
            'check',
            '--isolate',
            os.path.join('payload', 'hello_world.isolate'),
            '--isolated',
            isolated,
            '--config-variable',
            'OS',
            options.swarming_os,
        ], options.verbose)

        common.note('Running the job remotely. This:\n'
                    ' - archives to %s\n'
                    ' - runs and collect results via %s' %
                    (options.isolate_server, options.swarming))
        cmd = [
            'swarming.py',
            'run',
            '--swarming',
            options.swarming,
            '--isolate-server',
            options.isolate_server,
            '--dimension',
            'os',
            options.swarming_os,
            '--task-name',
            options.task_name,
            isolated,
        ]
        if options.idempotent:
            cmd.append('--idempotent')
        if options.priority is not None:
            cmd.extend(('--priority', str(options.priority)))
        common.run(cmd, options.verbose)
        return 0
    except subprocess.CalledProcessError as e:
        return e.returncode
    finally:
        shutil.rmtree(tempdir)
Example #34
0
def apiRun(params):
	model = models.RESNET50
	params = common.pre(model, params)
	if params == None:
		return None, None
	if params[const.PRECISION] == const.FP32:
		net = ResNet50(os.path.join(paths.MODELS, 'tensorflow_lite', model, 'converted_model.tflite'),params[const.HARDWARE])
		return params, common.run(model, net, params)
	if params[const.PRECISION] == const.INT8:
		net = ResNet50(os.path.join(paths.MODELS, 'tensorflow_lite_quantised', model, 'quantised_PTIQ.tflite'),params[const.HARDWARE])
		return params, common.run(model, net, params)
Example #35
0
def erosion(out, msk, radius):
    """
    Erodes the accepted regions (ie eliminates more pixels)

    Args:
        out: path to the ouput mask image file
        msk: path to the input mask image file
        radius (in pixels): size of the disk used for the erosion
    """
    if radius >= 2:
        common.run('morsi disk%d erosion %s %s' % (int(radius), msk, out))
Example #36
0
def galaxy_install(install_path):
    roles_path = os.path.join(install_path, 'roles')
    common.mkdir(roles_path)
    common.run(
        'ansible-galaxy install -f -r "%s" -p "%s"' %
        (os.path.join(install_path, 'requirements.yml'), common.roles_path))
    if os.path.exists(custom_requirements_path):
        if not os.path.exists(custom_roles_path):
            common.mkdir(custom_roles_path)
        common.run('ansible-galaxy install -f -r "%s" -p "%s"' %
                   (custom_requirements_path, custom_roles_path))
Example #37
0
def download(config):
	assert config.get('url', None), 'No URL specified'

	r = requests.get(config['url'], allow_redirects=True, stream=True)
	url = r.url
	r.close()

	filename = os.path.basename(url)
	name, ext = os.path.splitext(filename)
	dirname = os.path.dirname(url)

	def __download():
		run('aria2c -x 16 {} -o {}'.format(url, config['name']))

	if config['name'] == '':
		config['name'] = filename
	print 'filename={}'.format(filename)
	exists = os.path.exists(config['name'])
	if not exists:
		print 'Downloading {}'.format(url)
		__download()
	elif exists:
		if config['force']:
			print 'Force downloading {}'.format(url)
		else:
			checksum_url = os.path.join(dirname, filename + '.sha1')
			r = requests.get(checksum_url)
			url_checksum = r.text.strip().split(' ')[0]

			sha1 = hashlib.sha1()
			with open(config['name'], 'rb') as f:
				while True:
					data = f.read(4096)
					if not data:
						break
					sha1.update(data)
			file_checksum = sha1.hexdigest()
			if url_checksum != file_checksum:
				print '''Checksums don't match..Downloading ...'''
				__download()
			else:
				print 'Checksums match. Not downloading'

	# Now handle unzip
	if ext == '.zip':
		ret, stdout, stderr = run('zipinfo -1 {}'.format(filename), stdout=subprocess.PIPE)
		unzipped_filename = stdout.strip()
		if not os.path.exists(unzipped_filename) or config['unzip']:
			ret, stdout, stderr = run('unzip {}'.format(filename))
		return unzipped_filename
	else:
		return filename
Example #38
0
async def fortune(self, chan, src, msg, args, opts):
    """
    :name: fortune
    :hook: cmd
    :help: get a fortune
    :args: @search:str
    :aliases:
    """
    if len(msg) > 0:
        fort = common.run(["fortune", "-sm", msg], "")
    else:
        fort = common.run(["fortune", "-s"], "")
    await out.msg(self, modname, chan, [fort])
Example #39
0
def cloud_water_image_domain(out, w, h, H, rpc, roi_gml=None, cld_gml=None):
    """
    Computes a mask for pixels masked by clouds, water, or out of image domain.

    Args:
        out: path to the output image file.
        w, h: (w, h) are the dimensions of the output image mask.
        H: 3x3 numpy array representing the homography that transforms the
            original full image into the rectified tile.
        rpc: paths to the xml file containing the rpc coefficients of the image.
            RPC model is used with SRTM data to derive the water mask.
        roi_gml (optional, default None): path to a gml file containing a mask
            defining the area contained in the full image.
        cld_gml (optional, default None): path to a gml file containing a mask
            defining the areas covered by clouds.

    Returns:
        True if the tile is completely masked, False otherwise.
    """
    # put the coefficients of the homography in a string
    hij = ' '.join(['%f' % x for x in H.flatten()])

    # image domain mask
    if roi_gml is None:  # initialize to 255
        common.run('plambda zero:%dx%d "x 255 +" -o %s' % (w, h, out))
    else:
        common.run('cldmask %d %d -h "%s" %s %s' % (w, h, hij, roi_gml, out))
        if common.is_image_black(out):  # if we are already out, return
            return True

    # cloud mask
    if cld_gml is not None:
        cld_msk = common.tmpfile('.png')
        common.run('cldmask %d %d -h "%s" %s %s' % (w, h, hij, cld_gml,
                                                    cld_msk))
        # cld msk has to be inverted.
        # TODO: add flag to the cldmask binary, to avoid using read/write the
        # msk one more time for this
        common.run('plambda %s "255 x -" -o %s' % (cld_msk, cld_msk))

        intersection(out, out, cld_msk)

    # water mask
    water_msk = common.tmpfile('.png')
    env = os.environ.copy()
    env['SRTM4_CACHE'] = cfg['srtm_dir']
    common.run('watermask %d %d -h "%s" %s %s' % (w, h, hij, rpc, water_msk),
               env)
    intersection(out, out, water_msk)

    return common.is_image_black(out)
Example #40
0
def intersection(out, in1, in2):
    """
    Computes the intersection between two mask files

    Args:
        out: path to the ouput mask image file
        in1, in2: paths to the input mask image files

    The masks are binary. Pixels may have value 0 or 255, 0 being the rejection
    value. The output mask rejects any pixel that is rejected in one input mask,
    or in both input masks. As 0 is the rejection value, the intersection is
    equivalent to a pixelwise product.
    """
    common.run('plambda %s %s "x y 255 / *" -o %s' % (in1, in2, out))
Example #41
0
    def get_commits_between(base_version, new_version):
        cmd = f'git merge-base {base_version} {new_version}'
        rc, merge_base, _ = run(cmd)
        assert rc == 0

        # Returns a list of something like
        # b33e38ec47 Allow a higher-precision step type for Vec256::arange (#34555)
        cmd = f'git log --reverse --oneline {merge_base}..{new_version}'
        rc, commits, _ = run(cmd)
        assert rc == 0

        log_lines = commits.split('\n')
        hashes, titles = zip(*[log_line.split(' ', 1) for log_line in log_lines])
        return [Commit(commit_hash, 'Uncategorized', 'Untopiced', title) for commit_hash, title in zip(hashes, titles)]
Example #42
0
def edit(editor):
    if editor == 'webapp':
        config_file_path = os.path.join(config_path, config_filename)
        if os.path.exists(config_file_path):
            f = open(config_file_path)
            config_string = f.read()
            f.close()
            encoded_config_string = base64.b64encode(config_string)
            print encoded_config_string
            common.run('open https://osxstrap.github.io#%s' % encoded_config_string)
        else:
            output.abort("%s does not exist." % config_file_path)
    else:
        common.run('%s %s' % (editor, os.path.join(config_path, config_filename)))
def main():
  options = common.parse_args(use_isolate_server=False, use_swarming=False)
  tempdir = tempfile.mkdtemp(prefix='hello_world')
  try:
    # All the files are put in a temporary directory. This is optional and
    # simply done so the current directory doesn't have the following files
    # created:
    # - hello_world.isolated
    # - hello_world.isolated.state
    # - cache/
    # - hashtable/
    cachedir = os.path.join(tempdir, 'cache')
    hashtabledir = os.path.join(tempdir, 'hashtable')
    isolateddir = os.path.join(tempdir, 'isolated')
    isolated = os.path.join(isolateddir, 'hello_world.isolated')

    os.mkdir(isolateddir)

    common.note('Archiving to %s' % hashtabledir)
    # TODO(maruel): Parse the output from run() to get 'isolated_sha1'.
    common.run(
        [
          'isolate.py',
          'hashtable',
          '--isolate', os.path.join('payload', 'hello_world.isolate'),
          '--isolated', isolated,
          '--outdir', hashtabledir,
          '--config-variable', 'OS', 'Yours',
        ], options.verbose)

    common.note(
        'Running the executable in a temporary directory from the hash table')
    with open(isolated, 'rb') as f:
      isolated_sha1 = hashlib.sha1(f.read()).hexdigest()
    common.run(
        [
          'run_isolated.py',
          '--cache', cachedir,
          '--indir', hashtabledir,
          '--hash', isolated_sha1,
          # TODO(maruel): Should not require this.
          '--namespace', 'default',
          '--no-log',
        ], options.verbose)
    return 0
  except subprocess.CalledProcessError as e:
    return e.returncode
  finally:
    shutil.rmtree(tempdir)
def main():
  options = common.parse_args(use_isolate_server=True, use_swarming=True)
  try:
    tempdir = tempfile.mkdtemp(prefix=u'hello_world')
    try:
      _, hashval = common.isolate(
          tempdir, options.isolate_server, options.swarming_os, options.verbose)

      json_file = os.path.join(tempdir, 'task.json')
      common.note('Running on %s' % options.swarming)
      cmd = [
        'swarming.py',
        'trigger',
        '--swarming', options.swarming,
        '--isolate-server', options.isolate_server,
        '--dimension', 'os', options.swarming_os,
        '--dimension', 'pool', 'default',
        '--task-name', options.task_name,
        '--dump-json', json_file,
        '--isolated', hashval,
        '--shards', '2',
      ]
      if options.idempotent:
        cmd.append('--idempotent')
      if options.priority is not None:
        cmd.extend(('--priority', str(options.priority)))
      cmd.extend(('--', '${ISOLATED_OUTDIR}'))
      common.run(cmd, options.verbose)

      common.note('Getting results from %s' % options.swarming)
      common.run(
          [
            'swarming.py',
            'collect',
            '--swarming', options.swarming,
            '--json', json_file,
            '--task-output-dir', 'example_result',
          ], options.verbose)
      for root, _, files in os.walk('example_result'):
        for name in files:
          p = os.path.join(root, name)
          with open(p, 'rb') as f:
            print('%s content:' % p)
            print(f.read())
      return 0
    finally:
      shutil.rmtree(tempdir)
  except subprocess.CalledProcessError as e:
    return e.returncode
Example #45
0
def register_heights(im1, im2):
    """
    Affine registration of heights.

    Args:
        im1: first height map
        im2: second height map, to be registered on the first one

    Returns
        path to the registered second height map
    """
    # remove high frequencies with a morphological zoom out
    im1_low_freq = common.image_zoom_out_morpho(im1, 4)
    im2_low_freq = common.image_zoom_out_morpho(im2, 4)

    # first read the images and store them as numpy 1D arrays, removing all the
    # nans and inf
    i1 = piio.read(im1_low_freq).ravel() #np.ravel() gives a 1D view
    i2 = piio.read(im2_low_freq).ravel()
    ind = np.logical_and(np.isfinite(i1), np.isfinite(i2))
    h1 = i1[ind]
    h2 = i2[ind]

    # for debug
    print np.shape(i1)
    print np.shape(h1)

#    # 1st option: affine
#    # we search the (u, v) vector that minimizes the following sum (over
#    # all the pixels):
#    #\sum (im1[i] - (u*im2[i]+v))^2
#    # it is a least squares minimization problem
#    A = np.vstack((h2, h2*0+1)).T
#    b = h1
#    z = np.linalg.lstsq(A, b)[0]
#    u = z[0]
#    v = z[1]
#
#    # apply the affine transform and return the modified im2
#    out = common.tmpfile('.tif')
#    common.run('plambda %s "x %f * %f +" > %s' % (im2, u, v, out))

    # 2nd option: translation only
    v = np.mean(h1 - h2)
    out = common.tmpfile('.tif')
    common.run('plambda %s "x %f +" -o %s' % (im2, v, out))

    return out
Example #46
0
def get_mounted_drives():
    info = common.run('mount')[0]
    drives = []
    for line in filter(len, info.split('\n')):
        line_parts = line.split(' ')

        if len(line_parts) < 6:
            debug('Malformed mount info line: %s' % line)
            continue

        drive = {}
        try:
            drive['options'] = line_parts[-1][1:-1]
            drive['type'] = line_parts[-2]
            drive['dir'] = '/{0}'.format(
                ' /'.join(' '.join(line_parts[0:-3]).split(' /')[1:])
            )
            drive['device'] = ' '.join((
                line.split(drive['dir'])[0]
            ).split(' ')[:-2])
        except:
            print('Malformed mount info line: %s' % line)
            continue

        drives.append(drive)
    return drives
Example #47
0
def appraiser_worker(args):

    if (args.is_new_cufflinks):
        resultfolder = 'result/' + os.path.basename(args.filelist) + '/new_appraiser/'
    else:
        resultfolder = 'result/' + os.path.basename(args.filelist) + '/appraiser/'
    if not os.path.exists(resultfolder):
        os.makedirs(resultfolder)

    f = args.filename.strip()
    resultsubfolder=resultfolder+'/'+os.path.basename(f.replace('.bam',''))
    if not os.path.exists(resultsubfolder):
        os.makedirs(resultsubfolder)
    common.run('./appraiser -mamf ' + resultsubfolder + '/sm ' +
          '-l ' + resultsubfolder + '/log ' +
          '-qf '+ resultsubfolder +'/qf -b '+ f.strip())
Example #48
0
def get_gh_api():
    if not os.environ.get("OSXSTRAP_GITHUB_USERNAME"):
        username = click.prompt('Please enter your Github username')
        common.set_dotenv_key("OSXSTRAP_GITHUB_USERNAME", username)
    else:
        username = os.environ.get("OSXSTRAP_GITHUB_USERNAME")

    if not os.environ.get("OSXSTRAP_GITHUB_API_TOKEN"):
        token = click.prompt('Please create a Github access token by going to https://github.com/settings/tokens/new?scopes=gist&description=osxstrap+gist+cli and enter it here')
        common.set_dotenv_key("OSXSTRAP_GITHUB_API_TOKEN", token)
    else:
        token = os.environ.get("OSXSTRAP_GITHUB_API_TOKEN")
    
    gh = login(token=token)
    return gh

    files = {
    config_filename : {
        'content': 'What... is the air-speed velocity of an unladen swallow?'
        }
    }
    gist = gh.create_gist('Answer this to cross the bridge', files, public=False)
    # gist == <Gist [gist-id]>
    print(gist.html_url)
    system_username = common.run('whoami', capture=True)
Example #49
0
def cuffcompare_worker(args):
    ## run cuffcompare command
    resultfolder='result/' + os.path.basename(args.filelist) + args.cuffcompare_folder
    cuffresultfolder='result/'+ os.path.basename(args.filelist) + args.cufflinks_folder
    if not os.path.exists(resultfolder):
        os.makedirs(resultfolder)
    f = args.filename
    resultsubfolder=resultfolder+'/'+os.path.basename(f.strip().replace('.bam',''))

    if not os.path.exists(resultsubfolder):
        os.makedirs(resultsubfolder)

    alias=os.path.basename(f.strip().replace('.bam',''))
    common.run('cuffcompare -r ' + reffile + ' -o ' +
               resultsubfolder  + "/cuffcompare " +
               cuffresultfolder + '/' + alias + '/transcripts.gtf')
def main():
  options = common.parse_args(use_isolate_server=True, use_swarming=True)
  tempdir = tempfile.mkdtemp(prefix=u'hello_world')
  try:
    # All the files are put in a temporary directory. This is optional and
    # simply done so the current directory doesn't have the following files
    # created:
    # - hello_world.isolated
    # - hello_world.isolated.state
    isolated = os.path.join(tempdir, 'hello_world.isolated')

    common.note(
        'Creating hello_world.isolated. Note that this doesn\'t archives '
        'anything.')
    common.run(
        [
          'isolate.py',
          'check',
          '--isolate', os.path.join('payload', 'hello_world.isolate'),
          '--isolated', isolated,
          '--config-variable', 'OS', options.swarming_os,
        ], options.verbose)

    common.note(
        'Running the job remotely. This:\n'
        ' - archives to %s\n'
        ' - runs and collect results via %s' %
        (options.isolate_server, options.swarming))
    cmd = [
      'swarming.py',
      'run',
      '--swarming', options.swarming,
      '--isolate-server', options.isolate_server,
      '--dimension', 'os', options.swarming_os,
      '--task-name', options.task_name,
      isolated,
    ]
    if options.idempotent:
      cmd.append('--idempotent')
    if options.priority is not None:
      cmd.extend(('--priority', str(options.priority)))
    common.run(cmd, options.verbose)
    return 0
  except subprocess.CalledProcessError as e:
    return e.returncode
  finally:
    shutil.rmtree(tempdir)
def main():
  options = common.parse_args(use_isolate_server=True, use_swarming=False)
  tempdir = tempfile.mkdtemp(prefix='hello_world')
  try:
    # All the files are put in a temporary directory. This is optional and
    # simply done so the current directory doesn't have the following files
    # created:
    # - hello_world.isolated
    # - hello_world.isolated.state
    # - cache/
    cachedir = os.path.join(tempdir, 'cache')
    isolateddir = os.path.join(tempdir, 'isolated')
    isolated = os.path.join(isolateddir, 'hello_world.isolated')

    os.mkdir(isolateddir)

    common.note('Archiving to %s' % options.isolate_server)
    # TODO(maruel): Parse the output from run() to get 'isolated_sha1'.
    # Note that --config-variable OS is not specified and nobody cares.
    common.run(
        [
          'isolate.py',
          'archive',
          '--isolate', os.path.join('payload', 'hello_world.isolate'),
          '--isolated', isolated,
          '--isolate-server', options.isolate_server,
        ], options.verbose)

    common.note(
        'Downloading from %s and running in a temporary directory' %
        options.isolate_server)
    with open(isolated, 'rb') as f:
      isolated_sha1 = hashlib.sha1(f.read()).hexdigest()
    common.run(
        [
          'run_isolated.py',
          '--cache', cachedir,
          '--isolate-server', options.isolate_server,
          '--hash', isolated_sha1,
          '--no-log',
        ], options.verbose)
    return 0
  except subprocess.CalledProcessError as e:
    return e.returncode
  finally:
    shutil.rmtree(tempdir)
Example #52
0
File: sift.py Project: jguinet/s2p
def keypoints_match(k1, k2, method='relative', sift_thresh=0.6, F=None,
                    model=None):
    """
    Find matches among two lists of sift keypoints.

    Args:
        k1, k2: paths to text files containing the lists of sift descriptors
        method (optional, default is 'relative'): flag ('relative' or
            'absolute') indicating wether to use absolute distance or relative
            distance
        sift_thresh (optional, default is 0.6): threshold for distance between SIFT
            descriptors. These descriptors are 128-vectors, whose coefficients
            range from 0 to 255, thus with absolute distance a reasonable value
            for this threshold is between 200 and 300. With relative distance
            (ie ratio between distance to nearest and distance to second
            nearest), the commonly used value for the threshold is 0.6.
        F (optional): affine fundamental matrix
        model (optional, default is None): model imposed by RANSAC when
            searching the set of inliers. If None all matches are considered as
            inliers.

    Returns:
        a numpy 2D array containing the list of inliers matches.
    """
    # compute matches
    mfile = common.tmpfile('.txt')
    cmd = "matching %s %s -%s %f -o %s" % (k1, k2, method, sift_thresh, mfile)
    if F is not None:
        fij = ' '.join(str(x) for x in [F[0, 2], F[1, 2], F[2, 0],
                                        F[2, 1], F[2, 2]])
        cmd = "%s -f \"%s\"" % (cmd, fij)
    common.run(cmd)

    # filter outliers with ransac
    if model == 'fundamental':
        common.run("ransac fmn 1000 .3 7 %s < %s" % (mfile, mfile))
    if model is 'homography':
        common.run("ransac hom 1000 1 4 /dev/null /dev/null %s < %s" % (mfile,
                                                                        mfile))
    if model is 'hom_fund':
        common.run("ransac hom 1000 2 4 /dev/null /dev/null %s < %s" % (mfile,
                                                                        mfile))
        common.run("ransac fmn 1000 .2 7 %s < %s" % (mfile, mfile))

    # return numpy array of matches
    return np.loadtxt(mfile)
Example #53
0
def topformflat(fname, level, write = False):
  clean(fname)
  data = common.run(['topformflat', str(level)], stdin=file(fname))
  lines = data.splitlines()
  lines = [x.strip() + '\n' for x in lines if x.strip() != '']
  if (write):
    write_all(fname, lines)
  return len(lines)
Example #54
0
def get_pids_using_file(file_name):
    pids = common.run(['lsof', '-t', '-f', '--', str(file_name)])[0].strip()
    pids_list = []
    for pid in pids.split():
        try:
            pids_list.append(int(pid.strip()))
        except:
            continue
    return pids_list
Example #55
0
def compute_point_cloud(cloud, heights, rpc, H=None, crop_colorized='',
                        off_x=None, off_y=None, ascii_ply=False,
                        with_normals=False):
    """
    Computes a color point cloud from a height map.

    Args:
        cloud: path to the output points cloud (ply format)
        heights: height map, sampled on the same grid as the crop_colorized
            image. In particular, its size is the same as crop_colorized.
        rpc: path to xml file containing RPC data for the current Pleiade image
        H (optional, default None): path to the file containing the coefficients
            of the homography transforming the coordinates system of the
            original full size image into the coordinates system of the crop we
            are dealing with.
        crop_colorized (optional, default ''): path to a colorized crop of a
            Pleiades image
        off_{x,y} (optional, default None): coordinates of the point we want to
            use as origin in the local coordinate system of the computed cloud
        ascii_ply (optional, default false): boolean flag to tell if the output
            ply file should be encoded in plain text (ascii).
    """
    hij = " ".join([str(x) for x in np.loadtxt(H).flatten()]) if H else ""
    asc = "--ascii" if ascii_ply else ""
    nrm = "--with-normals" if with_normals else ""
    command = "colormesh %s %s %s %s -h \"%s\" %s %s" % (cloud, heights, rpc,
                                                         crop_colorized, hij,
                                                         asc, nrm)
    if off_x:
        command += " --offset_x %d" % off_x
    if off_y:
        command += " --offset_y %d" % off_y
    common.run(command)

    # if LidarViewer is installed, convert the point cloud to its format
    # this is useful for huge point clouds
    if crop_colorized and common.which('LidarPreprocessor'):
        tmp = cfg['temporary_dir']
        nthreads = multiprocessing.cpu_count()
        cloud_lidar_viewer = "%s.lidar_viewer" % os.path.splitext(cloud)[0]
        common.run("LidarPreprocessor -to %s/LidarO -tp %s/LidarP -nt %d %s -o %s" % (
            tmp, tmp, nthreads, cloud, cloud_lidar_viewer))
    return