Esempio n. 1
0
def obfuscate(input_path, output_path, obfuscation_combinations,
              no_of_variants):
    '''
    Iterates through each c file in a folder or just a single c file and
    calls the variant function.

    Arguments:
        input_path {str} -- Non-obfuscated c file/files path
        output_path {str} -- Obfuscated c file path
        obfuscation_combinations {list} -- the list of combinations of
                                           obfuscation techniques
        no_of_variants {int} -- The number of variants for each obfuscation
    '''
    input_files_path = fs.ls(input_path, EXT['c'])

    for input_path in input_files_path:
        input_file = fs.details(input_path)
        output_dir_path = os.path.join(output_path, input_file['name'])

        fs.mkdir(output_dir_path, False)

        variant(input_path, output_dir_path, obfuscation_combinations,
                no_of_variants)

        fs.rmdirs(output_dir_path, obfuscation_combinations)
Esempio n. 2
0
def variant(original_input_path,
            output_path,
            obfuscation_combinations,
            no_of_variants=1):
    '''
    Iterates through the number of variatns and obfuscation list to generate the
    necessary output path and calls the obscure function.

    Arguments:
        original_input_path {str} -- Single non-obfuscated file path
        output_path {str} -- obfuscated c file path

    Keyword Arguments:
        obfuscation_combinations {dict} -- List of combinations of each
                                           obfuscation technique (default: {{}})
        no_of_variants {int} -- The number of variants to be generated (default: {1})
    '''
    for index in range(1, no_of_variants + 1):
        for combination in obfuscation_combinations:
            if re.search(RE_OBFUSCATION, combination):
                v_n = VN
                file_name = ''
                input_path = original_input_path

                for obfuscation in combination:
                    file_name += obfuscation
                    target_path = os.path.join(output_path, file_name)

                    fs.mkdir(target_path, False)

                    input_path = obscure(input_path, output_path,
                                         obfuscation.upper(), file_name,
                                         str(index), v_n)
                    v_n += 1
Esempio n. 3
0
def file_iterator(input_path, output_path, tool, func_name, func_args):
    '''
    Recursively iterate c files from a given path. Call given function on each
    file.

    Arguments:
        input_path {str} -- Input path of source files
        output_path {str} -- Output path to store test results
        tool {str} -- Tool to run on each file iteration
        func_name {str} -- Name of function to call in each iternation
        func_args {dict} -- Arguments requried for function call
    '''
    input_files_path = fs.ls(input_path, EXT['c'])
    new_output_path = fs.mkdir(output_path)

    csv_header = get_csv_header(tool, func_args['credentials'])
    analysis_file_path = os.path.join(new_output_path, FILE_NAME['analysis'])
    fs.write_csv(analysis_file_path, [csv_header])

    for input_file_path in input_files_path:
        input_file = fs.details(input_file_path)
        output_dir_path = os.path.join(new_output_path, input_file['name'])
        new_output_dir_path = fs.mkdir(output_dir_path)
        # TODO: Remove file permissions on copy
        copy2(input_file_path, new_output_dir_path)

        test_results = func_name(input_file_path, new_output_dir_path,
                                 func_args)
        fs.append_csv(analysis_file_path, test_results)
Esempio n. 4
0
def run_tha_test(isolated_hash, storage, cache, leak_temp_dir, result_json,
                 root_dir, hard_timeout, grace_period, extra_args):
    """Downloads the dependencies in the cache, hardlinks them into a temporary
  directory and runs the executable from there.

  A temporary directory is created to hold the output files. The content inside
  this directory will be uploaded back to |storage| packaged as a .isolated
  file.

  Arguments:
    isolated_hash: the SHA-1 of the .isolated file that must be retrieved to
                   recreate the tree of files to run the target executable.
    storage: an isolateserver.Storage object to retrieve remote objects. This
             object has a reference to an isolateserver.StorageApi, which does
             the actual I/O.
    cache: an isolateserver.LocalCache to keep from retrieving the same objects
           constantly by caching the objects retrieved. Can be on-disk or
           in-memory.
    leak_temp_dir: if true, the temporary directory will be deliberately leaked
                   for later examination.
    result_json: file path to dump result metadata into. If set, the process
                 exit code is always 0 unless an internal error occured.
    root_dir: directory to the path to use to create the temporary directory. If
              not specified, a random temporary directory is created.
    hard_timeout: kills the process if it lasts more than this amount of
                  seconds.
    grace_period: number of seconds to wait between SIGTERM and SIGKILL.
    extra_args: optional arguments to add to the command stated in the .isolate
                file.

  Returns:
    Process exit code that should be used.
  """
    # run_isolated exit code. Depends on if result_json is used or not.
    result = map_and_run(isolated_hash, storage, cache, leak_temp_dir,
                         root_dir, hard_timeout, grace_period, extra_args)
    logging.info('Result:\n%s', tools.format_json(result, dense=True))
    if result_json:
        # We've found tests to delete 'work' when quitting, causing an exception
        # here. Try to recreate the directory if necessary.
        work_dir = os.path.dirname(result_json)
        if not fs.isdir(work_dir):
            fs.mkdir(work_dir)
        tools.write_json(result_json, result, dense=True)
        # Only return 1 if there was an internal error.
        return int(bool(result['internal_failure']))

    # Marshall into old-style inline output.
    if result['outputs_ref']:
        data = {
            'hash': result['outputs_ref']['isolated'],
            'namespace': result['outputs_ref']['namespace'],
            'storage': result['outputs_ref']['isolatedserver'],
        }
        sys.stdout.flush()
        print('[run_isolated_out_hack]%s[/run_isolated_out_hack]' %
              tools.format_json(data, dense=True))
        sys.stdout.flush()
    return result['exit_code'] or int(bool(result['internal_failure']))
Esempio n. 5
0
 def test_rmtree_unicode(self):
     subdir = os.path.join(self.tempdir, "hi")
     fs.mkdir(subdir)
     filepath = os.path.join(subdir, u"\u0627\u0644\u0635\u064A\u0646\u064A\u0629")
     with fs.open(filepath, "wb") as f:
         f.write("hi")
     # In particular, it fails when the input argument is a str.
     file_path.rmtree(str(subdir))
 def test_file_to_metadata_path_case_simple(self):
     # Ensure the symlink dest is saved in the right path case.
     subdir = os.path.join(self.cwd, u'subdir')
     fs.mkdir(subdir)
     linkdir = os.path.join(self.cwd, u'linkdir')
     fs.symlink('subDir', linkdir)
     actual = isolated_format.file_to_metadata(linkdir.upper(), False)
     self.assertEqual({'l': u'subdir'}, actual)
Esempio n. 7
0
 def test_rmtree_unicode(self):
   subdir = os.path.join(self.tempdir, 'hi')
   fs.mkdir(subdir)
   filepath = os.path.join(
       subdir, u'\u0627\u0644\u0635\u064A\u0646\u064A\u0629')
   with fs.open(filepath, 'wb') as f:
     f.write('hi')
   # In particular, it fails when the input argument is a str.
   file_path.rmtree(str(subdir))
Esempio n. 8
0
 def test_cleanup_unexpected(self):
     # cleanup() delete unexpected file in the cache directory.
     fs.mkdir(self.cache_dir)
     with fs.open(os.path.join(self.cache_dir, u'junk'), 'w') as f:
         f.write('random')
     cache = self.get_cache(_get_policies())
     self.assertEqual(['junk'], fs.listdir(cache.cache_dir))
     self.assertEqual(True, cache.cleanup())
     self.assertEqual([cache.STATE_FILE], fs.listdir(cache.cache_dir))
Esempio n. 9
0
 def test_hard_link_mode(self):
   # Creates a hard link, see if the file mode changed on the node or the
   # directory entry.
   dir_foo = os.path.join(self.tempdir, 'foo')
   file_bar = os.path.join(dir_foo, 'bar')
   file_link = os.path.join(dir_foo, 'link')
   fs.mkdir(dir_foo, 0777)
   write_content(file_bar, 'bar')
   file_path.hardlink(file_bar, file_link)
   self.assertFileMode(file_bar, 0100666)
   self.assertFileMode(file_link, 0100666)
   file_path.set_read_only(file_bar, True)
   self.assertMaskedFileMode(file_bar, 0100444)
   self.assertMaskedFileMode(file_link, 0100444)
Esempio n. 10
0
 def test_hard_link_mode(self):
     # Creates a hard link, see if the file mode changed on the node or the
     # directory entry.
     dir_foo = os.path.join(self.tempdir, 'foo')
     file_bar = os.path.join(dir_foo, 'bar')
     file_link = os.path.join(dir_foo, 'link')
     fs.mkdir(dir_foo, 0o777)
     write_content(file_bar, b'bar')
     file_path.hardlink(file_bar, file_link)
     self.assertFileMode(file_bar, 0o100666)
     self.assertFileMode(file_link, 0o100666)
     file_path.set_read_only(file_bar, True)
     self.assertMaskedFileMode(file_bar, 0o100444)
     self.assertMaskedFileMode(file_link, 0o100444)
Esempio n. 11
0
 def test_delete_rd_rf(self):
     # Confirms that a RO file in a RO directory can't be deleted.
     dir_foo = os.path.join(self.tempdir, 'foo')
     file_bar = os.path.join(dir_foo, 'bar')
     fs.mkdir(dir_foo, 0o777)
     write_content(file_bar, b'bar')
     file_path.set_read_only(dir_foo, True)
     file_path.set_read_only(file_bar, True)
     self.assertMaskedFileMode(dir_foo, 0o40555)
     self.assertMaskedFileMode(file_bar, 0o100444)
     with self.assertRaises(OSError):
         # It fails for different reason depending on the OS. See the test cases
         # above.
         fs.remove(file_bar)
Esempio n. 12
0
 def test_delete_rd_rf(self):
   # Confirms that a RO file in a RO directory can't be deleted.
   dir_foo = os.path.join(self.tempdir, 'foo')
   file_bar = os.path.join(dir_foo, 'bar')
   fs.mkdir(dir_foo, 0777)
   write_content(file_bar, 'bar')
   file_path.set_read_only(dir_foo, True)
   file_path.set_read_only(file_bar, True)
   self.assertMaskedFileMode(dir_foo, 040555)
   self.assertMaskedFileMode(file_bar, 0100444)
   with self.assertRaises(OSError):
     # It fails for different reason depending on the OS. See the test cases
     # above.
     fs.remove(file_bar)
Esempio n. 13
0
 def test_delete_wd_rf(self):
     # Confirms that a RO file in a RW directory can be deleted on non-Windows.
     dir_foo = os.path.join(self.tempdir, 'foo')
     file_bar = os.path.join(dir_foo, 'bar')
     fs.mkdir(dir_foo, 0o777)
     write_content(file_bar, b'bar')
     file_path.set_read_only(dir_foo, False)
     file_path.set_read_only(file_bar, True)
     self.assertFileMode(dir_foo, 0o40777)
     self.assertMaskedFileMode(file_bar, 0o100444)
     if sys.platform == 'win32':
         # On Windows, a read-only file can't be deleted.
         with self.assertRaises(OSError):
             fs.remove(file_bar)
     else:
         fs.remove(file_bar)
Esempio n. 14
0
 def test_delete_wd_rf(self):
   # Confirms that a RO file in a RW directory can be deleted on non-Windows.
   dir_foo = os.path.join(self.tempdir, 'foo')
   file_bar = os.path.join(dir_foo, 'bar')
   fs.mkdir(dir_foo, 0777)
   write_content(file_bar, 'bar')
   file_path.set_read_only(dir_foo, False)
   file_path.set_read_only(file_bar, True)
   self.assertFileMode(dir_foo, 040777)
   self.assertMaskedFileMode(file_bar, 0100444)
   if sys.platform == 'win32':
     # On Windows, a read-only file can't be deleted.
     with self.assertRaises(OSError):
       fs.remove(file_bar)
   else:
     fs.remove(file_bar)
Esempio n. 15
0
    def test_symlink_absolute(self):
        # A symlink to an absolute path is valid.
        # /dir
        # /dir/file
        # /ld -> /dir
        # /lf -> /ld/file
        dirpath = os.path.join(self.tempdir, 'dir')
        filepath = os.path.join(dirpath, 'file')
        fs.mkdir(dirpath)
        write_content(filepath, b'hello')

        linkfile = os.path.join(self.tempdir, 'lf')
        linkdir = os.path.join(self.tempdir, 'ld')
        dstfile = os.path.join(linkdir, 'file')
        fs.symlink(dstfile, linkfile)
        fs.symlink(dirpath, linkdir)

        self.assertEqual(True, fs.islink(linkfile))
        self.assertEqual(True, fs.islink(linkdir))
        self.assertEqual(dstfile, fs.readlink(linkfile))
        self.assertEqual(dirpath, fs.readlink(linkdir))
        self.assertEqual(['file'], fs.listdir(linkdir))
        # /lf resolves to /dir/file.
        with fs.open(linkfile) as f:
            self.assertEqual('hello', f.read())

        # Ensures that followlinks is respected in walk().
        expected = [
            (self.tempdir, ['dir', 'ld'], ['lf']),
            (dirpath, [], ['file']),
        ]
        actual = [
            (r, sorted(d), sorted(f))
            for r, d, f in sorted(fs.walk(self.tempdir, followlinks=False))
        ]
        self.assertEqual(expected, actual)
        expected = [
            (self.tempdir, ['dir', 'ld'], ['lf']),
            (dirpath, [], ['file']),
            (linkdir, [], ['file']),
        ]
        actual = [
            (r, sorted(d), sorted(f))
            for r, d, f in sorted(fs.walk(self.tempdir, followlinks=True))
        ]
        self.assertEqual(expected, actual)
Esempio n. 16
0
    def test_cleanup_incorrect_link(self):
        # cleanup() repairs broken symlink in named/.
        cache = self.get_cache(_get_policies())
        self._add_one_item(cache, 1)
        self._add_one_item(cache, 2)
        fs.remove(os.path.join(self.cache_dir, cache.NAMED_DIR, u'1'))
        fs.remove(os.path.join(self.cache_dir, cache.NAMED_DIR, u'2'))
        fs.symlink('invalid_dest',
                   os.path.join(self.cache_dir, cache.NAMED_DIR, u'1'))
        fs.mkdir(os.path.join(self.cache_dir, cache.NAMED_DIR, u'2'))

        cache = self.get_cache(_get_policies())
        self.assertEqual(
            ['1', '2'],
            sorted(fs.listdir(os.path.join(cache.cache_dir, cache.NAMED_DIR))))
        self.assertEqual(True, cache.cleanup())
        self.assertEqual([],
                         fs.listdir(
                             os.path.join(cache.cache_dir, cache.NAMED_DIR)))
Esempio n. 17
0
        def test_expand_symlinks_path_case(self):
            # Ensures that the resulting path case is fixed on case insensitive file
            # system.
            fs.symlink('dest', os.path.join(self.cwd, u'link'))
            fs.mkdir(os.path.join(self.cwd, u'Dest'))
            fs.open(os.path.join(self.cwd, u'Dest', u'file.txt'), 'w').close()

            relfile, symlinks = isolated_format._expand_symlinks(
                self.cwd, u'.')
            self.assertEqual((u'.', []), (relfile, symlinks))

            relfile, symlinks = isolated_format._expand_symlinks(
                self.cwd, u'link')
            self.assertEqual((u'Dest', [u'link']), (relfile, symlinks))

            relfile, symlinks = isolated_format._expand_symlinks(
                self.cwd, u'link/File.txt')
            self.assertEqual((u'Dest/file.txt', [u'link']),
                             (relfile, symlinks))
Esempio n. 18
0
    def fuck_yourself(self):
        # set ipv6 address if needed
        if self.use_ipv6:
            ipv6 = ipv6_addr()
            if not ipv6:
                ptl_error("cannot get ipv6 address")
            self.str_ipv6 = "&ipv6=%s" % urlencode(ipv6)

        # generate a port number if not given
        if not self.port:
            self.port = randint(MIN_PORT, MAX_PORT)

        # generate client key
        self.client_key = client_key()
        client_info = BT_CLIENTS[self.client_id]

        # generate peer_id : based on client chosen
        prefix = client_info["prefix"]
        pid = peer_id(prefix)
        self.quoted_peer_id = urlencode(pid)

        # generate http header[user-agent] : based on client chosen
        user_agent = client_info["user-agent"]
        self.headers.update({"User-Agent": user_agent})

        # supports scrape?
        self.scrapable = not self.no_scrape and client_info["scrape"]

        # create directories if not exist
        for up_down in (UP, DOWN):
            mkdir(DIR[up_down])

        log.setLevel(DEBUG)
        log.debug("ptliar started, version: %s" % __version__)
        log.info("verbose            : %s" % (self.logging_level == DEBUG))
        log.info("ipv6               : %s" % self.use_ipv6)
        log.info("zero_rate          : %s" % self.use_zero_rate)
        log.info("timer              : %s" % ptime(self.timer))
        log.info("max up bandwidth   : %s/s" % psize(self.max_up_speed))
        log.info("max down bandwidth : %s/s" % psize(self.max_down_speed))
        log.info("max torrent speed  : %s/s" % psize(self.max_torrent_speed))
        log.info("fake bt client     : %s" % self.client_id)
Esempio n. 19
0
    def test_rmtree_win(self):
      # Mock our sleep for faster test case execution.
      sleeps = []
      self.mock(time, 'sleep', sleeps.append)
      self.mock(sys, 'stderr', StringIO.StringIO())

      # Open a child process, so the file is locked.
      subdir = os.path.join(self.tempdir, 'to_be_deleted')
      fs.mkdir(subdir)
      script = 'import time; open(\'a\', \'w\'); time.sleep(60)'
      proc = subprocess.Popen([sys.executable, '-c', script], cwd=subdir)
      try:
        # Wait until the file exist.
        while not fs.isfile(os.path.join(subdir, 'a')):
          self.assertEqual(None, proc.poll())
        file_path.rmtree(subdir)
        self.assertEqual([2, 4, 2], sleeps)
        # sys.stderr.getvalue() would return a fair amount of output but it is
        # not completely deterministic so we're not testing it here.
      finally:
        proc.wait()
Esempio n. 20
0
    def test_rmtree_win(self):
      # Mock our sleep for faster test case execution.
      sleeps = []
      self.mock(time, 'sleep', sleeps.append)
      self.mock(sys, 'stderr', StringIO.StringIO())

      # Open a child process, so the file is locked.
      subdir = os.path.join(self.tempdir, 'to_be_deleted')
      fs.mkdir(subdir)
      script = 'import time; open(\'a\', \'w\'); time.sleep(60)'
      proc = subprocess.Popen([sys.executable, '-c', script], cwd=subdir)
      try:
        # Wait until the file exist.
        while not fs.isfile(os.path.join(subdir, 'a')):
          self.assertEqual(None, proc.poll())
        file_path.rmtree(subdir)
        self.assertEqual([2, 4, 2], sleeps)
        # sys.stderr.getvalue() would return a fair amount of output but it is
        # not completely deterministic so we're not testing it here.
      finally:
        proc.wait()
Esempio n. 21
0
 def test_delete_rd_wf(self):
   # Confirms that a Rw file in a RO directory can be deleted on Windows only.
   dir_foo = os.path.join(self.tempdir, 'foo')
   file_bar = os.path.join(dir_foo, 'bar')
   fs.mkdir(dir_foo, 0777)
   write_content(file_bar, 'bar')
   file_path.set_read_only(dir_foo, True)
   file_path.set_read_only(file_bar, False)
   self.assertMaskedFileMode(dir_foo, 040555)
   self.assertFileMode(file_bar, 0100666)
   if sys.platform == 'win32':
     # A read-only directory has a convoluted meaning on Windows, it means that
     # the directory is "personalized". This is used as a signal by Windows
     # Explorer to tell it to look into the directory for desktop.ini.
     # See http://support.microsoft.com/kb/326549 for more details.
     # As such, it is important to not try to set the read-only bit on
     # directories on Windows since it has no effect other than trigger
     # Windows Explorer to look for desktop.ini, which is unnecessary.
     fs.remove(file_bar)
   else:
     with self.assertRaises(OSError):
       fs.remove(file_bar)
Esempio n. 22
0
 def test_delete_rd_wf(self):
     # Confirms that a Rw file in a RO directory can be deleted on Windows only.
     dir_foo = os.path.join(self.tempdir, 'foo')
     file_bar = os.path.join(dir_foo, 'bar')
     fs.mkdir(dir_foo, 0o777)
     write_content(file_bar, b'bar')
     file_path.set_read_only(dir_foo, True)
     file_path.set_read_only(file_bar, False)
     self.assertMaskedFileMode(dir_foo, 0o40555)
     self.assertFileMode(file_bar, 0o100666)
     if sys.platform == 'win32':
         # A read-only directory has a convoluted meaning on Windows, it means that
         # the directory is "personalized". This is used as a signal by Windows
         # Explorer to tell it to look into the directory for desktop.ini.
         # See http://support.microsoft.com/kb/326549 for more details.
         # As such, it is important to not try to set the read-only bit on
         # directories on Windows since it has no effect other than trigger
         # Windows Explorer to look for desktop.ini, which is unnecessary.
         fs.remove(file_bar)
     else:
         with self.assertRaises(OSError):
             fs.remove(file_bar)
Esempio n. 23
0
        def test_file_to_metadata_path_case_complex(self):
            # Ensure the symlink dest is saved in the right path case. This includes 2
            # layers of symlinks.
            basedir = os.path.join(self.cwd, u'basebir')
            fs.mkdir(basedir)

            linkeddir2 = os.path.join(self.cwd, u'linkeddir2')
            fs.mkdir(linkeddir2)

            linkeddir1 = os.path.join(basedir, u'linkeddir1')
            fs.symlink('../linkedDir2', linkeddir1)

            subsymlinkdir = os.path.join(basedir, u'symlinkdir')
            fs.symlink('linkedDir1', subsymlinkdir)

            actual = isolated_format.file_to_metadata(subsymlinkdir.upper(),
                                                      True, False)
            self.assertEqual({'l': u'linkeddir1'}, actual)

            actual = isolated_format.file_to_metadata(linkeddir1.upper(), True,
                                                      False)
            self.assertEqual({'l': u'../linkeddir2'}, actual)
Esempio n. 24
0
    def test_load_corrupted_state(self):
        # cleanup() handles a broken state file.
        fs.mkdir(self.cache_dir)
        c = local_caching.NamedCache
        with fs.open(os.path.join(self.cache_dir, c.STATE_FILE), 'w') as f:
            f.write('}}}}')
        fs.makedirs(os.path.join(self.cache_dir, '1'), 0777)

        cache = self.get_cache(_get_policies())
        self._add_one_item(cache, 1)
        self.assertTrue(
            fs.exists(os.path.join(cache.cache_dir, cache.NAMED_DIR, '1')))
        self.assertTrue(
            fs.islink(os.path.join(cache.cache_dir, cache.NAMED_DIR, '1')))
        self.assertEqual([], cache.trim())
        self.assertTrue(
            fs.exists(os.path.join(cache.cache_dir, cache.NAMED_DIR, '1')))
        self.assertTrue(
            fs.islink(os.path.join(cache.cache_dir, cache.NAMED_DIR, '1')))
        self.assertEqual(True, cache.cleanup())
        self.assertEqual(
            sorted([cache.NAMED_DIR, cache.STATE_FILE, cache._lru[u'1'][0]]),
            sorted(fs.listdir(cache.cache_dir)))
Esempio n. 25
0
 def test_upgrade(self):
     # Make sure upgrading works. This is temporary as eventually all bots will
     # be updated.
     now = time.time()
     fs.mkdir(self.cache_dir)
     fs.mkdir(os.path.join(self.cache_dir, 'f1'))
     with fs.open(os.path.join(self.cache_dir, 'f1', 'hello'), 'wb') as f:
         f.write('world')
     # v1
     old = {
         'version': 2,
         'items': [
             ['cache1', ['f1', now]],
         ],
     }
     c = local_caching.NamedCache
     with fs.open(os.path.join(self.cache_dir, c.STATE_FILE), 'w') as f:
         json.dump(old, f)
     # It automatically upgrades to v2.
     cache = self.get_cache(_get_policies())
     expected = {u'cache1': ((u'f1', len('world')), now)}
     self.assertEqual(expected, dict(cache._lru._items.iteritems()))
     self.assertEqual([u'f1', cache.STATE_FILE],
                      sorted(fs.listdir(cache.cache_dir)))
Esempio n. 26
0
 def test_symlink_input_absolute_path(self):
     # A symlink is outside of the checkout, it should be treated as a normal
     # directory.
     # .../src
     # .../src/out -> .../tmp/foo
     # .../tmp
     # .../tmp/foo
     src = os.path.join(self.cwd, u'src')
     src_out = os.path.join(src, u'out')
     tmp = os.path.join(self.cwd, u'tmp')
     tmp_foo = os.path.join(tmp, u'foo')
     fs.mkdir(src)
     fs.mkdir(tmp)
     fs.mkdir(tmp_foo)
     # The problem was that it's an absolute path, so it must be considered a
     # normal directory.
     fs.symlink(tmp, src_out)
     fs.open(os.path.join(tmp_foo, u'bar.txt'), 'w').close()
     relfile, symlinks = isolated_format._expand_symlinks(
         src, u'out/foo/bar.txt')
     self.assertEqual((u'out/foo/bar.txt', []), (relfile, symlinks))
Esempio n. 27
0
    def test_cleanup_unexpected_named(self):
        # cleanup() deletes unexpected symlink and directory in named/.
        fs.mkdir(self.cache_dir)
        c = local_caching.NamedCache
        fs.mkdir(os.path.join(self.cache_dir, c.NAMED_DIR))
        p = os.path.join(self.cache_dir, c.NAMED_DIR, u'junk_file')
        with fs.open(p, 'w') as f:
            f.write('random')
        fs.mkdir(os.path.join(self.cache_dir, c.NAMED_DIR, u'junk_dir'))
        fs.symlink('invalid_dest',
                   os.path.join(self.cache_dir, c.NAMED_DIR, u'junk_link'))

        cache = self.get_cache(_get_policies())
        self.assertEqual([cache.NAMED_DIR], fs.listdir(cache.cache_dir))
        self.assertEqual(
            ['junk_dir', 'junk_file', 'junk_link'],
            sorted(fs.listdir(os.path.join(cache.cache_dir, cache.NAMED_DIR))))
        self.assertEqual(True, cache.cleanup())
        self.assertEqual([cache.NAMED_DIR, cache.STATE_FILE],
                         sorted(fs.listdir(cache.cache_dir)))
        self.assertEqual([],
                         fs.listdir(
                             os.path.join(cache.cache_dir, cache.NAMED_DIR)))
Esempio n. 28
0
        def test_file_to_metadata_path_case_collapse(self):
            # Ensure setting the collapse_symlink option doesn't include the symlinks
            basedir = os.path.join(self.cwd, u'basedir')
            fs.mkdir(basedir)
            subdir = os.path.join(basedir, u'subdir')
            fs.mkdir(subdir)
            linkdir = os.path.join(basedir, u'linkdir')
            fs.mkdir(linkdir)

            foo_file = os.path.join(subdir, u'Foo.txt')
            fs.open(foo_file, 'w').close()
            sym_file = os.path.join(basedir, u'linkdir', u'Sym.txt')
            fs.symlink('../subdir/Foo.txt', sym_file)

            actual = isolated_format.file_to_metadata(sym_file, True, True)
            actual['h'] = isolated_format.hash_file(sym_file, ALGO)
            expected = {
                # SHA-1 of empty string
                'h': 'da39a3ee5e6b4b0d3255bfef95601890afd80709',
                'm': 256,
                's': 0,
            }
            self.assertEqual(expected, actual)
Esempio n. 29
0
def run(argv):
    '''
    Selects which tool to be used by user.

    Arguments:
        argv {list} -- List of arguements
    '''
    if len(argv) <= 1:
        print_help()
        sys.exit(1)

    try:
        args = PARSER.parse_args()
    except BaseException:
        sys.exit(1)

    tool = args.option
    if args.output:
        output_path = os.path.abspath(args.output)
    else:
        print_help()
        exit(1)

    if tool == GENERATE:
        codes = args.codes
        passwords = args.passwords
        generate(output_path, codes, passwords)

    elif tool == OBFUSCATE:
        num_variants = args.num_variants
        input_path = os.path.abspath(args.input)
        fs.mkdir(output_path, False)
        obfuscation_combinations = args.obfuscation_list

        for obs in obfuscation_combinations:
            if not re.search(RE_OBFUSCATION, obs):
                print(
                    'One more more items in the list does not contain [A, C, D, V]')
                print('Error at: ' + obs)
                sys.exit(1)

        obfuscate(
            input_path,
            output_path,
            obfuscation_combinations,
            num_variants)

    elif tool == RUN:
        input_path = os.path.abspath(args.input)
        fn_args = {
            'credentials': credentials(args),
            'levels': args.optimization_levels,
        }
        file_iterator(input_path, output_path, RUN, run_code, fn_args)

    elif (tool == ANGR or tool == KLEE or tool == ALL):
        input_path = os.path.abspath(args.input)
        fn_args = {
            'tool': tool,
            'stdin': stdin(args),
            'options': se_options(args),
            'credentials': credentials(args),
        }
        file_iterator(input_path, output_path, tool, run_se, fn_args)
Esempio n. 30
0
def run_tha_test(
    isolated_hash, storage, cache, leak_temp_dir, result_json, root_dir,
    hard_timeout, grace_period, extra_args):
  """Downloads the dependencies in the cache, hardlinks them into a temporary
  directory and runs the executable from there.

  A temporary directory is created to hold the output files. The content inside
  this directory will be uploaded back to |storage| packaged as a .isolated
  file.

  Arguments:
    isolated_hash: the SHA-1 of the .isolated file that must be retrieved to
                   recreate the tree of files to run the target executable.
    storage: an isolateserver.Storage object to retrieve remote objects. This
             object has a reference to an isolateserver.StorageApi, which does
             the actual I/O.
    cache: an isolateserver.LocalCache to keep from retrieving the same objects
           constantly by caching the objects retrieved. Can be on-disk or
           in-memory.
    leak_temp_dir: if true, the temporary directory will be deliberately leaked
                   for later examination.
    result_json: file path to dump result metadata into. If set, the process
                 exit code is always 0 unless an internal error occured.
    root_dir: directory to the path to use to create the temporary directory. If
              not specified, a random temporary directory is created.
    hard_timeout: kills the process if it lasts more than this amount of
                  seconds.
    grace_period: number of seconds to wait between SIGTERM and SIGKILL.
    extra_args: optional arguments to add to the command stated in the .isolate
                file.

  Returns:
    Process exit code that should be used.
  """
  # run_isolated exit code. Depends on if result_json is used or not.
  result = map_and_run(
      isolated_hash, storage, cache, leak_temp_dir, root_dir, hard_timeout,
      grace_period, extra_args)
  logging.info('Result:\n%s', tools.format_json(result, dense=True))
  if result_json:
    # We've found tests to delete 'work' when quitting, causing an exception
    # here. Try to recreate the directory if necessary.
    work_dir = os.path.dirname(result_json)
    if not fs.isdir(work_dir):
      fs.mkdir(work_dir)
    tools.write_json(result_json, result, dense=True)
    # Only return 1 if there was an internal error.
    return int(bool(result['internal_failure']))

  # Marshall into old-style inline output.
  if result['outputs_ref']:
    data = {
      'hash': result['outputs_ref']['isolated'],
      'namespace': result['outputs_ref']['namespace'],
      'storage': result['outputs_ref']['isolatedserver'],
    }
    sys.stdout.flush()
    print(
        '[run_isolated_out_hack]%s[/run_isolated_out_hack]' %
        tools.format_json(data, dense=True))
    sys.stdout.flush()
  return result['exit_code'] or int(bool(result['internal_failure']))
Esempio n. 31
0
def CMDreproduce(parser, args):
  """Runs a task locally that was triggered on the server.

  This running locally the same commands that have been run on the bot. The data
  downloaded will be in a subdirectory named 'work' of the current working
  directory.

  You can pass further additional arguments to the target command by passing
  them after --.
  """
  options, args = parser.parse_args(args)
  extra_args = []
  if not args:
    parser.error('Must specify exactly one task id.')
  if len(args) > 1:
    if args[1] == '--':
      if len(args) > 2:
        extra_args = args[2:]
    else:
      extra_args = args[1:]

  url = options.swarming + '/_ah/api/swarming/v1/task/%s/request' % args[0]
  request = net.url_read_json(url)
  if not request:
    print >> sys.stderr, 'Failed to retrieve request data for the task'
    return 1

  workdir = unicode(os.path.abspath('work'))
  if not fs.isdir(workdir):
    fs.mkdir(workdir)

  properties = request['properties']
  env = None
  if properties.get('env'):
    env = os.environ.copy()
    logging.info('env: %r', properties['env'])
    for i in properties['env']:
      key = i['key'].encode('utf-8')
      if not i['value']:
        env.pop(key, None)
      else:
        env[key] = i['value'].encode('utf-8')

  if properties.get('inputs_ref'):
    # Create the tree.
    with isolateserver.get_storage(
          properties['inputs_ref']['isolatedserver'],
          properties['inputs_ref']['namespace']) as storage:
      bundle = isolateserver.fetch_isolated(
          properties['inputs_ref']['isolated'],
          storage,
          isolateserver.MemoryCache(file_mode_mask=0700),
          workdir,
          False)
      command = bundle.command
      if bundle.relative_cwd:
        workdir = os.path.join(workdir, bundle.relative_cwd)
  else:
    command = properties['command']
  try:
    return subprocess.call(command + extra_args, env=env, cwd=workdir)
  except OSError as e:
    print >> sys.stderr, 'Failed to run: %s' % ' '.join(command)
    print >> sys.stderr, str(e)
    return 1
Esempio n. 32
0
def CMDreproduce(parser, args):
  """Runs a task locally that was triggered on the server.

  This running locally the same commands that have been run on the bot. The data
  downloaded will be in a subdirectory named 'work' of the current working
  directory.

  You can pass further additional arguments to the target command by passing
  them after --.
  """
  parser.add_option(
      '--output-dir', metavar='DIR', default='',
      help='Directory that will have results stored into')
  options, args = parser.parse_args(args)
  extra_args = []
  if not args:
    parser.error('Must specify exactly one task id.')
  if len(args) > 1:
    if args[1] == '--':
      if len(args) > 2:
        extra_args = args[2:]
    else:
      extra_args = args[1:]

  url = options.swarming + '/_ah/api/swarming/v1/task/%s/request' % args[0]
  request = net.url_read_json(url)
  if not request:
    print >> sys.stderr, 'Failed to retrieve request data for the task'
    return 1

  workdir = unicode(os.path.abspath('work'))
  if fs.isdir(workdir):
    parser.error('Please delete the directory \'work\' first')
  fs.mkdir(workdir)

  properties = request['properties']
  env = None
  if properties.get('env'):
    env = os.environ.copy()
    logging.info('env: %r', properties['env'])
    for i in properties['env']:
      key = i['key'].encode('utf-8')
      if not i['value']:
        env.pop(key, None)
      else:
        env[key] = i['value'].encode('utf-8')

  if properties.get('inputs_ref'):
    # Create the tree.
    with isolateserver.get_storage(
          properties['inputs_ref']['isolatedserver'],
          properties['inputs_ref']['namespace']) as storage:
      bundle = isolateserver.fetch_isolated(
          properties['inputs_ref']['isolated'],
          storage,
          isolateserver.MemoryCache(file_mode_mask=0700),
          workdir)
      command = bundle.command
      if bundle.relative_cwd:
        workdir = os.path.join(workdir, bundle.relative_cwd)
      command.extend(properties.get('extra_args') or [])
    # https://github.com/luci/luci-py/blob/master/appengine/swarming/doc/Magic-Values.md
    new_command = run_isolated.process_command(command, options.output_dir)
    if not options.output_dir and new_command != command:
      parser.error('The task has outputs, you must use --output-dir')
    command = new_command
  else:
    command = properties['command']
  try:
    return subprocess.call(command + extra_args, env=env, cwd=workdir)
  except OSError as e:
    print >> sys.stderr, 'Failed to run: %s' % ' '.join(command)
    print >> sys.stderr, str(e)
    return 1