Exemple #1
0
    def get_current_best(self):
        """
        Current best results
        """

        data = read_json(self.output_file)
        return data.get('results') if data.get('results') else 0
Exemple #2
0
 def load_file(cls, filename, *args, **kwargs):
     """Loads the data from a file or return an empty instance."""
     try:
         out = cls.load(tools.read_json(filename), *args, **kwargs)
         logging.debug("Loaded %s(%s)", cls.__name__, filename)
     except (IOError, ValueError) as e:
         # On failure, loads the default instance.
         out = cls(*args, **kwargs)
         logging.warn("Failed to load %s: %s", filename, e)
     return out
Exemple #3
0
 def load_file(cls, filename, *args, **kwargs):
     """Loads the data from a file or return an empty instance."""
     try:
         out = cls.load(tools.read_json(filename), *args, **kwargs)
         logging.debug('Loaded %s(%s)', cls.__name__, filename)
     except (IOError, ValueError) as e:
         # On failure, loads the default instance.
         out = cls(*args, **kwargs)
         logging.warn('Failed to load %s: %s', filename, e)
     return out
Exemple #4
0
 def __init__(self, currency_pair):
     self.ticker = currency_pair
     self.input_file = join(dirname(realpath(__file__)),
                            f"input/{self.ticker}.json")
     self.output_file = join(dirname(realpath(__file__)),
                             f"output/{self.ticker}.json")
     self.btc = 1000
     self.current_best = self.get_current_best()
     self.data = read_json(self.input_file)
     self.portfolio = []
     self.actions = {
         'BUY': lambda coin: self.buy(coin),
         'SELL': lambda coin: self.sell(coin['close'])
     }
  def test_main_json(self):
    # Instruct the Popen mock to write a file in ISOLATED_OUTDIR so it will be
    # archived back on termination.
    self.mock(tools, 'disable_buffering', lambda: None)
    sub_cmd = [
      self.temp_join(u'foo.exe'), u'cmd with space',
      '${ISOLATED_OUTDIR}/out.txt',
    ]
    isolated = json_dumps({'command': sub_cmd})
    isolated_hash = isolateserver_mock.hash_content(isolated)
    def get_storage(_isolate_server, _namespace):
      return StorageFake({isolated_hash:isolated})
    self.mock(isolateserver, 'get_storage', get_storage)

    out = os.path.join(self.tempdir, 'res.json')
    cmd = [
        '--no-log',
        '--isolated', isolated_hash,
        '--cache', self.tempdir,
        '--isolate-server', 'https://localhost:1',
        '--json', out,
    ]
    ret = run_isolated.main(cmd)
    self.assertEqual(0, ret)
    # Replace ${ISOLATED_OUTDIR} with the temporary directory.
    sub_cmd[2] = self.popen_calls[0][0][2]
    self.assertNotIn('ISOLATED_OUTDIR', sub_cmd[2])
    self.assertEqual([(sub_cmd, {'detached': True})], self.popen_calls)
    expected = {
      u'exit_code': 0,
      u'internal_failure': None,
      u'outputs_ref': {
        u'isolated': u'e0a0fffa0910dd09e7ef4c89496116f60317e6c4',
        u'isolatedserver': u'http://localhost:1',
        u'namespace': u'default-gzip',
      },
      u'version': 1,
    }
    self.assertEqual(expected, tools.read_json(out))
      },
      u'stats': {
        u'download': {
          u'initial_number_items': 0,
          u'initial_size': 0,
          u'items_cold': [len(isolated_in_json)],
          u'items_hot': [],
        },
        u'upload': {
          u'items_cold': [len(isolated_out_json)],
          u'items_hot': [15],
        },
      },
      u'version': 3,
    }
    actual = tools.read_json(out)
    # duration can be exactly 0 due to low timer resolution, especially but not
    # exclusively on Windows.
    self.assertLessEqual(0, actual.pop(u'duration'))
    self.assertLessEqual(0, actual[u'stats'][u'download'].pop(u'duration'))
    self.assertLessEqual(0, actual[u'stats'][u'upload'].pop(u'duration'))
    for i in (u'download', u'upload'):
      for j in (u'items_cold', u'items_hot'):
        actual[u'stats'][i][j] = large.unpack(
            base64.b64decode(actual[u'stats'][i][j]))
    self.assertEqual(expected, actual)


if __name__ == '__main__':
  fix_encoding.fix_encoding()
  if '-v' in sys.argv:
Exemple #7
0
def CMDbatcharchive(parser, args):
    """Archives multiple isolated trees at once.

  Using single command instead of multiple sequential invocations allows to cut
  redundant work when isolated trees share common files (e.g. file hashes are
  checked only once, their presence on the server is checked only once, and
  so on).

  Takes a list of paths to *.isolated.gen.json files that describe what trees to
  isolate. Format of files is:
  {
    "version": 1,
    "dir": <absolute path to a directory all other paths are relative to>,
    "args": [list of command line arguments for single 'archive' command]
  }
  """
    isolateserver.add_isolate_server_options(parser)
    isolateserver.add_archive_options(parser)
    auth.add_auth_options(parser)
    parser.add_option(
        "--dump-json", metavar="FILE", help="Write isolated hashes of archived trees to this file as JSON"
    )
    options, args = parser.parse_args(args)
    auth.process_auth_options(parser, options)
    isolateserver.process_isolate_server_options(parser, options, True)

    # Validate all incoming options, prepare what needs to be archived as a list
    # of tuples (archival options, working directory).
    work_units = []
    for gen_json_path in args:
        # Validate JSON format of a *.isolated.gen.json file.
        data = tools.read_json(gen_json_path)
        if data.get("version") != ISOLATED_GEN_JSON_VERSION:
            parser.error("Invalid version in %s" % gen_json_path)
        cwd = data.get("dir")
        if not isinstance(cwd, unicode) or not os.path.isdir(cwd):
            parser.error("Invalid dir in %s" % gen_json_path)
        args = data.get("args")
        if not isinstance(args, list) or not all(isinstance(x, unicode) for x in args):
            parser.error("Invalid args in %s" % gen_json_path)
        # Convert command line (embedded in JSON) to Options object.
        work_units.append((parse_archive_command_line(args, cwd), cwd))

    # Perform the archival, all at once.
    isolated_hashes = isolate_and_archive(work_units, options.isolate_server, options.namespace)

    # TODO(vadimsh): isolate_and_archive returns None on upload failure, there's
    # no way currently to figure out what *.isolated file from a batch were
    # successfully uploaded, so consider them all failed (and emit empty dict
    # as JSON result).
    if options.dump_json:
        tools.write_json(options.dump_json, isolated_hashes or {}, False)

    if isolated_hashes is None:
        return EXIT_CODE_UPLOAD_ERROR

    # isolated_hashes[x] is None if 'x.isolate' contains a error.
    if not all(isolated_hashes.itervalues()):
        return EXIT_CODE_ISOLATE_ERROR

    return 0
Exemple #8
0
def CMDbatcharchive(parser, args):
    """Archives multiple isolated trees at once.

  Using single command instead of multiple sequential invocations allows to cut
  redundant work when isolated trees share common files (e.g. file hashes are
  checked only once, their presence on the server is checked only once, and
  so on).

  Takes a list of paths to *.isolated.gen.json files that describe what trees to
  isolate. Format of files is:
  {
    "version": 1,
    "dir": <absolute path to a directory all other paths are relative to>,
    "args": [list of command line arguments for single 'archive' command]
  }
  """
    isolateserver.add_isolate_server_options(parser)
    isolateserver.add_archive_options(parser)
    auth.add_auth_options(parser)
    parser.add_option(
        '--dump-json',
        metavar='FILE',
        help='Write isolated hashes of archived trees to this file as JSON')
    options, args = parser.parse_args(args)
    auth.process_auth_options(parser, options)
    isolateserver.process_isolate_server_options(parser, options, True, True)

    # Validate all incoming options, prepare what needs to be archived as a list
    # of tuples (archival options, working directory).
    work_units = []
    for gen_json_path in args:
        # Validate JSON format of a *.isolated.gen.json file.
        try:
            data = tools.read_json(gen_json_path)
        except IOError as e:
            parser.error('Failed to open %s: %s' % (gen_json_path, e))
        if data.get('version') != ISOLATED_GEN_JSON_VERSION:
            parser.error('Invalid version in %s' % gen_json_path)
        cwd = data.get('dir')
        if not isinstance(cwd, unicode) or not fs.isdir(cwd):
            parser.error('Invalid dir in %s' % gen_json_path)
        args = data.get('args')
        if (not isinstance(args, list)
                or not all(isinstance(x, unicode) for x in args)):
            parser.error('Invalid args in %s' % gen_json_path)
        # Convert command line (embedded in JSON) to Options object.
        work_units.append((parse_archive_command_line(args, cwd), cwd))

    # Perform the archival, all at once.
    isolated_hashes = isolate_and_archive(work_units, options.isolate_server,
                                          options.namespace)

    # TODO(vadimsh): isolate_and_archive returns None on upload failure, there's
    # no way currently to figure out what *.isolated file from a batch were
    # successfully uploaded, so consider them all failed (and emit empty dict
    # as JSON result).
    if options.dump_json:
        tools.write_json(options.dump_json, isolated_hashes or {}, False)

    if isolated_hashes is None:
        return EXIT_CODE_UPLOAD_ERROR

    # isolated_hashes[x] is None if 'x.isolate' contains a error.
    if not all(isolated_hashes.itervalues()):
        return EXIT_CODE_ISOLATE_ERROR

    return 0
Exemple #9
0
                u'isolated': {
                    u'download': {
                        u'initial_number_items': 0,
                        u'initial_size': 0,
                        u'items_cold': [len(isolated_in_json)],
                        u'items_hot': [],
                    },
                    u'upload': {
                        u'items_cold': [len(isolated_out_json)],
                        u'items_hot': [15],
                    },
                },
            },
            u'version': 5,
        }
        actual = tools.read_json(out)
        # duration can be exactly 0 due to low timer resolution, especially but not
        # exclusively on Windows.
        self.assertLessEqual(0, actual.pop(u'duration'))
        actual_isolated_stats = actual[u'stats'][u'isolated']
        self.assertLessEqual(
            0, actual_isolated_stats[u'download'].pop(u'duration'))
        self.assertLessEqual(0,
                             actual_isolated_stats[u'upload'].pop(u'duration'))
        for i in (u'download', u'upload'):
            for j in (u'items_cold', u'items_hot'):
                actual_isolated_stats[i][j] = large.unpack(
                    base64.b64decode(actual_isolated_stats[i][j]))
        self.assertEqual(expected, actual)

  def test_chromium_split(self):
    # Create an .isolate file and a tree of random stuff.
    isolate_file = os.path.join(
        ROOT_DIR, 'tests', 'isolate', 'split.isolate')
    options = self._get_option(isolate_file)
    options.path_variables = {
      'DEPTH': '.',
      'PRODUCT_DIR': os.path.join('files1'),
    }
    options.config_variables = {
      'OS': 'linux',
    }
    complete_state = isolate.load_complete_state(
        options, os.path.join(ROOT_DIR, 'tests', 'isolate'), None, False)
    # By saving the files, it forces splitting the data up.
    complete_state.save_files()

    actual_isolated_master = tools.read_json(
        os.path.join(self.directory, 'foo.isolated'))
    expected_isolated_master = {
      u'algo': u'sha-1',
      u'command': [u'python', u'split.py'],
      u'files': {
        u'split.py': {
          u'm': 488,
          u'h': unicode(hash_file('tests', 'isolate', 'split.py')),
          u's': _size('tests', 'isolate', 'split.py'),
        },
      },
      u'includes': [
        unicode(hash_file(os.path.join(self.directory, 'foo.0.isolated'))),
        unicode(hash_file(os.path.join(self.directory, 'foo.1.isolated'))),
      ],
      u'relative_cwd': u'.',
      u'version': unicode(isolate.isolateserver.ISOLATED_FILE_VERSION),
    }
    self._cleanup_isolated(expected_isolated_master)
    self.assertEqual(expected_isolated_master, actual_isolated_master)

    actual_isolated_0 = tools.read_json(
        os.path.join(self.directory, 'foo.0.isolated'))
    expected_isolated_0 = {
      u'algo': u'sha-1',
      u'files': {
        os.path.join(u'test', 'data', 'foo.txt'): {
          u'm': 416,
          u'h': unicode(
              hash_file('tests', 'isolate', 'test', 'data', 'foo.txt')),
          u's': _size('tests', 'isolate', 'test', 'data', 'foo.txt'),
        },
      },
      u'version': unicode(isolate.isolateserver.ISOLATED_FILE_VERSION),
    }
    self._cleanup_isolated(expected_isolated_0)
    self.assertEqual(expected_isolated_0, actual_isolated_0)

    actual_isolated_1 = tools.read_json(
        os.path.join(self.directory, 'foo.1.isolated'))
    expected_isolated_1 = {
      u'algo': u'sha-1',
      u'files': {
        os.path.join(u'files1', 'subdir', '42.txt'): {
          u'm': 416,
          u'h': unicode(
              hash_file('tests', 'isolate', 'files1', 'subdir', '42.txt')),
          u's': _size('tests', 'isolate', 'files1', 'subdir', '42.txt'),
        },
      },
      u'version': unicode(isolate.isolateserver.ISOLATED_FILE_VERSION),
    }
    self._cleanup_isolated(expected_isolated_1)
    self.assertEqual(expected_isolated_1, actual_isolated_1)

    actual_saved_state = tools.read_json(
        isolate.isolatedfile_to_state(options.isolated))
    isolated_base = unicode(os.path.basename(options.isolated))
    expected_saved_state = {
      u'OS': unicode(sys.platform),
      u'algo': u'sha-1',
      u'child_isolated_files': [
        isolated_base[:-len('.isolated')] + '.0.isolated',
        isolated_base[:-len('.isolated')] + '.1.isolated',
      ],
      u'command': [u'python', u'split.py'],
      u'config_variables': {
        u'OS': u'linux',
      },
      u'extra_variables': {
        u'foo': u'bar',
      },
      u'files': {
        os.path.join(u'files1', 'subdir', '42.txt'): {
          u'm': 416,
          u'h': unicode(
              hash_file('tests', 'isolate', 'files1', 'subdir', '42.txt')),
          u's': _size('tests', 'isolate', 'files1', 'subdir', '42.txt'),
        },
        u'split.py': {
          u'm': 488,
          u'h': unicode(hash_file('tests', 'isolate', 'split.py')),
          u's': _size('tests', 'isolate', 'split.py'),
        },
        os.path.join(u'test', 'data', 'foo.txt'): {
          u'm': 416,
          u'h': unicode(
              hash_file('tests', 'isolate', 'test', 'data', 'foo.txt')),
          u's': _size('tests', 'isolate', 'test', 'data', 'foo.txt'),
        },
      },
      u'isolate_file': file_path.safe_relpath(
          file_path.get_native_path_case(isolate_file),
          unicode(os.path.dirname(options.isolated))),
      u'path_variables': {
        u'DEPTH': u'.',
        u'PRODUCT_DIR': u'files1',
      },
      u'relative_cwd': u'.',
      u'root_dir': file_path.get_native_path_case(
          os.path.dirname(isolate_file)),
      u'version': unicode(isolate.SavedState.EXPECTED_VERSION),
    }
    self._cleanup_isolated(expected_saved_state)
    self._cleanup_saved_state(actual_saved_state)
    self.assertEqual(expected_saved_state, actual_saved_state)
    self.assertEqual(
        [
          'foo.0.isolated', 'foo.1.isolated',
          'foo.isolated', 'foo.isolated.state',
        ],
        sorted(os.listdir(self.directory)))
                    'h': isolateserver_mock.hash_content('generated data\n'),
                    's': 15,
                    'm': 0640,
                },
            },
            'version': isolated_format.ISOLATED_FILE_VERSION,
        }
        if sys.platform == 'win32':
            del isolated_out['files']['out.txt']['m']
        isolated_out_hash = isolateserver_mock.hash_content(
            json_dumps(isolated_out))
        expected = {
            u'exit_code': 0,
            u'had_hard_timeout': False,
            u'internal_failure': None,
            u'outputs_ref': {
                u'isolated': isolated_out_hash,
                u'isolatedserver': u'http://localhost:1',
                u'namespace': u'default-gzip',
            },
            u'version': 2,
        }
        self.assertEqual(expected, tools.read_json(out))


if __name__ == '__main__':
    fix_encoding.fix_encoding()
    logging.basicConfig(
        level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
    unittest.main()
          'h': isolateserver_mock.hash_content('generated data\n'),
          's': 15,
          'm': 0640,
        },
      },
      'version': isolated_format.ISOLATED_FILE_VERSION,
    }
    if sys.platform == 'win32':
      del isolated_out['files']['out.txt']['m']
    isolated_out_hash = isolateserver_mock.hash_content(
        json_dumps(isolated_out))
    expected = {
      u'exit_code': 0,
      u'had_hard_timeout': False,
      u'internal_failure': None,
      u'outputs_ref': {
        u'isolated': isolated_out_hash,
        u'isolatedserver': u'http://localhost:1',
        u'namespace': u'default-gzip',
      },
      u'version': 2,
    }
    self.assertEqual(expected, tools.read_json(out))


if __name__ == '__main__':
  fix_encoding.fix_encoding()
  logging.basicConfig(
      level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
  unittest.main()
Exemple #13
0
  def test_chromium_split(self):
    # Create an .isolate file and a tree of random stuff.
    isolate_file = os.path.join(
        ROOT_DIR, 'tests', 'isolate', 'split.isolate')
    options = self._get_option(isolate_file)
    options.path_variables = {
      'DEPTH': '.',
      'PRODUCT_DIR': os.path.join('files1'),
    }
    options.config_variables = {
      'OS': 'linux',
    }
    complete_state = isolate.load_complete_state(
        options, os.path.join(ROOT_DIR, 'tests', 'isolate'), None, False)
    # By saving the files, it forces splitting the data up.
    complete_state.save_files()

    actual_isolated_master = tools.read_json(
        os.path.join(self.directory, 'foo.isolated'))
    expected_isolated_master = {
      u'algo': u'sha-1',
      u'command': [u'python', u'split.py'],
      u'files': {
        u'split.py': {
          u'm': 488,
          u'h': unicode(hash_file('tests', 'isolate', 'split.py')),
          u's': _size('tests', 'isolate', 'split.py'),
        },
      },
      u'includes': [
        unicode(hash_file(os.path.join(self.directory, 'foo.0.isolated'))),
        unicode(hash_file(os.path.join(self.directory, 'foo.1.isolated'))),
      ],
      u'relative_cwd': u'.',
      u'version': unicode(isolate.isolateserver.ISOLATED_FILE_VERSION),
    }
    self._cleanup_isolated(expected_isolated_master)
    self.assertEqual(expected_isolated_master, actual_isolated_master)

    actual_isolated_0 = tools.read_json(
        os.path.join(self.directory, 'foo.0.isolated'))
    expected_isolated_0 = {
      u'algo': u'sha-1',
      u'files': {
        os.path.join(u'test', 'data', 'foo.txt'): {
          u'm': 416,
          u'h': unicode(
              hash_file('tests', 'isolate', 'test', 'data', 'foo.txt')),
          u's': _size('tests', 'isolate', 'test', 'data', 'foo.txt'),
        },
      },
      u'version': unicode(isolate.isolateserver.ISOLATED_FILE_VERSION),
    }
    self._cleanup_isolated(expected_isolated_0)
    self.assertEqual(expected_isolated_0, actual_isolated_0)

    actual_isolated_1 = tools.read_json(
        os.path.join(self.directory, 'foo.1.isolated'))
    expected_isolated_1 = {
      u'algo': u'sha-1',
      u'files': {
        os.path.join(u'files1', 'subdir', '42.txt'): {
          u'm': 416,
          u'h': unicode(
              hash_file('tests', 'isolate', 'files1', 'subdir', '42.txt')),
          u's': _size('tests', 'isolate', 'files1', 'subdir', '42.txt'),
        },
      },
      u'version': unicode(isolate.isolateserver.ISOLATED_FILE_VERSION),
    }
    self._cleanup_isolated(expected_isolated_1)
    self.assertEqual(expected_isolated_1, actual_isolated_1)

    actual_saved_state = tools.read_json(
        isolate.isolatedfile_to_state(options.isolated))
    isolated_base = unicode(os.path.basename(options.isolated))
    expected_saved_state = {
      u'OS': unicode(sys.platform),
      u'algo': u'sha-1',
      u'child_isolated_files': [
        isolated_base[:-len('.isolated')] + '.0.isolated',
        isolated_base[:-len('.isolated')] + '.1.isolated',
      ],
      u'command': [u'python', u'split.py'],
      u'config_variables': {
        u'OS': u'linux',
      },
      u'extra_variables': {
        u'foo': u'bar',
      },
      u'files': {
        os.path.join(u'files1', 'subdir', '42.txt'): {
          u'm': 416,
          u'h': unicode(
              hash_file('tests', 'isolate', 'files1', 'subdir', '42.txt')),
          u's': _size('tests', 'isolate', 'files1', 'subdir', '42.txt'),
        },
        u'split.py': {
          u'm': 488,
          u'h': unicode(hash_file('tests', 'isolate', 'split.py')),
          u's': _size('tests', 'isolate', 'split.py'),
        },
        os.path.join(u'test', 'data', 'foo.txt'): {
          u'm': 416,
          u'h': unicode(
              hash_file('tests', 'isolate', 'test', 'data', 'foo.txt')),
          u's': _size('tests', 'isolate', 'test', 'data', 'foo.txt'),
        },
      },
      u'isolate_file': file_path.safe_relpath(
          file_path.get_native_path_case(isolate_file),
          unicode(os.path.dirname(options.isolated))),
      u'path_variables': {
        u'DEPTH': u'.',
        u'PRODUCT_DIR': u'files1',
      },
      u'relative_cwd': u'.',
      u'root_dir': file_path.get_native_path_case(
          os.path.dirname(isolate_file)),
      u'version': unicode(isolate.SavedState.EXPECTED_VERSION),
    }
    self._cleanup_isolated(expected_saved_state)
    self._cleanup_saved_state(actual_saved_state)
    self.assertEqual(expected_saved_state, actual_saved_state)
    self.assertEqual(
        [
          'foo.0.isolated', 'foo.1.isolated',
          'foo.isolated', 'foo.isolated.state',
        ],
        sorted(os.listdir(self.directory)))