def assert_downloadable_files(self, prepared_files, original_files):
        prepared_files_dict = {os.path.basename(f): f for f in prepared_files}
        original_files_dict = {os.path.basename(f): f for f in original_files}

        self.assertEqual(original_files_dict.keys(), prepared_files_dict.keys())

        for filename in original_files_dict.keys():
            prepared_file = prepared_files_dict[filename]
            original_file = original_files_dict[filename]

            prepared_content = file_utils.read_file(prepared_file)
            original_content = file_utils.read_file(original_file)

            self.assertEqual(original_content, prepared_content, 'Different content for file ' + filename)
Exemplo n.º 2
0
def __introduce_access_config(context):
    conf_folder = os.path.join(context.conf_folder, 'runners')

    if not os.path.exists(conf_folder):
        return

    conf_files = [os.path.join(conf_folder, file)
                  for file in os.listdir(conf_folder)
                  if file.lower().endswith('.json')]

    for conf_file in conf_files:
        content = file_utils.read_file(conf_file)
        json_object = json.loads(content, object_pairs_hook=OrderedDict)

        if ('output_files' not in json_object) or ('parameters' not in json_object):
            continue

        output_files = json_object['output_files']
        parameter_names = [p['name'] for p in json_object['parameters'] if not is_blank(p.get('name'))]

        changed = False

        for i in range(len(output_files)):
            output_file = output_files[i]

            for param_name in parameter_names:
                output_file = re.sub('\$\$\$' + param_name, '${' + param_name + '}', output_file)

            if output_file != output_files[i]:
                output_files[i] = output_file
                changed = True

        if changed:
            _write_json(conf_file, json_object, content)
Exemplo n.º 3
0
    def _visit_script_configs(self, visitor):
        configs_dir = self._script_configs_folder
        files = os.listdir(configs_dir)

        configs = [file for file in files if file.lower().endswith(".json")]

        result = []

        for config_path in configs:
            path = os.path.join(configs_dir, config_path)

            try:
                content = file_utils.read_file(path)

                visit_result = visitor(path, content)
                if visit_result is not None:
                    result.append(visit_result)

            except StopIteration as e:
                if e.value is not None:
                    result.append(e.value)

            except:
                LOGGER.exception("Couldn't read the file: " + config_path)

        return result
    def _assert_files(self, expected_files, actual_files, expected_path_names=None):
        if expected_path_names is None:
            expected_path_names = {}

        expected_form_files = {}
        for key, file in expected_files.items():
            if key in expected_path_names:
                path_name = expected_path_names[key]
            else:
                path_name = file[0]

            form_file = HttpFormFile(file[0], os.path.join(_UPLOADS_FOLDER, path_name))

            put_multivalue(expected_form_files, key, form_file)

        self.assertEqual(expected_form_files, actual_files)

        for key, file in expected_files.items():
            actual_file = actual_files[key]
            if isinstance(actual_file, list):
                actual_file = find_any(actual_file, lambda f: f.filename == file[0])
                if actual_file is None:
                    self.fail('Failed to find actual file for ' + str(file))

            actual_path = actual_file.path
            expected_content = file[1]

            byte_content = isinstance(expected_content, bytes)
            actual_content = file_utils.read_file(actual_path, byte_content=byte_content)

            self.assertEqual(expected_content, actual_content)
Exemplo n.º 5
0
def bld_to_list(file_path):
  """Takes a BLD file path and returns an OrderedList of comments, new lines
  and dict(rule) in the file."""
  # Abort quickly if its a bad JSON file.
  bld_as_json = su.read_build_file(file_path)

  # Format each rule one by one. Preserve the comments only outside the body
  # of rule dictionary.
  lines = [l.strip() for l in fu.read_file(file_path).split('\n')]
  formatted = []
  while lines:
    line = lines.pop(0)
    if not line or line.startswith(COMMENT_CHAR):
      formatted.append(line)
    elif line.startswith(QUOTE_CHAR):
      name = _rule_name(line)
      #TODO: Improve it to retain comments inside a BLD rule as well.
      formatted.append({RULE_NAME_KEY: name, RULE_BODY_KEY: bld_as_json[name]})
      # Skip remaining lines of this rule now.
      _strip_rule_body(lines)
    else:
      raise ValueError('Illegal text %s found in file %s.' % (line, file_path))

  # Do a sanity check, formatting shouldn't change build file's semantics.
  temp_file = fu.get_temp_file()
  fu.write_file(temp_file, list_to_bld_string(formatted))
  formatted_bld_as_json = su.read_build_file(temp_file)
  assert formatted_bld_as_json == bld_as_json
  return formatted
Exemplo n.º 6
0
def __introduce_access_config(context):
    file_path = context.conf_file

    if not os.path.exists(file_path):
        return

    content = file_utils.read_file(file_path)
    json_object = json.loads(content, object_pairs_hook=OrderedDict)

    def move_to_access(field, parent_object):
        if 'access' not in json_object:
            json_object['access'] = {}

        json_object['access'][field] = parent_object[field]
        del parent_object[field]

    changed = False

    if 'auth' in json_object:
        auth_object = json_object['auth']
        if 'allowed_users' in auth_object:
            move_to_access('allowed_users', auth_object)
            changed = True

    fields = ['admin_users', 'trusted_ips']
    for field in fields:
        if field in json_object:
            changed = True
            move_to_access(field, json_object)

    if changed:
        _write_json(file_path, json_object, content)
Exemplo n.º 7
0
    def _write_post_execution_info(log_file_path, exit_code):
        file_content = file_utils.read_file(log_file_path, keep_newlines=True)

        file_parts = file_content.split(OUTPUT_STARTED_MARKER + os.linesep, 1)
        parameters_text = file_parts[0]
        parameters_text += 'exit_code:' + str(exit_code) + os.linesep

        new_content = parameters_text + OUTPUT_STARTED_MARKER + os.linesep + file_parts[1]
        file_utils.write_file(log_file_path, new_content.encode(ENCODING), byte_content=True)
Exemplo n.º 8
0
 def test_simple_trace(self):
     sys_log_file = open(self.vi.sys_log, "w")
     subprocess.Popen("strace ls",
                      stderr=sys_log_file, stdout=sys_log_file,
                      shell=True)
     sys_log_file.close()
     sleep(5)  # wait for strace output to be available
     sys_logs = fu.read_file(self.vi.sys_log)
     for sys_call in SYS_CALLS:
         self.assertIn(sys_call, sys_logs)
Exemplo n.º 9
0
def get_secret(temp_folder):
    secret_file = os.path.join(temp_folder, 'secret.dat')
    if os.path.exists(secret_file):
        secret = file_utils.read_file(secret_file, byte_content=True)
        if secret:
            return secret

    secret = os.urandom(256)
    file_utils.write_file(secret_file, secret, byte_content=True)
    return secret
Exemplo n.º 10
0
def get_tornado_secret():
    secret_file = os.path.join("temp", "secret.dat")
    if os.path.exists(secret_file):
        secret = file_utils.read_file(secret_file, byte_content=True)
        if secret:
            return secret

    secret = os.urandom(256)
    file_utils.write_file(secret_file, secret, byte_content=True)
    return secret
Exemplo n.º 11
0
def _validate_config(test_case, expected_filename, expected_body):
    configs_path = os.path.join(test_utils.temp_folder, 'runners')
    path = os.path.join(configs_path, expected_filename)
    all_paths = str(os.listdir(configs_path))
    test_case.assertTrue(
        os.path.exists(path),
        'Failed to find path ' + path + '. Existing paths: ' + all_paths)

    actual_body = json.loads(file_utils.read_file(path))
    test_case.assertEqual(expected_body, actual_body)
Exemplo n.º 12
0
def _read_old_migrations(temp_folder):
    file_path = os.path.join(temp_folder, 'migrations.txt')

    if not os.path.exists(file_path):
        return []

    content = file_utils.read_file(file_path)
    if not content:
        return []

    return [id.strip() for id in content.split('\n') if id.strip()]
Exemplo n.º 13
0
 def test_simple_trace(self):
     sys_log_file = open(self.vi.sys_log, "w")
     subprocess.Popen("strace ls",
                      stderr=sys_log_file,
                      stdout=sys_log_file,
                      shell=True)
     sys_log_file.close()
     sleep(5)  # wait for strace output to be available
     sys_logs = fu.read_file(self.vi.sys_log)
     for sys_call in SYS_CALLS:
         self.assertIn(sys_call, sys_logs)
Exemplo n.º 14
0
def _read_old_migrations(temp_folder):
    file_path = os.path.join(temp_folder, 'migrations.txt')

    if not os.path.exists(file_path):
        return []

    content = file_utils.read_file(file_path)
    if not content:
        return []

    return [id.strip() for id in content.split('\n') if id.strip()]
Exemplo n.º 15
0
def from_json(conf_path):
    if os.path.exists(conf_path):
        file_content = file_utils.read_file(conf_path)
    else:
        file_content = "{}"

    config = ServerConfig()

    json_object = json.loads(file_content)

    address = "0.0.0.0"
    port = 5000

    ssl = json_object.get("ssl")
    if ssl is not None:
        key_path = model_helper.read_obligatory(ssl, 'key_path', ' for ssl')
        cert_path = model_helper.read_obligatory(ssl, 'cert_path', ' for ssl')

        config.ssl = True
        config.ssl_key_path = key_path
        config.ssl_cert_path = cert_path
        port = 5443

    if json_object.get("address"):
        address = json_object.get("address")
    config.address = address

    if json_object.get("port"):
        port = json_object.get("port")
    config.port = port

    if json_object.get('title'):
        config.title = json_object.get('title')

    auth_config = json_object.get('auth')
    admin_users = _parse_admin_users(json_object)
    if auth_config:
        config.authenticator = create_authenticator(auth_config)

        allowed_users = auth_config.get('allowed_users')

        auth_type = config.authenticator.auth_type
        if auth_type == 'google_oauth' and allowed_users is None:
            raise Exception('auth.allowed_users field is mandatory for ' +
                            auth_type)

        config.authorizer = _create_authorizer(allowed_users, admin_users)
    else:
        config.authorizer = _create_authorizer('*', admin_users)

    config.alerts_config = parse_alerts_config(json_object)
    config.logging_config = parse_logging_config(json_object)

    return config
Exemplo n.º 16
0
    def find_log(self, execution_id):
        self._renew_files_cache()

        file = self._ids_to_file_map.get(execution_id)
        if file is None:
            LOGGER.warning('find_log: file for %s id not found', execution_id)
            return None

        file_content = file_utils.read_file(os.path.join(self._output_folder, file),
                                            keep_newlines=True)
        log = file_content.split(OUTPUT_STARTED_MARKER, 1)[1]
        return _lstrip_any_linesep(log)
Exemplo n.º 17
0
    def _write_post_execution_info(log_file_path, exit_code):
        file_content = file_utils.read_file(log_file_path, keep_newlines=True)

        file_parts = file_content.split(OUTPUT_STARTED_MARKER + os.linesep, 1)
        parameters_text = file_parts[0]
        parameters_text += 'exit_code:' + str(exit_code) + os.linesep

        new_content = parameters_text + OUTPUT_STARTED_MARKER + os.linesep + file_parts[
            1]
        file_utils.write_file(log_file_path,
                              new_content.encode(ENCODING),
                              byte_content=True)
    def verify_config_files(self, expected_jobs: Sequence[SchedulingJob]):
        expected_files = [get_job_filename(job) for job in expected_jobs]

        schedules_dir = os.path.join(test_utils.temp_folder, 'schedules')
        test_utils.assert_dir_files(expected_files, schedules_dir, self)

        for job in expected_jobs:
            job_path = os.path.join(schedules_dir, get_job_filename(job))
            content = file_utils.read_file(job_path)
            restored_job = json.loads(content)

            self.assertEqual(restored_job, job.as_serializable_dict())
Exemplo n.º 19
0
 def get(self):
     try:
         s_id = self.get_query_argument('strategy_id')
         strategy = self.session.query(Strategy).filter(
             Strategy.id == s_id).one()
     except KeyError:
         return self.set_status(404)
     strategy = Strategy.to_dict(strategy)
     strategy['code'] = f.read_file(strategy['code_location'])
     print(strategy['code'])
     resp = {'status': True, 'data': strategy}
     self.finish(resp)
Exemplo n.º 20
0
def decrypt_file(cipher: str, file_path : str, key: str):
    clean_file = file_utils.unmerk(file_path)
    
    fileBytes = file_utils.read_file(clean_file)
    # returns chosen cipher class
    algo = mux(cipher).Cipher(key)
    # Bytes to write to modified file
    if algo:
        # decrypt zipped folder
        print("DECRYPTING FILE BYTES",'\n')
        newBytes = bytes(algo.decrypt(bytes(fileBytes)))
        # write encrypted bytes and rename zip 
        file_utils.write_file(clean_file,newBytes)
        print(f"{clean_file} has been unMERKed")
Exemplo n.º 21
0
def from_json(conf_path):
    if os.path.exists(conf_path):
        file_content = file_utils.read_file(conf_path)
    else:
        file_content = "{}"

    config = ServerConfig()

    json_object = json.loads(file_content)

    port = 5000

    ssl = json_object.get("ssl")
    if ssl is not None:
        key_path = ssl.get("key_path")
        cert_path = ssl.get("cert_path")

        if not key_path:
            raise Exception("key_path is required for ssl")

        if not cert_path:
            raise Exception("cert_path is required for ssl")

        config.ssl = True
        config.ssl_key_path = key_path
        config.ssl_cert_path = cert_path
        port = 5443

    if json_object.get("port"):
        port = json_object.get("port")
    config.port = port

    if json_object.get("auth"):
        auth_object = json_object.get("auth")
        auth_type = auth_object.get("type")

        if not auth_type:
            raise Exception("Auth type should be specified")

        auth_type = auth_type.strip().lower()
        if auth_type == "ldap":
            from auth.auth_ldap import LdapAuthorizer
            config.authorizer = LdapAuthorizer(auth_object)

        else:
            raise Exception(auth_type + " auth is not supported")

    config.alerts_config = parse_alerts_config(json_object)

    return config
Exemplo n.º 22
0
def encrypt_file(cipher: str, file_path : str, key: str):

    fileBytes = file_utils.read_file(file_path)
    # returns chosen cipher class
    algo = mux(cipher).Cipher(key)
    # Bytes to write to modified file
    if algo:
        # encrypt zipped folder
        print("ENCRYPTING FILE BYTES",'\n')
        newBytes = bytes(algo.encrypt(bytes(fileBytes)))
        # write encrypted bytes and rename zip 
        file_utils.write_file(file_path,newBytes)
        MERKed_file = file_utils.merk(file_path)
        print(f"{file_path} has been MERKed:",MERKed_file)
Exemplo n.º 23
0
    def _write_post_execution_info(execution_id, log_file_path,
                                   post_execution_info_provider):
        exit_code = post_execution_info_provider.get_exit_code(execution_id)
        if exit_code is None:
            return

        file_content = file_utils.read_file(log_file_path)

        file_parts = file_content.split(OUTPUT_STARTED_MARKER + '\n', 1)
        parameters_text = file_parts[0]
        parameters_text += 'exit_code:' + str(exit_code) + '\n'

        new_content = parameters_text + OUTPUT_STARTED_MARKER + '\n' + file_parts[
            1]
        file_utils.write_file(log_file_path, new_content)
Exemplo n.º 24
0
def encrypt_folder(cipher: str, folder_path : str, key: str):
    # Create new zip and delete old folder
    oldFolder, newZip = file_utils.zip_folder(folder_path)
    file_utils.delete_folder(oldFolder)

    fileBytes = file_utils.read_file(newZip)
    # returns chosen cipher class
    algo = mux(cipher).Cipher(key)
    # Bytes to write to modified file
    if algo:
        # encrypt zipped folder
        newBytes = bytes(algo.encrypt(bytes(fileBytes)))
        # write encrypted bytes and rename zip 
        file_utils.write_file(newZip,newBytes)
        MERKed_file = file_utils.merk(newZip)
        print(f"{folder_path} has been MERKed:",MERKed_file)
Exemplo n.º 25
0
    def _path_to_json(self, path):
        if path is None:
            return None

        path = file_utils.normalize_path(path, self._config_folder)

        if os.path.exists(path):
            try:
                file_content = file_utils.read_file(path)
                return json.loads(file_content)
            except:
                LOGGER.exception('Failed to load included file ' + path)
                return None
        else:
            LOGGER.warning('Failed to load included file, path does not exist: ' + path)
            return None
Exemplo n.º 26
0
    def _path_to_json(self, path):
        if path is None:
            return None

        path = file_utils.normalize_path(path, self._config_folder)

        if os.path.exists(path):
            try:
                file_content = file_utils.read_file(path)
                return json.loads(file_content)
            except:
                LOGGER.exception('Failed to load included file ' + path)
                return None
        else:
            LOGGER.warning('Failed to load included file, path does not exist: ' + path)
            return None
Exemplo n.º 27
0
def visit_configs(visitor):
    configs_dir = CONFIGS_FOLDER
    files = os.listdir(configs_dir)

    configs = [file for file in files if file.lower().endswith(".json")]

    result = []

    for config_path in configs:
        path = os.path.join(configs_dir, config_path)
        content = file_utils.read_file(path)

        visit_result = visitor(path, content)
        if visit_result is not None:
            result.append(visit_result)

    return result
Exemplo n.º 28
0
    def _write_post_execution_info(execution_id, log_file_path,
                                   post_execution_info_provider):
        exit_code = post_execution_info_provider.get_exit_code(execution_id)
        if exit_code is None:
            return

        file_content = file_utils.read_file(log_file_path, keep_newlines=True)

        file_parts = file_content.split(OUTPUT_STARTED_MARKER + os.linesep, 1)
        parameters_text = file_parts[0]
        parameters_text += 'exit_code:' + str(exit_code) + os.linesep

        new_content = parameters_text + OUTPUT_STARTED_MARKER + os.linesep + file_parts[
            1]
        file_utils.write_file(log_file_path,
                              new_content.encode(ENCODING),
                              byte_content=True)
Exemplo n.º 29
0
    def test_update_script_config(self):
        self.start_server(12345, '127.0.0.1')

        xsrf_token = self.get_xsrf_token(self._admin_session)

        script_path = test_utils.create_file('my_script.py')
        test_utils.write_script_config(
            {
                'name': 's1',
                'script_path': script_path
            }, 's1', self.runners_folder)

        response = self._admin_session.put(
            'http://127.0.0.1:12345/admin/scripts',
            data={
                'filename':
                's1.json',
                'config':
                json.dumps({
                    'name': 'new name',
                    'script': {
                        'mode': 'new_code',
                        'path': script_path,
                        'code': 'abcdef'
                    }
                })
            },
            headers={'X-XSRFToken': xsrf_token},
        )

        self.assertEqual(200, response.status_code)

        conf_response = self.request(
            'get', 'http://127.0.0.1:12345/admin/scripts/new%20name',
            self._admin_session)
        self.assertEqual(
            {
                'config': {
                    'name': 'new name',
                    'script_path': script_path
                },
                'filename': 's1.json'
            }, conf_response)

        script_content = file_utils.read_file(script_path)
        self.assertEqual('abcdef', script_content)
Exemplo n.º 30
0
def get_import_paths(project_path):
    imports = set()

    class HtmlImportSearcher(HTMLParser):
        def handle_starttag(self, tag, attrs):
            if tag == 'script':
                for attr in attrs:
                    if attr[0] == 'src':
                        imports.add(attr[1])

            if tag == 'link':
                for attr in attrs:
                    if attr[0] == 'href':
                        imports.add(attr[1])

    web_folder = os.path.join(project_path, 'web')
    for file in os.listdir(web_folder):
        if not file.endswith('.html'):
            continue

        file_path = os.path.join(web_folder, file)

        parser = HtmlImportSearcher()
        parser.feed(file_utils.read_file(file_path))

    css_folder = os.path.join(web_folder, 'css')
    for file in os.listdir(css_folder):
        if not file.endswith('.css'):
            continue

        file_path = os.path.join(css_folder, file)

        fonts_paths = extract_font_urls_from_css(file_path)
        for path in fonts_paths:
            imports.add(os.path.join('css', path))

    lib_paths = []
    for import_path in imports:
        if '/libs/' in import_path:
            lib_path = import_path.replace('/', os.path.sep)
            lib_path = os.path.join(web_folder, lib_path)

            lib_paths.append(lib_path)

    return lib_paths
Exemplo n.º 31
0
def __introduce_access_config(context):
    conf_folder = os.path.join(context.conf_folder, 'runners')

    if not os.path.exists(conf_folder):
        return

    conf_files = [
        os.path.join(conf_folder, file) for file in os.listdir(conf_folder)
        if file.lower().endswith('.json')
    ]

    for conf_file in conf_files:
        content = file_utils.read_file(conf_file)
        json_object = json.loads(content, object_pairs_hook=OrderedDict)

        if ('output_files' not in json_object) or ('parameters'
                                                   not in json_object):
            continue

        output_files = json_object['output_files']
        parameter_names = [
            p['name'] for p in json_object['parameters']
            if not is_blank(p.get('name'))
        ]

        changed = False

        for i in range(len(output_files)):
            output_file = output_files[i]

            if not isinstance(output_file, str):
                continue

            for param_name in parameter_names:
                output_file = re.sub('\\$\\$\\$' + param_name,
                                     '${' + param_name + '}', output_file)

            if output_file != output_files[i]:
                output_files[i] = output_file
                changed = True

        if changed:
            _write_json(conf_file, json_object, content)
Exemplo n.º 32
0
def restore_jobs(schedules_folder):
    files = [file for file in os.listdir(schedules_folder) if file.endswith('.json')]

    job_dict = {}
    ids = []  # list of ALL ids, including broken configs

    for file in files:
        try:
            content = file_utils.read_file(os.path.join(schedules_folder, file))
            job_json = json.loads(content)
            ids.append(job_json['id'])

            job = scheduling_job.from_dict(job_json)

            job_dict[job.id] = job
        except:
            LOGGER.exception('Failed to parse schedule file: ' + file)

    return job_dict, ids
Exemplo n.º 33
0
def __migrate_user_id(context):
    output_folder = os.path.join(context.log_folder, 'processes')
    if not os.path.exists(output_folder):
        return

    log_files = [
        os.path.join(output_folder, file) for file in os.listdir(output_folder)
        if file.lower().endswith('.log')
    ]

    for log_file in log_files:
        (correct, parameters_text
         ) = ExecutionLoggingService._read_parameters_text(log_file)
        if not correct:
            continue

        parameters = ExecutionLoggingService._parse_history_parameters(
            parameters_text)
        if not parameters or ('user' not in parameters):
            continue

        if ('user_id' in parameters) and ('user_name' in parameters):
            continue

        file_content = file_utils.read_file(log_file, keep_newlines=True)

        file_parts = file_content.split(
            execution.logging.OUTPUT_STARTED_MARKER + os.linesep, 1)
        parameters_text = file_parts[0]

        user = parameters['user']

        if 'user_id' not in parameters:
            parameters_text += 'user_id:' + user + os.linesep

        if 'user_name' not in parameters:
            parameters_text += 'user_name:' + user + os.linesep

        new_content = parameters_text + execution.logging.OUTPUT_STARTED_MARKER + os.linesep + file_parts[
            1]
        file_utils.write_file(log_file,
                              new_content.encode(execution.logging.ENCODING),
                              byte_content=True)
Exemplo n.º 34
0
    def _restore_state(self):
        if not os.path.exists(self.dump_file):
            LOGGER.info('OAuth dump file is missing. Nothing to restore')
            return

        dump_data = file_utils.read_file(self.dump_file)
        dump_json = json.loads(dump_data)

        for user_state in dump_json:
            username = user_state.get('username')
            if not username:
                LOGGER.warning('Missing username in ' + str(user_state))
                continue

            state = _UserState(username)
            self._users[username] = state
            state.groups = user_state.get('groups', [])
            state.last_auth_update = user_state.get('last_auth_update')
            state.last_visit = user_state.get('last_visit')
Exemplo n.º 35
0
    def test_create_script_config(self):
        self.start_server(12345, '127.0.0.1')

        xsrf_token = self.get_xsrf_token(self._admin_session)

        response = self._admin_session.post(
            'http://127.0.0.1:12345/admin/scripts',
            data={
                'filename':
                'whatever',
                'config':
                json.dumps({
                    'name': 'test conf',
                    'script': {
                        'mode': 'upload_script',
                        'path': 'whatever'
                    }
                })
            },
            files={'uploadedScript': ('my.py', b'script content')},
            headers={'X-XSRFToken': xsrf_token},
        )

        self.assertEqual(200, response.status_code)

        expected_script_path = os.path.join(test_utils.temp_folder, 'conf',
                                            'scripts', 'my.py')

        conf_response = self.request(
            'get', 'http://127.0.0.1:12345/admin/scripts/test%20conf',
            self._admin_session)
        self.assertEqual(
            {
                'config': {
                    'name': 'test conf',
                    'script_path': expected_script_path
                },
                'filename': 'test_conf.json'
            }, conf_response)

        script_content = file_utils.read_file(expected_script_path)
        self.assertEqual('script content', script_content)
Exemplo n.º 36
0
    def _load_script_code_by_config(self, plain_config):
        script_path = plain_config.get(SCRIPT_PATH_FIELD)
        if is_blank(script_path):
            raise InvalidFileException('', 'Script path is not specified')

        command = process_utils.split_command(
            script_path, plain_config.get(WORKING_DIR_FIELD))
        binary_files = []
        for argument in command:
            if file_utils.exists(argument):
                if file_utils.is_binary(argument):
                    binary_files.append(argument)
                    continue

                return {
                    'code': file_utils.read_file(argument),
                    'file_path': argument
                }

        if binary_files:
            if len(binary_files) == 1:
                return {
                    'code': None,
                    'file_path': binary_files[0],
                    'code_edit_error': 'Cannot edit binary file'
                }

            raise InvalidFileException(
                'command', 'Cannot choose which binary file to edit: ' +
                str(binary_files))

        if len(command) == 1:
            return {
                'code': None,
                'file_path': command[0],
                'code_edit_error': 'Script path does not exist'
            }

        raise InvalidFileException(
            'command',
            'Failed to find script path in command "' + script_path + '"')
Exemplo n.º 37
0
 def test_attach_trace(self):
     sys_log_file = open(self.vi.sys_log, "w")
     vdisplay = ffm.start_xvfb()
     driver, _, proc = ffm.get_browser(self.vi.ff_log)
     subprocess.Popen("strace -p %d" % proc.pid,
                      stderr=sys_log_file, stdout=sys_log_file,
                      shell=True)
     driver.get("https://google.com")
     sleep(1)
     driver.close()
     proc.terminate()
     driver.quit()
     ffm.stop_xvfb(vdisplay)
     sys_log_file.close()
     sleep(5)  # wait for strace output to be available
     sys_logs = fu.read_file(self.vi.sys_log)
     for sys_call in SYS_CALLS:
         self.assertIn(sys_call, sys_logs,
                       """Cannot trace process, make sure you set up
             ptrace permissions in /proc/sys/kernel/yama/ptrace_scope as
             described in etc/setup.sh.""")
Exemplo n.º 38
0
def __introduce_access_config():
    file_path = os.path.join('conf', 'conf.json')

    if not os.path.exists(file_path):
        return

    content = file_utils.read_file(file_path)
    json_object = json.loads(content)

    def move_to_access(field, parent_object):
        if 'access' not in json_object:
            json_object['access'] = {}

        json_object['access'][field] = parent_object[field]
        del parent_object[field]

    changed = False

    if 'auth' in json_object:
        auth_object = json_object['auth']
        if 'allowed_users' in auth_object:
            move_to_access('allowed_users', auth_object)
            changed = True

    fields = ['admin_users', 'trusted_ips']
    for field in fields:
        if field in json_object:
            changed = True
            move_to_access(field, json_object)

    if changed:
        space_matches = re.findall('^\s+', content, flags=re.MULTILINE)
        if space_matches:
            indent_string = space_matches[0].replace('\t', '    ')
            indent = min(len(indent_string), 8)
        else:
            indent = 4

        with open(file_path, 'w') as fp:
            json.dump(json_object, fp, indent=indent)
Exemplo n.º 39
0
def visit_script_configs(visitor):
    configs_dir = SCRIPT_CONFIGS_FOLDER
    files = os.listdir(configs_dir)

    configs = [file for file in files if file.lower().endswith(".json")]

    result = []

    for config_path in configs:
        path = os.path.join(configs_dir, config_path)

        try:
            content = file_utils.read_file(path)

            visit_result = visitor(path, content)
            if visit_result is not None:
                result.append(visit_result)

        except:
            LOGGER.exception("Couldn't read the file: " + config_path)

    return result
Exemplo n.º 40
0
def _load_runner_files(conf_folder):
    runners_folder = os.path.join(conf_folder, 'runners')

    if not os.path.exists(runners_folder):
        return

    conf_files = [os.path.join(runners_folder, file)
                  for file in os.listdir(runners_folder)
                  if file.lower().endswith('.json')]

    result = []

    for conf_file in conf_files:
        content = file_utils.read_file(conf_file)
        try:
            json_object = custom_json.loads(content, object_pairs_hook=OrderedDict)
            result.append((conf_file, json_object, content))
        except Exception:
            LOGGER.exception('Failed to load file for migration: ' + conf_file)
            continue

    return result
Exemplo n.º 41
0
 def test_attach_trace(self):
     sys_log_file = open(self.vi.sys_log, "w")
     vdisplay = ffm.start_xvfb()
     driver, _, proc = ffm.get_browser(self.vi.ff_log)
     subprocess.Popen("strace -p %d" % proc.pid,
                      stderr=sys_log_file,
                      stdout=sys_log_file,
                      shell=True)
     driver.get("https://google.com")
     sleep(1)
     driver.close()
     proc.terminate()
     driver.quit()
     ffm.stop_xvfb(vdisplay)
     sys_log_file.close()
     sleep(5)  # wait for strace output to be available
     sys_logs = fu.read_file(self.vi.sys_log)
     for sys_call in SYS_CALLS:
         self.assertIn(
             sys_call, sys_logs,
             """Cannot trace process, make sure you set up
                 ptrace permissions in /proc/sys/kernel/yama/ptrace_scope as
                 described in etc/setup.sh.""")
Exemplo n.º 42
0
def __migrate_user_id(context):
    output_folder = os.path.join(context.log_folder, 'processes')
    if not os.path.exists(output_folder):
        return

    log_files = [os.path.join(output_folder, file)
                 for file in os.listdir(output_folder)
                 if file.lower().endswith('.log')]

    for log_file in log_files:
        (correct, parameters_text) = ExecutionLoggingService._read_parameters_text(log_file)
        if not correct:
            continue

        parameters = ExecutionLoggingService._parse_history_parameters(parameters_text)
        if not parameters or ('user' not in parameters):
            continue

        if ('user_id' in parameters) and ('user_name' in parameters):
            continue

        file_content = file_utils.read_file(log_file, keep_newlines=True)

        file_parts = file_content.split(execution.logging.OUTPUT_STARTED_MARKER + os.linesep, 1)
        parameters_text = file_parts[0]

        user = parameters['user']

        if 'user_id' not in parameters:
            parameters_text += 'user_id:' + user + os.linesep

        if 'user_name' not in parameters:
            parameters_text += 'user_name:' + user + os.linesep

        new_content = parameters_text + execution.logging.OUTPUT_STARTED_MARKER + os.linesep + file_parts[1]
        file_utils.write_file(log_file, new_content.encode(execution.logging.ENCODING), byte_content=True)
Exemplo n.º 43
0
            break

        if parent_path == ROOT_PROJECT_PATH:
            break

        parent_path = os.path.dirname(parent_path)

new_in_progress = set(pom_paths)

home_folder = os.path.expanduser('~')
unique_name = get_unique_name(ROOT_PROJECT_PATH)
in_progress_file = os.path.join(home_folder, '.incremaven', unique_name)

prev_in_progress = []
if os.path.exists(in_progress_file):
    prev_in_progress = file_utils.read_file(in_progress_file).split("\n")
    prev_in_progress = filter(lambda line: line != "", prev_in_progress)

for pom_path in prev_in_progress:
    if os.path.exists(pom_path):
        pom_paths.add(pom_path)

file_utils.write_file(in_progress_file, "\n".join(pom_paths))

projects = common.to_mvn_projects(pom_paths, ROOT_PROJECT_PATH, ROOT_ONLY)

to_rebuild = []
to_install = []

for project in projects:
    build_date = mvn_utils.target_build_date(project)
Exemplo n.º 44
0
def __migrate_old_files(context):
    output_folder = os.path.join(context.log_folder, 'processes')
    if not os.path.exists(output_folder):
        return

    log_files = [os.path.join(output_folder, file)
                 for file in os.listdir(output_folder)
                 if file.lower().endswith('.log')]

    def is_new_format(log_file):
        with open(log_file, 'r') as f:
            first_line = f.readline().strip()

            if not first_line.startswith('id:'):
                return False

            for line in f:
                if line.strip() == execution.logging.OUTPUT_STARTED_MARKER:
                    return True

        return False

    old_files = [log_file for log_file in log_files if not is_new_format(log_file)]

    if not old_files:
        return

    existing_ids = set()
    for file in log_files:
        correct, parameters_text = ExecutionLoggingService._read_parameters_text(file)
        if not correct:
            continue

        parameters = ExecutionLoggingService._parse_history_parameters(parameters_text)
        if not parameters or 'id' not in parameters:
            continue

        existing_ids.add(parameters['id'])

    id_generator = (str(id) for id in itertools.count())
    id_generator = filter(lambda id: id not in existing_ids, id_generator)

    for old_file in old_files:
        log_basename = os.path.basename(old_file)
        filename = os.path.splitext(log_basename)[0]

        match = re.fullmatch('(.+)_([^_]+)_((\d\d)(\d\d)(\d\d)_(\d\d)(\d\d)(\d\d))', filename)
        if match:
            script_name = match.group(1)
            username = match.group(2)
            start_time = datetime.strptime(match.group(3), '%y%m%d_%H%M%S')
            id = next(id_generator)
        else:
            script_name = 'unknown'
            username = '******'
            start_time = sec_to_datetime(os.path.getctime(old_file))
            id = next(id_generator)

        new_begin = ''
        new_begin += 'id:' + id + '\n'
        new_begin += 'user_name:' + username + '\n'
        new_begin += 'user_id:' + username + '\n'
        new_begin += 'script:' + script_name + '\n'
        new_begin += 'start_time:' + str(to_millis(start_time)) + '\n'
        new_begin += 'command:unknown' + '\n'
        new_begin += execution.logging.OUTPUT_STARTED_MARKER + '\n'

        file_content = file_utils.read_file(old_file)
        file_content = new_begin + file_content
        file_utils.write_file(old_file, file_content)
Exemplo n.º 45
0
 def read_logs_only(self, log_file):
     content = file_utils.read_file(log_file, keep_newlines=True)
     self.assertTrue(OUTPUT_STARTED_MARKER in content)
     log_start = content.index(OUTPUT_STARTED_MARKER) + len(OUTPUT_STARTED_MARKER) + 1
     return content[log_start:]
Exemplo n.º 46
0
def _replace_line_separators(files, original, new):
    for file in files:
        content = file_utils.read_file(file, byte_content=True)
        replaced_content = content.decode('utf-8').replace(original, new).encode('utf-8')
        file_utils.write_file(file, replaced_content, byte_content=True)
Exemplo n.º 47
0
    def read_log(self):
        if self.file_path and os.path.exists(self.file_path):
            return file_utils.read_file(self.file_path, keep_newlines=True)

        return None
Exemplo n.º 48
0
def create_db_from_schema(db_file, schema_file=cm.DB_SCHEMA):
    schema = fu.read_file(schema_file)
    with sq.connect(db_file, timeout=cm.DB_CONN_TIMOEUT) as conn:
        cursor = conn.cursor()
        cursor.executescript(schema)
Exemplo n.º 49
0
        if not mvn_utils.is_built(unchanged_project):
            print('project ' + str(unchanged_project) + ' was cleaned, sending to rebuild')
            changed_projects.append(unchanged_project)
            continue

        mvn_utils.fast_install(unchanged_project, MAVEN_REPO_PATH)

    mvn_utils.rebuild(ROOT_PROJECT_PATH, changed_projects, MVN_OPTS, silent=False)


current_revision = vcs_gateway.get_revision(ROOT_PROJECT_PATH)

info_file_path = os.path.join(ROOT_PROJECT_PATH, "_ci_rebuild.info")
if os.path.exists(info_file_path):
    last_revision = file_utils.read_file(info_file_path).strip()

    if last_revision != current_revision:
        try:
            incremental_rebuild(last_revision, current_revision)
        except mvn_utils.IncorrectConfigException as e:
            print('ERROR! {}'.format(e))
            sys.exit(-1)
    else:
        print("Svn revision is the same. Skipping rebuild")
else:
    print("No previous revision found, rebuilding the whole root project...")
    mvn_utils.rebuild_root(ROOT_PROJECT_PATH, MVN_OPTS, silent=False)

file_utils.write_file(info_file_path, current_revision)
Exemplo n.º 50
0
def from_json(conf_path, temp_folder):
    if os.path.exists(conf_path):
        file_content = file_utils.read_file(conf_path)
    else:
        file_content = "{}"

    config = ServerConfig()

    json_object = json.loads(file_content)

    address = "0.0.0.0"
    port = 5000

    ssl = json_object.get("ssl")
    if ssl is not None:
        key_path = model_helper.read_obligatory(ssl, 'key_path', ' for ssl')
        cert_path = model_helper.read_obligatory(ssl, 'cert_path', ' for ssl')

        config.ssl = True
        config.ssl_key_path = key_path
        config.ssl_cert_path = cert_path
        port = 5443

    if json_object.get("address"):
        address = json_object.get("address")
    config.address = address

    if json_object.get("port"):
        port = json_object.get("port")
    config.port = port

    if json_object.get('title'):
        config.title = json_object.get('title')

    access_config = json_object.get('access')
    if access_config:
        allowed_users = access_config.get('allowed_users')
        user_groups = model_helper.read_dict(access_config, 'groups')
    else:
        allowed_users = None
        user_groups = {}

    auth_config = json_object.get('auth')
    if auth_config:
        config.authenticator = create_authenticator(auth_config, temp_folder)

        auth_type = config.authenticator.auth_type
        if auth_type == 'google_oauth' and allowed_users is None:
            raise Exception('auth.allowed_users field is mandatory for ' + auth_type)

        def_trusted_ips = []
        def_admins = []
    else:
        def_trusted_ips = ['127.0.0.1', '::1']
        def_admins = def_trusted_ips

    if access_config:
        config.trusted_ips = strip(read_list(access_config, 'trusted_ips', default=def_trusted_ips))
        admin_users = _parse_admin_users(access_config, default_admins=def_admins)
    else:
        config.trusted_ips = def_trusted_ips
        admin_users = def_admins

    config.allowed_users = _prepare_allowed_users(allowed_users, admin_users, user_groups)
    config.alerts_config = json_object.get('alerts')
    config.callbacks_config = json_object.get('callbacks')
    config.logging_config = parse_logging_config(json_object)
    config.user_groups = user_groups
    config.admin_users = admin_users

    config.max_request_size_mb = read_int_from_config('max_request_size', json_object, default=10)

    return config
Exemplo n.º 51
0
 def test_write_to_file(self):
     filename = self.new_temp_file('write_test.txt')
     random_str = ut.rand_str(100)
     fu.write_to_file(filename, random_str)
     self.assertEqual(random_str, fu.read_file(filename))
Exemplo n.º 52
0
 def test_read_file(self):
     file_content = fu.read_file(os.path.realpath(__file__))
     if 'whatever written here' not in file_content:
         self.fail('Cannot read itself')
Exemplo n.º 53
0
    def _load_groups(self, groups_file):
        if not os.path.exists(groups_file):
            return {}

        content = file_utils.read_file(groups_file)
        return json.loads(content)