Beispiel #1
0
def create_version_file():
    if 'TRAVIS_BRANCH' in os.environ:
        current_branch = os.environ['TRAVIS_BRANCH']
    else:
        current_branch = process_utils.invoke(
            'git rev-parse --abbrev-ref HEAD').strip()

    npm_version = get_npm_version()
    if current_branch == 'stable':
        last_tag = process_utils.invoke(
            'git describe --exclude dev --abbrev=0 --tags').strip()
        last_tag_version = parse_semver_str(last_tag)
        if (last_tag_version[0] == npm_version[0]) and (last_tag_version[1]
                                                        == npm_version[1]):
            new_version = [
                last_tag_version[0], last_tag_version[1],
                last_tag_version[2] + 1
            ]
        else:
            new_version = npm_version
        new_version = '.'.join([str(v) for v in new_version])
    else:
        git_hash = process_utils.invoke('git rev-parse --short HEAD').strip()

        new_version = str(npm_version[0])
        new_version += '.' + str(npm_version[1] + 1)
        new_version += '.0-'
        new_version += current_branch + '@' + git_hash
    file_utils.write_file(VERSION_FILE, new_version)
def save_data_list_to_disk2(page_index):

    page_size = 1000
    print("page_size=%s,page_index=%s" % (page_size, page_index))
    # print config_dict

    data_type = int(config_dict["data_type"])
    data_list_folder_name = config_dict["data_list_folder_name"]

    # data_list保存文件名(文件按页码存储,每页PAGE_SIZE条)
    data_list_filename = "data_list_%s_%s.json" % (data_type, page_index)
    data_list_filename = data_list_folder_name + data_list_filename

    # 列表页url(从配置文件读取)
    data_list_url = cf.get("access_url", "data_list_url")
    data_list_url = data_list_url.format(data_type, page_index, page_size)
    logging.debug("数据采集地址:%s" % (data_list_url))

    # 数据采集并保存到本地
    data_list_data = access_data_utils.get_data(data_list_url)
    file_utils.write_file(data_list_filename, data_list_data)
    logging.debug("写入文件成功:%s" % (data_list_filename))
    logging.info("第%s页数据采集完成,剩余%s页,保存路径:%s" %
                 (page_index,
                  (total_page_no - page_index), data_list_filename))
    time.sleep(2)
Beispiel #3
0
def bld_to_list(file_path):
  """Takes a BLD file path and returns an OrderedList of comments, new lines
  and dict(rule) in the file."""
  # Abort quickly if its a bad JSON file.
  bld_as_json = su.read_build_file(file_path)

  # Format each rule one by one. Preserve the comments only outside the body
  # of rule dictionary.
  lines = [l.strip() for l in fu.read_file(file_path).split('\n')]
  formatted = []
  while lines:
    line = lines.pop(0)
    if not line or line.startswith(COMMENT_CHAR):
      formatted.append(line)
    elif line.startswith(QUOTE_CHAR):
      name = _rule_name(line)
      #TODO: Improve it to retain comments inside a BLD rule as well.
      formatted.append({RULE_NAME_KEY: name, RULE_BODY_KEY: bld_as_json[name]})
      # Skip remaining lines of this rule now.
      _strip_rule_body(lines)
    else:
      raise ValueError('Illegal text %s found in file %s.' % (line, file_path))

  # Do a sanity check, formatting shouldn't change build file's semantics.
  temp_file = fu.get_temp_file()
  fu.write_file(temp_file, list_to_bld_string(formatted))
  formatted_bld_as_json = su.read_build_file(temp_file)
  assert formatted_bld_as_json == bld_as_json
  return formatted
def save_data_list_to_disk():
    # 每页显示总数量(即每个文件保存1000条数据)
    page_size = 1000
    if total_count < page_size:
        page_size = total_count

    # 计算共多少页
    if total_count % page_size == 0:
        total_page_no = total_count / page_size
    else:
        total_page_no = total_count / page_size + 1

    for index in range(total_page_no):
        page_index = index + 1

        # data_list保存文件名
        data_list_filename = "data_list_%s_%s.json" % (
            dataTypeConfig.get_data_type(), page_index)
        data_list_filename = DATA_LIST_PATH + data_list_filename

        # 列表页url
        app_list_url = dataTypeConfig.get_data_list_url()
        app_list_url = app_list_url.format(dataTypeConfig.get_data_type(),
                                           page_index, page_size)

        # 数据采集并保存到本地
        data_list_data = access_data_utils.get_data(app_list_url)
        file_utils.write_file(data_list_filename, data_list_data)

        time.sleep(2)
Beispiel #5
0
def main():
    global config
    config, base_dir_dict = setup_utils.setup()
    tag_omit_list = ['html', 'head', 'body', 'p', 'br', 'em', 'time', 'strong', 'i', 'b', 'code', 'pre', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'h7', 'table', 'thead', 'tbody', 'tr', 'td', 'tfoot']
    xml_parser = etree.HTMLParser()

    # re_ulol = regex.compile(r'(\<(ul|ol)(?!href).*href[ ="]+https*\:\/\/[^=\>]+=http[^\>]+\>(?!</\2\>).*\<(\/\2)\>)')
    # re_li_a = regex.compile(r'(<li[^\>]*\>\s*\<a[^\>]+\shref\=("(javascript|http|\/)[^"]+")[^\>]*\>(?!\<\/a\>).*?\<\/a\>.*?\<\/li\>)')
    # re_href = regex.compile(r'(\<a[^\>]+\shref\=("http[^"]+")[^\>]*\>(?!\<\/a\>).*?\<\/a\>)')
    # re_tags = regex.compile(r'(\<(\w+)[^\>]*\>((?!</\2\>).*?)?\<(\/\2)\>)')
    re_empty = regex.compile(r'(\<([^ /">]+)[^\>]*\>\s*\<\/\2\>)')
    re_head_foot = regex.compile(r'(\<(header|footer|form|script|noscript|iframe|button)[^\>]*\>((?!</\2\>).*?)?\<\/\2\>)')
    re_input = regex.compile(r'(\<(input)[^\>]*\>((?!</\2\>).*?)?\/>)')
    re_comment = regex.compile(r'(\<\!\-\-((?!\-\-\>).*?)?\-\-\>)')
    re_tag_name = regex.compile(r'^<([^ /">]+)')
    re_reverse = regex.compile(r'((?r)\<(\w+)(?!\w).*?\/>)', regex.DOTALL)
    if not config:
        print('Failed to complete setup, exiting.')
        sys.exit()
    else:
        if 'data_list' in config:
            for data_dir in config['data_list']:
                if 'input' in base_dir_dict:
                    i_dir = base_dir_dict['input'].joinpath(data_dir).resolve()
                if 'work' in base_dir_dict:
                    w_dir = base_dir_dict['work'].joinpath(data_dir).resolve()
                if 'output' in base_dir_dict:
                    o_dir = base_dir_dict['output'].joinpath(data_dir).resolve()
                if not w_dir.is_dir():
                    w_dir.mkdir(parents=True)
                if not o_dir.is_dir():
                    o_dir.mkdir(parents=True)
                #
                #
                #
                print('\nPretty printing in: ' + str(i_dir))
                input_file_list = file_utils.get_file_list(root_path_str=str(i_dir))
                working_file_list = []
                seq_2 = None
                common_elements = None
                for pp_file in input_file_list:
                    print('Working with: ' + str(pp_file))
                    pp_soup = html_utils.make_a_soup(filename=pp_file)
                    base_string = str(pp_soup).replace('\n', '')
                    rever = re_reverse.findall(base_string)
                    for r in rever:
                        print(r[0].replace('/>', '>' + '</' + r[1] + '>'))
                    break
                    working_soup = html_utils.make_a_soup(html_doc_string=working_string)
                    output_filename = o_dir.joinpath('minimized_' + working_filename.stem.replace('_pp', '') + '.html')
                    print('output filename: ' + str(output_filename))
                    file_utils.write_file(fn=output_filename,
                                                overwrite=True,
                                                content=working_soup.prettify())
                    
                print('Done with: ' + str(w_dir))
                # break
        else:
            print('data list not in config.')
Beispiel #6
0
    def test_list_configs_when_one_broken(self):
        broken_conf_path = _create_script_config_file('broken')
        file_utils.write_file(broken_conf_path, '{ hello ?')
        _create_script_config_file('correct')

        configs = self.config_service.list_configs(self.user)
        self.assertEqual(1, len(configs))
        self.assertEqual('correct', configs[0].name)
Beispiel #7
0
def write_script_config(conf_object, filename, config_folder=None):
    if config_folder is None:
        config_folder = os.path.join(temp_folder, 'runners')
    file_path = os.path.join(config_folder, filename + '.json')

    config_json = json.dumps(conf_object)
    file_utils.write_file(file_path, config_json)
    return file_path
Beispiel #8
0
def write_script_config(conf_object, filename, config_folder=None):
    if config_folder is None:
        config_folder = os.path.join(temp_folder, 'runners')
    file_path = os.path.join(config_folder, filename + '.json')

    config_json = json.dumps(conf_object)
    file_utils.write_file(file_path, config_json)
    return file_path
Beispiel #9
0
    def save_job(self, job: SchedulingJob):
        user = job.user
        script_name = job.script_name

        filename = file_utils.to_filename(
            '%s_%s_%s.json' % (script_name, user.get_audit_name(), job.id))
        file_utils.write_file(os.path.join(self._schedules_folder, filename),
                              json.dumps(job.as_serializable_dict(), indent=2))
    def test_list_configs_when_one_broken(self):
        broken_conf_path = _create_script_config('broken')
        file_utils.write_file(broken_conf_path, '{ hello ?')
        _create_script_config('correct')

        configs = self.config_service.list_configs(self.user)
        self.assertEqual(1, len(configs))
        self.assertEqual('correct', configs[0].name)
    def test_static_include_corrupted_file(self):
        included_path = os.path.join(test_utils.temp_folder, 'file.json')
        file_utils.write_file(included_path, 'Hello world!')

        config_model = _create_config_model('main_conf', config={
            'include': included_path,
            'parameters': [create_script_param_config('param1', type='text')]})

        self.assertEqual(1, len(config_model.parameters))
    def save_file(self, filename, body, username) -> str:
        upload_folder = self.user_file_storage.prepare_new_folder(
            username, self.folder)
        pref_result_path = os.path.join(upload_folder, filename)

        result_path = file_utils.create_unique_filename(pref_result_path)
        file_utils.write_file(result_path, body, True)

        return file_utils.normalize_path(result_path)
Beispiel #13
0
def get_secret(secret_file):
    if os.path.exists(secret_file):
        secret = file_utils.read_file(secret_file, byte_content=True)
        if secret:
            return secret

    secret = os.urandom(256)
    file_utils.write_file(secret_file, secret, byte_content=True)
    return secret
Beispiel #14
0
    def _write_post_execution_info(log_file_path, exit_code):
        file_content = file_utils.read_file(log_file_path, keep_newlines=True)

        file_parts = file_content.split(OUTPUT_STARTED_MARKER + os.linesep, 1)
        parameters_text = file_parts[0]
        parameters_text += 'exit_code:' + str(exit_code) + os.linesep

        new_content = parameters_text + OUTPUT_STARTED_MARKER + os.linesep + file_parts[1]
        file_utils.write_file(log_file_path, new_content.encode(ENCODING), byte_content=True)
Beispiel #15
0
    def test_static_include_corrupted_file(self):
        included_path = os.path.join(test_utils.temp_folder, 'file.json')
        file_utils.write_file(included_path, 'Hello world!')

        config_model = _create_config_model('main_conf', config={
            'include': included_path,
            'parameters': [create_script_param_config('param1', type='text')]})

        self.assertEqual(1, len(config_model.parameters))
Beispiel #16
0
def get_secret(temp_folder):
    secret_file = os.path.join(temp_folder, 'secret.dat')
    if os.path.exists(secret_file):
        secret = file_utils.read_file(secret_file, byte_content=True)
        if secret:
            return secret

    secret = os.urandom(256)
    file_utils.write_file(secret_file, secret, byte_content=True)
    return secret
def get_tornado_secret():
    secret_file = os.path.join("temp", "secret.dat")
    if os.path.exists(secret_file):
        secret = file_utils.read_file(secret_file, byte_content=True)
        if secret:
            return secret

    secret = os.urandom(256)
    file_utils.write_file(secret_file, secret, byte_content=True)
    return secret
Beispiel #18
0
    def test_get_values_when_multiple_parameters(self):
        files_path = os.path.join(test_utils.temp_folder, 'path1', 'path2')
        for i in range(0, 5):
            file_utils.write_file(os.path.join(files_path, 'f' + str(i) + '.txt'), 'test')

        values_provider = DependantScriptValuesProvider(
            'ls ' + test_utils.temp_folder + '/${param1}/${param2}',
            self.create_parameters_supplier('param1', 'param2'))
        self.assertEqual(['f0.txt', 'f1.txt', 'f2.txt', 'f3.txt', 'f4.txt'],
                         values_provider.get_values({'param1': 'path1', 'param2': 'path2'}))
    def create_file(self, filepath):
        if not os.path.exists(test_utils.temp_folder):
            os.makedirs(test_utils.temp_folder)

        filename = os.path.basename(filepath)
        folder = os.path.join(test_utils.temp_folder,
                              os.path.dirname(filepath))
        if not os.path.exists(folder):
            os.makedirs(folder)

        file_utils.write_file(os.path.join(folder, filename), 'test text')
Beispiel #20
0
def renew_metadata(projects, repo_path):
    if not projects:
        return

    now = datetime.datetime.now()
    current_time = datetime.datetime.strftime(now, "%Y%m%d%H%M%S")

    for project in projects:
        project_repo_path = repo_folder_path(project, repo_path)

        metadata_path = os.path.join(project_repo_path,
                                     "maven-metadata-local.xml")

        update_file = os.path.exists(metadata_path)
        if update_file:
            try:
                xml_utils.replace_in_tree(
                    metadata_path, {
                        "versioning/lastUpdated":
                        current_time,
                        "versioning/snapshotVersions/snapshotVersion/updated":
                        current_time
                    })
            except xml.etree.ElementTree.ParseError:
                print(project.artifact_id + ' metadata is broken, rewriting')
                update_file = False

        if not update_file:
            local_metadata = '<metadata modelVersion="1.1.0">' + \
                             '  <groupId>' + project.group + '</groupId>' + \
                             '  <artifactId>' + project.artifact_id + '</artifactId>' + \
                             '  <version>' + project.version + '</version>' + \
                             '  <versioning>' + \
                             '    <snapshot>' + \
                             '      <localCopy>true</localCopy>' + \
                             '    </snapshot>' + \
                             '    <lastUpdated>' + current_time + '</lastUpdated>' + \
                             '    <snapshotVersions>' + \
                             '      <snapshotVersion>' + \
                             '        <extension>pom</extension>' + \
                             '        <value>' + project.version + '</value>' + \
                             '        <updated>' + current_time + '</updated>' + \
                             '      </snapshotVersion>'
            if requires_archive(project):
                local_metadata += '      <snapshotVersion>' + \
                                  '        <extension>' + project.get_packaging() + '</extension>' + \
                                  '        <value>' + project.version + '</value>' + \
                                  '        <updated>' + current_time + '</updated>' + \
                                  '      </snapshotVersion>'
            local_metadata += '    </snapshotVersions>' + \
                              '  </versioning>' + \
                              '</metadata>'
            file_utils.write_file(metadata_path, local_metadata)
Beispiel #21
0
    def post(self):
        message = self.get_body_argument('message')
        LOGGER.info('ReceiveAlertHandler. Received alert: ' + message)

        log_files = self.request.files['log']
        if log_files:
            file = log_files[0]
            filename = str(time.time()) + '_' + file.filename

            LOGGER.info('ReceiveAlertHandler. Writing file ' + filename)

            file_path = os.path.join('logs', 'alerts', filename)
            file_utils.write_file(file_path, file.body.decode('utf-8'))
    def test_get_parameter_values_cached(self):
        parameters = [
            _create_parameter('p1'),
            _create_parameter('dependant', type='list', script='echo "${p1}"')
        ]
        config_path = _create_config('conf_x', parameters=parameters)
        self.config_service.load_config('conf_x', self.user)

        file_utils.write_file(config_path, '{}')

        values = self.config_service.get_parameter_values(
            'conf_x', 'dependant', {'p1': 'ABC'}, self.user)
        self.assertEqual(['ABC'], values)
Beispiel #23
0
def encrypt_file(cipher: str, file_path : str, key: str):

    fileBytes = file_utils.read_file(file_path)
    # returns chosen cipher class
    algo = mux(cipher).Cipher(key)
    # Bytes to write to modified file
    if algo:
        # encrypt zipped folder
        print("ENCRYPTING FILE BYTES",'\n')
        newBytes = bytes(algo.encrypt(bytes(fileBytes)))
        # write encrypted bytes and rename zip 
        file_utils.write_file(file_path,newBytes)
        MERKed_file = file_utils.merk(file_path)
        print(f"{file_path} has been MERKed:",MERKed_file)
Beispiel #24
0
    def setUp(self) -> None:
        super().setUp()
        test_utils.setup()

        authorizer = Authorizer([], ['admin_user', 'admin_non_editor'], [],
                                ['admin_user'], EmptyGroupProvider())
        self.admin_user = User('admin_user', {})
        self.config_service = ConfigService(authorizer, test_utils.temp_folder)

        for pair in [('script.py', b'123'), ('another.py', b'xyz'),
                     ('binary 1.bin', bytes.fromhex('300000004000000a')),
                     ('my_python', bytes.fromhex('7F454C46'))]:
            path = os.path.join(test_utils.temp_folder, pair[0])
            file_utils.write_file(path, pair[1], byte_content=True)
Beispiel #25
0
def _create_script_config_file(filename, *, name=None, **kwargs):
    conf_folder = os.path.join(test_utils.temp_folder, 'runners')
    file_path = os.path.join(conf_folder, filename + '.json')

    config = {'script_path': 'echo 123'}
    if name is not None:
        config['name'] = name

    if kwargs:
        config.update(kwargs)

    config_json = json.dumps(config)
    file_utils.write_file(file_path, config_json)
    return file_path
Beispiel #26
0
def decrypt_file(cipher: str, file_path : str, key: str):
    clean_file = file_utils.unmerk(file_path)
    
    fileBytes = file_utils.read_file(clean_file)
    # returns chosen cipher class
    algo = mux(cipher).Cipher(key)
    # Bytes to write to modified file
    if algo:
        # decrypt zipped folder
        print("DECRYPTING FILE BYTES",'\n')
        newBytes = bytes(algo.decrypt(bytes(fileBytes)))
        # write encrypted bytes and rename zip 
        file_utils.write_file(clean_file,newBytes)
        print(f"{clean_file} has been unMERKed")
def save_data_list_to_disk(config_dict):
    data_type = int(config_dict["data_type"])
    get_type = int(config_dict["get_type"])
    data_list_folder_name = config_dict["data_list_folder_name"]
    if file_utils.clear_folder(data_list_folder_name):
        logging.info("清空文件夹文件:%s" % (data_list_folder_name))

    begin_time = time.time()
    logging.info("数据采集开始时间:%s" %
                 (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
    # 当前官网数据总量
    total_count = comm_utils.get_curr_nmpa_total_count(data_type)
    # 每页显示总数量(即每个文件保存1000条数据)
    page_size = 1000
    if total_count < page_size:
        page_size = total_count

    # 计算共多少页
    if total_count % page_size == 0:
        total_page_no = total_count / page_size
    else:
        total_page_no = total_count / page_size + 1
    # logging("数据总量=%s,每页采集量=%s,共计%s页:" % (total_count,page_size,total_page_no))
    logging.info("当前NMPA官网数据:data_type=%s,数据总量=%s,每页采集量%s,共计%s页" %
                 (data_type, total_count, page_size, total_page_no))
    for index in range(total_page_no):
        page_index = index + 1

        # data_list保存文件名(文件按页码存储,每页PAGE_SIZE条)
        data_list_filename = "data_list_%s_%s.json" % (data_type, page_index)
        data_list_filename = data_list_folder_name + data_list_filename

        # 列表页url(从配置文件读取)
        data_list_url = cf.get("access_url", "data_list_url")
        data_list_url = data_list_url.format(data_type, page_index, page_size)
        logging.debug("数据采集地址:%s" % (data_list_url))

        # 数据采集并保存到本地
        data_list_data = access_data_utils.get_data(data_list_url)
        file_utils.write_file(data_list_filename, data_list_data)
        logging.debug("写入文件成功:%s" % (data_list_filename))
        logging.info("第%s页数据采集完成,剩余%s页,保存路径:%s" %
                     (page_index,
                      (total_page_no - page_index), data_list_filename))
        time.sleep(2)
    end_time = time.time()
    logging.info("数据采集结束时间:%s" %
                 (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
    logging.info("数据采集共计耗时:%s秒" % (end_time - begin_time))
Beispiel #28
0
def renew_metadata(projects, repo_path):
    if not projects:
        return

    now = datetime.datetime.now()
    current_time = datetime.datetime.strftime(now, "%Y%m%d%H%M%S")

    for project in projects:
        project_repo_path = repo_folder_path(project, repo_path)

        metadata_path = os.path.join(project_repo_path, "maven-metadata-local.xml")

        update_file = os.path.exists(metadata_path)
        if update_file:
            try:
                xml_utils.replace_in_tree(metadata_path, {
                    "versioning/lastUpdated": current_time,
                    "versioning/snapshotVersions/snapshotVersion/updated": current_time
                })
            except xml.etree.ElementTree.ParseError:
                print(project.artifact_id + ' metadata is broken, rewriting')
                update_file = False

        if not update_file:
            local_metadata = '<metadata modelVersion="1.1.0">' + \
                             '  <groupId>' + project.group + '</groupId>' + \
                             '  <artifactId>' + project.artifact_id + '</artifactId>' + \
                             '  <version>' + project.version + '</version>' + \
                             '  <versioning>' + \
                             '    <snapshot>' + \
                             '      <localCopy>true</localCopy>' + \
                             '    </snapshot>' + \
                             '    <lastUpdated>' + current_time + '</lastUpdated>' + \
                             '    <snapshotVersions>' + \
                             '      <snapshotVersion>' + \
                             '        <extension>pom</extension>' + \
                             '        <value>' + project.version + '</value>' + \
                             '        <updated>' + current_time + '</updated>' + \
                             '      </snapshotVersion>'
            if requires_archive(project):
                local_metadata += '      <snapshotVersion>' + \
                                  '        <extension>' + project.get_packaging() + '</extension>' + \
                                  '        <value>' + project.version + '</value>' + \
                                  '        <updated>' + current_time + '</updated>' + \
                                  '      </snapshotVersion>'
            local_metadata += '    </snapshotVersions>' + \
                              '  </versioning>' + \
                              '</metadata>'
            file_utils.write_file(metadata_path, local_metadata)
Beispiel #29
0
    def _write_post_execution_info(execution_id, log_file_path,
                                   post_execution_info_provider):
        exit_code = post_execution_info_provider.get_exit_code(execution_id)
        if exit_code is None:
            return

        file_content = file_utils.read_file(log_file_path)

        file_parts = file_content.split(OUTPUT_STARTED_MARKER + '\n', 1)
        parameters_text = file_parts[0]
        parameters_text += 'exit_code:' + str(exit_code) + '\n'

        new_content = parameters_text + OUTPUT_STARTED_MARKER + '\n' + file_parts[
            1]
        file_utils.write_file(log_file_path, new_content)
Beispiel #30
0
    def post(self):
        body = tornado_utils.get_request_body(self)

        files = body.get('files', {})
        if files:
            del body['files']

        LOGGER.info('ReceiveAlertHandler. Received alert: ' + str(body))

        for key, value in files.items():
            filename = str(time.time()) + '_' + key

            LOGGER.info('ReceiveAlertHandler. Writing file ' + filename)

            file_path = os.path.join('logs', 'alerts', filename)
            file_utils.write_file(file_path, value)
Beispiel #31
0
def create_file(filepath, overwrite=False, text='test text'):
    if not os.path.exists(temp_folder):
        os.makedirs(temp_folder)

    filename = os.path.basename(filepath)
    folder = os.path.join(temp_folder, os.path.dirname(filepath))
    if not os.path.exists(folder):
        os.makedirs(folder)

    file_path = os.path.join(folder, filename)
    if os.path.exists(file_path) and not overwrite:
        raise Exception('File ' + file_path + ' already exists')

    file_utils.write_file(file_path, text)

    return file_path
Beispiel #32
0
    def post(self):
        body = tornado_utils.get_request_body(self)

        files = body.get('files', {})
        if files:
            del body['files']

        LOGGER.info('ReceiveAlertHandler. Received alert: ' + str(body))

        for key, value in files.items():
            filename = str(time.time()) + '_' + key

            LOGGER.info('ReceiveAlertHandler. Writing file ' + filename)

            file_path = os.path.join('logs', 'alerts', filename)
            file_utils.write_file(file_path, value)
Beispiel #33
0
def encrypt_folder(cipher: str, folder_path : str, key: str):
    # Create new zip and delete old folder
    oldFolder, newZip = file_utils.zip_folder(folder_path)
    file_utils.delete_folder(oldFolder)

    fileBytes = file_utils.read_file(newZip)
    # returns chosen cipher class
    algo = mux(cipher).Cipher(key)
    # Bytes to write to modified file
    if algo:
        # encrypt zipped folder
        newBytes = bytes(algo.encrypt(bytes(fileBytes)))
        # write encrypted bytes and rename zip 
        file_utils.write_file(newZip,newBytes)
        MERKed_file = file_utils.merk(newZip)
        print(f"{folder_path} has been MERKed:",MERKed_file)
def _create_script_config(filename, *, name=None, allowed_users=None, hidden=None):
    conf_folder = os.path.join(test_utils.temp_folder, 'runners')
    file_path = os.path.join(conf_folder, filename + '.json')

    config = {}
    if name is not None:
        config['name'] = name

    if allowed_users is not None:
        config['allowed_users'] = allowed_users

    if hidden is not None:
        config['hidden'] = hidden

    config_json = json.dumps(config)
    file_utils.write_file(file_path, config_json)
    return file_path
Beispiel #35
0
    def _write_post_execution_info(execution_id, log_file_path,
                                   post_execution_info_provider):
        exit_code = post_execution_info_provider.get_exit_code(execution_id)
        if exit_code is None:
            return

        file_content = file_utils.read_file(log_file_path, keep_newlines=True)

        file_parts = file_content.split(OUTPUT_STARTED_MARKER + os.linesep, 1)
        parameters_text = file_parts[0]
        parameters_text += 'exit_code:' + str(exit_code) + os.linesep

        new_content = parameters_text + OUTPUT_STARTED_MARKER + os.linesep + file_parts[
            1]
        file_utils.write_file(log_file_path,
                              new_content.encode(ENCODING),
                              byte_content=True)
Beispiel #36
0
def create_file(filepath, overwrite=False, text=None):
    if not os.path.exists(temp_folder):
        os.makedirs(temp_folder)

    filename = os.path.basename(filepath)
    folder = os.path.join(temp_folder, os.path.dirname(filepath))
    if not os.path.exists(folder):
        os.makedirs(folder)

    file_path = os.path.join(folder, filename)
    if os.path.exists(file_path) and not overwrite:
        raise Exception('File ' + file_path + ' already exists')

    if text is None:
        text = 'test text'

    file_utils.write_file(file_path, text)

    return file_path
Beispiel #37
0
def __migrate_user_id(context):
    output_folder = os.path.join(context.log_folder, 'processes')
    if not os.path.exists(output_folder):
        return

    log_files = [
        os.path.join(output_folder, file) for file in os.listdir(output_folder)
        if file.lower().endswith('.log')
    ]

    for log_file in log_files:
        (correct, parameters_text
         ) = ExecutionLoggingService._read_parameters_text(log_file)
        if not correct:
            continue

        parameters = ExecutionLoggingService._parse_history_parameters(
            parameters_text)
        if not parameters or ('user' not in parameters):
            continue

        if ('user_id' in parameters) and ('user_name' in parameters):
            continue

        file_content = file_utils.read_file(log_file, keep_newlines=True)

        file_parts = file_content.split(
            execution.logging.OUTPUT_STARTED_MARKER + os.linesep, 1)
        parameters_text = file_parts[0]

        user = parameters['user']

        if 'user_id' not in parameters:
            parameters_text += 'user_id:' + user + os.linesep

        if 'user_name' not in parameters:
            parameters_text += 'user_name:' + user + os.linesep

        new_content = parameters_text + execution.logging.OUTPUT_STARTED_MARKER + os.linesep + file_parts[
            1]
        file_utils.write_file(log_file,
                              new_content.encode(execution.logging.ENCODING),
                              byte_content=True)
Beispiel #38
0
def _create_script_config(filename,
                          *,
                          name=None,
                          allowed_users=None,
                          hidden=None):
    conf_folder = os.path.join(test_utils.temp_folder, 'runners')
    file_path = os.path.join(conf_folder, filename + '.json')

    config = {}
    if name is not None:
        config['name'] = name

    if allowed_users is not None:
        config['allowed_users'] = allowed_users

    if hidden is not None:
        config['hidden'] = hidden

    config_json = json.dumps(config)
    file_utils.write_file(file_path, config_json)
    return file_path
Beispiel #39
0
def main(program_name, cmd_line):
  """Main function for this module."""
  args = _parse_command_line(program_name, cmd_line)
  match_indexes = []
  bld_list = bp.bld_to_list(args.bld_file)

  for index in xrange(len(bld_list)):
    item = bld_list[index]
    if isinstance(item, dict):
      if su.MAVEN_SPECS_KEY in item[bp.RULE_BODY_KEY]:
        specs = item[bp.RULE_BODY_KEY][su.MAVEN_SPECS_KEY]
        match = any([
            item[bp.RULE_NAME_KEY] == args.rule_name,
            all([args.rule_name is None,
                 specs[su.MAVEN_ARTIFACT_ID_KEY] == args.artifact,
                 specs[su.MAVEN_GROUP_ID_KEY] == args.group,
                 (args.classifier is None or
                  specs.get(su.MAVEN_CLASSIFIER_KEY, '') == args.classifier),
                 (args.old_version is None or
                  specs[su.MAVEN_VERSION_KEY] == args.old_version)])])
        if match:
          match_indexes.append(index)

  if len(match_indexes) == 0:
    raise su.Error('Couldn\'t find requested build rule in {} file.'.format(
                   args.bld_file))
  if len(match_indexes) > 1:
    raise su.Error('Update failed, more than 1 matches found. '
                   'Provide more info!!')
  index = match_indexes[0]
  rule_body = bld_list[index][bp.RULE_BODY_KEY]
  old_version = rule_body[su.MAVEN_SPECS_KEY][su.MAVEN_VERSION_KEY]
  rule_body[su.MAVEN_SPECS_KEY][su.MAVEN_VERSION_KEY] = args.new_version
  bld_list[index][bp.RULE_BODY_KEY] = rule_body
  fu.write_file(args.bld_file, bp.list_to_bld_string(bld_list))
  msg = 'Successfully update version from {} to {}'.format(old_version,
                                                           args.new_version)
  return (0, msg)
Beispiel #40
0
def __migrate_user_id(context):
    output_folder = os.path.join(context.log_folder, 'processes')
    if not os.path.exists(output_folder):
        return

    log_files = [os.path.join(output_folder, file)
                 for file in os.listdir(output_folder)
                 if file.lower().endswith('.log')]

    for log_file in log_files:
        (correct, parameters_text) = ExecutionLoggingService._read_parameters_text(log_file)
        if not correct:
            continue

        parameters = ExecutionLoggingService._parse_history_parameters(parameters_text)
        if not parameters or ('user' not in parameters):
            continue

        if ('user_id' in parameters) and ('user_name' in parameters):
            continue

        file_content = file_utils.read_file(log_file, keep_newlines=True)

        file_parts = file_content.split(execution.logging.OUTPUT_STARTED_MARKER + os.linesep, 1)
        parameters_text = file_parts[0]

        user = parameters['user']

        if 'user_id' not in parameters:
            parameters_text += 'user_id:' + user + os.linesep

        if 'user_name' not in parameters:
            parameters_text += 'user_name:' + user + os.linesep

        new_content = parameters_text + execution.logging.OUTPUT_STARTED_MARKER + os.linesep + file_parts[1]
        file_utils.write_file(log_file, new_content.encode(execution.logging.ENCODING), byte_content=True)
Beispiel #41
0
new_in_progress = set(pom_paths)

home_folder = os.path.expanduser('~')
unique_name = get_unique_name(ROOT_PROJECT_PATH)
in_progress_file = os.path.join(home_folder, '.incremaven', unique_name)

prev_in_progress = []
if os.path.exists(in_progress_file):
    prev_in_progress = file_utils.read_file(in_progress_file).split("\n")
    prev_in_progress = filter(lambda line: line != "", prev_in_progress)

for pom_path in prev_in_progress:
    if os.path.exists(pom_path):
        pom_paths.add(pom_path)

file_utils.write_file(in_progress_file, "\n".join(pom_paths))

projects = common.to_mvn_projects(pom_paths, ROOT_PROJECT_PATH, ROOT_ONLY)

to_rebuild = []
to_install = []

for project in projects:
    build_date = mvn_utils.target_build_date(project)
    if build_date is None:
        print(str(project) + ' needs rebuild. Artifact is missing in target')
        to_rebuild.append(project)
        continue

    project_src_paths = mvn_utils.get_buildable_paths(project)
    src_modification = file_utils.last_modification(project_src_paths)
Beispiel #42
0
def _from_json(content):
    json_obj = json.dumps(content)
    conf_path = os.path.join(test_utils.temp_folder, 'conf.json')
    file_utils.write_file(conf_path, json_obj)
    return server_conf.from_json(conf_path, test_utils.temp_folder)
Beispiel #43
0
def _write_migrations(temp_folder, migrations):
    file_path = os.path.join(temp_folder, 'migrations.txt')
    file_utils.write_file(file_path, '\n'.join(migrations))
Beispiel #44
0
def __migrate_old_files(context):
    output_folder = os.path.join(context.log_folder, 'processes')
    if not os.path.exists(output_folder):
        return

    log_files = [os.path.join(output_folder, file)
                 for file in os.listdir(output_folder)
                 if file.lower().endswith('.log')]

    def is_new_format(log_file):
        with open(log_file, 'r') as f:
            first_line = f.readline().strip()

            if not first_line.startswith('id:'):
                return False

            for line in f:
                if line.strip() == execution.logging.OUTPUT_STARTED_MARKER:
                    return True

        return False

    old_files = [log_file for log_file in log_files if not is_new_format(log_file)]

    if not old_files:
        return

    existing_ids = set()
    for file in log_files:
        correct, parameters_text = ExecutionLoggingService._read_parameters_text(file)
        if not correct:
            continue

        parameters = ExecutionLoggingService._parse_history_parameters(parameters_text)
        if not parameters or 'id' not in parameters:
            continue

        existing_ids.add(parameters['id'])

    id_generator = (str(id) for id in itertools.count())
    id_generator = filter(lambda id: id not in existing_ids, id_generator)

    for old_file in old_files:
        log_basename = os.path.basename(old_file)
        filename = os.path.splitext(log_basename)[0]

        match = re.fullmatch('(.+)_([^_]+)_((\d\d)(\d\d)(\d\d)_(\d\d)(\d\d)(\d\d))', filename)
        if match:
            script_name = match.group(1)
            username = match.group(2)
            start_time = datetime.strptime(match.group(3), '%y%m%d_%H%M%S')
            id = next(id_generator)
        else:
            script_name = 'unknown'
            username = '******'
            start_time = sec_to_datetime(os.path.getctime(old_file))
            id = next(id_generator)

        new_begin = ''
        new_begin += 'id:' + id + '\n'
        new_begin += 'user_name:' + username + '\n'
        new_begin += 'user_id:' + username + '\n'
        new_begin += 'script:' + script_name + '\n'
        new_begin += 'start_time:' + str(to_millis(start_time)) + '\n'
        new_begin += 'command:unknown' + '\n'
        new_begin += execution.logging.OUTPUT_STARTED_MARKER + '\n'

        file_content = file_utils.read_file(old_file)
        file_content = new_begin + file_content
        file_utils.write_file(old_file, file_content)
def _replace_line_separators(files, original, new):
    for file in files:
        content = file_utils.read_file(file, byte_content=True)
        replaced_content = content.decode('utf-8').replace(original, new).encode('utf-8')
        file_utils.write_file(file, replaced_content, byte_content=True)
    def test_no_history_for_wrong_file(self):
        log_path = os.path.join(test_utils.temp_folder, 'wrong.log')
        file_utils.write_file(log_path, 'log\ntext\n')

        logs = self.logging_service.get_history_entries()
        self.assertEqual(0, len(logs))
Beispiel #47
0
        if not mvn_utils.is_built(unchanged_project):
            print('project ' + str(unchanged_project) + ' was cleaned, sending to rebuild')
            changed_projects.append(unchanged_project)
            continue

        mvn_utils.fast_install(unchanged_project, MAVEN_REPO_PATH)

    mvn_utils.rebuild(ROOT_PROJECT_PATH, changed_projects, MVN_OPTS, silent=False)


current_revision = vcs_gateway.get_revision(ROOT_PROJECT_PATH)

info_file_path = os.path.join(ROOT_PROJECT_PATH, "_ci_rebuild.info")
if os.path.exists(info_file_path):
    last_revision = file_utils.read_file(info_file_path).strip()

    if last_revision != current_revision:
        try:
            incremental_rebuild(last_revision, current_revision)
        except mvn_utils.IncorrectConfigException as e:
            print('ERROR! {}'.format(e))
            sys.exit(-1)
    else:
        print("Svn revision is the same. Skipping rebuild")
else:
    print("No previous revision found, rebuilding the whole root project...")
    mvn_utils.rebuild_root(ROOT_PROJECT_PATH, MVN_OPTS, silent=False)

file_utils.write_file(info_file_path, current_revision)
Beispiel #48
0
    def _set_user_groups(self, user, groups):
        self._user_groups[user] = groups

        new_groups_content = json.dumps(self._user_groups, indent=2)
        file_utils.write_file(self._groups_file, new_groups_content)