Exemplo n.º 1
0
def merge_public_file_with_old(public_xml_path, ids_xml_path, module, config):
    # rdir = get_res_dir(dirname)
    res_dirs = config['project_source_sets'][module]['main_res_directory']
    for rdir in res_dirs:
        old_public = get_file_content(os.path.join(rdir, 'values', 'public.xml'))
        write_merge_result(public_xml_path, old_public)

        old_ids = get_file_content(os.path.join(rdir, 'values', 'ids.xml'))
        write_merge_result(ids_xml_path, old_ids)
Exemplo n.º 2
0
def merge_public_file_with_old(public_xml_path, ids_xml_path, module, config):
    # rdir = get_res_dir(dirname)
    res_dirs = config['project_source_sets'][module]['main_res_directory']
    for rdir in res_dirs:
        old_public = get_file_content(os.path.join(rdir, 'values', 'public.xml'))
        write_merge_result(public_xml_path, old_public)

        old_ids = get_file_content(os.path.join(rdir, 'values', 'ids.xml'))
        write_merge_result(ids_xml_path, old_ids)
Exemplo n.º 3
0
def post(text_path=None, image_path=None):
    FB_GROUP_ID = os.getenv("fb_group_id")
    TOKEN = os.getenv("fb_token")
    image_file = utils.get_file_content(image_path, file_type="image")
    text = utils.get_file_content(text_path)
    upload = upload_image(image_file, token=TOKEN, group_id=FB_GROUP_ID)
    if upload.ok:
        id = upload.json()["id"]
        post_image_to_group(message=text,
                            id=id,
                            token=TOKEN,
                            group_id=FB_GROUP_ID)
    else:
        print(upload.status_code)
def preconfigure_nginx():

    target_runtime_props = ctx.target.instance.runtime_properties

    # This is used by nginx's default.conf to select the relevant configuration
    external_rest_protocol = target_runtime_props['external_rest_protocol']
    internal_rest_port = target_runtime_props['internal_rest_port']

    src_runtime_props['external_rest_protocol'] = external_rest_protocol
    src_runtime_props['internal_cert_path'] = utils.INTERNAL_CERT_PATH
    src_runtime_props['internal_key_path'] = utils.INTERNAL_KEY_PATH
    src_runtime_props['internal_rest_port'] = internal_rest_port
    src_runtime_props['file_server_root'] = utils.MANAGER_RESOURCES_HOME

    # Pass on the the path to the certificate to manager_configuration
    target_runtime_props['internal_cert_path'] = utils.INTERNAL_CA_CERT_PATH

    utils.deploy_or_generate_external_ssl_cert(
        [
            target_runtime_props['external_rest_host'],
            target_runtime_props['internal_rest_host']
        ], target_runtime_props['external_rest_host'],
        src_runtime_props['rest_certificate'], src_runtime_props['rest_key'])

    src_runtime_props['external_cert_path'] = utils.EXTERNAL_CERT_PATH
    src_runtime_props['external_key_path'] = utils.EXTERNAL_KEY_PATH

    # The public cert content is used in the outputs later
    target_runtime_props['external_rest_cert_content'] = \
        utils.get_file_content(utils.EXTERNAL_CERT_PATH)

    _deploy_nginx_config_files()
    utils.systemd.enable(NGINX_SERVICE_NAME, append_prefix=False)
Exemplo n.º 5
0
    def __init__(self, param_obj):
        """Initiate APIClassMethod instance

        """
        self.http_method = param_obj['http_method']
        self.path = param_obj['path']
        self.file_name = param_obj['file_name']
        self.lang = param_obj['lang']
        self.lang_tools = param_obj['lang_tools']
        self.method_params = []
        self.required_id = False
        self.param_obj = None
        self.api_summary = ''
        self.method_summary = ''
        self.method_name = ''

        obj_id_re = re.compile('\{\w+\}')
        if obj_id_re.search(self.path):
            self.required_id = True
            self.path = re.sub(obj_id_re, '%s', self.path)

        self.param_lines = [
            get_file_content('%s/templates/method_params_def.proto' %
                             self.lang)
        ]
Exemplo n.º 6
0
    def __modify_other_modules_r(self, package_name, finder=None):
        if not finder:
            finder = self._finder

        r_path = android_tools.find_r_file(finder.get_dst_r_dir(),
                                           package_name=package_name)
        if r_path and os.path.exists(r_path):
            target_dir = os.path.join(self.__get_freeline_backup_r_dir(),
                                      package_name.replace('.', os.sep))
            if not os.path.exists(target_dir):
                os.makedirs(target_dir)
            target_path = os.path.join(target_dir, 'R.java')
            if not os.path.exists(target_path):
                self.debug('copy {} to {}'.format(r_path, target_path))
                shutil.copy(r_path, target_path)

                content = get_file_content(target_path)
                content = GradleIncBuildInvoker.remove_final_tag(content)
                content = GradleIncBuildInvoker.extend_main_r(
                    content, self._config['package'])
                content = android_tools.fix_unicode_parse_error(
                    content, target_path)
                write_file_content(target_path, content)

            return target_path
Exemplo n.º 7
0
def is_main_project(module):
    config_path = os.path.join(module, 'build.gradle')
    if os.path.exists(config_path):
        content = get_file_content(config_path)
        if "apply plugin: 'com.antfortune.freeline'" in content:
            return True
    return False
Exemplo n.º 8
0
    def check_other_modules_resources(self):
        if self._name == self._config[
                'main_project_name'] and self._all_module_info is not None:
            changed_modules = self._changed_modules

            if len(changed_modules) > 0:
                main_r_fpath = os.path.join(
                    self._finder.get_backup_dir(),
                    self._module_info['packagename'].replace('.',
                                                             os.sep), 'R.java')
                self.debug('modify {}'.format(main_r_fpath))
                write_file_content(
                    main_r_fpath,
                    GradleIncBuildInvoker.remove_final_tag(
                        get_file_content(main_r_fpath)))

                target_main_r_dir = os.path.join(
                    self.__get_freeline_backup_r_dir(),
                    self._module_info['packagename'].replace('.', os.sep))
                if not os.path.exists(target_main_r_dir):
                    os.makedirs(target_main_r_dir)

                target_main_r_path = os.path.join(target_main_r_dir, 'R.java')
                self.debug('copy {} to {}'.format(main_r_fpath,
                                                  target_main_r_path))
                shutil.copy(main_r_fpath, target_main_r_path)

                for module in changed_modules:
                    fpath = self.__modify_other_modules_r(
                        self._all_module_info[module]['packagename'])
                    self.debug('modify {}'.format(fpath))
def make_param_string(method):
    """Make the string containing the parameter definition for each method
    in a class

    """
    t_attr = get_file_content('%s/templates/method_params_attr.proto' %
                              method.lang)
    if method.param_obj is None:
        return ''

    for p in method.param_obj:
        if p['name'] == "%sId" % (method.file_name):
            continue

        param_name = "%s%s" % (p['name'],
                               p['dataType'][0].upper() + p['dataType'][1:])

        if 'allowMultiple' in p and p['allowMultiple']:
            param_name = param_name + "Array"

        attr = re.sub('\{ATTR_NAME\}', param_name, t_attr)
        attr = re.sub('\{ATTR_ORIG_NAME\}', p['name'], attr)
        method.param_lines.append(attr)

        method.method_params.append(param_name)

    return ', '.join(method.method_params)
Exemplo n.º 10
0
    def append_r_file(self):
        if self._name != self._config['main_project_name']:
            backupdir = self.__get_freeline_backup_r_dir()
            main_r_path = os.path.join(backupdir, self._config['package'].replace('.', os.sep), 'R.java')

            # main_r_path existence means that resource modification exists, so that need to add R.java to classpath
            if os.path.exists(main_r_path):
                pns = [self._config['package'], self._module_info['packagename']]

                for m in self._module_info['local_module_dep']:
                    pns.append(self._all_module_info[m]['packagename'])

                for pn in pns:
                    rpath = os.path.join(backupdir, pn.replace('.', os.sep), 'R.java')
                    if os.path.exists(rpath) and rpath not in self._changed_files['src']:
                        self._changed_files['src'].append(rpath)
                        self.debug('add R.java to changed list: ' + rpath)
                    elif pn == self._module_info['packagename']:
                        fpath = self.__modify_other_modules_r(pn)
                        self.debug('modify {}'.format(fpath))
                        if fpath and os.path.exists(fpath):
                            self._changed_files['src'].append(fpath)
                            self.debug('add R.java to changed list: ' + fpath)
        else:
            if is_windows_system():
                main_r_path = os.path.join(self._finder.get_backup_dir(),
                                           self._module_info['packagename'].replace('.', os.sep), 'R.java')
                if os.path.exists(main_r_path):
                    content = android_tools.fix_unicode_parse_error(get_file_content(main_r_path), main_r_path)
                    write_file_content(main_r_path, content)
Exemplo n.º 11
0
def check_error_codes(file_path, error_codes, string_codes, errors):
    content = get_file_content(file_path)
    if content is None:
        return
    for index, line in enumerate(content.splitlines()):
        line = line.strip()
        start_log = 'LOGS.' in line
        start_loc = 'LOCALIZATION.' in line
        if start_loc or start_log:
            line = line.split('LOGS.')[1] if start_log else line.split(
                'LOCALIZATION.')[1]
            code = line.split("'")
            if len(code) < 2:
                continue
            code = code[1].split("'")[0]
            if code.startswith('E'):
                if code not in error_codes:
                    errors.append('[{}:{}]: Unknown error code "{}"'.format(
                        file_path, index + 1, code))
                else:
                    error_codes[code]['usage'] += 1
                    if error_codes[code]['string'] not in string_codes:
                        errors.append(
                            '[{}:{}]: Unknown string code "{}" used in error code "{}"'
                            .format(file_path, index + 1,
                                    error_codes[code]['string'], code))
            elif code.startswith('S'):
                if code not in string_codes:
                    errors.append('[{}:{}]: Unknown string code "{}"'.format(
                        file_path, index + 1, code))
                else:
                    string_codes[code]['usage'] += 1
Exemplo n.º 12
0
def is_main_project(module):
    config_path = os.path.join(module, 'build.gradle')
    if os.path.exists(config_path):
        content = get_file_content(config_path)
        if "apply plugin: 'com.antfortune.freeline'" in content:
            return True
    return False
Exemplo n.º 13
0
def update_repo_version(repo_name, crate_name, crate_dir_path, temp_dir,
                        update_type, badges_only):
    file_path = join(join(join(temp_dir, repo_name), crate_dir_path),
                     "Cargo.toml")
    output = file_path.replace(temp_dir, "")
    if output.startswith('/'):
        output = output[1:]
    write_msg('=> Updating versions for {}'.format(file_path))
    content = get_file_content(file_path)
    if content is None:
        return False
    toml = TomlHandler(content)
    versions_update = []
    for section in toml.sections:
        if (section.name == 'package'
                or (section.name.startswith('dependencies.')
                    and find_crate(section.name[13:]))):
            version = section.get('version', None)
            if version is None:
                continue
            new_version = None
            if badges_only is False:
                new_version = update_version(version, update_type,
                                             section.name)
            else:
                new_version = version
            if new_version is None:
                return False
            # Print the status directly if it's the crate's version.
            if section.name == 'package':
                write_msg('\t{}: {} => {}'.format(
                    output.split(os_sep)[-2], version, new_version))
                CRATES_VERSION[crate_name] = new_version
            else:  # Otherwise add it to the list to print later.
                versions_update.append({
                    'dependency_name': section.name[13:],
                    'old_version': version,
                    'new_version': new_version
                })
            section.set('version', new_version)
        elif section.name == 'dependencies':
            for entry in section.entries:
                if find_crate(entry):
                    new_version = check_and_update_version(
                        section.entries[entry], update_type, entry)
                    section.set(entry, new_version)
    for up in versions_update:
        write_msg('\t{}: {} => {}'.format(up['dependency_name'],
                                          up['old_version'],
                                          up['new_version']))
    out = str(toml)
    if not out.endswith("\n"):
        out += '\n'
    result = True
    if badges_only is False:
        # We only write into the file if we're not just getting the crates version.
        result = write_into_file(file_path, out)
    write_msg('=> {}: {}'.format(
        output.split(os_sep)[-2], 'Failure' if result is False else 'Success'))
    return result
Exemplo n.º 14
0
def update_crate_version(repo_name, crate_name, crate_dir_path, temp_dir,
                         specified_crate):
    file_path = join(join(join(temp_dir, repo_name), crate_dir_path),
                     "Cargo.toml")
    output = file_path.replace(temp_dir, "")
    if output.startswith('/'):
        output = output[1:]
    write_msg('=> Updating crate versions for {}'.format(file_path))
    content = get_file_content(file_path)
    if content is None:
        return False
    toml = TomlHandler(content)
    for section in toml.sections:
        if section.name == 'package':
            section.set('version', CRATES_VERSION[crate_name])
        elif specified_crate is not None:
            continue
        elif section.name.startswith('dependencies.') and find_crate(
                section.name[13:]):
            if specified_crate is None and section.name[
                    13:] not in CRATES_VERSION:
                input(
                    '"{}" dependency not found in versions for crate "{}"...'.
                    format(section.name[13:], crate_name))
                continue
            section.set('version', CRATES_VERSION[section.name[13:]])
        elif section.name == 'dependencies':
            for entry in section.entries:
                if find_crate(entry['key']):
                    section.set(entry['key'], CRATES_VERSION[entry['key']])
    result = write_into_file(file_path, str(toml))
    write_msg('=> {}: {}'.format(
        output.split(os_sep)[-2], 'Failure' if result is False else 'Success'))
    return result
Exemplo n.º 15
0
    def append_r_file(self):
        if self._name != self._config['main_project_name']:
            backupdir = self.__get_freeline_backup_r_dir()
            main_r_path = os.path.join(backupdir, self._config['package'].replace('.', os.sep), 'R.java')

            # main_r_path existence means that resource modification exists, so that need to add R.java to classpath
            if os.path.exists(main_r_path):
                pns = [self._config['package'], self._module_info['packagename']]

                for m in self._module_info['local_module_dep']:
                    pns.append(self._all_module_info[m]['packagename'])

                for pn in pns:
                    rpath = os.path.join(backupdir, pn.replace('.', os.sep), 'R.java')
                    if os.path.exists(rpath) and rpath not in self._changed_files['src']:
                        self._changed_files['src'].append(rpath)
                        self.debug('add R.java to changed list: ' + rpath)
                    elif pn == self._module_info['packagename']:
                        fpath = self.__modify_other_modules_r(pn)
                        self.debug('modify {}'.format(fpath))
                        if os.path.exists(fpath):
                            self._changed_files['src'].append(fpath)
                            self.debug('add R.java to changed list: ' + fpath)
        else:
            if is_windows_system():
                main_r_path = os.path.join(self._finder.get_backup_dir(),
                                           self._module_info['packagename'].replace('.', os.sep), 'R.java')
                content = self.__fix_unicode_parse_error(get_file_content(main_r_path), main_r_path)
                write_file_content(main_r_path, content)
Exemplo n.º 16
0
def test_data(model, dataset_dir):
    confusion_matrix = defaultdict(Counter)
    doc_count = 0
    count_correct_classification = 0
    print("Testing Data...")
    print("Please wait...")
    for topic in os.listdir(dataset_dir):

        if topic.startswith('.'):
            continue
        topic_dir = dataset_dir + "/" + topic

        for test_file in os.listdir(topic_dir):
            doc_count += 1
            file_path = topic_dir + "/" + test_file
            words = utils.get_file_content(file_path)
            # Topic classified using our model.
            classified_topic = utils.find_topic(model, words)

            # Is it the same as our ground truth?
            if classified_topic == topic:
                count_correct_classification += 1

            # Tabulate results into our confusion matrix.
            confusion_matrix[topic][classified_topic] += 1

    # Return the result object.
    return utils.Result(confusion_matrix, count_correct_classification,
                        doc_count)
Exemplo n.º 17
0
def fix_package_name(config, manifest):
    if config and config['package'] != config['debug_package']:
        finder = GradleDirectoryFinder(config['main_project_name'],
                                       config['main_project_dir'],
                                       config['build_cache_dir'],
                                       config=config)
        target_manifest_path = os.path.join(finder.get_backup_dir(),
                                            'AndroidManifest.xml')
        if os.path.exists(target_manifest_path):
            return target_manifest_path

        if manifest and os.path.isfile(manifest):
            Logger.debug(
                'find app has debug package name, freeline will fix the package name in manifest'
            )
            content = get_file_content(manifest)
            result = re.sub('package=\"(.*)\"',
                            'package=\"{}\"'.format(config['package']),
                            content)
            Logger.debug('change package name from {} to {}'.format(
                config['debug_package'], config['package']))
            from utils import write_file_content
            write_file_content(target_manifest_path, result)
            Logger.debug(
                'save new manifest to {}'.format(target_manifest_path))
            return target_manifest_path
    return manifest
def make_param_string(method):
    """Make the string containing the parameter definition for each method
    in a class

    """
    t_attr = get_file_content('%s/templates/method_params_attr.proto'
                              % method.lang)
    if method.param_obj is None:
        return ''

    for p in method.param_obj:
        if p['name'] == "%sId" % (method.file_name):
            continue

        param_name = "%s%s" % (p['name'],
                     p['dataType'][0].upper() + p['dataType'][1:])

        if 'allowMultiple' in p and p['allowMultiple']:
            param_name = param_name + "Array"

        attr = re.sub('\{ATTR_NAME\}', param_name, t_attr)
        attr = re.sub('\{ATTR_ORIG_NAME\}', p['name'], attr)
        method.param_lines.append(attr)

        method.method_params.append(param_name)

    return ', '.join(method.method_params)
Exemplo n.º 19
0
def preconfigure_nginx():

    target_runtime_props = ctx.target.instance.runtime_properties

    # This is used by nginx's default.conf to select the relevant configuration
    external_rest_protocol = target_runtime_props['external_rest_protocol']
    internal_cert_path, internal_key_path = utils.generate_internal_ssl_cert(
        target_runtime_props['internal_rest_host']
    )

    src_runtime_props['external_rest_protocol'] = external_rest_protocol
    src_runtime_props['internal_cert_path'] = internal_cert_path
    src_runtime_props['internal_key_path'] = internal_key_path
    src_runtime_props['file_server_root'] = utils.MANAGER_RESOURCES_HOME

    # Pass on the the path to the certificate to manager_configuration
    target_runtime_props['internal_cert_path'] = internal_cert_path

    if external_rest_protocol == 'https':
        external_cert_path, external_key_path = \
            utils.deploy_or_generate_external_ssl_cert(
                target_runtime_props['external_rest_host']
            )

        src_runtime_props['external_cert_path'] = external_cert_path
        src_runtime_props['external_key_path'] = external_key_path

        # The public cert content is used in the outputs later
        external_rest_cert_content = utils.get_file_content(external_cert_path)
        target_runtime_props['external_rest_cert_content'] = \
            external_rest_cert_content

    _deploy_nginx_config_files(external_rest_protocol)
    utils.systemd.enable(NGINX_SERVICE_NAME, append_prefix=False)
Exemplo n.º 20
0
def get_project_info(config):
    Logger.debug("collecting project info, please wait a while...")
    project_info = {}
    if 'modules' in config:
        modules = config['modules']
    else:
        modules = get_all_modules(os.getcwd())

    jar_dependencies_path = os.path.join(config['build_cache_dir'],
                                         'jar_dependencies.json')
    jar_dependencies = []
    if os.path.exists(jar_dependencies_path):
        jar_dependencies = load_json_cache(jar_dependencies_path)

    for module in modules:
        if module['name'] in config['project_source_sets']:
            module_info = {}
            module_info['name'] = module['name']
            module_info['path'] = module['path']
            module_info['relative_dir'] = module['path']
            module_info['dep_jar_path'] = jar_dependencies
            module_info['packagename'] = get_package_name(
                config['project_source_sets'][
                    module['name']]['main_manifest_path'])

            if 'module_dependencies' in config:
                module_info['local_module_dep'] = config[
                    'module_dependencies'][module['name']]
            else:
                gradle_content = remove_comments(
                    get_file_content(
                        os.path.join(module['path'], 'build.gradle')))
                module_info['local_module_dep'] = get_local_dependency(
                    gradle_content)

            project_info[module['name']] = module_info

    for module in modules:
        if module['name'] in config['project_source_sets']:
            if 'module_dependencies' not in config:
                local_deps = project_info[module['name']]['local_module_dep']
                for dep in project_info[module['name']]['local_module_dep']:
                    if dep in project_info:
                        local_deps.extend(
                            project_info[dep]['local_module_dep'])
                local_deps = list(set(local_deps))
                project_info[module['name']]['local_module_dep'] = []
                for item in local_deps:
                    local_dep_name = get_module_name(item)
                    if local_dep_name in project_info:
                        project_info[module['name']][
                            'local_module_dep'].append(local_dep_name)

            project_info[module['name']]['dep_res_path'], project_info[module['name']]['local_dep_res_path'] = \
                get_local_resources_dependencies('resources', config, module, project_info)
            project_info[module['name']]['dep_assets_path'], project_info[module['name']]['local_dep_assets_path'] = \
                get_local_resources_dependencies('assets', config, module, project_info)

    return project_info
Exemplo n.º 21
0
def test_circular_ref():
    content = parse_file('test4.txt')
    actual_content = "\n".join(content)

    control = get_file_content('test5.txt')

    error_msg = error_msg_tpl % (actual_content, control)
    assert actual_content == control, error_msg
Exemplo n.º 22
0
def test_parse_file():
    content = parse_file('test1.txt')
    control = get_file_content('test3.txt')

    actual_content = "\n".join(content)

    error_msg = error_msg_tpl % (actual_content, control)    
    assert actual_content == control, error_msg
Exemplo n.º 23
0
    def get_resources_from_dir(self, path, lang):
        """Get JSON Swagger resources from files and
        appends APIClass created from them to self.classes

        """
        resources_json = get_file_content("%s/resources.json" % (path))
        resources = json.loads(resources_json)
        for each_res in resources['apis']:
            print each_res
            each_res['path'] = re.sub('\{format\}', 'json', each_res['path'])
            json_string = get_file_content(path + each_res['path'])
            # Allow invalid JSON exception to be raised
            res = json.loads(json_string)

            if res is not None:
                res['lang'] = lang
                self.classes.append(APIClass(res))
Exemplo n.º 24
0
 def _body_seg(self,filename):
     image = get_file_content(filename)
     res = self.client.bodySeg(image)
     labelmap = base64.b64decode(res['labelmap'])
     nparr_labelmap = np.fromstring(labelmap, np.uint8)
     labelmapimg = cv2.imdecode(nparr_labelmap, 1)
     im_new_labelmapimg = np.where(labelmapimg == 1, 255, labelmapimg)
     return im_new_labelmapimg
Exemplo n.º 25
0
  def getContainerState(self, container): 
        #print (utils.get_file_content_cpu('/proc/stat'))
        CPU_BASE_DIR="/sys/fs/cgroup/cpuacct/docker/"
        p = utils.get_path_by_container(CPU_BASE_DIR, container, 'cpuacct.usage')
        now = time.time()
        value = float(utils.get_file_content(p))

        MEMORY_BASE_DIR="/sys/fs/cgroup/memory/docker/"
        p = utils.get_path_by_container(MEMORY_BASE_DIR, container, 'memory.usage_in_bytes')
        usages = float(utils.get_file_content(p))
        p = utils.get_path_by_container(MEMORY_BASE_DIR, container, 'memory.limit_in_bytes')
        limits = float(utils.get_file_content(p))
        p = utils.get_path_by_container(MEMORY_BASE_DIR, container, 'memory.stat')
        d = utils.get_file_content_kv(p)
        rss = d["rss"]
        cache = d["cache"]

        return {'time': now, 'cpu_usage': value/1e9,'rss': rss, 'cache': cache, 'usage': usages, 'limit': limits}
Exemplo n.º 26
0
    def construct_file_contents(self):
        """Construct and return the class definition for the file
        We can't construct methods here, because we need to move some
        methods to the Asterisk class.

        """
        template = get_file_content('%s/templates/class_def.proto' % self.lang)
        template = re.sub('\{CLASS_NAME\}', self.class_name, template)
        template = re.sub('\{FILE_NAME\}', self.file_name, template)
        return template
Exemplo n.º 27
0
    def construct_file_contents(self):
        """Construct and return the class definition for the file
        We can't construct methods here, because we need to move some
        methods to the Asterisk class.

        """
        template = get_file_content('%s/templates/class_def.proto' % self.lang)
        template = re.sub('\{CLASS_NAME\}', self.class_name, template)
        template = re.sub('\{FILE_NAME\}', self.file_name, template)
        return template
Exemplo n.º 28
0
def test_ignore_lines():
    ignore_lines = [
        "^\#.*",
    ]
    content = parse_file('test6.txt', ignore_lines=ignore_lines)
    actual_content = "\n".join(content)

    control = get_file_content('test7.txt')

    error_msg = error_msg_tpl % (actual_content, control)
    assert actual_content == control, error_msg
Exemplo n.º 29
0
def generate_public_files_by_r(dst_r_path, public_path, ids_path):
    buf = get_file_content(dst_r_path)

    temp = re.findall('<tr><td><code>([^<]+)</code></td>', buf)
    diykv = []
    for i in temp:
        if "{" not in i:
            diykv.append(i)
    dstbuf = ''
    idbuf = '<?xml version="1.0" encoding="utf-8"?>\n'
    idbuf += '<resources>\n'
    dstbuf += idbuf

    result = buf.split('\n')
    type_char = ''
    for r in result:
        if 'public static final class' in r:
            type_char = r.replace('public static final class ', '').replace(
                ' {', '').replace(' ', '').replace('\n', '').replace('\r', '')
        elif 'public static class' in r:
            type_char = r.replace('public static class ', '').replace(
                ' {', '').replace(' ', '').replace('\n', '').replace('\r', '')
            type_char = type_char.replace(' ',
                                          '').replace('\n',
                                                      '').replace('\r', '')
        elif 'public static final int' in r and type_char != '' and '[]' not in r:
            kv = r.replace('public static final int ',
                           '').replace(';', '').split('=')
            name = kv[0].replace(' ', '').replace('\n', '').replace('\r', '')
            id_char = kv[1].replace(' ', '').replace('\n',
                                                     '').replace('\r', '')
            dstbuf += '    <public type="%s" name="%s" id="%s" />\n' % (
                type_char, name, id_char)
            if type_char == 'id' and name not in diykv:
                idbuf += '    <item name="%s" type="id"/>\n' % name

        elif 'public static int' in r and type_char != '' and '[]' not in r:
            kv = r.replace('public static int ', '').replace(';',
                                                             '').split('=')
            name = kv[0].replace(' ', '').replace('\n', '').replace('\r', '')
            id_char = kv[1].replace(' ', '').replace('\n',
                                                     '').replace('\r', '')
            dstbuf += '    <public type="%s" name="%s" id="%s" />\n' % (
                type_char, name, id_char)
            if type_char == 'id' and name not in diykv:
                idbuf += '    <item name="%s" type="id"/>\n' % name

        elif type_char != '' and '}' in r:
            type_char = ''

    dstbuf += '</resources>'
    idbuf += '</resources>'
    write_file_content(public_path, dstbuf)
    write_file_content(ids_path, idbuf)
Exemplo n.º 30
0
def end_docs_build(temp_dir):
    path = join(temp_dir, consts.DOC_REPO)
    revert_changes(consts.DOC_REPO, temp_dir,
                   ['COPYRIGHT.txt', 'LICENSE-APACHE.txt', 'LICENSE-MIT.txt'])
    lines = get_file_content(join(path,
                                  'target/doc/search-index.js')).split('\n')
    with open(join(path, 'search-index.js'), 'w') as f:
        f.write('\n'.join(SEARCH_INDEX_BEFORE))
        f.write('\n'.join(SEARCH_INDEX))
        f.write('\n'.join(SEARCH_INDEX_AFTER))
    add_to_commit(consts.DOC_REPO, temp_dir, ['.'])
Exemplo n.º 31
0
def build_docs(repo_name, temp_dir):
    path = join(temp_dir, repo_name)
    features = get_features(join(path, 'Cargo.toml'))
    command = [
        'bash', '-c',
        'cd {} && cargo doc --no-default-features --features "{}"'.format(
            path, features)
    ]
    if not exec_command_and_print_error(command):
        input(
            "Couldn't generate docs! Try to fix it and then press ENTER to continue..."
        )
    doc_folder = join(path, 'target/doc')
    try:
        file_list = ' '.join([
            '"{}"'.format(f) for f in listdir(doc_folder)
            if isfile(join(doc_folder, f))
        ])
    except Exception as e:
        write_error('Error occured in build docs: {}'.format(e))
        input(
            "It seems like the \"{}\" folder doesn't exist. Try to fix it then press ENTER..."
            .format(doc_folder))
    command = [
        'bash', '-c', 'cd {} && cp -r "{}" src/{} {} "{}"'.format(
            doc_folder, repo_name.replace('-', '_'),
            repo_name.replace('-', '_'), file_list,
            join(temp_dir, consts.DOC_REPO))
    ]
    if not exec_command_and_print_error(command):
        input(
            "Couldn't copy docs! Try to fix it and then press ENTER to continue..."
        )
    lines = get_file_content(join(path,
                                  'target/doc/search-index.js')).split('\n')
    before = True
    fill_extras = len(SEARCH_INDEX_BEFORE) == 0
    for line in lines:
        if line.startswith('searchIndex['):
            before = False
            # We need to be careful in here if we're in a sys repository (which should never be the
            # case!).
            if line.startswith('searchIndex["{}"]'.format(
                    repo_name.replace('-', '_'))):
                SEARCH_INDEX.append(line)
        elif fill_extras is True:
            if before is True:
                SEARCH_INDEX_BEFORE.append(line)
            else:
                SEARCH_INDEX_AFTER.append(line)
    input(
        "Couldn't find \"{}\" in `searchIndex.js`! Try to fix it and then press ENTER to \
          continue...".format(repo_name.replace('-', '_')))
Exemplo n.º 32
0
def set_groups(fname, data, fproperty):
	fgroups = []
	# set group
	for group in groups:
		if has_keyword(fname, group):
			fgroups.append(group)
				
	if not fgroups:
		file_data = get_file_content(fname)
		if has_keyword(file_data, group):
			fgroups.append(group)
		
	fproperty["groups"] = fgroups
Exemplo n.º 33
0
def get_project_info(config):
    Logger.debug("collecting project info, please wait a while...")
    project_info = {}
    if 'modules' in config:
        modules = config['modules']
    else:
        modules = get_all_modules(os.getcwd())

    jar_dependencies_path = os.path.join(config['build_cache_dir'], 'jar_dependencies.json')
    jar_dependencies = []
    if os.path.exists(jar_dependencies_path):
        jar_dependencies = load_json_cache(jar_dependencies_path)

    for module in modules:
        if module['name'] in config['project_source_sets']:
            module_info = {}
            module_info['name'] = module['name']
            module_info['path'] = module['path']
            module_info['relative_dir'] = module['path']
            module_info['dep_jar_path'] = jar_dependencies
            module_info['packagename'] = get_package_name(
                config['project_source_sets'][module['name']]['main_manifest_path'])

            if 'module_dependencies' in config:
                module_info['local_module_dep'] = config['module_dependencies'][module['name']]
            else:
                gradle_content = remove_comments(get_file_content(os.path.join(module['path'], 'build.gradle')))
                module_info['local_module_dep'] = get_local_dependency(gradle_content)

            project_info[module['name']] = module_info

    for module in modules:
        if module['name'] in config['project_source_sets']:
            if 'module_dependencies' not in config:
                local_deps = project_info[module['name']]['local_module_dep']
                for dep in project_info[module['name']]['local_module_dep']:
                    if dep in project_info:
                        local_deps.extend(project_info[dep]['local_module_dep'])
                local_deps = list(set(local_deps))
                project_info[module['name']]['local_module_dep'] = []
                for item in local_deps:
                    local_dep_name = get_module_name(item)
                    if local_dep_name in project_info:
                        project_info[module['name']]['local_module_dep'].append(local_dep_name)

            project_info[module['name']]['dep_res_path'], project_info[module['name']]['local_dep_res_path'] = \
                get_local_resources_dependencies('resources', config, module, project_info)
            project_info[module['name']]['dep_assets_path'], project_info[module['name']]['local_dep_assets_path'] = \
                get_local_resources_dependencies('assets', config, module, project_info)

    return project_info
Exemplo n.º 34
0
def get_all_modules(dir_path):
    settings_path = os.path.join(dir_path, 'settings.gradle')
    if os.path.isfile(settings_path):
        data = get_file_content(settings_path)
        modules = []
        for item in re.findall(r'''['"]:(.*?)['"]''', data):
            index = item.rfind(':')
            if index == -1:
                modules.append({'name': item, 'path': item})
            else:
                modules.append({'name': item[index + 1:], 'path': item.replace(":", os.sep)})
        # modules = [item.replace(":", os.sep) for item in re.findall(r'''['"]:(.*?)['"]''', data)]
        return filter(lambda module: os.path.isdir(os.path.join(dir_path, module['path'])), modules)
    return []
Exemplo n.º 35
0
def get_all_modules(dir_path):
    settings_path = os.path.join(dir_path, 'settings.gradle')
    if os.path.isfile(settings_path):
        data = get_file_content(settings_path)
        modules = []
        for item in re.findall(r'''['"]:(.*?)['"]''', data):
            index = item.rfind(':')
            if index == -1:
                modules.append({'name': item, 'path': item})
            else:
                modules.append({'name': item[index + 1:], 'path': item.replace(":", os.sep)})
        # modules = [item.replace(":", os.sep) for item in re.findall(r'''['"]:(.*?)['"]''', data)]
        return filter(lambda module: os.path.isdir(os.path.join(dir_path, module['path'])), modules)
    return []
Exemplo n.º 36
0
    def __modify_main_r(self):
        main_r_fpath = os.path.join(self._finder.get_backup_dir(),
                                    self._module_info['packagename'].replace('.', os.sep), 'R.java')

        self.debug('modify {}'.format(main_r_fpath))
        write_file_content(main_r_fpath, GradleIncBuildInvoker.remove_final_tag(get_file_content(main_r_fpath)))

        target_main_r_dir = os.path.join(self.__get_freeline_backup_r_dir(),
                                         self._module_info['packagename'].replace('.', os.sep))
        if not os.path.exists(target_main_r_dir):
            os.makedirs(target_main_r_dir)

        target_main_r_path = os.path.join(target_main_r_dir, 'R.java')
        self.debug('copy {} to {}'.format(main_r_fpath, target_main_r_path))
        shutil.copy(main_r_fpath, target_main_r_path)
Exemplo n.º 37
0
def train_data(dataset_dir, fraction):
    unclassified = {}
    model = defaultdict(Counter)
    topics = Counter()
    word_counter = Counter()
    doc_topic_counter = Counter()
    for topic in os.listdir(dataset_dir):
        if topic.startswith('.'):
            continue
        topic_dir = dataset_dir + "/" + topic

        for cur_file in os.listdir(topic_dir):
            file_path = topic_dir + "/" + cur_file
            words = utils.get_file_content(file_path)

            # Flip a coin to check if the program can see the classification of the document.
            if read_classification(fraction):
                topics[topic] += len(words)
                doc_topic_counter[topic] += 1
                for word in words:
                    model[topic][word] += 1
                    word_counter[word] += 1
            else:
                topics[topic] += 0
                doc_topic_counter[topic] += 0
                if cur_file not in unclassified:
                    unclassified[cur_file] = (words, 'None')
                else:
                    new_name = cur_file + random_name()
                    while new_name in unclassified:
                        print("More duplicates found!")
                        new_name = cur_file + random_name()
                    unclassified[new_name] = (words, 'None')
    # Create our Model.
    model_obj = utils.Model(model, topics, word_counter, doc_topic_counter)

    # Iteratively classify unclassified documents till the loop ends or when there are no more
    # changes to the model.
    for i in range(12):
        print("Iteration %d" % i)
        model_obj, count_changed = train_unclassified_documents(
            unclassified, model_obj)
        if count_changed == 0:
            break

    # Return the object for serialization.
    return model_obj
Exemplo n.º 38
0
    def __modify_main_r(self):
        main_r_fpath = os.path.join(self._finder.get_backup_dir(),
                                    self._module_info['packagename'].replace('.', os.sep), 'R.java')

        self.debug('modify {}'.format(main_r_fpath))
        buf = GradleIncBuildInvoker.remove_final_tag(get_file_content(main_r_fpath))
        buf = self.__fix_unicode_parse_error(buf, main_r_fpath)
        write_file_content(main_r_fpath, buf)

        target_main_r_dir = os.path.join(self.__get_freeline_backup_r_dir(),
                                         self._module_info['packagename'].replace('.', os.sep))
        if not os.path.exists(target_main_r_dir):
            os.makedirs(target_main_r_dir)

        target_main_r_path = os.path.join(target_main_r_dir, 'R.java')
        self.debug('copy {} to {}'.format(main_r_fpath, target_main_r_path))
        shutil.copy(main_r_fpath, target_main_r_path)
Exemplo n.º 39
0
    def construct_file_contents(self):
        """Construct and return the contents of the method definition"""
        t_method = get_file_content('%s/templates/method_def.proto' %
                                    self.lang)
        t_method = re.sub('\{API_METHOD_NAME\}', self.method_name, t_method)

        string = self.lang_tools.make_param_string(self)
        t_method = re.sub('\{PARAMS\}', string, t_method)

        call_param_string = self.lang_tools.make_api_call_params(self)
        t_method = re.sub('\{API_CALL_PARAMS\}', call_param_string, t_method)
        method_comment = self.lang_tools.make_method_comment(
            self.api_summary, self.method_summary)
        t_method = re.sub('\{METHOD_COMMENTS\}', method_comment, t_method)
        t_method = re.sub('\{BUILD_API_CALL_PARAMS\}',
                          '\n'.join(self.param_lines), t_method)

        return t_method
Exemplo n.º 40
0
 def _body_part(self,filename):
     image = get_file_content(filename)
     para = self.client.bodyAnalysis(image)
     if DEBUG:
         print(para)
     person_num=para.get('person_num',0)
     if person_num < 1:
         raise NoBodyException("文件%s 没有检测到人像,详细信息:%s"%(filename,para))
     person=para['person_info'][0]
     # score = person['location']['score']
     # if score < 0.5:
     #     raise NoBodyException()
     loc = person['location']
     x_left = int(loc['left'])
     y_top = int(loc['top'])
     w = int(loc['width'])
     h = int(loc['height'])
     return person['body_parts'],(x_left,y_top,w,h)
Exemplo n.º 41
0
    def construct_file_contents(self):
        """Construct and return the contents of the method definition"""
        t_method = get_file_content('%s/templates/method_def.proto'
                                    % self.lang)
        t_method = re.sub('\{API_METHOD_NAME\}', self.method_name, t_method)

        string = self.lang_tools.make_param_string(self)
        t_method = re.sub('\{PARAMS\}', string, t_method)

        call_param_string = self.lang_tools.make_api_call_params(self)
        t_method = re.sub('\{API_CALL_PARAMS\}', call_param_string, t_method)
        method_comment = self.lang_tools.make_method_comment(
            self.api_summary, self.method_summary)
        t_method = re.sub('\{METHOD_COMMENTS\}', method_comment, t_method)
        t_method = re.sub('\{BUILD_API_CALL_PARAMS\}',
                          '\n'.join(self.param_lines), t_method)

        return t_method
Exemplo n.º 42
0
def post(text_path=None, image_path=None, ):
    TOKEN = os.getenv("tg_token")
    CHAT_ID = os.getenv("tg_chat_id")
    if not text_path or not image_path:
        return None
    text = utils.get_file_content(text_path)
    bot = telegram.Bot(TOKEN)
    if len(text) < 100:
        caption = text
    else:
        caption = None
        bot.send_message(chat_id=CHAT_ID, text=text)
    with open(image_path, "rb") as file:
        bot.send_photo(
            chat_id=CHAT_ID,
            photo=file,
            caption=caption
        )
Exemplo n.º 43
0
def fix_package_name(config, manifest):
    if config and config['package'] != config['debug_package']:
        finder = GradleDirectoryFinder(config['main_project_name'], config['main_project_dir'],
                                       config['build_cache_dir'], config=config)
        target_manifest_path = os.path.join(finder.get_backup_dir(), 'AndroidManifest.xml')
        if os.path.exists(target_manifest_path):
            return target_manifest_path

        if manifest and os.path.isfile(manifest):
            Logger.debug('find app has debug package name, freeline will fix the package name in manifest')
            content = get_file_content(manifest)
            result = re.sub('package=\"(.*)\"', 'package=\"{}\"'.format(config['package']), content)
            Logger.debug('change package name from {} to {}'.format(config['debug_package'], config['package']))
            from utils import write_file_content
            write_file_content(target_manifest_path, result)
            Logger.debug('save new manifest to {}'.format(target_manifest_path))
            return target_manifest_path
    return manifest
Exemplo n.º 44
0
def generate_public_files_by_r(dst_r_path, public_path, ids_path):
    buf = get_file_content(dst_r_path)

    temp = re.findall('<tr><td><code>([^<]+)</code></td>', buf)
    diykv = []
    for i in temp:
        if "{" not in i:
            diykv.append(i)
    dstbuf = ''
    idbuf = '<?xml version="1.0" encoding="utf-8"?>\n'
    idbuf += '<resources>\n'
    dstbuf += idbuf

    result = buf.split('\n')
    type_char = ''
    for r in result:
        if 'public static final class' in r:
            type_char = r.replace('public static final class ', '').replace(' {', '').replace(' ', '').replace('\n', '').replace('\r', '')
        elif 'public static class' in r:
            type_char = r.replace('public static class ', '').replace(' {', '').replace(' ', '').replace('\n', '').replace('\r', '')
            type_char = type_char.replace(' ', '').replace('\n', '').replace('\r', '')
        elif 'public static final int' in r and type_char != '' and '[]' not in r:
            kv = r.replace('public static final int ', '').replace(';', '').split('=')
            name = kv[0].replace(' ', '').replace('\n', '').replace('\r', '')
            id_char = kv[1].replace(' ', '').replace('\n', '').replace('\r', '')
            dstbuf += '    <public type="%s" name="%s" id="%s" />\n' % (type_char, name, id_char)
            if type_char == 'id' and name not in diykv:
                idbuf += '    <item name="%s" type="id"/>\n' % name

        elif 'public static int' in r and type_char != '' and '[]' not in r:
            kv = r.replace('public static int ', '').replace(';', '').split('=')
            name = kv[0].replace(' ', '').replace('\n', '').replace('\r', '')
            id_char = kv[1].replace(' ', '').replace('\n', '').replace('\r', '')
            dstbuf += '    <public type="%s" name="%s" id="%s" />\n' % (type_char, name, id_char)
            if type_char == 'id' and name not in diykv:
                idbuf += '    <item name="%s" type="id"/>\n' % name

        elif type_char != '' and '}' in r:
            type_char = ''

    dstbuf += '</resources>'
    idbuf += '</resources>'
    write_file_content(public_path, dstbuf)
    write_file_content(ids_path, idbuf)
Exemplo n.º 45
0
def scan_data_test_ids_project(
    project_path,
    project_options,
):
    data_report = {}

    errors = []

    stats = {
        "scanned_files": 0,
        "errors": 0,
        "keys_found": 0,
    }

    project_found_files = find_files(project_path,
                                     project_options["files_extensions"])

    log.debug("Project files list [{}]: {}".format(len(project_found_files),
                                                   project_found_files))

    for project_file in project_found_files:
        log.info("Scanning {}".format(project_file))

        project_file_absolute_path = project_file.absolute()
        project_file_content = get_file_content(project_file_absolute_path)

        project_file_data_test_id_used = re.findall(DATA_TEST_IDS_REGEX,
                                                    project_file_content)

        sanitize_keys = [(test_id_value)
                         for (garbage_quotes,
                              test_id_value) in project_file_data_test_id_used]
        keys_cardinal = len(sanitize_keys)
        data_report[str(project_file)] = {
            'path': str(project_file),
            'name': str(project_file.name),
            'keys': sanitize_keys,
            'cardinal': keys_cardinal
        }
        stats["keys_found"] = stats["keys_found"] + keys_cardinal

    log.info("Project files list [{}]".format(len(project_found_files)))
    stats["scanned_files"] = len(project_found_files)
    return data_report, errors, stats
Exemplo n.º 46
0
    def __modify_other_modules_r(self, package_name, finder=None):
        if not finder:
            finder = self._finder

        r_path = android_tools.find_r_file(finder.get_dst_r_dir(), package_name=package_name)
        if os.path.exists(r_path):
            target_dir = os.path.join(self.__get_freeline_backup_r_dir(), package_name.replace('.', os.sep))
            if not os.path.exists(target_dir):
                os.makedirs(target_dir)
            target_path = os.path.join(target_dir, 'R.java')
            if not os.path.exists(target_path):
                self.debug('copy {} to {}'.format(r_path, target_path))
                shutil.copy(r_path, target_path)

                content = get_file_content(target_path)
                content = GradleIncBuildInvoker.remove_final_tag(content)
                content = GradleIncBuildInvoker.extend_main_r(content, self._config['package'])
                content = self.__fix_unicode_parse_error(content, target_path)
                write_file_content(target_path, content)

            return target_path
Exemplo n.º 47
0
def update_badges(repo_name, temp_dir, specified_crate):
    path = join(join(temp_dir, repo_name), "_data/crates.json")
    content = get_file_content(path)
    current = None
    out = []
    for line in content.split("\n"):
        if line.strip().startswith('"name": "'):
            current = line.split('"name": "')[-1].replace('",', '')
            if specified_crate is not None and current != specified_crate:
                current = None
        elif line.strip().startswith(
                '"max_version": "') and current is not None:
            version = line.split('"max_version": "')[-1].replace('"',
                                                                 '').replace(
                                                                     ',', '')
            out.append(
                line.replace('": "{}"'.format(version), '": {}'.format(
                    CRATES_VERSION[current])) + '\n')
            current = None
            continue
        out.append(line + '\n')
    return write_into_file(path, ''.join(out).replace('\n\n', '\n'))
Exemplo n.º 48
0
def make_param_string(method):
    """Make the string containing the parameter definition for each method
    in a class

    """
    t_attr = get_file_content('%s/templates/method_params_attr.proto'
                              % method.lang)
    if method.param_obj is None:
        return 'self'

    method.method_params.insert(0, 'self')
    for p in method.param_obj:
        if p['name'] == "%sId" % (method.file_name):
            continue

        #param_name = re.sub('([A-Z]{1,1})', r'_\1', param_name)
        param_name = "%s_%s" % (p['name'], p['dataType'])

        if 'allowMultiple' in p and p['allowMultiple']:
            param_name = param_name + "_list"

        param_name = re.sub('([A-Z]{1,1})', r'_\1', param_name)
        param_name = param_name.lower()
        attr = re.sub('\{ATTR_NAME\}', param_name, t_attr)
        attr = re.sub('\{ATTR_ORIG_NAME\}', p['name'], attr)
        method.param_lines.append(attr)

        if 'defaultValue' in p:
            p['defaultValue'] = "'%s'" % (p['defaultValue'])
        else:
            p['defaultValue'] = None

        param = "%s=%s" % (param_name, p['defaultValue'])

        method.method_params.append(param)

    return ', '.join(method.method_params)
Exemplo n.º 49
0
    def __init__(self, param_obj):
        """Initiate APIClassMethod instance

        """
        self.http_method = param_obj['http_method']
        self.path = param_obj['path']
        self.file_name = param_obj['file_name']
        self.lang = param_obj['lang']
        self.lang_tools = param_obj['lang_tools']
        self.method_params = []
        self.required_id = False
        self.param_obj = None
        self.api_summary = ''
        self.method_summary = ''
        self.method_name = ''

        obj_id_re = re.compile('\{\w+\}')
        if obj_id_re.search(self.path):
            self.required_id = True
            self.path = re.sub(obj_id_re, '%s', self.path)

        self.param_lines = [get_file_content(
                            '%s/templates/method_params_def.proto'
                            % self.lang)]
def preconfigure_nginx():

    target_runtime_props = ctx.target.instance.runtime_properties

    # This is used by nginx's default.conf to select the relevant configuration
    external_rest_protocol = target_runtime_props['external_rest_protocol']
    internal_cert_path, internal_key_path = utils.generate_internal_ssl_cert(
        target_runtime_props['internal_rest_host']
    )

    src_runtime_props['external_rest_protocol'] = external_rest_protocol
    src_runtime_props['internal_cert_path'] = internal_cert_path
    src_runtime_props['internal_key_path'] = internal_key_path
    src_runtime_props['file_server_root'] = utils.MANAGER_RESOURCES_HOME

    # Pass on the the path to the certificate to manager_configuration
    target_runtime_props['internal_cert_path'] = internal_cert_path

    if external_rest_protocol == 'https':
        external_cert_path, external_key_path = \
            utils.deploy_or_generate_external_ssl_cert(
                [target_runtime_props['external_rest_host'],
                 target_runtime_props['internal_rest_host']],
                target_runtime_props['external_rest_host']
            )

        src_runtime_props['external_cert_path'] = external_cert_path
        src_runtime_props['external_key_path'] = external_key_path

        # The public cert content is used in the outputs later
        external_rest_cert_content = utils.get_file_content(external_cert_path)
        target_runtime_props['external_rest_cert_content'] = \
            external_rest_cert_content

    _deploy_nginx_config_files(external_rest_protocol)
    utils.systemd.enable(NGINX_SERVICE_NAME, append_prefix=False)
Exemplo n.º 51
0
def get_package_name(manifest):
    if manifest and os.path.isfile(manifest):
        result = re.search('package=\"(.*)\"', get_file_content(manifest))
        if result:
            return result.group(1)
    return ''
rest_host = target_runtime_props['internal_rest_host']
rest_protocol = target_runtime_props['rest_protocol']
rest_port = target_runtime_props['rest_port']
security_enabled = target_runtime_props['security_enabled']
ssl_enabled = target_runtime_props['ssl_enabled']
verify_rest_certificate = \
    target_runtime_props.get('agent_verify_rest_certificate', '')
agent_rest_cert_path = target_runtime_props['agent_rest_cert_path']
broker_ssl_cert_path = target_runtime_props['broker_ssl_cert_path']
internal_rest_cert_content = ''
local_rest_cert_file = ''

if verify_rest_certificate.lower() == 'true':
    local_rest_cert_file = INTERNAL_REST_CERT_PATH
    internal_rest_cert_raw = utils.get_file_content(INTERNAL_REST_CERT_PATH)
    internal_rest_cert_content = \
        utils.escape_for_systemd(internal_rest_cert_raw)

# the file server is accessed through the same host and protocol as the rest
# service, but on a different port
file_server_host = target_runtime_props['file_server_host']
file_server_port = target_runtime_props['file_server_port']
file_server_protocol = target_runtime_props['file_server_protocol']


debug_message = 'mgmtworker configuration: \n' \
             'rest_host: {0}\n'\
             'rest_protocol: {1}\n' \
             'rest_port: {2}\n' \
             'security_enabled: {3}\n' \
    def run(self, argv):
        """Make API self.classes

        Parse Swagger JSON files and make API classes.

        Uses:
        copyright_notice.bit
        proper_object_def.proto
        proper_object_method_def.proto

        """
        args = parse_args(argv)
        methods_to_move = ['get', 'gets']
        asterisk_class = None
        if ((args['dir'] is None or args['dir'] == '')
                and (args['resource'] is None or args['resource'] == '')) \
                or args['lang'] is None or args['lang'] == '':
            print "Usage: ./generate_library --lang=language ", \
                  "[--dir=/path/to/resources/ | ", \
                  "--resource=", \
                  "http://localhost:8088/stasis/api-docs/resources.json] "
            return 1

        self.lang_tools = __import__(args['lang'])

        def remove_moved(method):
            """Remove get* methods from this class and add to Asterisk"""
            if method.method_name in methods_to_move:
                # Add these to the Asterisk class instead
                asterisk_class.methods.append(method)
                return False
            else:
                return True

        for class_ in self.classes:
            if class_.class_name == "Asterisk":
                asterisk_class = class_
            class_.methods[:] = [m for m in class_.methods if remove_moved(m)]

        template_copyright = get_file_content(
            '%s/templates/copyright.proto' % (args['lang'])
        ) + '\n'

        if args['dir']:
            self.get_resources_from_dir(args['dir'], args['lang'])
        elif args['resource']:
            self.get_resources_from_url(args['resource'], args['lang'])

        if len(self.classes) == 0:
            print "No resources found. Are you using Asterisk 12 or later?"
            return 1

        self.classes = sorted(self.classes, cmp=sort_asterisk_first)

        for class_ in self.classes:
            method_texts = []
            print "Generating class %s" % (class_.class_name)
            class_def = class_.construct_file_contents()

            for method in class_.methods:
                if method.method_name in methods_to_move:
                    if class_.class_name != 'Asterisk':
                        continue
                    else:
                        # Rename from get/gets to get_channel, get_channels
                        method.method_name = re.sub('(s*)$', r'_%s\1'
                                                    % (method.file_name),
                                                    method.method_name)
                        method.file_name = 'asterisk'

                print "  method %s.%s" \
                    % (class_.class_name, method.method_name)
                filebit = method.construct_file_contents()
                method_texts.append(filebit)

            methods_blob = '\n\n'.join(method_texts)
            if methods_blob != '':
                # Handle different number of newlines if we have no methods
                # to add.
                methods_blob = '\n' + methods_blob

            class_def = re.sub('\{CLASS_METHODS\}', methods_blob, class_def)
            file_contents = '\n\n'.join([template_copyright, class_def])
            file_contents = self.lang_tools.wrap(file_contents)
            write_file('%s/lib/%s.%s' % (args['lang'], class_.file_name,
                       self.lang_tools.FILE_EXTENSION), file_contents)

        license_content = get_file_content('LICENSE')
        write_file('%s/lib/LICENSE' % args['lang'], license_content)
Exemplo n.º 54
0
Arquivo: nodes.py Projeto: kuxi/lisk
 def _get_content(self):
     self.entry.st_atime = time()
     if not self._content:
         self._content = utils.get_file_content(self.doc_id)
     return self._content
def preconfigure_nginx():

    target_runtime_props = ctx.target.instance.runtime_properties
    # this is used by nginx's default.conf to select the relevant configuration
    rest_protocol = target_runtime_props['rest_protocol']
    file_server_protocol = target_runtime_props['file_server_protocol']

    # TODO: NEED TO IMPLEMENT THIS IN CTX UTILS
    ctx.source.instance.runtime_properties['rest_protocol'] = rest_protocol
    ctx.source.instance.runtime_properties['file_server_protocol'] = \
        file_server_protocol
    if rest_protocol == 'https':
        utils.deploy_rest_certificates(
            internal_rest_host=target_runtime_props['internal_rest_host'],
            external_rest_host=target_runtime_props['external_rest_host'])

        # get rest public certificate for output later
        external_rest_cert_content = \
            utils.get_file_content(EXTERNAL_REST_CERT_PATH)
        target_runtime_props['external_rest_cert_content'] = \
            external_rest_cert_content

    ctx.logger.info('Deploying Nginx configuration files...')
    utils.deploy_blueprint_resource(
        '{0}/{1}-rest-server.cloudify'.format(CONFIG_PATH, rest_protocol),
        '/etc/nginx/conf.d/{0}-rest-server.cloudify'.format(rest_protocol),
        NGINX_SERVICE_NAME, load_ctx=False)
    utils.deploy_blueprint_resource(
        '{0}/{1}-file-server.cloudify'
        .format(CONFIG_PATH, file_server_protocol),
        '/etc/nginx/conf.d/{0}-file-server.cloudify'
        .format(file_server_protocol),
        NGINX_SERVICE_NAME, load_ctx=False)
    utils.deploy_blueprint_resource(
        '{0}/nginx.conf'.format(CONFIG_PATH),
        '/etc/nginx/nginx.conf',
        NGINX_SERVICE_NAME, load_ctx=False)
    utils.deploy_blueprint_resource(
        '{0}/default.conf'.format(CONFIG_PATH),
        '/etc/nginx/conf.d/default.conf',
        NGINX_SERVICE_NAME, load_ctx=False)
    utils.deploy_blueprint_resource(
        '{0}/rest-location.cloudify'.format(CONFIG_PATH),
        '/etc/nginx/conf.d/rest-location.cloudify',
        NGINX_SERVICE_NAME, load_ctx=False)
    utils.deploy_blueprint_resource(
        '{0}/fileserver-location.cloudify'.format(CONFIG_PATH),
        '/etc/nginx/conf.d/fileserver-location.cloudify',
        NGINX_SERVICE_NAME, load_ctx=False)
    utils.deploy_blueprint_resource(
        '{0}/redirect-to-fileserver.cloudify'.format(CONFIG_PATH),
        '/etc/nginx/conf.d/redirect-to-fileserver.cloudify',
        NGINX_SERVICE_NAME, load_ctx=False)
    utils.deploy_blueprint_resource(
        '{0}/ui-locations.cloudify'.format(CONFIG_PATH),
        '/etc/nginx/conf.d/ui-locations.cloudify',
        NGINX_SERVICE_NAME, load_ctx=False)
    utils.deploy_blueprint_resource(
        '{0}/logs-conf.cloudify'.format(CONFIG_PATH),
        '/etc/nginx/conf.d/logs-conf.cloudify',
        NGINX_SERVICE_NAME, load_ctx=False)

    utils.systemd.enable(NGINX_SERVICE_NAME,
                         append_prefix=False)
Exemplo n.º 56
0
def get_last_sync_ticket(cache_dir):
    ticket_path = get_last_sync_ticket_path(cache_dir)
    data = get_file_content(ticket_path)
    return 0 if len(data) == 0 else int(data)
Exemplo n.º 57
0
def get_apk_created_ticket(apktime_path):
    data = get_file_content(apktime_path)
    return 0 if len(data) == 0 else int(data)
Exemplo n.º 58
0
 def fix_for_windows(path):
     if is_windows_system():
         buf = fix_unicode_parse_error(get_file_content(path), path)
         write_file_content(path, buf)