Example #1
0
            } for k, v in techniques_mappings.items()],
            "gradient": {
                "colors": ["#ffffff", "#66fff3"],
                "minValue": 0,
                "maxValue": 1
            },
            "legendItems": [{
                "label": "Datasets researched",
                "color": "#66fff3"
            }]
        }
        open('../docs/notebooks/small/{}/{}.json'.format(PLATFORM, PLATFORM),
             'w').write(json.dumps(thp_layer))

print("\n[+] Creating dataset summary tables for each platform..")
summary_template = Template(open('templates/summary_template.md').read())
for summary in summary_table:
    if len(summary['dataset']) > 0:
        print("  [>>] Creating summary table for {} datasets..".format(
            summary['platform']))
        summary_for_render = copy.deepcopy(summary)
        markdown = summary_template.render(summary=summary_for_render)
        open(
            '../docs/notebooks/small/{}/{}.md'.format(
                summary['platform'].lower(), summary['platform'].lower()),
            'w').write(markdown)

# ******* Update Jupyter Book TOC File *************
print("\n[+] Writing final TOC file for Jupyter book..")
with open(r'../docs/_toc.yml', 'w') as file:
    yaml.dump(toc_template, file, sort_keys=False)
Example #2
0
from jinja2 import Template

data = '''
{% raw %}
His name is {{ name }}
{% endraw %}
'''

tm = Template(data)
msg = tm.render(name="Peter")

print(msg)
Example #3
0
@pytest.fixture
def test_plot() -> None:
    from bokeh.plotting import figure
    test_plot = figure()
    test_plot.circle([1, 2], [2, 3])
    return test_plot


PAGE = Template("""
<!DOCTYPE html>
<html lang="en">
<head>
</head>

<body>
  <script>
  {{js}}
  </script>
  {{tag}}
</body>
""")

#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------


class Test_autoload_static(object):
    def test_return_type(self, test_plot) -> None:
        r = bes.autoload_static(test_plot, CDN, "some/path")
                        response = client.label_detection(image=image,
                                                          max_results=20)
                        labels = response.label_annotations

                        print('Labels for : ' + file_name)
                        labelscores = []
                        for label in labels:
                            if label.score > 0.7:
                                labelscores.append(
                                    label.description + ' | ' +
                                    str(round(label.score * 100, 2)) + '%')

                    directory_map["ltks"].append({
                        "name": ltk_name,
                        "url": ltk_url,
                        "image_url": image_url,
                        "scores": labelscores
                    })

        categories.append(directory_map)

#print(categories)
with open('../template.html') as file_:
    template = Template(file_.read())
rendered_file = template.render(categories=categories)

bytes = rendered_file.encode(encoding='UTF-8')

with open("../results.html", "wb") as f:
    f.write(bytes)
  - sudo /bin/bash /opt/autoscale/files/expand_disk.sh
  - sudo /bin/sh /opt/autoscale/files/dynatrace.sh "{{dynatrace_api_url}}/deployment/installer/agent/unix/default/latest?Api-Token={{dynatrace_api_token}}&arch=x86&flavor=default" "/tmp/one_agent.sh" "/opt/autoscale"
  - echo "{{component}}" > /opt/autoscale/python/component_name.txt
  - /bin/python3.6 /opt/autoscale/python/create_silent_config.py -m "{{ms_ip}}" -e "{{username}}" -p "{{password}}" -o "{{port}}" -w "{{protocol}}" -r "{{region}}" -n "{{pod}}" -u "{{dynatrace_api_url}}/events" -a "{{dynatrace_api_token}}"
  - sudo /opt/autoscale/custom_prop.sh
  - sudo /bin/python3.6 /opt/autoscale/python/azure_helpers.py
  - sudo /opt/apigee/apigee-setup/bin/setup.sh -p {{component}} -f /opt/autoscale/python/silent_config
  - sudo /opt/apigee/apigee-service/bin/apigee-all restart
  - /bin/python3.6 /opt/autoscale/python/uuid_generator.py -w http -o "{{uuid_port}}" -i localhost -u "{{dynatrace_api_url}}/events" -a "{{dynatrace_api_token}}" -c {{uuid_retry}}
  - sudo systemctl daemon-reload
  - sudo systemctl enable start_stop.service
  - sudo systemctl -l start start_stop.service
  - sudo /bin/sh /opt/autoscale/files/monit_setup.sh
  - sudo rm -rf /opt/autoscale/python/silent_config
"""
template = Template(cloud_init_template)

def get_auth_token(username,password):
    auth_token = base64.b64encode('{}:{}'.format(username,password).encode('utf-8'))
    auth_token = 'Basic {}'.format(auth_token.decode('utf-8'))
    return auth_token

def get_apigee_region(baseUrl,auth_token):
    headers = {
    'Content-Type': 'application/json',
    'Accept': 'application/json',
    'Authorization': auth_token
    }
    response = requests.get(baseUrl+"/v1/regions",headers=headers,verify=False)
    regions = response.json()
    if len(regions) > 0:
Example #6
0
 def _get_say_template(list_say_template, message_dict):
     if isinstance(list_say_template, list):
         # then we pick randomly one template
         list_say_template = random.choice(list_say_template)
     t = Template(list_say_template)
     return t.render(**message_dict)
 def __init__(self):
     self.template = Template('')
     self.context = ''
Example #8
0
def get_template(templ):
    if templ not in templates:
        t = Template(templ)
        templates[templ] = t
    return templates[templ]
Example #9
0
def renderApps():
    appsTemplate = Template(open("apps.html").read())
    appsdata = apps.getAllApps()
    appsHTML = appsTemplate.render(appdata=appsdata)
    with open("apps-report.html", "w") as f:
        f.write(appsHTML)
Example #10
0
class Search(MacroElement):
    """
    Adds a search tool to your map.

    Parameters
    ----------
    data: str/JSON
        GeoJSON strings
    search_zoom: int
        zoom level when searching features, default 12
    search_label: str
        label to index the search, default 'name'
    geom_type: str
        geometry type, default 'Point'
    position: str
        Change the position of the search bar, can be:
        'topleft', 'topright', 'bottomright' or 'bottomleft',
        default 'topleft'

    See https://github.com/stefanocudini/leaflet-search for more information.

    """
    _template = Template("""
        {% macro script(this, kwargs) %}

            var {{this.get_name()}} = new L.GeoJSON({{this.data}});

            {{this._parent.get_name()}}.addLayer({{this.get_name()}});

            var searchControl = new L.Control.Search({
                layer: {{this.get_name()}},
                propertyName: '{{this.search_label}}',
            {% if this.geom_type == 'Point' %}
                initial: false,
                zoom: {{this.search_zoom}},
                position:'{{this.position}}',
                hideMarkerOnCollapse: true
            {% endif %}
            {% if this.geom_type == 'Polygon' %}
                marker: false,
                moveToLocation: function(latlng, title, map) {
                var zoom = {{this._parent.get_name()}}.getBoundsZoom(latlng.layer.getBounds());
                    {{this._parent.get_name()}}.setView(latlng, zoom); // access the zoom
                }
            {% endif %}
                });
                searchControl.on('search:locationfound', function(e) {

                    e.layer.setStyle({fillColor: '#3f0', color: '#0f0'});
                    if(e.layer._popup)
                        e.layer.openPopup();

                }).on('search:collapsed', function(e) {

                    {{this.get_name()}}.eachLayer(function(layer) {   //restore feature color
                        {{this.get_name()}}.resetStyle(layer);
                    });
                });
            {{this._parent.get_name()}}.addControl( searchControl );

        {% endmacro %}
        """)  # noqa

    def __init__(self,
                 data,
                 search_zoom=12,
                 search_label='name',
                 geom_type='Point',
                 position='topleft'):
        super(Search, self).__init__()
        self.position = position
        self.data = data
        self.search_label = search_label
        self.search_zoom = search_zoom
        self.geom_type = geom_type

    def render(self, **kwargs):
        super(Search, self).render()

        figure = self.get_root()
        assert isinstance(figure, Figure), ('You cannot render this Element '
                                            'if it is not in a Figure.')

        figure.header.add_child(
            JavascriptLink(
                'https://cdn.jsdelivr.net/npm/[email protected]/dist/leaflet-search.min.js'
            ),  # noqa
            name='Leaflet.Search.js')

        figure.header.add_child(
            CssLink(
                'https://cdn.jsdelivr.net/npm/[email protected]/dist/leaflet-search.min.css'
            ),  # noqa
            name='Leaflet.Search.css')
Example #11
0
def _codegen(service, out):
    logger.info(service)
    tpl = Template(RPC_TEMPLATE)
    with open(out, 'w') as f:
        f.write(tpl.render(service))
Example #12
0
plots = dict(Red=p1, Blue=p2, Green=p3)

script, div = components(plots)

template = Template('''<!DOCTYPE html>
<html lang="en">
    <head>
        <meta charset="utf-8">
        <title>Bokeh Scatter Plots</title>
        {{ resources }}
        {{ script }}
        <style>
            .embed-wrapper {
                display: flex;
                justify-content: space-evenly;
            }
        </style>
    </head>
    <body>
        <div class="embed-wrapper">
            {% for key in div.keys() %}
                {{ div[key] }}
            {% endfor %}
        </div>
    </body>
</html>
''')

resources = INLINE.render()

filename = 'embed_multiple.html'
import os
try:
    from jinja2 import Template
except:
    print('Something went wrong finding jinja!')
    print('Please run `sudo easy_install jinja2` and try again.')

try: # Py2 hacks
    input = raw_input
except:
    pass

PATH = os.path.dirname(os.path.abspath(__file__))
coc_file = os.path.join(PATH, 'code-of-conduct.md')
with open(coc_file, 'r') as f:
    template = Template(f.read())
    name = input('Event name: ')
    description = input('Event description: ')
    type = input('Enter the type of event (user group, conference): ')
    organiser_name = input('Enter an organiser name: ')
    organiser_number = input('Enter their contact number: ')
    organiser_2_name = input('Enter another organiser name: ')
    organiser_2_number = input('Enter their contact number: ')
    law = input('Enter local law enforcement number: ')
    template = template.render(
        name=name,
        description=description,
        type=type,
        contact_name=organiser_name,
        contact_number=organiser_number,
        second_contact_name=organiser_2_name,
def build_erddap_catalog_chunk(data_root, deployment):
    """
    Builds an ERDDAP dataset xml chunk.

    :param str data_root: The root directory where netCDF files are read from
    :param mongo.Deployment deployment: Mongo deployment model
    """
    deployment_dir = deployment.deployment_dir
    logger.info("Building ERDDAP catalog chunk for {}".format(deployment_dir))

    # grab template for dataset fragment
    template_path = os.path.join(template_dir, 'dataset.deployment.xml')
    with open(template_path) as f:
        template = Template("".join(f.readlines()))

    dir_path = os.path.join(data_root, deployment_dir)

    checksum = (deployment.checksum or '').strip()
    completed = deployment.completed
    delayed_mode = deployment.delayed_mode

    # look for a file named extra_atts.json that provides
    # variable and/or global attributes to add and/or modify
    # An example of extra_atts.json file is in the module docstring
    extra_atts = {"_global_attrs": {}}
    extra_atts_file = os.path.join(dir_path, "extra_atts.json")
    if os.path.isfile(extra_atts_file):
        try:
            with open(extra_atts_file) as f:
                extra_atts = json.load(f)
        except Exception:
            logger.error("Error loading file: {}".format(extra_atts_file))

    # Get the latest file from the DB (and double check just in case)
    latest_file = deployment.latest_file or get_latest_nc_file(dir_path)
    if latest_file is None:
        raise IOError(
            'No nc files found in deployment {}'.format(deployment_dir))

    # variables which need to have the variable {var_name}_qc present in the
    # template.  Right now these are all the same, so are hardcoded
    required_qc_vars = {
        "conductivity_qc", "density_qc", "depth_qc", "latitude_qc",
        "lat_uv_qc", "longitude_qc", "lon_uv_qc", "profile_lat_qc",
        "profile_lon_qc", "pressure_qc", "salinity_qc", "temperature_qc",
        "time_qc", "time_uv_qc", "profile_time_qc", "u_qc", "v_qc"
    }

    # any destinationNames that need to have a different name.
    # by default the destinationName will equal the sourceName
    dest_var_remaps = {
        'longitude_qc': 'precise_lon_qc',
        'latitude_qc': 'precise_lat_qc',
        'profile_lon_qc': 'longitude_qc',
        'profile_lat_qc': 'latitude_qc',
        'time_qc': 'precise_time_qc',
        'profile_time_qc': 'time_qc'
    }

    existing_varnames = {
        'trajectory', 'wmo_id', 'profile_id', 'profile_time', 'profile_lat',
        'profile_lon', 'time', 'depth', 'pressure', 'temperature',
        'conductivity', 'salinity', 'density', 'lat', 'lon', 'time_uv',
        'lat_uv', 'lon_uv', 'u', 'v', 'platform', 'instrument_ctd'
    }

    # need to explicitly cast keys to set in Python 2
    exclude_vars = (existing_varnames | set(dest_var_remaps.keys())
                    | required_qc_vars | {'latitude', 'longitude'})

    nc_file = os.path.join(data_root, deployment_dir, latest_file)
    with Dataset(nc_file, 'r') as ds:
        qc_var_types = check_for_qc_vars(ds)
        all_other_vars = ds.get_variables_by_attributes(
            name=lambda n: n not in exclude_vars)
        gts_ingest = getattr(ds, 'gts_ingest',
                             'true')  # Set default value to true
        templ = template.render(dataset_id=deployment.name,
                                dataset_dir=dir_path,
                                checksum=checksum,
                                completed=completed,
                                reqd_qc_vars=required_qc_vars,
                                dest_var_remaps=dest_var_remaps,
                                qc_var_types=qc_var_types,
                                gts_ingest=gts_ingest,
                                delayed_mode=delayed_mode)
        # Add any of the extra variables and attributes
        try:
            tree = etree.fromstring(templ)
            for identifier, mod_attrs in extra_atts.items():
                add_extra_attributes(tree, identifier, mod_attrs)
            # append all the 'other' variables to etree
            for var in all_other_vars:
                tree.append(add_erddap_var_elem(var))
            return etree.tostring(tree, encoding=str)
        except Exception:
            logger.exception(
                "Exception occurred while adding atts to template: {}".format(
                    deployment_dir))
            return templ
Example #15
0
    left=Side("thin", "FF000000"),
    right=Side("thin", "FF000000"),
    top=Side("thin", "FF000000"),
    bottom=Side("thin", "FF000000"),
    vertical=Side("thin", "FF000000"),
    horizontal=Side("thin", "FF000000")
)

MailTemplate = Template("""<html>
    <body>
        <center><h1>{{ subject }}</h1></center><br />
        {% for ipath in imglist %}
        <h2>图片:{{ ipath }}</h2><br />
        <img src="cid:{{ ipath }}" alt="{{ ipath }}"><br />
        {% endfor %}
        <br />
        #说明 任务ID:{{ eid }}<br />
        #回复命令格式:(注意回复不要附带原始邮件内容)<br /><br />
        ##序号发送:<br />
        M{{ eid }},1,3,5<br /><br />
        ##全部发送:<br />
        M{{ eid }}<br />
    <body>
</html>""")


class MmsDaily:

    def __init__(self, Id, config, path, subject, port=8989):
        os.makedirs("datas/logs/", exist_ok=True)
        os.makedirs("datas/tmps/", exist_ok=True)
        self.logger = initLogger("datas/logs/" + Id, Id)
Example #16
0
from typing import List, Tuple, Any, Dict

from language_templates.language import Language
from jinja2 import Template

SOLUTION_TEMPLATE = Template("""
#include <cctype>
#include <iomanip>
#include <iostream>
#include <sstream>
#include <string>
#include <vector>
using namespace std;

{{returnType}} {{funcName}}({{arguments}}) {
{{body}}
}

""")

CALL_TEMPLATE = Template("""{{funcName}}({{arguments}})""")


class CPP(Language):
    def typeString(self):
        return "string"

    def typeDouble(self):
        return "double"

    def typeInteger(self):
Example #17
0
hash = {}
result = []
for contributors in result1:
    for user in contributors:
        if user.login not in hash.keys():
            result.append(user)
        hash[user.login] = 1

for contributors in result2:
    for user in contributors:
        if user.login not in hash.keys():
            result.append(user)
        hash[user.login] = 1

s = """
{% for user in contributor %}
<li class="span2">
    <a href="{{ user.html_url }}" target="_blank" title="{{ user.login }} contributions">{{ user.login }}</a>
    <a href="{{ user.html_url }}" target="_blank" title="{{ user.login }} contributions"><img src="{{ user.avatar_url }}" alt="{{ user.login }}'s avatar"></a>
</li>
{% endfor %}
"""

print "{# created "+str(datetime.date.today())+" #}"
print "{# do not modify it, this is a read-only file created by bin/extract_contributor.py #}"

template = Template(s)
print template.render(contributor=result)

Example #18
0
def render(id_pub):
    id_pub = os.path.basename(id_pub)
    with open('Dockerfile.template') as input, \
         open('Dockerfile', 'w') as output:
        template = Template(input.read())
        output.write(template.render(id_pub=id_pub))
Example #19
0
API_ENDPOINT = 'https://torrentapi.org/pubapi_v2.php'
API_RATE_LIMIT = 2  # seconds/request
TOKEN_LIFESPAN = timedelta(minutes=15)
APP_ID = 'github.com/banteg/rarbg'

TEMPLATE = Template('''\
<?xml version="1.0" encoding="utf-8"?>
<rss version="2.0">
    <channel>
        <title>{{title}}</title>
        <link>https://torrentapi.org/apidocs_v2.txt</link>
        <ttl>15</ttl>
        {% for entry in entries %}
        <item>
            <title>{{entry.title}} ({{entry.hsize}})</title>
            <description/>
            <guid>{{entry.hash}}</guid>
            <pubDate>{{entry.pubdate}}</pubDate>
            <link>{{entry.download}}</link>
            <enclosure
                url="{{entry.download | e}}"
                length="{{entry.size}}"
                type="application/x-bittorrent" />
        </item>
        {% endfor %}
    </channel>
</rss>''')

app = web.Application()
app.token = None
app.token_got = datetime.now()
app.counter = 0
Example #20
0
 def render(self, variables):
     """
     Render the template using Jinja2.
     """
     return Template(self.template).render(variables)
Example #21
0
    def inject_credential(self, credential, env, safe_env, args, safe_args,
                          private_data_dir):
        """
        Inject credential data into the environment variables and arguments
        passed to `ansible-playbook`

        :param credential:       a :class:`awx.main.models.Credential` instance
        :param env:              a dictionary of environment variables used in
                                 the `ansible-playbook` call.  This method adds
                                 additional environment variables based on
                                 custom `env` injectors defined on this
                                 CredentialType.
        :param safe_env:         a dictionary of environment variables stored
                                 in the database for the job run
                                 (`UnifiedJob.job_env`); secret values should
                                 be stripped
        :param args:             a list of arguments passed to
                                 `ansible-playbook` in the style of
                                 `subprocess.call(args)`.  This method appends
                                 additional arguments based on custom
                                 `extra_vars` injectors defined on this
                                 CredentialType.
        :param safe_args:        a list of arguments stored in the database for
                                 the job run (`UnifiedJob.job_args`); secret
                                 values should be stripped
        :param private_data_dir: a temporary directory to store files generated
                                 by `file` injectors (like config files or key
                                 files)
        """
        if not self.injectors:
            if self.managed_by_tower and credential.kind in dir(
                    builtin_injectors):
                injected_env = {}
                getattr(builtin_injectors,
                        credential.kind)(credential, injected_env,
                                         private_data_dir)
                env.update(injected_env)
                safe_env.update(build_safe_env(injected_env))
            return

        class TowerNamespace:
            pass

        tower_namespace = TowerNamespace()

        # maintain a normal namespace for building the ansible-playbook arguments (env and args)
        namespace = {'tower': tower_namespace}

        # maintain a sanitized namespace for building the DB-stored arguments (safe_env and safe_args)
        safe_namespace = {'tower': tower_namespace}

        # build a normal namespace with secret values decrypted (for
        # ansible-playbook) and a safe namespace with secret values hidden (for
        # DB storage)
        for field_name, value in credential.inputs.items():

            if type(value) is bool:
                # boolean values can't be secret/encrypted
                safe_namespace[field_name] = namespace[field_name] = value
                continue

            value = credential.get_input(field_name)

            if field_name in self.secret_fields:
                safe_namespace[field_name] = '**********'
            elif len(value):
                safe_namespace[field_name] = value
            if len(value):
                namespace[field_name] = value

        # default missing boolean fields to False
        for field in self.inputs.get('fields', []):
            if field['type'] == 'boolean' and field[
                    'id'] not in credential.inputs.keys():
                namespace[field['id']] = safe_namespace[field['id']] = False

        file_tmpls = self.injectors.get('file', {})
        # If any file templates are provided, render the files and update the
        # special `tower` template namespace so the filename can be
        # referenced in other injectors
        for file_label, file_tmpl in file_tmpls.items():
            data = Template(file_tmpl).render(**namespace)
            _, path = tempfile.mkstemp(dir=private_data_dir)
            with open(path, 'w') as f:
                f.write(data)
            os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)

            # determine if filename indicates single file or many
            if file_label.find('.') == -1:
                tower_namespace.filename = path
            else:
                if not hasattr(tower_namespace, 'filename'):
                    tower_namespace.filename = TowerNamespace()
                file_label = file_label.split('.')[1]
                setattr(tower_namespace.filename, file_label, path)

        injector_field = self._meta.get_field('injectors')
        for env_var, tmpl in self.injectors.get('env', {}).items():
            try:
                injector_field.validate_env_var_allowed(env_var)
            except ValidationError as e:
                logger.error(
                    'Ignoring prohibited env var {}, reason: {}'.format(
                        env_var, e))
                continue
            env[env_var] = Template(tmpl).render(**namespace)
            safe_env[env_var] = Template(tmpl).render(**safe_namespace)

        if 'INVENTORY_UPDATE_ID' not in env:
            # awx-manage inventory_update does not support extra_vars via -e
            extra_vars = {}
            for var_name, tmpl in self.injectors.get('extra_vars', {}).items():
                extra_vars[var_name] = Template(tmpl).render(**namespace)

            def build_extra_vars_file(vars, private_dir):
                handle, path = tempfile.mkstemp(dir=private_dir)
                f = os.fdopen(handle, 'w')
                f.write(safe_dump(vars))
                f.close()
                os.chmod(path, stat.S_IRUSR)
                return path

            path = build_extra_vars_file(extra_vars, private_data_dir)
            if extra_vars:
                args.extend(['-e', '@%s' % path])
                safe_args.extend(['-e', '@%s' % path])
Example #22
0
def setup_teardown(request, duthosts, fib_info_files, duts_running_config_facts):

    is_multi_asic = duthosts[0].sonichost.is_multi_asic

    # Initialize parameters
    if "201811" in duthosts[0].os_version or "201911" in duthosts[0].os_version:
        dscp_mode = "pipe"
    else:
        dscp_mode = "uniform"

    ecn_mode = "copy_from_outer"
    ttl_mode = "pipe"

    # The hostvars dict has definitions defined in ansible/group_vars/sonic/variables
    hostvars = duthosts[0].host.options["variable_manager"]._hostvars[duthosts[0].hostname]
    sonic_hwsku = duthosts[0].sonichost.facts["hwsku"]
    mellanox_hwskus = hostvars.get("mellanox_hwskus", [])

    if sonic_hwsku in mellanox_hwskus:
        dscp_mode = "uniform"
        ecn_mode = "standard"

    setup_info = {
        "outer_ipv4": to_bool(request.config.getoption("outer_ipv4")),
        "outer_ipv6": to_bool(request.config.getoption("outer_ipv6")),
        "inner_ipv4": to_bool(request.config.getoption("inner_ipv4")),
        "inner_ipv6": to_bool(request.config.getoption("inner_ipv6")),
        "dscp_mode": dscp_mode,
        "ecn_mode": ecn_mode,
        "ttl_mode": ttl_mode,
        "fib_info_files": fib_info_files[:3],  # Test at most 3 DUTs in case of multi-DUT
        "ignore_ttl": True if is_multi_asic else False,
        "max_internal_hops": 3 if is_multi_asic else 0,
    }

    # config decap
    decap_conf_template = Template(open("../ansible/roles/test/templates/decap_conf.j2").read())

    lo_ips = []
    lo_ipv6s = []
    for duthost in duthosts:
        cfg_facts = duts_running_config_facts[duthost.hostname]
        lo_ip = None
        lo_ipv6 = None
        # Loopback0 ip is same on all ASICs
        for addr in cfg_facts[0]["LOOPBACK_INTERFACE"]["Loopback0"]:
            ip = IPNetwork(addr).ip
            if ip.version == 4 and not lo_ip:
                lo_ip = str(ip)
            elif ip.version == 6 and not lo_ipv6:
                lo_ipv6 = str(ip)
        lo_ips.append(lo_ip)
        lo_ipv6s.append(lo_ipv6)

        decap_conf_vars = {
            "lo_ip": lo_ip,
            "lo_ipv6": lo_ipv6,
            "op": "SET"
        }
        decap_conf_vars.update(setup_info)

        duthost.copy(content=decap_conf_template.render(
            **decap_conf_vars), dest="/tmp/decap_conf.json")

        decap_conf_vars["op"] = "DEL"
        duthost.copy(content=decap_conf_template.render(
            **decap_conf_vars), dest="/tmp/decap_conf_del.json")

        for asic_id in duthost.get_frontend_asic_ids():
            duthost.shell("docker cp /tmp/decap_conf.json swss{}:/decap_conf.json"
                        .format(asic_id if asic_id is not None else ""))
            duthost.shell('docker exec swss{} sh -c "swssconfig /decap_conf.json"'
                        .format(asic_id if asic_id is not None else ""))

    setup_info['lo_ips'] = lo_ips
    setup_info['lo_ipv6s'] = lo_ipv6s
    setup_info['router_macs'] = [duthost.facts['router_mac'] for duthost in duthosts]

    logger.info(json.dumps(setup_info, indent=2))

    yield setup_info

    # Remove decap configuration
    for duthost in duthosts:
        for asic_id in duthost.get_frontend_asic_ids():
            duthost.shell("docker cp /tmp/decap_conf_del.json swss{}:/decap_conf_del.json"
                        .format(asic_id if asic_id is not None else ""))
            duthost.shell('docker exec swss{} sh -c "swssconfig /decap_conf_del.json"'
                        .format(asic_id if asic_id is not None else ""))
Example #23
0
 def render(self):
     path = Path(self.template)
     template = Template(path.read_text())
     return template.render(**self.params)
Example #24
0
MILESTONE_REPORT_TMP = Template('''\
> This file is auto generated by `ays` services. Please don't modify manually.

# Summary
|Milestone|ETA|
|---------|---|
{% for milestone in milestones.values() -%}
|[{{ milestone.title }}](#milestone-{{ milestone.title | replace(' ', '-')| replace('.', '')| lower }})|{{ summary(milestone.title) }}|
{% endfor -%}
|[No milestone](#no-milestone)|{{ summary('__no_milestone__') }}|

{% for key, milestone in milestones.items() -%}
## [Milestone {{ milestone.title }}](milestones/{{ key }}.md)

{% set issues = report.get(milestone.title, []) %}
|Issue|Title|State|Owner|ETA|
|-----|-----|-----|-----|---|
{% for issue in issues -%}
|[#{{ issue.number }}](https://github.com/{{ repo.fullname }}/issues/{{ issue.number }})|\
{{ issue.title }}|\
{{ state(issue.state) }}|\
{% if issue.assignee %}[{{ issue.assignee }}](https://github.com/{{ issue.assignee }}){% endif %}|\
{% set eta, id = issue.story_estimate %}{% if eta %}[{{ eta|trim }}]({{ issue.url }}#issuecomment-{{ id }}){% else %}N/A{% endif %}|
{% endfor %}
{% endfor %}


## No milestone
|Issue|Title|State|Owner|ETA|
|-----|-----|-----|-----|---|
{% for issue in report.get('__no_milestone__', []) -%}
|[#{{ issue.number }}](https://github.com/{{ repo.fullname }}/issues/{{ issue.number }})|\
{{ issue.title }}|\
{{ state(issue.state) }}|\
{% if issue.assignee %}[{{ issue.assignee }}](https://github.com/{{ issue.assignee }}){% endif %}|\
{% set eta, id = issue.story_estimate %}{% if eta %}[{{ eta|trim }}]({{ issue.url }}#issuecomment-{{ id }}){% else %}N/A{% endif %}|
{% endfor %}
''')
Example #25
0
def main(data, template, output_file):
    open(output_file, 'w').write(
        Template(open(template).read()).render(json.load(open(data))))
service_template = Template('''
@ImplementedBy(classOf[{{entityName}}ServiceImpl])
trait {{entityName}}Service {
  def search{{entityName}}(hqId: Long): Future[Seq[{{entityName}}]] 
  def search{{entityName}}(hqId: Long,  searchPage: SearchPage ): Future[SearchResult[{{entityName}}]] 
  def find{{entityName}}ById(hqId: Long, id: Long) : Future[{{entityName}}]
  def save{{entityName}}(m: {{entityName}}): Future[Long]
  def saveAndUpdate{{entityName}}(m: {{entityName}}): Future[Unit]
  def update{{entityName}}(m: {{entityName}}): Future[Unit]
}

@Singleton
class {{entityName}}ServiceImpl @Inject()(dbConfigProvider: DatabaseConfigProvider) extends BaseDao(dbConfigProvider)
  with {{entityName}}Service {

  private[this] val _{{tableClassNames}} = Tables.{{tableClassNames}}
 
  override def search{{entityName}}(hqId: Long) = { 
    val query = _{{tableClassNames}}.filter(t=>t.hqId === hqId)
    runDBAction(query.sortBy(_.id.desc).result)
  }

  override def search{{entityName}}(hqId: Long, searchPage: SearchPage) = { 
    val query = _{{tableClassNames}}.filter(t=>t.hqId === hqId)
    runDBAction(query.sortBy(_.id.desc).drop((searchPage.page - 1) * searchPage.size).take(searchPage.size).result.zip(
				query.length.result)).map { case (items, count) =>
				DaoHelper.createSearchResult(items, searchPage, count)
		}
  }

  override def find{{entityName}}ById(hqId: Long, id: Long) = {
    val query = _{{tableClassNames}}.filter(t=>t.hqId === hqId && t.id === id)
    runDBAction(query.result.head)
  }
  

  override def save{{entityName}}(m: {{entityName}}) = {
    runDBAction((_{{tableClassNames}} returning _{{tableClassNames}}.map(_.id)) += m)
  }

  override def saveAndUpdate{{entityName}}(m: {{entityName}}) = {
    (m.id match {
      case 0L => runDBAction((_{{tableClassNames}} returning _{{tableClassNames}}.map(_.id)) += m)
      case _ => runDBAction(_{{tableClassNames}}.filter(_.id === m.id).update(m))
    }).map { _
      => Unit
    }
  }

  override def update{{entityName}}(m: {{entityName}}) = {
    runDBAction(_{{tableClassNames}}.filter(_.id === m.id).update(m)).map {
      case _ => Unit
    }
  }
}
''')
Example #27
0
def convert(path, node):
    """
    
    Parameters
    ----------
    path: String
        Path of the Java source file
    node: javalang node
    
    """
    root = node.package.name.replace(".", os.path.sep)
    if not exists(root):
        os.makedirs(root)
    if not exists(join(root, '__init__.py')):
        with open(join(root, '__init__.py'), 'w') as f:
            pass
    if not node.types or not isinstance(node.types[0], ClassDeclaration):
        print("No class in {}".format(path))
        return
    # Class
    cls = node.types[0]

    with open(join(root, cls.name + '.py'), 'w') as f:
        # Format imports
        scope = set()
        for imp in node.imports:
            imp.pkg = ".".join(imp.path.split(".")[:-1])
            imp.name = "*" if imp.wildcard else imp.path.split(".")[-1]
            scope.add(imp.name)

        # Import from local package if needed
        if cls.extends and cls.extends.name not in scope:
            node.imports.append(Import(pkg='.', name=cls.extends.name))

        # Format fields
        for f in cls.fields:
            if not hasattr(f, 'name'):
                f.name = f.decorators[0].name

        if cls.documentation:
            docs = '"""{}"""'.format("\n    ".join([
                l.lstrip().lstrip("*").replace("/*", "").replace("*/", "")
                for l in cls.documentation.split("\n")
            ]))
        else:
            docs = "# No docs"

        f.write(
            Template(
                dedent("""
        from atom.api import set_default
        {% for imp in node.imports %}
        from {{ imp.pkg }} import {{ imp.name }} 
        {% endfor %}
        
        class {{cls.name}}({{base.name}}):
            {{ docs }}
            __nativeclass__ = set_default('{{node.package.name}}.{{cls.name}}')
        {% for f in cls.fields %}
            {{ f.name }} = JavaField({{f.type.name}}){% endfor %}
        {% for m in cls.methods %}{% if 'public' in m.modifiers %}
            {{ m.name }} = {% if 'static' in m.modifiers %}JavaStaticMethod{% else %}JavaMethod{% endif %}({% for p in m.parameters %}'{{p.type.name}}',{% endfor %}){% endif %}{% endfor %}
        
        """)).render(imports=node.imports,
                     node=node,
                     docs=docs,
                     cls=cls,
                     base=cls.extends or RefType()))
Example #28
0
import paramiko
import yaml
from pprint import pprint
from jinja2 import Template
import time

file_input = input("Masukan file yaml: ") or 'data.yml'
file_yaml = open(file_input, 'r')
data_yaml = yaml.load(file_yaml)

template_file_input = input("Masukkan file template: ") or 'template.j2'
template_file = open(template_file_input, "r").read()
template = Template(template_file)

for router in data_yaml:
    ip = router['ip']
    username = router['username']
    password = router['password']

    interface_list = router['interface_list']
    bgp_network = router["bgp"]['network']
    as_number = router['bgp']['local_as']
    router_id = router['bgp']['router_id']
    bgp_neighbor_list = router['bgp']['neighbor']

    dhcp_list = router['dhcp']

    cmds = template.render(interface_list=interface_list,
                           bgp_network=bgp_network,
                           dhcp_list=dhcp_list,
                           as_number=as_number,
import re
import os
from pathlib import Path

from ruamel.yaml import YAML
from jinja2 import Template

from publist.publist import update as update_publist

PREPRINT_REGEX = r"arXiv:(.+?[/\.]\d+)"
FULL_TEMPLATE = Template(
    """{{preprint.title}} ([arXiv:{{preprint.id}}](https://arxiv.org/abs/{{preprint.id}}))

{{preprint.authors | join(', ')}}

> {{preprint.abstract}}""")
SHORT_TEMPLATE = Template(
    """**{{preprint.title}}** ([arXiv:{{preprint.id}}](https://arxiv.org/abs/{{preprint.id}}))
  {{preprint.authors | join(', ')}}""")
INLINE_TEMPLATE = Template(
    '[arXiv:{{preprint.id}}](https://arxiv.org/abs/{{preprint.id}} "{{preprint.title}}")'
)


def expand_syllabus(toc, template, out):
    """Plug the TOC data into a syllabus template."""
    Path(out).write_text(
        Template(Path(template).read_text()).render(
            chapters=YAML().load(Path(toc))))

Example #30
0
def loadTemplate(page):
    templatePath = "static/{}.html".format(page)
    with open(templatePath, "r") as my_file:
        template = Template(my_file.read())
    return template