示例#1
0
def node(*args):
    """PyYAML wrapper constructor for creating nodes.

    This allows the YAML node to have 'labels' and 'properties' instead of
    'args' and 'kwargs', at the expense of having to do a little massaging of
    the parameters (which come in as positional variables with a single-key
    mapping).

    >>> import yaml
    >>> result = yaml.load('''
    !!python/object/apply:gryaml.node2
        - labels:
            - 'person'
        - properties:
            name: 'Bob Newhart'
            occupation: 'Comedian'
    ''')
    >>> isinstance(result, Node)
    """
    labels = first(arg['labels']
                   for arg in args if is_label_map(arg)) or []
    # labels = labels.get('labels', []) if labels else []
    properties = first(arg['properties']
                       for arg in args if is_properties_map(arg)) or {}
    # properties = properties.get('properties', {}) if properties else {}

    # print('labels: {!r}'.format(labels))
    # print('properties: {!r}'.format(properties))

    # return labels, properties
    return compat_node(labels, properties)
示例#2
0
def play(cups, rounds):
    starting_cup = cups[0]
    cups = Cups(cups)
    node = cups[starting_cup]
    min_cup = min(cups)
    max_cup = max(cups)

    for _ in range(rounds):
        removed_nodes = node.next, node.next.next, node.next.next.next
        removed_cups = [node.value for node in removed_nodes]
        cups.remove(removed_nodes)

        destination = node.value - 1
        while destination not in cups:
            # pylint: disable=cell-var-from-loop
            destination -= 1
            new_min_cup = iterutils.first(
                range(min_cup, min_cup + 4),
                key=lambda cup: cup not in removed_cups,
            )
            new_max_cup = iterutils.first(
                range(max_cup, max_cup - 4, -1),
                key=lambda cup: cup not in removed_cups,
            )

            if destination < new_min_cup:
                destination = new_max_cup

        destination = cups.get_node(destination)
        cups.insert(destination, removed_nodes)
        node = node.next

    return cups
示例#3
0
 def test_empty_iterables(self):
     """
     Empty iterables return None.
     """
     s = set()
     l = []
     assert first(s) is None
     assert first(l) is None
示例#4
0
 def test_empty_iterables(self):
     """
     Empty iterables return None.
     """
     s = set()
     l = []
     assert first(s) is None
     assert first(l) is None
示例#5
0
    def test_default_value(self):
        """
        Empty iterables + a default value return the default value.
        """
        s = set()
        l = []
        assert first(s, default=42) == 42
        assert first(l, default=3.14) == 3.14

        l = [0, False, []]
        assert first(l, default=3.14) == 3.14
示例#6
0
    def test_default_value(self):
        """
        Empty iterables + a default value return the default value.
        """
        s = set()
        l = []
        assert first(s, default=42) == 42
        assert first(l, default=3.14) == 3.14

        l = [0, False, []]
        assert first(l, default=3.14) == 3.14
示例#7
0
def get_repo(remote):
    # Fetch the URL for the given remote.
    url = Repo().remote(remote).url

    # List the patterns a GitHub repository URL can assume.
    patterns = [
        "[email protected]:{}/{}.git",
        "https://github.com/{}/{}.git",
    ]

    # Attempt to parse the URL.
    if not (match := first(map(partial(search, string=url), patterns))):
        exit(f"Invalid remote URL: {url}")
示例#8
0
    def initialize(cls):
        db_entries = config.get("hosts.mongo", {})
        missing = []
        log.info("Initializing database connections")

        override_connection_string = getenv(OVERRIDE_CONNECTION_STRING_ENV_KEY)
        override_hostname = first(map(getenv, OVERRIDE_HOST_ENV_KEY), None)
        override_port = first(map(getenv, OVERRIDE_PORT_ENV_KEY), None)

        if override_connection_string:
            log.info(
                f"Using override mongodb connection string {override_connection_string}"
            )
        else:
            if override_hostname:
                log.info(f"Using override mongodb host {override_hostname}")
            if override_port:
                log.info(f"Using override mongodb port {override_port}")

        for key, alias in get_items(Database).items():
            if key not in db_entries:
                missing.append(key)
                continue

            entry = cls._create_db_entry(alias=alias,
                                         settings=db_entries.get(key))

            if override_connection_string:
                entry.host = override_connection_string
            else:
                if override_hostname:
                    entry.host = furl(
                        entry.host).set(host=override_hostname).url
                if override_port:
                    entry.host = furl(entry.host).set(port=override_port).url

            try:
                entry.validate()
                log.info("Registering connection to %(alias)s (%(host)s)" %
                         entry.to_struct())
                register_connection(**entry.to_struct())

                cls._entries.append(entry)
            except ValidationError as ex:
                raise Exception("Invalid database entry `%s`: %s" %
                                (key, ex.args[0]))
        if missing:
            raise ValueError("Missing database configuration for %s" %
                             ", ".join(missing))
示例#9
0
def get_entries(
    backstop: datetime.date = None
) -> typing.Tuple[datetime.date, block.BulletedListBlock]:
    journal = iterutils.first(
        client.notion_client().get_top_level_pages(),
        key=lambda page: page.title == 'Journal',
    )

    if not journal:
        raise ValueError('No journal found with name \'Journal\'')

    for year in journal.children:
        for month in year.children:
            for week in month.children:
                for entry in week.children:
                    match = re.fullmatch(ENTRY_TITLE_REGEX, entry.title)
                    assert match is not None, (year, month, week, entry)

                    date = datetime.date(
                        int(year.title),
                        int(match.group('month')),
                        int(match.group('day')),
                    )
                    if backstop and date < backstop:
                        continue

                    yield date, entry
示例#10
0
文件: cli.py 项目: mcgyver5/apatite
def set_repo_added_dates(pfile, plist, targets, dry_run):
    project_list = plist.project_list
    if targets:
        project_list = [
            proj for proj in project_list
            if (proj.name in targets or proj.name_slug in targets)
        ]
    results = []
    updated_proj_list = []
    for project in tqdm(project_list):
        if project._orig_data.get('date_added'):
            continue
        # rstrip in case it was added in a denormalized form
        repo_url = str(project.repo_url).rstrip('/')
        res = run_cap([
            'git', 'log', '--date=iso-strict',
            '--pretty=format:"%h%x09%an%x09%ad%x09%s"', '--reverse',
            '--source', '-S', repo_url, '--', pfile
        ])
        first_line = first(res.stdout.splitlines())
        if not first_line:
            print('nothing for', repo_url)
            continue  # TODO
        parts = [p.strip() for p in first_line.strip('"').split('\t')]
        dt_text = parts[2]
        parsed_date = _get_commit_dt(dt_text)
        results.append((project.name, parsed_date))
        updated_proj_list.append(attr.evolve(project, date_added=parsed_date))
    results.sort(key=lambda x: x[1])
    plist.update_projects(updated_proj_list)
    if not dry_run and updated_project_list:
        normalize(pfile=pfile, plist=plist)
    return
示例#11
0
    def _get_queries_for_order_field(
            cls, query: Q, order_field: str) -> Union[None, Tuple[Q, Q]]:
        """
        In case the order_field is one of the cls fields and the sorting is ascending
        then return the tuple of 2 queries:
        1. original query with not empty constraint on the order_by field
        2. original query with empty constraint on the order_by field
        """
        if not order_field or order_field.startswith(
                "-") or "[" in order_field:
            return

        mongo_field_name = order_field.replace(".", "__")
        mongo_field = first(v for k, v in cls.get_all_fields_with_instance()
                            if k == mongo_field_name)

        if isinstance(mongo_field, ListField):
            params = {"is_list": True}
        elif isinstance(mongo_field, StringField):
            params = {"empty_value": ""}
        else:
            params = {}
        non_empty = query & field_exists(mongo_field_name, **params)
        empty = query & field_does_not_exist(mongo_field_name, **params)
        return non_empty, empty
示例#12
0
def assemble_tiles(graph):
    corner = set_top_left_corner(graph)
    positions = {corner: (0, 0)}

    for edge in nx.bfs_edges(graph, corner):
        # pylint: disable=cell-var-from-loop
        edges = graph.edges[edge]['edge']
        tile_id_1, tile_id_2 = edge
        tile_1, tile_2 = graph.nodes[tile_id_1]['tile'], graph.nodes[tile_id_2]['tile']

        assert tile_id_1 in positions

        tile_1_edges = tile_1.edges()
        edge_1 = iterutils.first(range(4), key=lambda i: tile_1_edges[i] in edges)
        positions[tile_id_2] = utils.add_vector(positions[tile_id_1], EDGE_TO_VECTOR[edge_1])
        orient_tile_to_edge(tile_1, edge_1, tile_2)

    # Now that positions have been determined, form grid of sub-images
    dims = utils.add_vector(max(positions.values()), (1, 1))
    grid = [
        [None for _ in range(dims[1])]
        for _ in range(dims[0])
    ]

    for tile_id, (i, j) in positions.items():
        tile = graph.nodes[tile_id]['tile']
        grid[i][j] = tile
        # Truncate tile borders
        tile.points = tile.points[1:-1, 1:-1]

    # Merge truncated tiles into final image
    return np.vstack([
        np.hstack([tile.points for tile in row])
        for row in grid
    ])
    def test_get_all_with_stats(self):
        project4, _ = self._temp_project_with_tasks(
            name="project1/project3/project4")
        project5, _ = self._temp_project_with_tasks(
            name="project1/project3/project5")
        project2 = self._temp_project(name="project2")
        res = self.api.projects.get_all(shallow_search=True).projects
        self.assertTrue(any(p for p in res if p.id == project2))
        self.assertFalse(any(p for p in res if p.id in [project4, project5]))

        project1 = first(p.id for p in res if p.name == "project1")
        res = self.api.projects.get_all_ex(id=[project1, project2],
                                           include_stats=True).projects
        self.assertEqual(set(p.id for p in res), {project1, project2})
        res1 = next(p for p in res if p.id == project1)
        self.assertEqual(res1.stats["active"]["status_count"]["created"], 0)
        self.assertEqual(res1.stats["active"]["status_count"]["stopped"], 2)
        self.assertEqual(res1.stats["active"]["total_runtime"], 2)
        self.assertEqual(
            {sp.name
             for sp in res1.sub_projects},
            {
                "project1/project3",
                "project1/project3/project4",
                "project1/project3/project5",
            },
        )
        res2 = next(p for p in res if p.id == project2)
        self.assertEqual(res2.stats["active"]["status_count"]["created"], 0)
        self.assertEqual(res2.stats["active"]["status_count"]["stopped"], 0)
        self.assertEqual(res2.stats["active"]["total_runtime"], 0)
        self.assertEqual(res2.sub_projects, [])
示例#14
0
def py2neo20_node(*labels, **properties):
    """Py2neo `node` factory.

    Requires a module-global `graphdb` connection.
    """
    global graphdb

    return first(graphdb.create(py2neo_node(*labels, **properties)))
示例#15
0
def test_complex_related_graph(graphdb):
    """Test loading a graph with multiple nodes & relationships."""
    result = yaml.load(open('tests/samples/nodes-and-relationships.yaml'))
    assert len(result) == 21
    result = graphdb.cypher.execute(
        'MATCH (p)-[r:DIRECTED]->(m{title:"The Matrix"}) RETURN p,r,m')
    assert len(result) == 1
    person, relationship, movie = first(result)
    assert person['name'] == 'Lana Wachowski'
    assert relationship.type == 'DIRECTED'
    assert movie['title'] == 'The Matrix'
示例#16
0
def _get(prop_name, env_suffix=None, default=""):
    suffix = env_suffix or prop_name
    keys = [f"{p}_SERVER_{suffix}" for p in ("CLEARML", "TRAINS")]
    value = first(map(getenv, keys))
    if value:
        return value

    try:
        return (root / prop_name).read_text().strip()
    except FileNotFoundError:
        return default
示例#17
0
def py2neo16_node(*labels, **properties):
    """Implement a Py2neo 2.0-compatible `node` factory.

    Version 1.6 did not support adding labels (which ordinarily would only be
    possible to add *after* node creation).
    """
    global graphdb

    new_node = first(graphdb.create(py2neo_node(**properties)))
    new_node.add_labels(*labels)

    return new_node
示例#18
0
    def host(self, alias):
        r = self.connection(alias)
        if isinstance(r, RedisCluster):
            connections = first(
                r.connection_pool._available_connections.values())
        else:
            connections = r.connection_pool._available_connections

        if not connections:
            return None

        return connections[0].host
示例#19
0
def run():
    width = 25
    height = 6

    layers = chunked(data, width * height)

    image = [
        first(pixel_layers, key=lambda x: x in ['0', '1'])
        for pixel_layers in zip(*layers)
    ]
    for r in chunked(image, width):
        print(''.join(r).replace('0', ' ').replace('1', '#'))
示例#20
0
def cli(backstop: datetime.datetime):
    journal = iterutils.first(
        client.notion_client().get_top_level_pages(),
        key=lambda page: page.title == 'Journal (New)',
    )

    if not journal:
        raise ValueError('No journal found with name \'Journal (New)\'')

    for date, entry in get_entries(backstop=backstop.date()):
        migrate_entry(journal.collection, date, entry)
        click.echo(f'Migrated {date}')
示例#21
0
def test_relationship_structures(graphdb):
    """Test relationship representation."""
    result = yaml.load(open('tests/samples/relationships.yaml'))
    assert len(result) == 5
    result = graphdb.cypher.execute('MATCH (n) RETURN n')
    assert len(result) == 3  # 2 nodes
    result = graphdb.cypher.execute('MATCH (n)-[r]->(o) RETURN *')
    assert len(result) == 2  # 2 relationships
    result = graphdb.cypher.execute('MATCH (p)-[r:DIRECTED]->(m) RETURN p,r,m')
    assert len(result) == 1
    person, relationship, movie = first(result)
    assert person['name'] == 'Lana Wachowski'
    assert relationship.type == 'DIRECTED'
    assert movie['title'] == 'The Matrix'
    def test_last_scalar_metrics(self):
        metric = "Metric1"
        variant = "Variant1"
        iter_count = 100
        task = self._temp_task()
        events = [
            {
                **self._create_task_event("training_stats_scalar", task, iteration),
                "metric": metric,
                "variant": variant,
                "value": iteration,
            }
            for iteration in range(iter_count)
        ]
        # send 2 batches to check the interaction with already stored db value
        # each batch contains multiple iterations
        self.send_batch(events[:50])
        self.send_batch(events[50:])

        task_data = self.api.tasks.get_by_id(task=task).task
        metric_data = first(first(task_data.last_metrics.values()).values())
        self.assertEqual(iter_count - 1, metric_data.value)
        self.assertEqual(iter_count - 1, metric_data.max_value)
        self.assertEqual(0, metric_data.min_value)
示例#23
0
def foremost(iterable, default=None, key=lambda x: x is not None):
    # type: (Iterable[E], Optional[D], Optional[ElementPredicate]) -> Union[E, D]
    """Return first non-*None* element from an iterable or *default*.

    *foremost* is a specialized version of :func:`boltons.iterutils.first`,
    with its *key* parameter defaulting to a predicate for non-*None* instead
    of *True*.

    This is especially useful with py2neo, because an entity without properties
    evaluates to *False*, whereas we usually just want the first non-*None*
    item from a result.

    :param Iterable iterable: Iterable to work on.
    :param default: Default if all elements of *iterable* are *None*. Default is *None*.
    :return: First non-None element of Iterable.
    """
    return first(iterable, default=default, key=key)
示例#24
0
    def test_selection(self):
        """
        Success cases with and without a key function.
        """
        l = [(), 0, False, 3, []]

        assert first(l, default=42) == 3
        assert first(l, key=isint) == 0
        assert first(l, key=isbool) is False
        assert first(l, key=odd) == 3
        assert first(l, key=even) == 0
        assert first(l, key=is_meaning_of_life) is None
示例#25
0
    def test_selection(self):
        """
        Success cases with and without a key function.
        """
        l = [(), 0, False, 3, []]

        assert first(l, default=42) == 3
        assert first(l, key=isint) == 0
        assert first(l, key=isbool) is False
        assert first(l, key=odd) == 3
        assert first(l, key=even) == 0
        assert first(l, key=is_meaning_of_life) is None
示例#26
0
        def merge_with_updated_task_states(
                old_state: TaskScrollState,
                updates: Sequence[TaskScrollState]) -> TaskScrollState:
            task = old_state.task
            updated_state = first(uts for uts in updates if uts.task == task)
            if not updated_state:
                old_state.reset()
                return old_state

            updated_metrics = [m.metric for m in updated_state.metrics]
            return TaskScrollState(
                task=task,
                metrics=[
                    *updated_state.metrics,
                    *(old_metric for old_metric in old_state.metrics
                      if old_metric.metric not in updated_metrics),
                ],
            )
示例#27
0
 def _read_env_paths(self):
     value = first(map(getenv, self.extra_config_path_env_key),
                   DEFAULT_EXTRA_CONFIG_PATH)
     if value is None:
         return
     paths = [
         Path(expandvars(v)).expanduser()
         for v in value.split(EXTRA_CONFIG_PATH_SEP)
     ]
     invalid = [
         path for path in paths
         if not path.is_dir() and str(path) != DEFAULT_EXTRA_CONFIG_PATH
     ]
     if invalid:
         print(
             f"WARNING: Invalid paths in {self.extra_config_path_env_key} env var: {' '.join(map(str,invalid))}"
         )
     return [path for path in paths if path.is_dir()]
示例#28
0
    def _get_paths(self) -> List[Path]:
        default_paths = EXTRA_CONFIG_PATH_SEP.join(EXTRA_CONFIG_PATHS)
        value = first(map(getenv, self.extra_config_path_override_var),
                      default_paths)

        paths = [
            Path(expandvars(v)).expanduser()
            for v in value.split(EXTRA_CONFIG_PATH_SEP)
        ]

        if value is not default_paths:
            invalid = [path for path in paths if not path.is_dir()]
            if invalid:
                print(
                    f"WARNING: Invalid paths in {self.extra_config_path_override_var} env var: {' '.join(map(str, invalid))}"
                )

        return [path for path in paths if path.is_dir()]
示例#29
0
    def _import(
        cls,
        reader: ZipFile,
        company_id: str = "",
        user_id: str = None,
        metadata: Mapping[str, Any] = None,
        sort_tasks_by_last_updated: bool = True,
    ):
        """
        Import entities and events from the zip file
        Start from entities since event import will require the tasks already in DB
        """
        event_file_ending = cls.events_file_suffix + ".json"
        entity_files = (
            fi
            for fi in reader.filelist
            if not fi.orig_filename.endswith(event_file_ending)
            and fi.orig_filename != cls.metadata_filename
        )
        metadata = metadata or {}
        tasks = []
        for entity_file in entity_files:
            with reader.open(entity_file) as f:
                full_name = splitext(entity_file.orig_filename)[0]
                print(f"Reading {reader.filename}:{full_name}...")
                res = cls._import_entity(f, full_name, company_id, user_id, metadata)
                if res:
                    tasks = res

        if sort_tasks_by_last_updated:
            tasks = sorted(tasks, key=attrgetter("last_update"))

        for task in tasks:
            events_file = first(
                fi
                for fi in reader.filelist
                if fi.orig_filename.endswith(task.id + event_file_ending)
            )
            if not events_file:
                continue
            with reader.open(events_file) as f:
                full_name = splitext(events_file.orig_filename)[0]
                print(f"Reading {reader.filename}:{full_name}...")
                cls._import_events(f, full_name, company_id, user_id)
示例#30
0
def set_top_left_corner(graph):
    # Start with an arbitrary corner, make this the top-left corner
    corner = iterutils.first(node for node in graph.nodes if graph.degree(node) == 2)
    corner_tile = graph.nodes[corner]['tile']

    interior_edges = set(
        itertools.chain.from_iterable(
            graph.edges[edge]['edge']
            for edge in graph.edges(corner)
        )
    )

    corner_edges = corner_tile.edges()
    interior_indices = sorted([i for i, edge in enumerate(corner_edges) if edge in interior_edges])

    # Rotate corner until it's in place (i.e. the interior borders face to the right and below)
    while interior_indices != [2, 3]:
        corner_tile.rotate()
        corner_edges = corner_tile.edges()
        interior_indices = sorted([i for i, edge in enumerate(corner_edges) if edge in interior_edges])

    return corner
示例#31
0
def _get_pkg_info(plist, project, repo_dir):
    # snap: search for snapcraft.yaml
    # appimage: find -iname "appimage" -type d
    # flatpak: find -iname "flatpak" -type d  # maybe exclude test dirs, e.g., what ansible has
    # docker: find -name "Dockerfile"
    ret = {}
    container_stacks = OMD()
    for path in iter_find_files(repo_dir, CONTAINER_FILES, include_dirs=True):
        container_stacks.add(
            os.path.splitext(os.path.basename(path))[0].lower(), path)
    #if container_stacks:
    #    print(container_stacks.todict())
    has_docker = bool(container_stacks.pop('dockerfile', None))
    container_stack = first(container_stacks.keys(),
                            None) or ('docker' if has_docker else '')
    ret['container'] = container_stack

    # TODO: split into mac/windows/linux? for linux I'll need to look
    # at deb/rpm, and I'm not sure the best strategy there. rpm maybe
    # .spec files? might have to check inside as other tools
    # (pyinstaller) uses .spec, too.

    # freezers -> pyInstaller, cx_Freeze, py2exe, py2app, pynsist
    # (bbFreeze phased out, osnap/constructor not yet adopted, harder
    # to search for). conda and omnibus also not adopted.

    freezer_res_map = OMD()
    for freezer_name in FREEZERS:
        search_output = search_files(freezer_name, '*', repo_dir)
        if search_output:
            freezer_res_map.add(freezer_name, len(search_output.splitlines()))
    if freezer_res_map:
        top, top_res = sorted(freezer_res_map.items(), key=lambda x: x[1])[-1]
        ret['freezer'] = top

    return ret
示例#32
0
def part_2():
    foods = get_foods()
    hypoallergenic = get_hypoallergenic_ingredients(foods)
    allergen_candidates = collections.defaultdict(set)

    for food in foods:
        for ingredient, allergen in itertools.product(food.ingredients,
                                                      food.allergens):
            if ingredient not in hypoallergenic:
                allergen_candidates[allergen].add(ingredient)

    # Remove invalid candidates
    for allergen, ingredients in allergen_candidates.items():
        for ingredient in list(ingredients):
            if not all(ingredient in food.ingredients
                       for food in foods if allergen in food.allergens):
                ingredients.remove(ingredient)

    allergen_map = {}

    while allergen_candidates:
        # Find the next allergen that has been solved, i.e. only 1 candidate left
        solved_allergen = iterutils.first(
            allergen_candidates,
            key=lambda allergen: len(allergen_candidates[allergen]) == 1,
        )
        ingredient = allergen_candidates[solved_allergen].pop()
        allergen_map[solved_allergen] = ingredient

        # Remove allergen from candidate map and remove ingredient from other candidate lists
        del allergen_candidates[solved_allergen]
        for allergen, ingredients in allergen_candidates.items():
            ingredients.discard(ingredient)

    allergens = sorted(allergen_map.keys())
    print(','.join(allergen_map[allergen] for allergen in allergens))
示例#33
0
from os import getenv
from time import sleep

from boltons.iterutils import first
from redis import StrictRedis
from redis.sentinel import Sentinel, SentinelConnectionPool

from apierrors.errors.server_error import ConfigError, GeneralError
from config import config

log = config.logger(__file__)

OVERRIDE_HOST_ENV_KEY = ("TRAINS_REDIS_SERVICE_HOST", "REDIS_SERVICE_HOST")
OVERRIDE_PORT_ENV_KEY = ("TRAINS_REDIS_SERVICE_PORT", "REDIS_SERVICE_PORT")

OVERRIDE_HOST = first(filter(None, map(getenv, OVERRIDE_HOST_ENV_KEY)))
if OVERRIDE_HOST:
    log.info(f"Using override redis host {OVERRIDE_HOST}")

OVERRIDE_PORT = first(filter(None, map(getenv, OVERRIDE_PORT_ENV_KEY)))
if OVERRIDE_PORT:
    log.info(f"Using override redis port {OVERRIDE_PORT}")


class MyPubSubWorkerThread(threading.Thread):
    def __init__(self, sentinel, on_new_master, msg_sleep_time, daemon=True):
        super(MyPubSubWorkerThread, self).__init__()
        self.daemon = daemon
        self.sentinel = sentinel
        self.on_new_master = on_new_master
        self.sentinel_host = sentinel.connection_pool.connection_kwargs["host"]
示例#34
0
def get_version(repo):
    return first(repo.get_releases()).title
示例#35
0
from werkzeug.exceptions import NotFound
from werkzeug.security import safe_join

from config import config
from utils import get_env_bool

DEFAULT_UPLOAD_FOLDER = "/mnt/fileserver"

app = Flask(__name__)
CORS(app, **config.get("fileserver.cors"))

if get_env_bool("CLEARML_COMPRESS_RESP", default=True):
    Compress(app)

app.config["UPLOAD_FOLDER"] = first(
    (os.environ.get(f"{prefix}_UPLOAD_FOLDER") for prefix in ("CLEARML", "TRAINS")),
    default=DEFAULT_UPLOAD_FOLDER,
)
app.config["SEND_FILE_MAX_AGE_DEFAULT"] = config.get(
    "fileserver.download.cache_timeout_sec", 5 * 60
)


@app.before_request
def before_request():
    if request.content_encoding:
        return f"Content encoding is not supported ({request.content_encoding})", 415


@app.after_request
def after_request(response):
    response.headers["server"] = config.get(
示例#36
0
    "CLEARML_ELASTIC_SERVICE_HOST",
    "TRAINS_ELASTIC_SERVICE_HOST",
    "ELASTIC_SERVICE_HOST",
    "ELASTIC_SERVICE_SERVICE_HOST",
)
OVERRIDE_PORT_ENV_KEY = (
    "CLEARML_ELASTIC_SERVICE_PORT",
    "TRAINS_ELASTIC_SERVICE_PORT",
    "ELASTIC_SERVICE_PORT",
)

OVERRIDE_USERNAME_ENV_KEY = ("CLEARML_ELASTIC_SERVICE_USERNAME", )

OVERRIDE_PASSWORD_ENV_KEY = ("CLEARML_ELASTIC_SERVICE_PASSWORD", )

OVERRIDE_HOST = first(filter(None, map(getenv, OVERRIDE_HOST_ENV_KEY)))
if OVERRIDE_HOST:
    log.info(f"Using override elastic host {OVERRIDE_HOST}")

OVERRIDE_PORT = first(filter(None, map(getenv, OVERRIDE_PORT_ENV_KEY)))
if OVERRIDE_PORT:
    log.info(f"Using override elastic port {OVERRIDE_PORT}")

OVERRIDE_USERNAME = first(filter(None, map(getenv, OVERRIDE_USERNAME_ENV_KEY)))
if OVERRIDE_USERNAME:
    log.info(f"Using override elastic username {OVERRIDE_USERNAME}")

OVERRIDE_PASSWORD = first(filter(None, map(getenv, OVERRIDE_PASSWORD_ENV_KEY)))
if OVERRIDE_PASSWORD:
    log.info("Using override elastic password ********")
示例#37
0
 def _deduce_provides_from_fn_name(self, fn_name):
     ## Trim prefix from function-name to derive a singular "provides".
     provides = first(
         self._match_fn_name_pattern(fn_name, p) for p in self.out_patterns)
     return provides
示例#38
0
    def __init__(self, route, app, **kwargs):
        # TODO: maybe two constructors, one for initial binding, one for rebinding?

        # keep a reference to the unbound version
        self.unbound_route = unbound_route = getattr(route, 'unbound_route',
                                                     route)
        self.bound_apps = getattr(route, 'bound_apps', []) + [app]

        prefix = kwargs.pop('prefix', '')
        rebind_render = kwargs.pop('rebind_render', True)
        inherit_slashes = kwargs.pop('inherit_slashes', True)
        rebind_render_error = kwargs.pop('rebind_render_error', True)
        if kwargs:
            raise TypeError('unexpected keyword args: %r' % kwargs.keys())

        self.pattern = prefix + route.pattern
        self.slash_mode = app.slash_mode if inherit_slashes else route.slash_mode
        self.methods = route.methods

        self.regex, self.converters = _compile_path_pattern(
            self.pattern, self.slash_mode)
        self.path_args = self.converters.keys()
        self.endpoint_args = get_arg_names(unbound_route.endpoint)

        app_resources = getattr(app, 'resources', {})
        self.resources = dict(app_resources)
        self.resources.update(getattr(route, 'resources', {}))
        app_mws = getattr(app, 'middlewares', [])
        self.middlewares = tuple(
            merge_middlewares(getattr(route, 'middlewares', []), app_mws))

        # rebind_render=True is basically a way of making the
        # generated render function sticky to the most-recently bound
        # application which can fulfill it.
        bind_render = rebind_render or route.render is _noop_render or not callable(
            route.render)

        render_factory_list = [
            getattr(ba, 'render_factory', None) for ba in self.bound_apps
        ]
        render_factory = first(reversed(render_factory_list), key=callable)

        if callable(unbound_route.render):
            # explicit callable renders always take precedence
            render = unbound_route.render
            render_factory = None
        elif bind_render and render_factory and unbound_route.render is not None:
            render = render_factory(unbound_route.render)
        else:
            # default to carrying through values from the route
            render = route.render if callable(route.render) else _noop_render
            render_factory = getattr(route, 'render_factory', None)
        self.render_factory = render_factory
        self.render = render

        if rebind_render_error:
            render_error = getattr(app.error_handler, 'render_error', None)
        else:
            render_error = route.render_error
        if callable(render_error):
            check_render_error(render_error, self.resources)
        self.render_error = render_error

        src_provides_map = {
            'url': set(self.converters),
            'builtins': set(RESERVED_ARGS),
            'resources': set(self.resources)
        }
        check_middlewares(self.middlewares, src_provides_map)
        provided = set.union(*src_provides_map.values())

        self._execute = make_middleware_chain(self.middlewares,
                                              unbound_route.endpoint, render,
                                              provided)

        self._required_args = self._resolve_required_args()