Beispiel #1
0
import pathlib

from dffml import op, DataFlow, Input, Definition, GetSingle
from dffml_feature_git.feature.operations import clone_git_repo


@op(
    inputs={"repo": clone_git_repo.op.outputs["repo"]},
    outputs={
        "result": Definition(name="has_package_cargo_result", primitive="bool")
    },
)
def has_package_cargo(repo: clone_git_repo.op.outputs["repo"].spec):
    return {
        "result":
        pathlib.Path(repo.directory, "cargo.toml").is_file()
        or pathlib.Path(repo.directory, "Cargo.toml").is_file()
    }


DATAFLOW = DataFlow.auto(has_package_cargo, GetSingle)
DATAFLOW.seed.append(
    Input(
        value=[
            has_package_cargo.op.outputs["result"].name,
        ],
        definition=GetSingle.op.inputs["spec"],
    ))


@op(
Beispiel #2
0
import json
import os
import tempfile
import urllib.parse
from pathlib import Path
from typing import Dict, Any

from dffml import op, Definition, run_command

package_src_dir = Definition(name="package_src_dir", primitive="str")
dependency_check_output = Definition(name="dependency_check_output",
                                     primitive="Dict[str, Any]")


class DependencyCheckError(Exception):
    """
    Raised when dependency-check fails
    """


@op(
    inputs={"pkg": package_src_dir},
    outputs={"report": dependency_check_output},
)
async def run_dependency_check(self, pkg: str) -> Dict[str, Any]:
    """
    CLI usage: dffml service dev run -log debug shouldi.dependency_check:run_dependency_check -pkg .
    """
    with tempfile.TemporaryDirectory() as tempdir:
        # Define command
        cmd = [
Beispiel #3
0
import asyncio
import tempfile

from dffml import op, Definition


@op(
    inputs={
        "input_file": Definition(name="input_file", primitive="bytes"),
        "resolution": Definition(name="resolution", primitive="int"),
    },
    outputs={"output_file": Definition(name="output_file", primitive="bytes")},
)
async def convert_to_gif(input_file, resolution):
    temp_input_file = tempfile.NamedTemporaryFile(prefix="ffmpeg-")
    temp_input_file.write(input_file)
    temp_input_file.seek(0)
    proc = await asyncio.create_subprocess_exec(
        "ffmpeg",
        "-ss",
        "0.3",
        "-t",
        "10",
        "-i",
        temp_input_file.name,
        "-y",
        "-vf",
        f"fps=10,scale={resolution}:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse",
        "-loop",
        "0",
        "-f",
Beispiel #4
0
from dffml import Definition

UUID = Definition(name="uuid", primitive="string")
NAME = Definition(name="name", primitive="string")
STATIC_ANALYSIS = Definition(name="staticAnalysis", primitive="string")
LEGEL = Definition(name="legal", primitive="string")
Beispiel #5
0
import pathlib

from dffml import op, DataFlow, Input, Definition, GetSingle
from dffml_feature_git.feature.operations import clone_git_repo


@op(
    inputs={"repo": clone_git_repo.op.outputs["repo"]},
    outputs={
        "result": Definition(name="has_setup_py_result", primitive="bool")
    },
)
def has_setup_py(repo: clone_git_repo.op.outputs["repo"].spec):
    return {"result": pathlib.Path(repo.directory, "setup.py").is_file()}


DATAFLOW_ID_PYTHON = DataFlow.auto(has_setup_py, GetSingle)
DATAFLOW_ID_PYTHON.seed.append(
    Input(
        value=[
            has_setup_py.op.outputs["result"].name,
        ],
        definition=GetSingle.op.inputs["spec"],
    ))


@op(
    inputs={"repo": clone_git_repo.op.outputs["repo"]},
    outputs={"python": Definition(name="repo_is_python", primitive="string")},
)
async def check_python(self, repo):
Beispiel #6
0
    async def test_config_default_label(self):
        with patch.object(BaseKeyValueStore, "load", load_kvstore_with_args):
            was = MemoryRedundancyChecker.config(await parse_unknown(
                "--rchecker-memory-kvstore",
                "withargs",
                "--rchecker-memory-kvstore-withargs-filename",
                "somefile",
            ))
            self.assertEqual(type(was), MemoryRedundancyCheckerConfig)
            self.assertEqual(type(was.kvstore), KeyValueStoreWithArguments)
            self.assertEqual(type(was.kvstore.config),
                             KeyValueStoreWithArgumentsConfig)
            self.assertEqual(was.kvstore.config.filename, "somefile")


CONDITION = Definition(name="condition", primitive="boolean")


class TestMemoryOrchestrator(AsyncTestCase):
    async def test_condition_does_run(self):
        ran = []

        @op(conditions=[CONDITION])
        async def condition_test(hi: str):
            ran.append(True)

        async with MemoryOrchestrator() as orchestrator:
            async with orchestrator(DataFlow(condition_test)) as octx:
                async for _ in octx.run([
                        Input(
                            value=True,
Beispiel #7
0
    },
    "Washington": {
        "url":
        "https://www2.census.gov/programs-surveys/popest/datasets/2010-2019/cities/totals/sub-est2019_53.csv",
        "expected_sha384_hash":
        "b426810c8438585c67057c5073281fce6b20d6bf013370256d6dbdcc4ad0b92c7d673c1e7d6e2a1d14e59f7bbc6599ad",
    },
    "Illinois": {
        "url":
        "https://www2.census.gov/programs-surveys/popest/datasets/2010-2019/cities/totals/sub-est2019_17.csv",
        "expected_sha384_hash":
        "a55edf7f31ccdc792d183bb0c1dccbc55f6cfb5d518502e3fc5278d230a0174a741ae625d2b00e650dc1d8cd39f2e989",
    },
}

temperature_def = Definition(name="temperature", primitive="generic")
population_def = Definition(name="population", primitive="generic")


@op(outputs={"temperature": temperature_def})
async def lookup_temperature(self, city: str, month: int):
    if city not in temperature_dataset_urls:
        raise Exception(f"City: {city} not found in dataset")

    cache_dir = (pathlib.Path("~", ".cache", "dffml", "datasets",
                              "temperature").expanduser().resolve())

    filepath = await cached_download(
        temperature_dataset_urls[city]["url"],
        cache_dir / f"{city}.json",
        temperature_dataset_urls[city]["expected_sha384_hash"],
Beispiel #8
0
from dffml import op, Definition

from .definitions import UUID


class ExampleAPIServer(httptest.Handler):
    def do_GET(self):
        self.send_response(200)
        self.send_header("Content-type", "text/plain")
        self.end_headers()
        self.wfile.write(random.choice(["PASS", "FAIL"]).encode())


@httptest.Server(ExampleAPIServer)
async def make_request_to_example_server(session, ts=httptest.NoServer()):
    async with session.get(ts.url()) as resp:
        return (await resp.read()).decode()


@op(
    inputs={"uuid": UUID},
    outputs={"result": Definition(name="api_result", primitive="string")},
    imp_enter={
        "session": (lambda self: aiohttp.ClientSession(trust_env=True))
    },
)
async def query_an_api(self, uuid: str, ts=httptest.NoServer()) -> str:
    return {
        "result": await make_request_to_example_server(self.parent.session)
    }
Beispiel #9
0
import asyncio
from dffml import Definition, DataFlow, Input, op
from dffml.noasync import run

OBJ = Definition(name="obj", primitive="mapping")
LOCKED_OBJ = Definition(name="locked_obj", primitive="mapping", lock=True)
SLEEP_TIME = Definition(name="sleep_time", primitive="int")
INTEGER = Definition(name="integer", primitive="int")


@op(inputs={"obj": OBJ, "sleep_for": SLEEP_TIME, "i": INTEGER})
async def run_me(obj: dict, sleep_for: int, i: int) -> None:
    obj["i"] = i
    await asyncio.sleep(sleep_for)
    print(f"set i = {i}, got i = {obj['i']}")


print("Running dataflow without locked object")
for ctx, result in run(
        DataFlow(run_me),
    [
        Input(value={}, definition=OBJ),
        Input(value=0.1, definition=SLEEP_TIME),
        Input(value=0.2, definition=SLEEP_TIME),
        Input(value=1, definition=INTEGER),
        Input(value=2, definition=INTEGER),
    ],
):
    pass

print("Running dataflow with locked object")
Beispiel #10
0
import json
import asyncio
from typing import Dict, Any

from dffml import op, Definition

package_src_dir = Definition(name="package_src_dir", primitive="str")
golangci_lint_output = Definition(
    name="golangci_lint_output", primitive="Dict[str, Any]"
)


class GoLangCILintError(Exception):
    """
    Raised when golangci-lint fails
    """


@op(inputs={"pkg": package_src_dir}, outputs={"report": golangci_lint_output})
async def run_golangci_lint(pkg: str) -> Dict[str, Any]:
    """
    CLI usage: dffml service dev run -log debug shouldi.golangci_lint:run_golangci_lint -pkg .
    """
    proc = await asyncio.create_subprocess_exec(
        "golangci-lint",
        "run",
        "--out-format",
        "json",
        "./...",
        cwd=pkg,
        stdout=asyncio.subprocess.PIPE,
Beispiel #11
0
import shutil
import tempfile
from typing import Dict, Any

import aiohttp

from dffml import op, Definition, Stage

from .safety import package, package_version
from .bandit import package_src_dir

package_json = Definition(name="package_json", primitive="Dict[str, Any]")
package_url = Definition(name="package_url", primitive="str")


@op(
    inputs={"package": package},
    outputs={"response_json": package_json},
    # imp_enter allows us to create instances of objects which are async context
    # managers and assign them to self.parent which is an object of type
    # OperationImplementation which will be alive for the lifetime of the
    # Orchestrator which runs all these operations.
    imp_enter={
        "session": (lambda self: aiohttp.ClientSession(trust_env=True))
    },
)
async def pypi_package_json(self, package: str) -> Dict[str, Any]:
    """
    Download the information on the package in JSON format.
    """
    url = f"https://pypi.org/pypi/{package}/json"
Beispiel #12
0
import io
import re
import json
import tempfile
import contextlib
from aiohttp import ClientSession, ClientTimeout

from dffml.cli.cli import CLI
from dffml import op, config, Definition, BaseSecret

ACCESSTOKEN = Definition(name="access_token", primitive="str")
ROOMNAME = Definition(name="room_name", primitive="str")
ROOMID = Definition(name="room_id", primitive="str")
MESSAGE = Definition(name="message", primitive="str")
TOSEND = Definition(name="to_send", primitive="str")


@config
class GitterChannelConfig:
    secret: BaseSecret


@op(
    inputs={"room_uri": ROOMNAME},
    outputs={"room_id": ROOMID},
    config_cls=GitterChannelConfig,
    imp_enter={
        "secret": lambda self: self.config.secret,
        "session": lambda self: ClientSession(trust_env=True),
    },
    ctx_enter={"sctx": lambda self: self.parent.secret()},
Beispiel #13
0
import pathlib

from dffml import op, DataFlow, Input, Definition, GetSingle
from dffml_feature_git.feature.operations import clone_git_repo


@op(
    inputs={"repo": clone_git_repo.op.outputs["repo"]},
    outputs={
        "result": Definition(name="has_package_json_result", primitive="bool")
    },
)
def has_package_json(repo: clone_git_repo.op.outputs["repo"].spec):
    return {"result": pathlib.Path(repo.directory, "package.json").is_file()}


DATAFLOW = DataFlow.auto(has_package_json, GetSingle)
DATAFLOW.seed.append(
    Input(
        value=[
            has_package_json.op.outputs["result"].name,
        ],
        definition=GetSingle.op.inputs["spec"],
    ))


@op(
    inputs={"repo": clone_git_repo.op.outputs["repo"]},
    outputs={
        "javascript": Definition(name="repo_is_javascript", primitive="string")
    },
Beispiel #14
0
import sys
import json
import asyncio
from typing import Dict, Any

from dffml import op
from dffml import Definition

package_src_dir = Definition(name="package_src_dir", primitive="str")
bandit_output = Definition(name="bandit_output", primitive="Dict[str, Any]")


@op(inputs={"pkg": package_src_dir}, outputs={"report": bandit_output})
async def run_bandit(pkg: str) -> Dict[str, Any]:
    """
    CLI usage: dffml service dev run -log debug shouldi.bandit:run_bandit -pkg .
    """
    proc = await asyncio.create_subprocess_exec(
        sys.executable,
        "-m",
        "bandit",
        "-r",
        "-f",
        "json",
        pkg,
        stdout=asyncio.subprocess.PIPE,
        stderr=asyncio.subprocess.PIPE,
    )

    stdout, _stderr = await proc.communicate()
    if len(stdout) == 0:
Beispiel #15
0
import sys
import json
import asyncio
from typing import Dict, Any

from dffml import op
from dffml import Definition

package = Definition(name="package", primitive="str")
package_version = Definition(name="package_version", primitive="str")
safety_check_number_of_issues = Definition(
    name="safety_check_number_of_issues", primitive="int")


@op(
    name="safety_check",
    inputs={
        "package": package,
        "version": package_version
    },
    outputs={"issues": safety_check_number_of_issues},
    conditions=[],
)
async def safety_check(package: str, version: str) -> Dict[str, Any]:
    pinned = f"{package}=={version}"

    proc = await asyncio.create_subprocess_exec(
        sys.executable,
        "-m",
        "safety",
        "check",
Beispiel #16
0
import json
import asyncio
from typing import Dict, Any

from dffml import op, Definition

package_src_dir = Definition(name="package_src_dir", primitive="str")
cargo_audit_output = Definition(name="cargo_audit_output",
                                primitive="Dict[str, Any]")


class CargoAuditError(Exception):
    """
    Raised when cargo-audit fails
    """


async def run_cargo_build(pkg_input: str):

    new_proc = await asyncio.create_subprocess_exec(
        "cargo",
        "build",
        "--release",
        cwd=pkg_input,
        stdout=asyncio.subprocess.PIPE,
        stderr=asyncio.subprocess.PIPE,
    )
    stdout, stderr = await new_proc.communicate()
    if new_proc.returncode != 0:
        raise Exception(stderr.decode())
Beispiel #17
0
from dffml import Definition

from typing import NamedTuple


class SAResultsSpec(NamedTuple):
    """
    Static analysis results for a language
    """

    critical: int
    high: int
    medium: int
    low: int
    report: dict


SA_RESULTS = Definition(
    name="static_analysis", primitive="string", spec=SAResultsSpec,
)