def save_dashboard(self, data):
        if "id" in data:
            existing_dashboard = self.get_dashboard(data["id"])
            if existing_dashboard:
                raise DashboardAlreadyExistsError(existing_dashboard, "id")

        name = data["name"]
        slug = slugify(name)
        file_path = self.project.analyze_dir("dashboards",
                                             f"{slug}.dashboard.m5o")

        if os.path.exists(file_path):
            with file_path.open() as f:
                existing_dashboard = json.load(f)
            raise DashboardAlreadyExistsError(existing_dashboard, "slug")

        data = MeltanoAnalysisFileParser.fill_base_m5o_dict(
            file_path.relative_to(self.project.root), slug, data)
        data["version"] = DashboardsService.VERSION
        data["description"] = data["description"] or ""
        data["report_ids"] = []

        with self.project.file_update():
            with file_path.open("w") as f:
                json.dump(data, f)

        return data
Esempio n. 2
0
    def update_report(self, data):
        id = data["id"]
        existing_report = self.get_report(id)
        slug = existing_report["slug"]

        file_path = self.project.analyze_dir("reports", f"{slug}.report.m5o")

        new_name = data["name"]
        new_slug = slugify(new_name)
        new_file_path = self.project.analyze_dir("reports",
                                                 f"{new_slug}.report.m5o")
        is_same_file = new_slug == slug
        if not is_same_file and os.path.exists(new_file_path):
            with new_file_path.open() as f:
                existing_report = json.load(f)
            raise ReportAlreadyExistsError(existing_report, "slug")

        with self.project.file_update():
            os.remove(file_path)

        data["slug"] = new_slug
        data["path"] = str(new_file_path.relative_to(self.project.root))

        with self.project.file_update():
            with new_file_path.open("w") as f:
                json.dump(data, f)

        return data
Esempio n. 3
0
    def update_dashboard(self, data):
        dashboard = self.get_dashboard(data["dashboard"]["id"])
        slug = dashboard["slug"]

        file_path = self.project.analyze_dir("dashboards", f"{slug}.dashboard.m5o")
        if not os.path.exists(file_path):
            raise DashboardDoesNotExistError(data)

        new_settings = data["new_settings"]
        new_name = new_settings["name"]
        new_slug = slugify(new_name)
        new_file_path = self.project.analyze_dir(
            "dashboards", f"{new_slug}.dashboard.m5o"
        )
        is_same_file = new_slug == slug
        if not is_same_file and os.path.exists(new_file_path):
            with new_file_path.open() as f:
                existing_dashboard = json.load(f)
            raise DashboardAlreadyExistsError(existing_dashboard, "slug")

        os.remove(file_path)

        dashboard["slug"] = new_slug
        dashboard["name"] = new_name
        dashboard["description"] = new_settings["description"]
        dashboard["path"] = str(new_file_path.relative_to(self.project.root))

        if "report_ids" in new_settings:
            dashboard["report_ids"] = new_settings["report_ids"]

        with new_file_path.open("w") as f:
            json.dump(dashboard, f)

        return dashboard
Esempio n. 4
0
    def fill_base_m5o_dict(file, name, file_dict=None):
        if file_dict is None:
            file_dict = {"name": name}

        file_dict["path"] = str(file)
        file_dict["id"] = encode_id_from_file_path(file_dict["path"])
        file_dict["slug"] = slugify(name)
        file_dict["createdAt"] = time.time()
        return file_dict
Esempio n. 5
0
    def fill_base_m5o_dict(file, name, file_dict=None):
        if file_dict is None:
            file_dict = {"name": name}

        file_dict["path"] = str(file)
        file_dict["slug"] = slugify(name)

        if "id" not in file_dict:
            file_dict["id"] = str(uuid.uuid4())

        # Legacy reports and dashboards can have a `createdAt` key that would
        # conflict with the new `created_at` key instead of being overwritten.
        file_dict.pop("createdAt", None)

        file_dict["created_at"] = datetime.now(timezone.utc).timestamp()

        return file_dict
Esempio n. 6
0
    def save_report(self, data):
        report_name = data["name"]

        # guard if it already exists
        existing_report = self.get_report_by_name(report_name)
        if existing_report:
            raise ReportAlreadyExistsError(existing_report)

        project = Project.find()
        slug = slugify(report_name)
        file_path = project.analyze_dir("reports", f"{slug}.report.m5o")

        data = MeltanoAnalysisFileParser.fill_base_m5o_dict(
            file_path, slug, data)
        data["version"] = ReportsHelper.VERSION

        with file_path.open("w") as f:
            json.dump(data, f)

        return data
Esempio n. 7
0
def save_pipeline_schedule() -> Response:
    """
    endpoint for persisting a pipeline schedule
    """
    incoming = request.get_json()
    # Airflow requires alphanumeric characters, dashes, dots and underscores exclusively
    name = slugify(incoming["name"])
    extractor = incoming["extractor"]
    loader = incoming["loader"]
    transform = incoming["transform"]
    interval = incoming["interval"]

    project = Project.find()
    schedule_service = ScheduleService(project)

    try:
        schedule = schedule_service.add(db.session, name, extractor, loader,
                                        transform, interval)
        return jsonify(schedule._asdict()), 201
    except ScheduleAlreadyExistsError as e:
        raise ScheduleAlreadyExistsError(e.schedule)
Esempio n. 8
0
    def save_dashboard(self, data):
        dashboard_name = data["name"]

        # guard if it already exists
        existing_dashboard = self.get_dashboard_by_name(dashboard_name)
        if existing_dashboard:
            raise DashboardAlreadyExistsError(existing_dashboard)

        project = Project.find()
        slug = slugify(dashboard_name)
        file_path = project.analyze_dir("dashboards", f"{slug}.dashboard.m5o")
        data = MeltanoAnalysisFileParser.fill_base_m5o_dict(
            file_path, slug, data)
        data["version"] = DashboardsHelper.VERSION
        data["description"] = data["description"] or ""
        data["report_ids"] = []

        with file_path.open("w") as f:
            json.dump(data, f)

        return data
Esempio n. 9
0
def add_plugin_configuration_profile(plugin_ref) -> Response:
    """
    Endpoint for adding a configuration profile to a plugin
    """
    payload = request.get_json()
    project = Project.find()
    config = ConfigService(project)
    plugin = config.get_plugin(plugin_ref)
    settings = PluginSettingsService(project)

    # create the new profile for this plugin
    name = payload["name"]
    profile = plugin.add_profile(slugify(name), label=name)

    config.update_plugin(plugin)

    profile_config = settings.profile_with_config(db.session,
                                                  plugin,
                                                  profile,
                                                  redacted=True)
    freeze_profile_config_keys(profile_config)

    return jsonify(profile_config)
Esempio n. 10
0
    def save_report(self, data):
        if "id" in data:
            existing_report = self.get_report(data["id"])
            if existing_report:
                raise ReportAlreadyExistsError(existing_report, "id")

        name = data["name"]
        slug = slugify(name)
        file_path = self.project.analyze_dir("reports", f"{slug}.report.m5o")

        if os.path.exists(file_path):
            with file_path.open() as f:
                existing_report = json.load(f)
            raise ReportAlreadyExistsError(existing_report, "slug")

        data = MeltanoAnalysisFileParser.fill_base_m5o_dict(
            file_path.relative_to(self.project.root), slug, data)
        data["version"] = ReportsService.VERSION

        with file_path.open("w") as f:
            json.dump(data, f)

        return data
Esempio n. 11
0
def save_pipeline_schedule() -> Response:
    """
    Endpoint for persisting a pipeline schedule
    """
    payload = request.get_json()
    # Airflow requires alphanumeric characters, dashes, dots and underscores exclusively
    name = payload["name"]
    slug = slugify(name)
    extractor = payload["extractor"]
    loader = payload["loader"]
    transform = payload["transform"]
    interval = payload["interval"]

    project = Project.find()
    schedule_service = ScheduleService(project)

    schedule = schedule_service.add(db.session, slug, extractor, loader,
                                    transform, interval)

    schedule = dict(schedule)
    schedule["start_date"] = (schedule["start_date"].date().isoformat()
                              if schedule["start_date"] else None)

    return jsonify(schedule), 201
Esempio n. 12
0
 def legacy_logs_dir(self, job_id, *joinpaths):
     job_dir = self.project.run_dir("elt").joinpath(slugify(job_id),
                                                    *joinpaths)
     return job_dir if job_dir.exists() else None