Esempio n. 1
0
def connect(connection: ConnectionSftp) -> Tuple[Transport, SFTPClient]:
    """Connect to sftp server."""
    try:
        # there is no timeout in paramiko so...
        # continue to attemp to login during time limit
        # if we are getting timeout exceptions
        timeout = time.time() + 60 * 3  # 2 mins from now
        while True:
            try:
                transport = paramiko.Transport(
                    f"{connection.address}:{(connection.port or 22)}")

                # build ssh key file
                key = None
                if connection.key:
                    with tempfile.NamedTemporaryFile(mode="w+",
                                                     newline="") as key_file:
                        key_file.write(
                            em_decrypt(connection.key, app.config["PASS_KEY"]))
                        key_file.seek(0)

                        key = paramiko.RSAKey.from_private_key_file(
                            key_file.name,
                            password=em_decrypt(connection.password,
                                                app.config["PASS_KEY"]),
                        )

                transport.connect(
                    username=str(connection.username),
                    password=(em_decrypt(connection.password,
                                         app.config["PASS_KEY"])
                              if key is None else ""),
                    pkey=key,
                )

                conn = paramiko.SFTPClient.from_transport(transport)
                if conn is None:
                    raise ValueError("Failed to create connection.")

                break
            except (paramiko.ssh_exception.AuthenticationException,
                    EOFError) as e:
                # pylint: disable=no-else-continue
                if str(e) == "Authentication timeout." and time.time(
                ) <= timeout:
                    time.sleep(10)  # wait 10 sec before retrying
                    continue
                elif time.time() > timeout:
                    raise ValueError("Connection timeout.")

                raise ValueError(f"Connection failed.\n{e}")

        return (transport, conn)

    except BaseException as e:
        raise ValueError(f"Connection failed.\n{e}")
Esempio n. 2
0
def connect(connection: ConnectionFtp) -> FTP:
    """Connect to ftp server."""
    try:
        # there is no timeout in paramiko so...
        # continue to attemp to login during time limit
        # if we are getting timeout exceptions
        timeout = time.time() + 60 * 3  # 2 mins from now
        while True:
            try:
                conn = FTP(connection.address or "")  # noqa: S321
                conn.login(
                    user=(connection.username or ""),
                    passwd=(em_decrypt(connection.password,
                                       app.config["PASS_KEY"]) or ""),
                )
                break
            except ftplib.error_reply as e:
                # pylint: disable=no-else-continue
                if time.time() <= timeout:
                    time.sleep(10)  # wait 10 sec before retrying
                    continue
                elif time.time() > timeout:
                    # pylint: disable=raise-missing-from
                    raise ValueError("Connection timeout.")

                else:
                    raise ValueError(f"Connection failed.\n{e}")

        return conn

    except BaseException as e:
        raise ValueError(f"Connection failed.\n{e}")
    def build_connect() -> SMBConnection:

        conn = SMBConnection(
            username,
            em_decrypt(password, app.config["PASS_KEY"]),
            "EM2.0 Webapp",
            server_name,
            use_ntlm_v2=True,
        )

        redis_client.set("smb_connection_" + str(server_name),
                         pickle.dumps(conn))

        return conn
Esempio n. 4
0
def smb_online(smb_id: int) -> str:
    """Check if connection is online."""
    try:
        smb_connection = ConnectionSmb.query.filter_by(id=smb_id).first()
        smb_connect(
            smb_connection.username,
            em_decrypt(smb_connection.password, app.config["PASS_KEY"]),
            smb_connection.server_name,
            smb_connection.server_ip,
        )
        # we do not close smb connections. they are recycled.
        return '<span class="tag is-success is-light">Online</span>'
    except BaseException as e:
        return f'<span class="has-tooltip-arrow has-tooltip-right has-tooltip-multiline tag is-danger is-light" data-tooltip="{e}">Offline</span>'
Esempio n. 5
0
def database_online(database_id: int) -> str:
    """Check if connection is online."""
    try:
        database_connection = ConnectionDatabase.query.filter_by(id=database_id).first()
        if database_connection.type_id == 2:
            conn, _ = sql_connect(
                em_decrypt(
                    database_connection.connection_string, app.config["PASS_KEY"]
                ).strip(),
                database_connection.timeout or app.config["DEFAULT_SQL_TIMEOUT"],
            )
            conn.close()
        else:
            conn, _ = pg_connect(
                em_decrypt(
                    database_connection.connection_string, app.config["PASS_KEY"]
                ).strip(),
                database_connection.timeout or app.config["DEFAULT_SQL_TIMEOUT"],
            )
            conn.close()

        return '<span class="tag is-success is-light">Online</span>'
    except BaseException as e:
        return f'<span class="has-tooltip-arrow has-tooltip-right has-tooltip-multiline tag is-danger is-light" data-tooltip="{e}">Offline</span>'
def connect(connection: ConnectionSsh) -> paramiko.SSHClient:
    """Connect to SSH server."""
    session = paramiko.SSHClient()
    session.set_missing_host_key_policy(paramiko.AutoAddPolicy())
    session.connect(
        hostname=str(connection.address),
        port=(connection.port or 22),
        username=connection.username,
        password=em_decrypt(connection.password, app.config["PASS_KEY"]),
        timeout=5000,
        allow_agent=False,
        look_for_keys=False,
    )

    return session
    def __load_file(self, file_name: str) -> IO[str]:

        director = urllib.request.build_opener(SMBHandler)

        password = em_decrypt(self.password, app.config["PASS_KEY"])
        open_file_for_read = director.open(
            f"smb://{self.username}:{password}@{self.server_name},{self.server_ip}/{self.share_name}/{file_name}"
        )

        def load_data(file_obj: TextIOWrapper) -> Generator:
            with file_obj as this_file:
                while True:
                    data = this_file.read(1024)
                    if not data:
                        break
                    yield data

        # send back contents

        with tempfile.NamedTemporaryFile(mode="wb+",
                                         delete=False,
                                         dir=self.dir) as data_file:
            for data in load_data(open_file_for_read):
                if (self.task.source_smb_ignore_delimiter != 1
                        and self.task.source_smb_delimiter):
                    my_delimiter = self.task.source_smb_delimiter or ","

                    csv_reader = csv.reader(
                        data.splitlines(),
                        delimiter=my_delimiter,
                    )
                    writer = csv.writer(data_file)
                    writer.writerows(csv_reader)

                else:
                    data_file.write(data)

            original_name = str(self.dir.joinpath(file_name.split("/")[-1]))
            if os.path.islink(original_name):
                os.unlink(original_name)
            elif os.path.isfile(original_name):
                os.remove(original_name)
            os.link(data_file.name, original_name)
            data_file.name = original_name  # type: ignore[misc]

        open_file_for_read.close()

        return data_file
Esempio n. 8
0
    def __get_source(self) -> None:

        if self.task.source_type_id == 1:  # sql

            external_db = self.task.source_database_conn
            try:
                RunnerLog(self.task, self.run_id, 8, "Loading query...")
                query = self.__get_query()
            except BaseException as e:
                raise RunnerException(self.task, self.run_id, 8,
                                      f"Failed to load query.\n{e}")

            RunnerLog(self.task, self.run_id, 8,
                      "Starting query run, waiting for results...")

            if external_db.database_type.id == 1:  # postgres
                try:
                    self.query_output_size, self.source_files = Postgres(
                        task=self.task,
                        run_id=self.run_id,
                        connection=em_decrypt(external_db.connection_string,
                                              app.config["PASS_KEY"]),
                        timeout=external_db.timeout
                        or app.config["DEFAULT_SQL_TIMEOUT"],
                        directory=self.temp_path,
                    ).run(query)

                except ValueError as message:
                    raise RunnerException(self.task, self.run_id, 21, message)

                except BaseException as message:
                    raise RunnerException(self.task, self.run_id, 21,
                                          f"Failed to run query.\n{message}")

            elif external_db.database_type.id == 2:  # mssql
                try:
                    self.query_output_size, self.source_files = SqlServer(
                        task=self.task,
                        run_id=self.run_id,
                        connection=em_decrypt(external_db.connection_string,
                                              app.config["PASS_KEY"]),
                        timeout=external_db.timeout
                        or app.config["DEFAULT_SQL_TIMEOUT"],
                        directory=self.temp_path,
                    ).run(query)

                except ValueError as message:
                    raise RunnerException(self.task, self.run_id, 20, message)

                except BaseException as message:
                    raise RunnerException(self.task, self.run_id, 20,
                                          f"Failed to run query.\n{message}")

            RunnerLog(
                self.task,
                self.run_id,
                8,
                f"Query completed.\nData file {self.source_files[0].name} created. Data size: {file_size(str(Path(self.source_files[0].name).stat().st_size))}.",
            )

        elif self.task.source_type_id == 2:  # smb file
            file_name = self.param_loader.insert_file_params(
                self.task.source_smb_file)
            file_name = DateParsing(
                task=self.task,
                run_id=self.run_id,
                date_string=file_name,
            ).string_to_date()

            self.source_files = Smb(
                task=self.task,
                run_id=self.run_id,
                connection=self.task.source_smb_conn,
                directory=self.temp_path,
            ).read(file_name=file_name)

        elif self.task.source_type_id == 3:  # sftp file
            RunnerLog(self.task, self.run_id, 9, "Loading data from server...")
            file_name = self.param_loader.insert_file_params(
                self.task.source_sftp_file)
            file_name = DateParsing(
                task=self.task,
                run_id=self.run_id,
                date_string=file_name,
            ).string_to_date()

            self.source_files = Sftp(
                task=self.task,
                run_id=self.run_id,
                connection=self.task.source_sftp_conn,
                directory=self.temp_path,
            ).read(file_name=file_name)

        elif self.task.source_type_id == 4:  # ftp file
            RunnerLog(self.task, self.run_id, 13,
                      "Loading data from server...")
            file_name = self.param_loader.insert_file_params(
                self.task.source_ftp_file)
            file_name = DateParsing(
                task=self.task,
                run_id=self.run_id,
                date_string=file_name,
            ).string_to_date()

            self.source_files = Ftp(
                task=self.task,
                run_id=self.run_id,
                connection=self.task.source_ftp_conn,
                directory=self.temp_path,
            ).read(file_name=file_name)

        elif self.task.source_type_id == 6:  # ssh command
            query = self.__get_query()

            Ssh(
                task=self.task,
                run_id=self.run_id,
                connection=self.task.source_ssh_conn,
                command=query,
            ).run()
Esempio n. 9
0
    def save(self) -> Tuple[str, str, str]:
        """Create and save the file.

        returns [filename, filepath] of final file.
        """
        if (self.task.destination_file_name is None
                or self.task.destination_file_name == ""):
            RunnerLog(
                self.task,
                self.run_id,
                11,
                f"No filename specified, {Path(self.data_file.name).name} will be used.",
            )

        if (self.task.destination_file_name != ""
                and self.task.destination_file_name is not None):

            # insert params
            self.file_name = self.params.insert_file_params(
                self.task.destination_file_name.strip())

            # parse python dates
            self.file_name = DateParsing(self.task, self.run_id,
                                         self.file_name).string_to_date()

        else:
            self.file_name = Path(self.data_file.name).name

        # 4 is other
        if self.task.destination_file_type_id != 4 and self.task.file_type is not None:
            self.file_name += "." + (self.task.file_type.ext or "csv")

        self.file_path = str(Path(self.base_path).joinpath(self.file_name))

        # if the source name matches the destination name, rename the source and update tmp file name.
        if self.data_file.name == self.file_path:
            data_file_as_path = Path(self.data_file.name)
            new_data_file_name = str(
                data_file_as_path.parent /
                (data_file_as_path.stem + "_tmp" + data_file_as_path.suffix))
            os.rename(self.data_file.name, new_data_file_name)
            self.data_file.name = new_data_file_name  # type: ignore[misc]

        with open(self.data_file.name, "r", newline="") as data_file:
            reader = csv.reader(data_file)

            with open(self.file_path, mode="w") as myfile:
                # if csv (1) or text (2) and had delimiter

                if (self.task.destination_file_type_id == 1
                        or self.task.destination_file_type_id == 2
                        or self.task.destination_file_type_id == 4) and (
                            self.task.destination_ignore_delimiter is None
                            or self.task.destination_ignore_delimiter != 1):
                    wrtr = (
                        csv.writer(
                            myfile,
                            delimiter=str(self.task.destination_file_delimiter)
                            .encode("utf-8").decode("unicode_escape"),
                            quoting=self.__quote_level(),
                        ) if self.task.destination_file_delimiter is not None
                        and len(self.task.destination_file_delimiter) > 0 and
                        (self.task.destination_file_type_id == 2
                         or self.task.destination_file_type_id == 4
                         )  # txt or other
                        else csv.writer(
                            myfile,
                            quoting=self.__quote_level(),
                        ))
                    for row in reader:
                        new_row = [(x.strip('"').strip("'") if isinstance(
                            x, str) else x) for x in row]

                        if (self.task.destination_file_type_id == 1
                                or self.task.destination_file_type_id == 2
                                or self.task.destination_file_type_id == 4
                            ) and (self.task.destination_file_line_terminator
                                   is not None and
                                   self.task.destination_file_line_terminator
                                   != ""):
                            new_row.append(
                                self.task.destination_file_line_terminator)

                        wrtr.writerow(new_row)

                # if xlxs (3)
                elif self.task.destination_file_type_id == 3:
                    wrtr = csv.writer(
                        myfile,
                        dialect="excel",
                        quoting=self.__quote_level(),
                    )
                    for row in reader:
                        new_row = [(x.strip('"').strip("'") if isinstance(
                            x, str) else x) for x in row]
                        wrtr.writerow(new_row)

                else:
                    for line in data_file:
                        myfile.write(line)

        RunnerLog(
            self.task,
            self.run_id,
            11,
            f"File {self.file_name} created. Size: {file_size(Path(self.file_path).stat().st_size)}.\n{self.file_path}",
        )

        # encrypt file
        if self.task.file_gpg == 1:
            gpg = gnupg.GPG("/usr/local/bin/gpg")

            # import the key
            keychain = gpg.import_keys(
                em_decrypt(self.task.file_gpg_conn.key,
                           app.config["PASS_KEY"]))

            # set it to trusted
            gpg.trust_keys(keychain.fingerprints, "TRUST_ULTIMATE")

            # encrypt file
            with open(self.file_path, "rb") as my_file:
                encrypt_status = gpg.encrypt_file(
                    file=my_file,
                    recipients=keychain.fingerprints,
                    output=self.file_path + ".gpg",
                )

            # remove key
            gpg.delete_keys(keychain.fingerprints)

            # update global file name
            if not encrypt_status.ok:
                raise RunnerException(
                    self.task,
                    self.run_id,
                    11,
                    "File failed to encrypt.\n%s\n%s\n%s" % (
                        self.file_path,
                        encrypt_status.status,
                        encrypt_status.stderr,
                    ),
                )

            self.file_path = self.file_path + ".gpg"
            self.file_name = self.file_name + ".gpg"

            RunnerLog(
                self.task,
                self.run_id,
                11,
                "File encrypted.\n%s\n%s\n%s" %
                (self.file_path, encrypt_status.status, encrypt_status.stderr),
            )

        # get file hash.. after encrypting
        with open(self.file_path, "rb") as my_file:
            while True:
                chunk = my_file.read(8192)
                if not chunk:
                    break
                self.file_hash.update(chunk)

        RunnerLog(self.task, self.run_id, 11,
                  f"File md5 hash: {self.file_hash.hexdigest()}")

        # create zip
        if self.task.destination_create_zip == 1:

            self.zip_name = DateParsing(
                self.task, self.run_id,
                str(self.task.destination_zip_name)).string_to_date()

            # parse params
            self.zip_name = self.params.insert_file_params(self.zip_name)

            self.zip_name = self.zip_name.replace(".zip", "") + ".zip"

            with zipfile.ZipFile(
                    str(Path(self.base_path).joinpath(self.zip_name)),
                    "w") as zip_file:
                zip_file.write(
                    self.file_path,
                    compress_type=zipfile.ZIP_DEFLATED,
                    arcname=self.file_name,
                )

            # now we change all file stuff to our zip.

            self.file_name = self.zip_name
            self.file_path = str(Path(self.base_path).joinpath(self.zip_name))

            RunnerLog(self.task, self.run_id, 11,
                      f"ZIP archive created.\n{self.file_path}")

        return self.file_name, self.file_path, self.file_hash.hexdigest()