예제 #1
0
async def process_data_source(s3, source: Dict, target: AnyStr):
    start = time.time()
    ftp = FTP(**source)
    ftp_files = ftp.list_dir(source.get("ftp_dir"))
    LOG.debug(f"All ftp files - {ftp_files}")
    current_s3_files = s3.check_state(target)
    LOG.debug(f"Files on S3 - {current_s3_files}")

    files_to_sync = []
    for file in ftp_files:
        if file not in current_s3_files:
            files_to_sync.append(file)
            LOG.debug(f"New file - {file.name}")
        else:
            if file.size > current_s3_files[current_s3_files.index(file)].size \
                    or file.mdate > current_s3_files[current_s3_files.index(file)].mdate:
                files_to_sync.append(file)
                LOG.debug(f"File with changed timestamp/size - {file.name}")
            else:
                LOG.debug(f"File identical to existing - {file.name}")

    for file in files_to_sync:
        await sync_file(s3, file, ftp, target)

    s3.save_state(target, ftp_files)

    LOG.debug(
        f"Finished processing {source} in {round(time.time() - start)} seconds."
    )
예제 #2
0
 def __init__(self):
     self.sql = MSSQL()
     self.ftp = FTP()
     self.localdir = "files"
     self.remotedir = "seis"
     self.table_prefix = "SEIS"
     self.schools = self.ftp.get_directory_names(self.remotedir)
예제 #3
0
def main():
    parser = argparse.ArgumentParser(
        usage='{} [OPTIONS]'.format(os.path.basename(sys.argv[0])),
        description='FTP client. Using passive ASCII mode and 21 port by '
        'default')
    parser.add_argument('address', help='address to connect')
    parser.add_argument('port',
                        help='port',
                        nargs='?',
                        type=int,
                        default=FTP_PORT)
    parser.add_argument('-e',
                        '--encoding',
                        type=str,
                        help="Choose server's "
                        "encoding")
    parser.add_argument('--active',
                        dest='active',
                        action='store_true',
                        help='use active mode')

    args = parser.parse_args()
    if args.encoding:
        ftp.ENCODING = args.encoding
    con = FTP(args.address, args.port, args.active)
    print(con.connect())
    con.run_batch(download_func=download_batch, load_func=load_batch)
예제 #4
0
 def init_ftp(self, path):
     pass
     conf = config().get_cfg(path, 'ftp.conf')
     try:
         self.ftp = FTP(conf['host'], conf['port'],conf['user'], conf['password'], conf['path'], conf['local_path'], conf['protocol'])
     except Exception, e:
         raise e
예제 #5
0
    def bruteforce(self, target, port, method, passlist, username):

        parser = Passlist(passlist)
        passwords = parser.get_list()

        if method == "http" or "https":
            attack = Basic_Auth()

        if method == "ssh":
            attack = SSH()

        if method == "ftp":
            attack = FTP()

        attack.brute_force(target, port, username, passwords, method)
예제 #6
0
def main():
    try:
        school_year = os.getenv("CURRENT_SCHOOL_YEAR")
        conn = MSSQL()
        ftp = FTP()

        ftp.archive_remote_files(SOURCEDIR)
        ftp.delete_old_archive_files(SOURCEDIR)

        api_suffixes = os.getenv("API_SUFFIXES").split(",")
        API(api_suffixes).request_reports()
        if int(os.getenv("DELETE_LOCAL_FILES")):
            delete_data_files(LOCALDIR)
        files = download_from_ftp(ftp)

        process_application_data(conn, files, school_year)

        process_change_tracking(conn)

        if args.targets:
            sync_enrollment_targets(conn, school_year)
            conn.exec_sproc("sproc_SchoolMint_LoadTargetsWide")
            conn.exec_sproc("sproc_Schoolmint_create_intercepts")
            conn.exec_sproc("sproc_Schoolmint_load_Fact_PM")

        process_fact_daily_status(conn)

        success_message = read_logs("app.log")
        mailer = Mailer()
        mailer.notify(results=success_message)

    except Exception as e:
        logging.exception(e)
        stack_trace = traceback.format_exc()
        mailer = Mailer()
        mailer.notify(success=False, error_message=stack_trace)
예제 #7
0
 def __init__(self, address=None, port=None, passive=True):
     self.__ftp = FTP(address, port, passive)
예제 #8
0
class FTPAdapter:
    def __init__(self, address=None, port=None, passive=True):
        self.__ftp = FTP(address, port, passive)

    def login(self, username, password):
        return self.__ftp.login(username, password)

    def connect(self, address=None, port=None):
        return self.__ftp.connect(address, port)

    def get_files_list(self):
        files = self.__ftp.ftp_nlst().split('\r\n')

        for item in ["", ".", ".."]:
            if item in files:
                files.remove(item)

        return files

    def check_if_file(self, path):
        try:
            self.__ftp.ftp_cwd(path)
            self.__ftp.ftp_cwd("..")
            return False
        except PermanentError:
            return True

    def delete_file(self, path):
        return self.__ftp.ftp_dele(path)

    def delete_folder(self, path):
        return self.__ftp.ftp_rmd(path)

    def get_last_modified_time(self, path):
        return self.__ftp.ftp_mdtm(path)

    def get_size(self, path):
        return self.__ftp.ftp_size(path).strip('\n')

    def rename(self, old_path, new_path):
        self.__ftp.ftp_rnfr(old_path)
        return self.__ftp.ftp_rnto(new_path)

    def make_new_folder(self, path):
        return self.__ftp.ftp_mkd(path)

    def change_folder(self, path):
        return self.__ftp.ftp_cwd(path)

    def get_data(self, server_path, local_path=None,
                 output_func=None, download_func=None):
        return self.__ftp.ftp_retr(server_path, local_path,
                                   output_func, download_func)

    def store_data(self, local_path, server_path=None,
                   load_func=None, output_func=None):
        return self.__ftp.ftp_stor(local_path, server_path,
                                   load_func, output_func)
예제 #9
0
con_ = False
login_ = False

while True:

    while con_ == False:
        ip_port = raw_input('input (IP:port) : ')
        ip_port_list = ip_port.split(':')
        try:
            ip = ip_port_list[0]
            port = ip_port_list[1]
        except IndexError:
            print 'illegal input!Check'
            continue
        myftp = FTP(ip, port)
        if myftp.Connect() == False:
            print "Error,check target IP"
            continue
        else:
            con_ = True

    while login_ == False:
        username = raw_input('Username: '******'Password: ')
        if myftp.Login(username, passwd) == False:
            continue
        else:
            login_ = True

    while True:
예제 #10
0
class TestWithStubServer(unittest.TestCase):
    def setUp(self):
        self.server = FTPStubServer(0)
        self.server.run()
        self.port = self.server.server.server_address[1]
        self.ftp = FTP()
        self.ftp.connect('localhost', self.port)

    def tearDown(self):
        self.ftp.ftp_quit()
        self.server.stop()

    def test_list(self):
        fileA = "A.png"
        fileB = "B.png"
        self.server.add_file(fileA, "")
        self.server.add_file(fileB, "asd")
        listing = self.ftp.ftp_list()
        self.assertEqual(listing, fileA + '\n' + fileB)

    # Sockets unclosed
    def test_retr(self):
        fileB = "B.png"
        self.server.add_file(fileB, "asd")
        temp = tempfile.NamedTemporaryFile(delete=False)
        with mock.patch.object(self.ftp, 'ftp_size', return_value=12345):
            self.ftp.ftp_retr(fileB, temp.name, download_func=download_batch)
        with open(temp.name, 'r') as file:
            data = file.read()
        self.assertEqual(data, "asd")
        temp.close()

    def test_pasv(self):
        reply = self.ftp.ftp_pasv()
        self.assertEqual(reply.startswith('227 Entering Passive Mode'), True)

    def test_cwd_pwd(self):

        dir_name = "new_dir"
        expected = '257 "' + dir_name + '" is your current location' + '\r\n'
        self.ftp.ftp_cwd(dir_name)
        self.assertEqual(self.ftp.ftp_pwd(), expected)

    def test_welcome(self):
        value = '220 (FtpStubServer 0.1a)\r\n'
        self.assertEqual(self.ftp.welcome, value)

    # def test_error(self):
    #     text = '530 Please login with USER and PASS'
    #     with mock.patch.object(self.ftp, '_FTP__get_full_reply',
    #                            return_value=text):
    #         with self.assertRaises(PermanentError):
    #             self.ftp.ftp_list()

    def test_extract_file_name(self):
        fn = self.ftp._FTP__get_filename(os.path.join("C", "test.txt"))
        self.assertEqual("test.txt", fn)
        with self.assertRaises(Exception):
            path = self.ftp._FTP__get_filename(os.getcwd())

    def test_size(self):
        size = "123"
        response = "213 " + size
        with mock.patch.object(self.ftp, 'send', return_value=response):
            self.assertEqual(size, self.ftp.ftp_size("asd"))

    def test_type(self):
        self.ftp.ftp_type("A")
        self.assertFalse(self.ftp.binary)
        self.ftp.ftp_type("I")
        self.assertTrue(self.ftp.binary)
        with self.assertRaises(Exception):
            self.ftp.ftp_type("E")
예제 #11
0
class Connector:
    """ETL connector class"""
    def __init__(self):
        self.data_dir = "data"
        self.sql = MSSQL()
        self.ftp = FTP(self.data_dir)

    def sync_all_ftp_data(self):
        for table_name, directory_name in data_reports.items():
            self.ftp.download_files(directory_name)
            self._load_new_records_into_table(table_name, directory_name)

    def _load_new_records_into_table(self, table_name, report_name):
        """Find and insert new records into the data warehouse."""
        start_date = self._get_latest_date(table_name) + timedelta(days=1)
        yesterday = datetime.today() - timedelta(days=1)
        if start_date > yesterday:
            logging.info(
                f"Clever_{table_name} is up to date. No records inserted.")
            return
        else:
            file_names = self._generate_file_names(start_date, yesterday,
                                                   report_name)
            df = self._read_and_concat_files(file_names)
            self.sql.insert_into(f"Clever_{table_name}",
                                 df,
                                 if_exists="append")
            logging.info(
                f"Inserted {len(df)} records into Clever_{table_name}.")

    def _get_latest_date(self, table_name):
        """Get the latest date record in this table."""
        date = self.sql.query(
            f"SELECT TOP(1) [date] FROM custom.Clever_{table_name} ORDER BY [date] DESC"
        )
        latest_date = date["date"][0]
        return datetime.strptime(latest_date, "%Y-%m-%d")

    def _generate_file_names(self, start_date, yesterday, report_name):
        file_names = []
        while start_date <= yesterday:  # loop through yesterday's date
            formatted_date = start_date.strftime("%Y-%m-%d")
            file_names.append(f"{formatted_date}-{report_name}-students.csv")
            start_date += timedelta(days=1)
        return file_names

    def _read_and_concat_files(self, file_names):
        dfs = []
        for file_name in file_names:
            df = pd.read_csv(f"{self.data_dir}/{file_name}")
            logging.info(f"Read {len(df)} records from '{file_name}'.")
            dfs.append(df)
        data = pd.concat(dfs)
        return data

    def sync_student_google_accounts(self):
        """Get student emails from Google Accounts Manager app."""
        browser = Browser(self.data_dir)
        browser.export_student_google_accounts()
        # Transform and load csv data into database table
        df = self._get_data_from_csv_by_name("Student_export")
        df.rename(columns={"ID": "SIS_ID"}, inplace=True)
        self.sql.insert_into("Clever_StudentGoogleAccounts",
                             df,
                             if_exists="replace")
        logging.info(
            f"Inserted {len(df)} new records into Clever_StudentGoogleAccounts."
        )

    def _get_data_from_csv_by_name(self, string_to_match):
        """Get the downloaded csv BY NAME and store it in a dataframe."""
        for filename in os.listdir(self.data_dir):
            if fnmatch(filename, f"*{string_to_match}*"):
                file_path = f"{self.data_dir}/{filename}"
                break
        df = pd.read_csv(file_path)
        logging.info(f"Loaded {len(df)} records from downloaded file.")
        return df
예제 #12
0
class Connector:
    """
    Data connector for Extracting data, Transforming into dataframes,
    and Loading into a database.
    """
    def __init__(self):
        self.sql = MSSQL()
        self.ftp = FTP()
        self.localdir = "files"
        self.remotedir = "seis"
        self.table_prefix = "SEIS"
        self.schools = self.ftp.get_directory_names(self.remotedir)

    def remove_local_files(self):
        """Remove any leftover files from local project directory."""
        filelist = [f for f in os.listdir(self.localdir)]
        for filename in filelist:
            if "gitkeep" not in filename:
                os.remove(os.path.join(self.localdir, filename))

    def get_files_from_ftp(self):
        """Loop through all sub-folders and download files from FTP."""
        self.remove_local_files()
        self.ftp.download_all(self.remotedir, self.localdir)
        self.filenames = [
            f for f in os.listdir(self.localdir) if f.endswith(".csv")
        ]
        logging.info(f"{len(self.filenames)} files downloaded. ")

    def read_files_into_df(self, file_name):
        """
        Given the file name (eg. Student or Service), read the files and concat into one DataFrame.

        Params:
            file_name (str): name of the file that you are trying to combine.

        Return:
            DataFrame: combined data from all files with the same name (ie. same type of data)
        """
        dfs = []
        for school in self.schools:
            path = os.path.join(self.localdir, f"{school}_{file_name}.csv")
            df = pd.read_csv(path,
                             sep=",",
                             quotechar='"',
                             doublequote=True,
                             dtype=str,
                             header=0)
            dfs.append(df)
        merged = pd.concat(dfs)
        merged.replace(np.nan, "", regex=True, inplace=True)
        return merged

    def insert_df_into_db(self, df, table_name):
        """
        Insert DataFrame into database with given table name.

        Params:
            df (DataFrame): data to insert into the database.
            table_name (str): name of the database table that you want to update.

        Return:
            none
        """
        table = f"{self.table_prefix}_{table_name}"
        self.sql.insert_into(table, df, if_exists="replace")
        logging.info(f"Inserted {len(df)} records into {table}.")
예제 #13
0
#!/usr/bin/env python


from ftp import FTP

import sys, getpass, os.path

host, username, localfile, remotepath = sys.argv[1:]
password = getpass.getpass("Enter password for %s on %s: " % (username, host))

f = FTP(host)
f.login(username, password)
f.cwd(remotepath)
fd = open(localfile, 'rb')
f.storbinary('STOR %s' % os.path.basename(localfile), fd)
fd.close()

f.quit()
예제 #14
0
class Connector:
    """ETL connector class"""
    def __init__(self):
        self.data_dir = "data"
        self.sql = MSSQL()
        self.ftp = FTP(self.data_dir)

    def sync_all_ftp_data(self):
        for table_name, directory_name in data_reports.items():
            self.ftp.download_files(directory_name)
            self._load_new_records_into_table(table_name, directory_name)

    def _load_new_records_into_table(self, table_name, report_name):
        """Find and insert new records into the data warehouse."""
        if report_name == "idm-reports":
            # this folder contains student emails file, which has no datestamp in the file name
            self._process_files_without_datestamp(table_name, report_name)
        else:
            self._process_files_with_datestamp(table_name, report_name)

    def _process_files_without_datestamp(self, table_name, report_name):
        # Student Emails file doesn't contain a datestamp in the file name
        # This table should be truncated and replaced.
        df = self._read_file(f"{self.data_dir}/google-student-emails.csv")
        self.sql.insert_into(f"Clever_{table_name}", df, if_exists="replace")
        logging.info(f"Inserted {len(df)} records into Clever_{table_name}.")

    def _process_files_with_datestamp(self, table_name, report_name):
        # Generate names for files with datestamps in the file name and process those files
        # These tables should be appended to, not truncated.
        start_date = self._get_latest_date(table_name) + timedelta(days=1)
        yesterday = datetime.today() - timedelta(days=1)
        if start_date > yesterday:
            logging.info(
                f"Clever_{table_name} is up to date. No records inserted.")
            return
        else:
            file_names = self._generate_file_names(start_date, yesterday,
                                                   report_name)
            df = self._read_and_concat_files(file_names)
            self.sql.insert_into(f"Clever_{table_name}",
                                 df,
                                 if_exists="append")
            logging.info(
                f"Inserted {len(df)} records into Clever_{table_name}.")

    def _get_latest_date(self, table_name):
        """Get the latest date record in this table."""
        date = self.sql.query(
            f"SELECT TOP(1) [date] FROM custom.Clever_{table_name} ORDER BY [date] DESC"
        )
        latest_date = date["date"][0]
        return datetime.strptime(latest_date, "%Y-%m-%d")

    def _generate_file_names(self, start_date, yesterday, report_name):
        file_names = []
        while start_date <= yesterday:  # loop through yesterday's date
            formatted_date = start_date.strftime("%Y-%m-%d")
            file_names.append(f"{formatted_date}-{report_name}-students.csv")
            start_date += timedelta(days=1)
        return file_names

    def _read_and_concat_files(self, file_names):
        dfs = []
        for file_name in file_names:
            df = pd.read_csv(f"{self.data_dir}/{file_name}")
            logging.info(f"Read {len(df)} records from '{file_name}'.")
            dfs.append(df)
        data = pd.concat(dfs)
        return data

    def _read_file(self, file_name):
        df = pd.read_csv(file_name)
        logging.info(f"Read {len(df)} records from '{file_name}'.")
        return df
예제 #15
0
 def setUp(self):
     self.server = FTPStubServer(0)
     self.server.run()
     self.port = self.server.server.server_address[1]
     self.ftp = FTP()
     self.ftp.connect('localhost', self.port)
예제 #16
0
    def shodan_search(self, args):

        shodan = Shodan_Search()

        if shodan.validateapi():
            results = shodan.search(args.shodan)
            response = raw_input(
                'YOU WANT TO TEST SHODAN RESULTS? Y/N: ').upper().strip()

            if response == "Y":

                parser = Passlist(args.passlist)

                for result in results['matches']:

                    passwords = parser.get_list()

                    try:

                        if args.method is None:

                            if result['port'] == 21:
                                method = "ftp"
                                Attack = FTP()

                            if result['port'] == 22:
                                method = "ssh"
                                Attack = SSH()

                            if result['port'] == 443:
                                method = "https"
                                Attack = Basic_Auth()

                            else:
                                args.method = "http"
                                Attack = Basic_Auth()

                        else:

                            if args.method == "http":
                                method = "http"
                                Attack = Basic_Auth()

                            if args.method == "https":
                                method = "https"
                                Attack = Basic_Auth

                            if args.method == "ftp":
                                method = "ftp"
                                Attack = FTP()

                            if args.method == "ssh":
                                method = "ssh"
                                Attack = SSH()

                        Attack.brute_force(result['ip_str'], result['port'],
                                           args.username, passwords, method)

                    except:
                        pass

            else:
                sys.exit()

        else:
            sys.exit()
예제 #17
0
 def __init__(self):
     self.data_dir = "data"
     self.sql = MSSQL()
     self.ftp = FTP(self.data_dir)
예제 #18
0
def test():
    logger = log.setup('ftp')
    logger.info("testing with 104.238.181.33:21")
    ftp = FTP("104.238.181.33", 21, "vtta", "***")
    ftp.send("NOOP")
    ftp.recv(200)
    ftp.download("foo")
    ftp.upload("bar")
    ftp.list()
    ftp.send("QUIT")
    ftp.recv(221)