Beispiel #1
0
def transfer(job, pyinput):
    '''
    :inputs - job is of model Job
    :       - pyinput is a dictionary of which creds are a part of it.
    :       - Creds - {"url":"ftp.marketosolutionsconsulting.com", "username":"******", 
    :                  "password":"******", "path":"rightstack.csv", "csvtype": "standardtable"}
    :This function takes a job and a dictionary and 
    :executes the transfer of data to Marketo.
    :Todo - Work on set different job status's based on outcomes for debugging in the UI to
    :improve overall customer UX.
    '''
    creds = pyinput['ftp']
    reader = Csvreader(creds)
    ftptomkto=FtpToMktoTransfer(pyinput['mkto'],pyinput['map'], reader)
    ftptomkto.startTransfer()
    reader.endconnection()
    reader.delete_file()
    job.setStatus('Job Complete')
    db.session.add(job)
    db.session.commit()
    return None
Beispiel #2
0
'''
This is a test of the following:
1. open an FTP connection
2. grab file and store it locally (ephermeral heroku memory)
3. open s3 connection
4. take local file and add it to s3bucket
5. delete local file
'''
if __name__ == '__main__':
    from ftpconnector import Csvreader
    from s3connector import S3manipulator

    creds = {"url":"ftp.marketosolutionsconsulting.com", "username":"******", "password":"******", "path":"rightstack.csv", "account":'generalmotors'}
    #Create a new Csvreader
    reader = Csvreader(creds)
    #use the readers ftp to tmp folder file transfer method
    localfilepath = reader.ftpcsv2tmpcsv()
    #kill the readers connection to ftp
    reader.endconnection()
    #Create a new S3manipulator
    a = S3manipulator(creds['account'])
    #Create bucket
    a.create_bucket()
    #Store data in bucket
    a.store_data(pathtofile=localfilepath)
    #Delete temp file
    reader.delete_file()
    #Delete bucket
    #a.delete_bucket()