Exemple #1
0
    def find_keys(path):
        path = os.path.abspath(path)
        paths = find_files(path,file_name='key.txt')
        keys = []
        for path in paths:
            with file(path,'r') as fh:
                key = fh.read().strip()
                keys.append(key)

        return keys
Exemple #2
0
    def add_upload(self,path):
        """
        adds a file / dir to the list
        of files to be uploaded. if the path
        is to a directory than each file in the
        directory is send (recursively)
        """

        # get our abs path
        path = os.path.abspath(os.path.expanduser(path))

        log.debug('adding upload: %s' % path)

        # if it's a directory than we want to search it recursively
        if os.path.isdir(path):
            
            log.debug('path is dir')

            # find all the files recursively
            files = find_files(path)

        else:
            
            log.debug('path is file')

            # it's a single file, still want a list
            files = [path]

        log.debug('files: %s' % files)

        # go through all our files, starting uploads
        for file_path in files:

            log.debug('creating upload for: %s' % file_path)

            # create our upload thread
            upload = UploadThread(file_path,
                                  self.host,self.port)

            log.debug('starting upload')

            # start it goin
            upload.start()

            # add it to our tracking list
            self.uploads.append(upload)
Exemple #3
0
    def add_upload(self, path):
        """
        adds a file / dir to the list
        of files to be uploaded. if the path
        is to a directory than each file in the
        directory is send (recursively)
        """

        # get our abs path
        path = os.path.abspath(os.path.expanduser(path))

        log.debug('adding upload: %s' % path)

        # if it's a directory than we want to search it recursively
        if os.path.isdir(path):

            log.debug('path is dir')

            # find all the files recursively
            files = find_files(path)

        else:

            log.debug('path is file')

            # it's a single file, still want a list
            files = [path]

        log.debug('files: %s' % files)

        # go through all our files, starting uploads
        for file_path in files:

            log.debug('creating upload for: %s' % file_path)

            # create our upload thread
            upload = UploadThread(file_path, self.host, self.port)

            log.debug('starting upload')

            # start it goin
            upload.start()

            # add it to our tracking list
            self.uploads.append(upload)
Exemple #4
0
    def queue_mail(self, to, subject, body, attachment_paths=None):
        """
        adds a msg to the email queue, attachment paths which are
        dirs will be zipped when attached
        """


        # compile our data to send to the queue
        msg_data = {
            'to':to,
            'subject':subject,
            'body':body
        }

        log.debug('base message: %s' % msg_data)
        log.debug('adding attachments')

        # create entries for our attachments
        attachments = []
        for path in attachment_paths:

            log.debug('attachment path: %s' % path)

            # get it's name
            if path.endswith('/'):
                path = path[:-1] # it's a dir
            name = os.path.basename(path)

            log.debug('attachment name: %s' % name)

            # get the full path
            path = os.path.abspath(os.path.expanduser(path))

            log.debug('attachment full path: %s' % path)

            # if it's a dir than we are going to zip the contents
            if os.path.isdir(path):
                
                log.debug('attachment is dir')

                # we want to zip the entire directory in memory
                # setup an in memory file object
                zip_fh = StringIO()

                log.debug('creating our zip')

                # create our new zip
                _zip = zipfile.ZipFile(zip_fh,'w')

                log.debug('going through files')

                # go through all the files in the dir, recursively
                for file_path in find_files(path):

                    log.debug('file path: %s' % file_path)
                    
                    # use it's rel file name from the attachment path
                    # as it's name in the zip
                    rel_file_path = file_path[len(path):]

                    # make sure it starts w/ a slash
                    if not rel_file_path.startswith('/'):
                        rel_file_path = '/%s' % rel_file_path

                    log.debug('rel file path: %s' % rel_file_path)
                    log.debug('adding file data to zip')

                    #_zip.write(file_path)
                    # write the data to the zipfile
                    with file(file_path,'rb') as fh:
                        _zip.writestr(rel_file_path,fh.read())

                # the attachment name needs to end in .zip
                name = '%s.zip' % name

                log.debug('updating zipped files created flag')

                # Mark the files as having been created on Windows so that
                # Unix permissions are not inferred as 0000
                for zfile in _zip.filelist:
                    zfile.create_system = 0 

                log.debug('reading zip from memory')

                # get our zip's data (closing it's buffer)
                zip_fh.seek(0)
                _zip.close()
                data = zip_fh.getvalue()
                zip_fh.close()

                # save the data down DEBUG
                with file('./out.zip','wb') as fh:
                    fh.write(data)

                # encode our data
                data = b64encode(data)

                log.debug('adding to attachments list')
                
                # add it to our attachment list
                attachments.append({
                    'name':name,
                    'data':data
                })

            # it's not a path, just a single file
            else:

                log.debug('reading attachment file')

                # read in and gzip the data
                with file(path,'r') as fh:

                    log.debug('reading data')

                    # read the data 
                    data = fh.read()

                    # compress it
                    data = zlib.compress(data)

                    # encode our data
                    data = b64encode(data)

                    log.debug('adding to attachment list')

                    # add the data to our list
                    attachments.append({
                       'name':name,
                       'gzip_data':data
                    })

        # if we have any attachments, add them to the message
        if attachments:
            log.debug('adding attachments to message')
            msg_data['attachments'] = attachments

        log.debug('sending message to queue')

        # add it to the queue
        self.queue.send_message('email',msg_data)
Exemple #5
0
    print 'reading old snapshot'

    # read in the old snapshot
    snapshot_path = 'snapshot.%s.json' % remote_bucket.replace('/', '_')
    if os.path.exists(snapshot_path):
        with file(snapshot_path, 'r') as fh:
            old_snapshot = json.loads(fh.read())
    else:
        print 'old snapshot not found'
        old_snapshot = {}

    print

    # go through all the files recursively
    known_folders = []
    for file_path in find_files(local_bucket):

        # get some info on our to-upload file
        stats = os.stat(file_path)

        # where are we uploading it to ?
        rel_path = file_path[len(local_bucket) + 1:]

        print 'attempting upload uploading: %s' % rel_path

        # glean some info about our remote file
        try:
            remote_stats = sftp.stat(rel_path)
        except IOError:

            print 'does not exist remotely'
Exemple #6
0
    print 'reading old snapshot'

    # read in the old snapshot
    snapshot_path = 'snapshot.%s.json' % remote_bucket.replace('/','_')
    if os.path.exists(snapshot_path):
        with file(snapshot_path,'r') as fh:
            old_snapshot = json.loads(fh.read())
    else:
        print 'old snapshot not found'
        old_snapshot = {}

    print

    # go through all the files recursively
    known_folders = []
    for file_path in find_files(local_bucket):

        # get some info on our to-upload file
        stats = os.stat(file_path)

        # where are we uploading it to ?
        rel_path = file_path[len(local_bucket)+1:]
        
        print 'attempting upload uploading: %s' % rel_path

        # glean some info about our remote file
        try:
            remote_stats = sftp.stat(rel_path)
        except IOError:

            print 'does not exist remotely'