예제 #1
0
 def MoveToS3(self, localFolderName, folderName, subFolder):
     '''
     move gzip files to s3 and clean local instance
     localFolderName --> local folder name
     subFolder --> date
     folderName --> folder name on s3
     '''
     try:
         self.logger.debug(self.moduleName + " -- " + "MoveToS3 " +
                           localFolderName + " starting ")
         ###
         #  move any gzip files to the s3 server
         ###
         s3folder = "s3://" + self.job["bucketName"] + self.job["s3GzipFolderBase"] +\
                     "/" + folderName + '/' + subFolder
         localFolder = self.fileUtilities.gzipFolder + localFolderName
         S3Utilities.SyncFolderAWSCli(localFolder,
                                      s3folder,
                                      args='''--quiet --include "*.gz"''',
                                      dbug="Y")
         # Cleanup local files
         FileUtilities.EmptyFolderContents(localFolder)
         self.logger.debug(self.moduleName + " -- " + "MoveToS3 " +
                           localFolderName + " finished ")
     except:
         self.logger.exception(self.moduleName +
                               " - we had an error in MoveToS3")
         raise
예제 #2
0
    def SynchronizeSourceFolder(self):
        '''
        Synchronize the source folder from the AirMarkets bucket in s3
        '''
        self.logger.info("Synchronizing ZIP files from s3 folder...")

        S3Utilities.SyncFolderAWSCli(
            "s3://" + self.job["bucketName"] + self.job["s3SrcDirectory"],
            self.rawDataFolder, True)
예제 #3
0
 def BulkUploadToS3(self, s3subFolder):
     '''
     Uploads all GZIP files created into S3 to be uploaded later...
     '''
     self.logger.info(self.moduleName +
                      " - Uploading GZIP files to s3 folder...")
     s3Sub = None
     if s3subFolder is not None:
         s3Sub = '/' + s3subFolder
     S3Utilities.SyncFolderAWSCli(self.fileUtilities.gzipFolder,
                                  "s3://" + self.job["bucketName"] +
                                  self.job["s3GzipFolderBase"] + s3Sub,
                                  args='''--quiet --include "*.gz"''',
                                  dbug="Y")
예제 #4
0
파일: IEA.py 프로젝트: eulertech/backup
 def PushFilesToS3(self):
     '''
     push files to s3 server
     '''
     try:
         self.logger.debug(self.moduleName + " -- " + "PushFilesToS3" +
                           " starting ")
         S3Utilities.SyncFolderAWSCli(self.localTempDirectory + '/zips',
                                      "s3://" + self.job["bucketName"] +
                                      '/' + self.job["s3SrcDirectory"] +
                                      '/zips',
                                      args='''--quiet --include "*.zip"''',
                                      dbug="Y")
         self.logger.debug(self.moduleName + " -- " + "PushFilesToS3" +
                           " finished ")
     except:
         self.logger.exception(self.moduleName +
                               "- we had an error in PushFilesToS3")
         raise
예제 #5
0
파일: Totem.py 프로젝트: eulertech/backup
 def MoveToS3(self):
     '''
     move gzip files to s3 and clean local instance
     '''
     try:
         self.logger.debug(self.moduleName + " -- " + "MoveToS3 " +
                           " starting ")
         ###
         #  move any gzip files to the s3 server
         ###
         s3folder = "s3://" + self.job["bucketName"] + self.job[
             "s3GzipFolderBase"]
         S3Utilities.SyncFolderAWSCli(self.fileUtilities.gzipFolder,
                                      s3folder,
                                      args='''--quiet --include "*.gz"''',
                                      dbug="N")
         # Cleanup local files
         FileUtilities.EmptyFolderContents(self.fileUtilities.gzipFolder)
         self.logger.debug(self.moduleName + " -- " + "MoveToS3 " +
                           " finished ")
     except:
         self.logger.exception(self.moduleName +
                               " - we had an error in MoveToS3")
         raise