def get_file(self, path):
     fullpath = utility.path_join(self.path, path)
     r = RemoteFile(self.region, self.container_name, self.shalist,
                    self.remote_files, self.path, fullpath)
     if fullpath in self.shalist:
          r.cache['checksum'] = self.shalist[fullpath]
     return r
Esempio n. 2
0
 def get_file(self, path):
     fullpath = utility.path_join(self.path, path)
     r = RemoteFile(self.region, self.container_name, self.shalist,
                    self.remote_files, self.path, fullpath)
     if fullpath in self.shalist:
         r.cache['checksum'] = self.shalist[fullpath]
     return r
def transfer_directory(source_container, destination_container, path,
                       refilter):
    global uploaded
    global destination_total

    print '%s Syncing %s' % (datetime.datetime.now(), path)
    source_dir = source_container.get_directory(path)
    destination_dir = destination_container.get_directory(path)

    queued_shas = {}
    for ent in source_dir.listdir():
        # NOTE(mikal): this is a work around to handle the historial way
        # in which the directory name appears in both the container name and
        # path inside the container for remote stores. It was easier than
        # rewriting the contents of the remote stores.
        if source_dir.region != 'local':
            ent = '/'.join(os.path.split(ent)[1:])

        fullpath = utility.path_join(path, ent)
        source_file = source_dir.get_file(ent)

        if source_file.isdir():
            transfer_directory(source_container, destination_container,
                               fullpath, refilter)

        elif source_file.islink():
            pass

        elif source_file.get_path().endswith('.sha512'):
            pass

        elif source_file.get_path().endswith('.shalist'):
            pass

        elif source_file.get_path().endswith('~'):
            pass

        else:
            destination_file = destination_dir.get_file(ent)
            print '%s Consider  %s' % (datetime.datetime.now(),
                                       source_file.get_path())
            m = refilter.match(source_file.get_path())
            if not m:
                print '%s ... skipping due to filter' % datetime.datetime.now()
                continue

            if destination_file.exists():
                if int(os.environ.get('PUSH_NO_CHECKSUM', 0)) == 1:
                    print '%s ... skipping checksum' % datetime.datetime.now()
                    if ARGS.delete_local:
                        print('%s ... cleaning up file' %
                              datetime.datetime.now())
                        os.remove(source_file.get_path())
                    continue

                if destination_file.checksum() != source_file.checksum():
                    print(
                        '%s Checksum for %s does not match! (%s vs %s)' %
                        (datetime.datetime.now(), source_file.get_path(),
                         source_file.checksum(), destination_file.checksum()))
                else:
                    if ARGS.delete_local:
                        print('%s ... cleaning up file' %
                              datetime.datetime.now())
                        os.remove(source_file.get_path())
                    continue

            done = False
            attempts = 0
            while not done and attempts < 3:
                try:
                    local_file = source_file.get_path()
                    local_cleanup = False
                    if not source_file.region == 'local':
                        print('%s Fetching the file from remote location' %
                              datetime.datetime.now())
                        local_cleanup = True
                        local_file = source_file.fetch()

                    source_size = source_file.size()
                    print('%s Transferring %s (%s)' %
                          (datetime.datetime.now(), source_file.get_path(),
                           utility.DisplayFriendlySize(source_size)))
                    start_time = time.time()
                    destination_file.store(local_file)

                    queued_shas[source_file.checksum()] = destination_file
                    print('%s There are %d queued checksum writes' %
                          (datetime.datetime.now(), len(queued_shas)))

                    if ARGS.delete_local:
                        print('%s ... cleaning up file' %
                              datetime.datetime.now())
                        os.remove(source_file.get_path())

                    if len(queued_shas) > 20 or source_size > 1024 * 1024:
                        print('%s Clearing queued checksum writes' %
                              datetime.datetime.now())
                        for sha in queued_shas:
                            destination_dir.update_shalist(
                                queued_shas[sha].path, sha)
                        destination_dir.write_shalist()
                        queued_shas = {}

                    if local_cleanup:
                        os.remove(local_file)

                    print('%s Uploaded  %s (%s)' %
                          (datetime.datetime.now(), source_file.get_path(),
                           utility.DisplayFriendlySize(source_file.size())))
                    uploaded += source_size
                    destination_total += source_size
                    elapsed = time.time() - start_time
                    print('%s Total     %s' %
                          (datetime.datetime.now(),
                           utility.DisplayFriendlySize(uploaded)))
                    print('%s           %s per second' %
                          (datetime.datetime.now(),
                           utility.DisplayFriendlySize(
                               int(source_size / elapsed))))
                    print('%s Stored    %s' %
                          (datetime.datetime.now(),
                           utility.DisplayFriendlySize(destination_total)))
                    done = True

                except Exception, e:
                    sys.stderr.write('%s Sync failed for %s (attempt %d): %s' %
                                     (datetime.datetime.now(),
                                      source_file.get_path(), attempts, e))
                    attempts += 1
Esempio n. 4
0
 def get_directory(self, path):
     return RemoteDirectory(self.region, self.container_name,
                            utility.path_join(self.basename, path))
def transfer_directory(source_container, destination_container, path, refilter):
    global uploaded
    global destination_total

    print '%s Syncing %s' %(datetime.datetime.now(), path)
    source_dir = source_container.get_directory(path)
    destination_dir = destination_container.get_directory(path)

    queued_shas = {}
    for ent in source_dir.listdir():
        # NOTE(mikal): this is a work around to handle the historial way
        # in which the directory name appears in both the container name and
        # path inside the container for remote stores. It was easier than
        # rewriting the contents of the remote stores.
        if source_dir.region != 'local':
            ent = '/'.join(os.path.split(ent)[1:])

        fullpath = utility.path_join(path, ent)
        source_file = source_dir.get_file(ent)

        if source_file.isdir():
            transfer_directory(source_container, destination_container,
                               fullpath, refilter)

        elif source_file.islink():
            pass

        elif source_file.get_path().endswith('.sha512'):
            pass

        elif source_file.get_path().endswith('.shalist'):
            pass

        elif source_file.get_path().endswith('~'):
            pass

        else:
            destination_file = destination_dir.get_file(ent)
            print '%s Consider  %s' %(datetime.datetime.now(),
                                      source_file.get_path())
            m = refilter.match(source_file.get_path())
            if not m:
                print '%s ... skipping due to filter' % datetime.datetime.now()
                continue

            if destination_file.exists():
                if int(os.environ.get('PUSH_NO_CHECKSUM', 0)) == 1:
                    print '%s ... skipping checksum' % datetime.datetime.now()
                    if ARGS.delete_local:
                        print ('%s ... cleaning up file'
                               % datetime.datetime.now())
                        os.remove(source_file.get_path())
                    continue

                if destination_file.checksum() != source_file.checksum():
                    print ('%s Checksum for %s does not match! (%s vs %s)'
                           %(datetime.datetime.now(), source_file.get_path(),
                             source_file.checksum(),
                             destination_file.checksum()))
                else:
                    if ARGS.delete_local:
                        print ('%s ... cleaning up file'
                               % datetime.datetime.now())
                        os.remove(source_file.get_path())
                    continue

            done = False
            attempts = 0
            while not done and attempts < 3:
                try:
                    local_file = source_file.get_path()
                    local_cleanup = False
                    if not source_file.region == 'local':
                        print ('%s Fetching the file from remote location'
                               % datetime.datetime.now())
                        local_cleanup = True
                        local_file = source_file.fetch()

                    source_size = source_file.size()
                    print ('%s Transferring %s (%s)'
                           %(datetime.datetime.now(), source_file.get_path(),
                             utility.DisplayFriendlySize(source_size)))
                    start_time = time.time()
                    destination_file.store(local_file)

                    queued_shas[source_file.checksum()] = destination_file
                    print ('%s There are %d queued checksum writes'
                           %(datetime.datetime.now(), len(queued_shas)))

                    if ARGS.delete_local:
                        print ('%s ... cleaning up file'
                               % datetime.datetime.now())
                        os.remove(source_file.get_path())

                    if len(queued_shas) > 20 or source_size > 1024 * 1024:
                        print ('%s Clearing queued checksum writes'
                               % datetime.datetime.now())
                        for sha in queued_shas:
                            destination_dir.update_shalist(
                                queued_shas[sha].path, sha)
                        destination_dir.write_shalist()
                        queued_shas = {}

                    if local_cleanup:
                        os.remove(local_file)

                    print ('%s Uploaded  %s (%s)'
                           %(datetime.datetime.now(), source_file.get_path(),
                             utility.DisplayFriendlySize(source_file.size())))
                    uploaded += source_size
                    destination_total += source_size
                    elapsed = time.time() - start_time
                    print ('%s Total     %s'
                           %(datetime.datetime.now(),
                             utility.DisplayFriendlySize(uploaded)))
                    print ('%s           %s per second'
                           %(datetime.datetime.now(),
                             utility.DisplayFriendlySize(int(source_size /
                                                             elapsed))))
                    print ('%s Stored    %s'
                           %(datetime.datetime.now(),
                             utility.DisplayFriendlySize(destination_total)))
                    done = True

                except Exception, e:
                    sys.stderr.write('%s Sync failed for %s (attempt %d): %s'
                                     %(datetime.datetime.now(),
                                       source_file.get_path(),
                                       attempts, e))
                    attempts += 1
Esempio n. 6
0
 def get_directory(self, path):
     return RemoteDirectory(self, utility.path_join(self.basename, path))
Esempio n. 7
0
 def get_file(self, path):
     fullpath = utility.path_join(self.path, path)
     r = RemoteFile(self.parent_container, self, fullpath)
     if fullpath in self.shalist:
         r.cache['checksum'] = self.shalist[fullpath]
     return r
 def get_directory(self, path):
     return RemoteDirectory(self, utility.path_join(self.basename, path))
 def get_file(self, path):
     fullpath = utility.path_join(self.path, path)
     r = RemoteFile(self.parent_container, self, fullpath)
     if fullpath in self.shalist:
          r.cache['checksum'] = self.shalist[fullpath]
     return r
Esempio n. 10
0
            responsePattern = get_response_pattern()

        responsePattern.update(**responsePatternIn)
        TheLogger.debug("Incomming response pattern: \n" + \
                json_dumps(responsePattern))

        pyData_to_json_file(responsePattern, pathRespPatternFile)
    except Exception as e:
        TheLogger.error(str(e))


if __name__ == "__main__":
    """
    Main enter.
    """
    pathTempDataDir = path_join(PATH_ROOT, "temp")
    pathRespPatternFile = path_join(pathTempDataDir, "response_pattern.json")
    make_dir(pathTempDataDir)
    TheLogger.init(pathTempDataDir, "server.log")

    mode = sys.argv[1]
    if mode == "start":
        serverHost = "127.0.0.1"
        try:
            serverPort = int(sys.argv[2])
        except Exception as e:
            print("Error: specify port correctly.")
        start(serverHost, serverPort)
    elif mode == "set_response_pattern":
        pathRespPatternFileIn = sys.argv[2]
        flagAddData = bool(int(sys.argv[3]))
Esempio n. 11
0
 def get_directory(self, path):
     return RemoteDirectory(self.region, self.container_name,
                            utility.path_join(self.basename, path))