def upload_files(self, arg, dirname, names): # upload or skip items for item in names: if item in self.FILTER_LIST: continue # Skip files we don't want to sync file_path = os.path.join(dirname, item) if os.path.isdir(file_path): continue # Don't try to upload directories object_name = self.STATIC_URL + file_path.split(self.DIRECTORY)[1] self.local_object_names.append(object_name) try: cloud_obj = self.container.get_object(object_name) except cloudfiles.errors.NoSuchObject: cloud_obj = self.container.create_object(object_name) self.create_count += 1 if self.use_md5: cloud_md5 = cloud_obj.objsum m = hashlib.md5() local_file = open(file_path, 'rb') while True: data = local_file.read(10240) if len(data) == 0: break m.update(data) local_md5 = m.hexdigest() if cloud_md5 == local_md5: self.skip_count += 1 if self.verbosity > 1: print "Skipped %s: md5 not modified." % object_name continue else: cloud_datetime = (cloud_obj.last_modified and datetime.datetime.strptime( cloud_obj.last_modified, "%a, %d %b %Y %H:%M:%S %Z" ) or None) local_datetime = datetime.datetime.utcfromtimestamp( os.stat(file_path).st_mtime) if cloud_datetime and local_datetime < cloud_datetime: self.skip_count += 1 if self.verbosity > 1: print "Skipped %s: datetime not modified." % object_name continue if not self.test_run: cloud_obj.load_from_filename(file_path) sync_headers(cloud_obj) self.upload_count += 1 if self.verbosity > 1: print "Uploaded", cloud_obj.name
def upload_files(self, arg, dirname, names): # upload or skip items for item in names: if item in self.FILTER_LIST: continue # Skip files we don't want to sync file_path = os.path.join(dirname, item) if os.path.isdir(file_path): continue # Don't try to upload directories object_name = self.STATIC_URL + file_path.split(self.DIRECTORY)[1] self.local_object_names.append(object_name) # check if the metadata for our object is on the server obj_info = next((o for o in self.cloudfile_info_list if o['name'] == object_name), None) if not obj_info: # it is not on the server yet, we will create the object if not self.test_run: cloud_obj = self.container.create_object(object_name) self.create_count += 1 else: if self.add_only: continue # check if it needs to be re-uploaded cloud_datetime = (obj_info['last_modified'] and datetime.datetime.strptime( obj_info['last_modified'], "%Y-%m-%dT%H:%M:%S.%f", ) or None) local_datetime = datetime.datetime.utcfromtimestamp( os.stat(file_path).st_mtime) if cloud_datetime and local_datetime < cloud_datetime: self.skip_count += 1 if self.verbosity > 1: print "Skipped %s: not modified." % object_name continue # check if the hash matches with file(file_path) as source: if obj_info['hash'] == cloudfiles.Object.compute_md5sum(source): if self.verbosity > 1: print "Skipped %s: not modified (checksum)." % object_name continue # we will have to update it, grab the object cloud_obj = self.container.get_object(object_name) # upload the file content if not self.test_run: cloud_obj.load_from_filename(file_path) sync_headers(cloud_obj) self.upload_count += 1 if self.verbosity > 1: print "Uploaded", cloud_obj.name
def load_from_filename(self, cloud_obj, file_path): try: cloud_obj.load_from_filename(file_path) sync_headers(cloud_obj) except ssl.SSLError: return self.load_from_filename(cloud_obj, file_path)