def upload_development_files(self): """ Helper function for file uploads to S3 """ # Prepare storage bucket for development. # We skip this in production, where the bucket needs to be created manually. uploads_bucket = getattr(settings, "AWS_S3_BUCKET", None) log.info(f"Makes sure the {uploads_bucket} bucket exists") storage.create_bucket(uploads_bucket) assets_folder = os.path.join(settings.BASE_DIR, "../assets") user = User.objects.get(pk=1) for file in os.listdir(assets_folder): log.info(f"Uploading {file} file to bucket") file_path = os.path.join(assets_folder, file) storage.upload_file(uploads_bucket, file, file_path) File.objects.get_or_create( pk=file, defaults={ "state": "ready", "file_type": "image", "token": "token", "user": user, "public": True, }, )
def upload_development_files(self): """ Helper function for file uploads to S3 """ # Prepare storage bucket for development. # We skip this in production, where the bucket needs to be created manually. uploads_bucket = getattr(settings, 'AWS_S3_BUCKET', None) log.info(f'Makes sure the {uploads_bucket} bucket exists') storage.create_bucket(uploads_bucket) assets_folder = os.path.join(settings.BASE_DIR, '../assets') user = User.objects.get(pk=1) for file in os.listdir(assets_folder): log.info(f'Uploading {file} file to bucket') file_path = os.path.join(assets_folder, file) storage.upload_file(uploads_bucket, file, file_path) File.objects.get_or_create(pk=file, defaults={ 'state': 'ready', 'file_type': 'image', 'token': 'token', 'user': user, 'public': True, })
def run(self, *args, **options): if not os.path.exists(IMPORT_DIRECTORY): log.error(f'Missing import directory: "{IMPORT_DIRECTORY}"') exit(1) self.load_initial_fixtures() # Upload files if os.path.exists(f"{IMPORT_DIRECTORY}/files"): # Prepare storage bucket for development. # We skip this in production, where the bucket needs to be created manually. uploads_bucket = getattr(settings, "AWS_S3_BUCKET", None) log.info(f"Makes sure the {uploads_bucket} bucket exists") storage.create_bucket(uploads_bucket) if not options["yes"]: choice = input( "Do you wish to upload/import all files? [Y/n]").lower() if choice == "n" or choice == "no" or choice == "nei": print( f"[IGNORE] Ignoring upload of files\n----------------") else: self.upload_files(uploads_bucket, f"{IMPORT_DIRECTORY}/files") else: self.upload_files(uploads_bucket, f"{IMPORT_DIRECTORY}/files") # End upload files file_names = os.listdir(IMPORT_DIRECTORY) file_names.sort() print("Found the following files/fixtures to import:") for file_name in file_names: if not os.path.isfile(f"{IMPORT_DIRECTORY}/{file_name}"): continue print(f"\t{file_name}") if os.path.isfile( f"{IMPORT_DIRECTORY}/1_nerd_export_group_files.yaml"): self.handle_fixture_import("1_nerd_export_group_files.yaml", options["yes"]) file_names.remove("1_nerd_export_group_files.yaml") if os.path.isfile( f"{IMPORT_DIRECTORY}/1_nerd_export_group_objects.yaml"): # We need to update the group MPTT mapping group_tree = initial_tree nerd_groups = self.load_yaml( f"{IMPORT_DIRECTORY}/1_nerd_export_group_objects.yaml") for group_model in nerd_groups: group_fields = group_model["fields"] group_fields["id"] = group_model["pk"] if group_fields["logo"]: group_fields["logo"] = File.objects.get( key=group_fields["logo"]) group_name = group_fields["name"] lego_group_ids[group_model["pk"]] = group_name group_fields.pop("name", None) if group_fields["parent"] is None: group_fields.pop("parent", None) group_tree[group_name] = [group_fields, {}] continue parent = lego_group_ids[group_fields["parent"]] group_fields.pop("parent", None) print(f"Attempting to find path for group: {group_name}") results = list(self.find_group_path(parent, group_tree)) path = " -> ".join(results[0][1] + [group_name]) print(f"Found group path: {path}") if not options["yes"]: choice = input( "Do you wish to import this group? [Y/n]").lower() if choice == "n" or choice == "no" or choice == "nei": print( f"[IGNORE] Ignoring {group_name}\n----------------" ) continue results[0][0][1][group_name] = [group_fields, {}] print( f"[SUCCESS] Added {group_name} to the import tree\n------------------" ) insert_abakus_groups(group_tree) AbakusGroup.objects.rebuild() file_names.remove("1_nerd_export_group_objects.yaml") # End MPTT mapping print("Starting import of fixtures") for file_name in file_names: self.handle_fixture_import(file_name, options["yes"]) # Make sure user profile pictures are owned by the users print("Setting ownership of user profile pictures") for user in User.all_objects.exclude(picture__isnull=True): if user.picture.token == "token": continue user.picture.user = user user.picture.save() # Loop through all the models and generate slug for them if they do not exist for article in Article.all_objects.all(): article.save() for event in Event.all_objects.all(): event.save() for page in Page.all_objects.all(): page.save() for joblisting in Joblisting.all_objects.all(): joblisting.save() for meeting in Meeting.all_objects.all(): meeting.save() for quote in Quote.all_objects.all(): quote.save()