def reset_sqlite_database(username=None, email=None, password=None, router=None, verbosity="1"): """ Resets the currently used sqlite database. Creates the user if admin_username is passed. :param username: If present, creates a superuser with this username. :param email: If present, creates a superuser with this email. :param password: If present, creates a superuser with this password. :param router: The database router to use. :return: Returns the superuser created or None if no arguments are provided. """ if not router: router = getattr(settings, 'SCREENSHOTS_ROUTER', 'default') db_engine = settings.DATABASES[router]['ENGINE'] if db_engine == settings.SQLITE3_ENGINE: # make sure database path exists ensure_dir(settings.SCREENSHOTS_OUTPUT_PATH) new_io = StringIO() call_command("setup", interactive=False, stdout=new_io, verbosity=verbosity) call_command("generaterealdata", scenario_1=True, interactive=False, stdout=new_io, router=router, verbosity=verbosity) # For coachreports pages if username and email and password: log.info('==> Creating superuser username==%s; email==%s ...' % (username, email,)) if int(verbosity) > 0 else None call_command("createsuperuser", username=username, email=email, interactive=False, stdout=new_io, router=router, verbosity=verbosity) admin_user = User.objects.get(username=username) admin_user.set_password(password) admin_user.save() return admin_user return None
def create_node(self, path): filename = "".join(random.sample(string.lowercase, 16)) data = {} for metadatum in self.metadata: data[metadatum] = "".join(random.sample(string.lowercase, 16)) if random.random() > 0.5: filetype = random.choice(self.extensions) extension = filetype[0] filekind = filetype[1] with open( os.path.join(os.path.dirname(self.tempdir), path, filename + ".%s" % extension), 'w') as f: f.write(str(random.random())) with open( os.path.join(os.path.dirname(self.tempdir), path, filename + ".%s" % extension + ".json"), 'w') as f: json.dump(data, f) data["kind"] = filekind data["format"] = extension else: ensure_dir( os.path.join(os.path.dirname(self.tempdir), path, filename)) with open( os.path.join(os.path.dirname(self.tempdir), path, filename + ".json"), 'w') as f: json.dump(data, f) data["kind"] = "Topic" data["children"] = [] data["slug"] = filename data["path"] = os.path.join(path, data["slug"]) return data
def move_srts(lang_code): """ Srts live in the locale directory, but that's not exposed at any URL. So instead, we have to move the srts out to /static/subtitles/[lang_code]/ """ lang_code_ietf = lcode_to_ietf(lang_code) lang_code_django = lcode_to_django_dir(lang_code) subtitles_static_dir = os.path.join(settings.STATIC_ROOT, "subtitles") src_dir = os.path.join(LOCALE_ROOT, lang_code_django, "subtitles") dest_dir = get_srt_path(lang_code_django) ensure_dir(dest_dir) lang_subtitles = glob.glob(os.path.join(src_dir, "*.srt")) logging.info("Moving %d subtitles from %s to %s" % (len(lang_subtitles), src_dir, dest_dir)) for fil in lang_subtitles: srt_dest_path = os.path.join(dest_dir, os.path.basename(fil)) if os.path.exists(srt_dest_path): os.remove(srt_dest_path ) # we're going to replace any srt with a newer version shutil.move(fil, srt_dest_path) if not os.path.exists(src_dir): logging.info("No subtitles for language pack %s" % lang_code) elif os.listdir(src_dir): logging.warn( "%s is not empty; will not remove. Please check that all subtitles were moved." % src_dir) else: logging.info("Removing empty source directory (%s)." % src_dir) shutil.rmtree(src_dir)
def reset_sqlite_database(username=None, email=None, password=None, router=None, verbosity="1"): """ Resets the currently used sqlite database. Creates the user if admin_username is passed. :param username: If present, creates a superuser with this username. :param email: If present, creates a superuser with this email. :param password: If present, creates a superuser with this password. :param router: The database router to use. :return: Returns the superuser created or None if no arguments are provided. """ if not router: router = getattr(settings, 'SCREENSHOTS_ROUTER', 'default') db_engine = settings.DATABASES[router]['ENGINE'] if db_engine == settings.SQLITE3_ENGINE: # make sure database path exists ensure_dir(settings.SCREENSHOTS_OUTPUT_PATH) new_io = StringIO() call_command("setup", interactive=False, stdout=new_io, verbosity=verbosity) call_command("generaterealdata", scenario_1=True, interactive=False, stdout=new_io, router=router, verbosity=verbosity) # For coachreports pages if username and email and password: if int(verbosity) > 0: log.info('==> Creating superuser username==%s; email==%s ...' % (username, email,)) call_command("createsuperuser", username=username, email=email, interactive=False, stdout=new_io, router=router, verbosity=verbosity) admin_user = User.objects.get(username=username) admin_user.set_password(password) admin_user.save() return admin_user return None
def handle(self, *args, **options): self.setup(options) operation = args[0] self.foreground = options.get('foreground', False) self.is_template = options.get('template', False) self.force = options.get('force', False) if self.is_template: ensure_dir(django_settings.DB_CONTENT_ITEM_TEMPLATE_DIR) # This is sort of undefined, because templates are always assumed fine # to overwrite if self.is_template and self.force: raise CommandError("Cannot combine --force and --template.") if operation == "download": self.start(_("Downloading content pack.")) self.download(*args, **options) elif operation == "local": self.start(_("Installing a local content pack.")) self.local(*args, **options) elif operation == "empty": self.empty(*args, **options) else: raise CommandError("Unknown operation: %s" % operation)
def run_makemessages(ignore_patterns_py=[], ignore_patterns_js=[], verbosity=0): """Run makemessages command for english po files""" # Do some packages only python_package_dirs = glob.glob(os.path.join(PROJECT_ROOT, 'python-packages', '*')) ignored_packages = [os.path.join('*/python-packages/', os.path.basename(pp)) for pp in python_package_dirs if os.path.basename(pp) not in ['securesync', 'fle_utils']] # Besides externally requested ignores, add on a few standard ones. ignore_shared = ignored_packages + ['*/data/*', '*/.git/*', '*/migrations/*', '*/node_modules/*', '*/fle_utils/chronograph/*'] ignore_patterns_py = ignore_patterns_py + ignore_shared + ['*/static/*'] ignore_patterns_js = ignore_patterns_js + ignore_shared + ['*/kalite/static/*', '*/static/admin/*', '*/static/js/i18n/*', '*/kalite/distributed/static/khan-exercises/*'] + ['*jquery*', '*bootstrap*'] logging.debug("Creating / validating locale root folder") ensure_dir(LOCALE_ROOT) # Command must be run from project root logging.debug("Moving to project root directory") os.chdir(PROJECT_ROOT) call_command('clean_pyc', path=PROJECT_ROOT) logging.info("Executing makemessages command") # Generate english po file sys.stdout.write("\n\nCompiling .py / .html files... ") call_command('makemessages', extensions=['html', 'py'], verbosity=verbosity, locale='en', ignore_patterns=ignore_patterns_py, no_obsolete=True) # Generate english po file for javascript sys.stdout.write("\n\nCompiling .js files... ") call_command('makemessages', extensions=['js'], domain='djangojs', verbosity=verbosity, locale='en', ignore_patterns=ignore_patterns_js, no_obsolete=True)
def move_files(self): """If necessary (determined previously), move video files on disk. Otherwise, write into local_settings.""" # Move over videos if self.move_videos == "y": if os.path.exists(settings.CONTENT_ROOT): video_files = set(glob.glob(settings.CONTENT_ROOT + '*')) - set((settings.CONTENT_ROOT + "note.txt",)) else: video_files = set() sys.stdout.write("* Moving over %d files (videos and thumbnails)\n" % len(video_files)) if not os.path.exists(self.working_dir + "/content/"): os.mkdir(self.working_dir + "/content/") for video_file in video_files: shutil.move(video_file, self.working_dir + "/content/" + os.path.split(video_file)[1]) else: # write (append) fh = open(self.working_dir + "/kalite/local_settings.py", "a") fh.write("\nCONTENT_ROOT = '%s'\n" % settings.CONTENT_ROOT) fh.close() # Move inner zip file if not os.path.exists(self.inner_zip_file) or not os.path.exists(self.signature_file): sys.stderr.write("\tCould not find inner zip file / signature file for storage. Continuing...\n") else: try: zip_dir = os.path.join(self.working_dir, "kalite", "static", "zip") ensure_dir(zip_dir) shutil.move(self.inner_zip_file, os.path.join(zip_dir, os.path.basename(self.inner_zip_file))) shutil.move(self.signature_file, os.path.join(zip_dir, os.path.basename(self.signature_file))) except Exception as e: sys.stderr.write("\tCould not keep inner zip file / signature for future re-packaging (%s). Continuing...\n" % e)
def unpack_zipfile_to_content_folder(zf): try: channel = zf.read("channel.name") except KeyError: channel = "" if channel: folder = os.path.join(settings.ASSESSMENT_ITEM_ROOT, channel) else: folder = settings.ASSESSMENT_ITEM_ROOT ensure_dir(folder) zf.extractall(folder) ensure_dir(settings.KHAN_ASSESSMENT_ITEM_ROOT) # Ensure that special files are in their configured locations os.rename( os.path.join(folder, 'assessmentitems.version'), settings.KHAN_ASSESSMENT_ITEM_VERSION_PATH ) os.rename( os.path.join(folder, 'assessmentitems.sqlite'), settings.KHAN_ASSESSMENT_ITEM_DATABASE_PATH ) # JSON file is apparrently not required (not in the test at least) if os.path.isfile(os.path.join(folder, 'assessmentitems.json')): os.rename( os.path.join(folder, 'assessmentitems.json'), settings.KHAN_ASSESSMENT_ITEM_JSON_PATH )
def update_jsi18n_file(code="en"): """ For efficieny's sake, we want to cache Django's js18n file. So, generate that file here, then save to disk--it won't change until the next language pack update! """ translation.activate(code) # we switch the language of the whole thread output_dir = os.path.join(settings.CONTENT_ROOT, 'locale', 'js', 'i18n') ensure_dir(output_dir) output_file = os.path.join(output_dir, "%s.js" % code) request = HttpRequest() request.path = output_file request.session = {settings.LANGUAGE_COOKIE_NAME: code} response = javascript_catalog(request, packages=('ka-lite.locale', ), domain="djangojs") icu_js = "" for path in settings.LOCALE_PATHS: try: icu_js = open(os.path.join(path, code, "%s_icu.js" % code), "r").read() except IOError: logging.warn( "No {code}_icu.js file found in locale_path {path}".format( code=code, path=path)) output_js = response.content + "\n" + icu_js logging.info("Writing i18nized js file to {0}".format(output_file)) with open(output_file, "w") as fp: fp.write(output_js) translation.deactivate()
def update_jsi18n_file(code="en"): """ For efficieny's sake, we want to cache Django's js18n file. So, generate that file here, then save to disk--it won't change until the next language pack update! """ translation.activate(code) # we switch the language of the whole thread output_dir = os.path.join(settings.CONTENT_ROOT, 'locale', 'js', 'i18n') ensure_dir(output_dir) output_file = os.path.join(output_dir, "%s.js" % code) request = HttpRequest() request.path = output_file request.session = {settings.LANGUAGE_COOKIE_NAME: code} response = javascript_catalog(request, packages=('ka-lite.locale',), domain="djangojs") icu_js = "" for path in settings.LOCALE_PATHS: try: icu_js = open(os.path.join(path, code, "%s_icu.js" % code), "r").read() except IOError: logging.warn("No {code}_icu.js file found in locale_path {path}".format(code=code, path=path)) output_js = response.content + "\n" + icu_js logging.info("Writing i18nized js file to {0}".format(output_file)) with open(output_file, "w") as fp: fp.write(output_js) translation.deactivate()
def __init__(self, *args, **kwargs): """ Force setting up live server test. Adding to kwargs doesn't work, need to go to env. Dependent on how Django works here. """ self.failfast = kwargs.get("failfast", False) # overload # verbosity level, default 1 self.verbosity = int(kwargs.get("verbosity")) # If no liveserver specified, set some default. # port range is the set of open ports that Django can use to # start the server. They may have multiple servers open at once. if not os.environ.get('DJANGO_LIVE_TEST_SERVER_ADDRESS',""): os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = "localhost:9000-9999" self._bdd_only = kwargs["bdd_only"] # Extra options from our custom test management command are passed into self._no_bdd = kwargs['no_bdd'] # the constructor, but not the build_suite function where we need them. # Django < 1.7 serves static files using the staticfiles app, not from static root. # This causes django_js_reverse not to get served to the client, so we manually copy it into distributed. call_command("collectstatic_js_reverse", interactive=False) ensure_dir(os.path.join(os.path.dirname(os.path.dirname(__file__)), "distributed", "static", "django_js_reverse", "js")) shutil.copy2(os.path.join(settings.STATIC_ROOT, "django_js_reverse", "js", "reverse.js"), os.path.join(os.path.dirname(os.path.dirname(__file__)), "distributed", "static", "django_js_reverse", "js", "reverse.js")) if os.environ.get("TRAVIS"): settings.DO_NOT_RELOAD_CONTENT_CACHE_AT_STARTUP = True return super(KALiteTestRunner, self).__init__(*args, **kwargs)
def update_metadata(package_metadata, version=VERSION): """ We've zipped the packages, and now have unzipped & zipped sizes. Update this info in the local metadata (but not inside the zip) """ master_filepath = get_language_pack_availability_filepath(version=version) master_metadata = softload_json(master_filepath, logger=logging.warn, errmsg="Error opening master language pack metadata") for lc, updated_meta in package_metadata.iteritems(): lang_code_ietf = lcode_to_ietf(lc) # Gather existing metadata metadata_filepath = get_language_pack_metadata_filepath(lang_code_ietf, version=version) stored_meta = softload_json(metadata_filepath, logger=logging.warn, errmsg="Error opening %s language pack metadata" % lc) stored_meta.update(updated_meta) # Write locally (this is used on download by distributed server to update it's database) with open(metadata_filepath, 'w') as output: json.dump(stored_meta, output) # Update master (this is used for central server to handle API requests for data) master_metadata[lang_code_ietf] = stored_meta # Save updated master ensure_dir(os.path.dirname(master_filepath)) with open(master_filepath, 'w') as output: json.dump(master_metadata, output) logging.info("Local record of translations updated")
def handle(self, *args, **options): # Get the CSV data, either from a recent cache_file # or from the internet cache_dir = settings.MEDIA_ROOT cache_file = os.path.join(cache_dir, "dubbed_videos.csv") if not options["force"] and os.path.exists(cache_file) and datediff(datetime.datetime.now(), datetime.datetime.fromtimestamp(os.path.getctime(cache_file)), units="days") <= 14.0: # Use cached data to generate the video map csv_data = open(cache_file, "r").read() (video_map, _) = generate_dubbed_video_mappings(csv_data=csv_data) else: # Use cached data to generate the video map (video_map, csv_data) = generate_dubbed_video_mappings() try: ensure_dir(cache_dir) with open(cache_file, "w") as fp: fp.write(csv_data) except Exception as e: logging.error("Failed to make a local cache of the CSV data: %s" % e) # Now we've built the map. Save it. out_file = DUBBED_VIDEOS_MAPPING_FILEPATH ensure_dir(os.path.dirname(out_file)) logging.info("Saving data to %s" % out_file) with open(out_file, "w") as fp: json.dump(video_map, fp) logging.info("Done.")
def unpack_zipfile_to_content_folder(zf): try: channel = zf.read("channel.name") folder = os.path.join(settings.ASSESSMENT_ITEM_ROOT, channel) except KeyError: # 0.16 legacy assessment zip no longer comes with a channel.name file folder = settings.KHAN_ASSESSMENT_ITEM_ROOT logging.info("Unpacking to folder {}...".format(folder)) ensure_dir(folder) zf.extractall(folder) # If assessmentitems.version exists, copy it to another location outside # of the channel folder because for some reason a test expects it to be # there. version_file = os.path.join(folder, 'assessmentitems.version') version_file_copied_dest = os.path.join( settings.ASSESSMENT_ITEM_ROOT, 'assessmentitems.version' ) if version_file_copied_dest != version_file: if os.path.isfile(version_file_copied_dest): os.unlink(version_file_copied_dest) # Test that file exists because there's a test that mocks unzipping and # then this would fail because a file that should exist doesn't (doh) if os.path.isfile(version_file): # Ensure that special files are in their configured locations shutil.copy( version_file, version_file_copied_dest )
def move_exercises(lang_code): lang_pack_location = os.path.join(LOCALE_ROOT, lang_code) src_exercise_dir = os.path.join(lang_pack_location, "exercises") dest_exercise_dir = get_localized_exercise_dirpath(lang_code) if not os.path.exists(src_exercise_dir): logging.warn("Could not find downloaded exercises; skipping: %s" % src_exercise_dir) else: # Move over one at a time, to combine with any other resources that were there before. ensure_dir(dest_exercise_dir) all_exercise_files = glob.glob(os.path.join(src_exercise_dir, "*.html")) logging.info("Moving %d downloaded exercises to %s" % (len(all_exercise_files), dest_exercise_dir)) for exercise_file in all_exercise_files: shutil.move( exercise_file, os.path.join(dest_exercise_dir, os.path.basename(exercise_file))) logging.debug("Removing emtpy directory") try: shutil.rmtree(src_exercise_dir) except Exception as e: logging.error("Error removing dubbed video directory (%s): %s" % (src_exercise_dir, e))
def handle(self, *args, **options): if not settings.CENTRAL_SERVER: raise CommandError("Disabled for distributed servers, until we can figure out what to do with ") options['platform'] = options['platform'].lower() # normalize if options['platform'] not in ["all", "linux", "macos", "darwin", "windows"]: raise CommandError("Unrecognized platform: %s; will include ALL files." % options['platform']) # Step 0: refresh all resources get_dubbed_video_map(force=True) # force a remote download # Step 1: recursively add all static files kalite_base = os.path.realpath(settings.PROJECT_PATH + "/../") files_dict = recursively_add_files(dirpath=kalite_base, **options) # Step 2: Add a local_settings.py file. # For distributed servers, this is a copy of the local local_settings.py, # with a few properties (specified as command-line options) overridden ls_file = create_local_settings_file(location=os.path.realpath(kalite_base+"/kalite/local_settings.py"), server_type=options['server_type'], locale=options['locale'], central_server=options["central_server"]) files_dict[ls_file] = { "dest_path": "kalite/local_settings.py" } # Step 3: select output file. if not options['file']: options['file'] = create_default_archive_filename(options) # Step 4: package into a zip file ensure_dir(os.path.realpath(os.path.dirname(options["file"]))) # allows relative paths to be passed.=== system_specific_zipping( files_dict = dict([(v["dest_path"], src_path) for src_path, v in files_dict.iteritems()]), zip_file = options["file"], compression=ZIP_DEFLATED if options['compress'] else ZIP_STORED, callback=_default_callback_zip if options["verbosity"] else None, )
def update_metadata(package_metadata, version=SHORTVERSION): """ We've zipped the packages, and now have unzipped & zipped sizes. Update this info in the local metadata (but not inside the zip) """ master_filepath = get_language_pack_availability_filepath(version=version) master_metadata = softload_json(master_filepath, logger=logging.warn, errmsg="Error opening master language pack metadata") for lc, updated_meta in package_metadata.iteritems(): lang_code_ietf = lcode_to_ietf(lc) # Gather existing metadata metadata_filepath = get_language_pack_metadata_filepath(lang_code_ietf, version=version) stored_meta = softload_json(metadata_filepath, logger=logging.warn, errmsg="Error opening %s language pack metadata" % lc) stored_meta.update(updated_meta) # Write locally (this is used on download by distributed server to update it's database) with open(metadata_filepath, 'w') as output: json.dump(stored_meta, output) # Update master (this is used for central server to handle API requests for data) master_metadata[lang_code_ietf] = stored_meta # Save updated master ensure_dir(os.path.dirname(master_filepath)) with open(master_filepath, 'w') as output: json.dump(master_metadata, output) logging.info("Local record of translations updated")
def unpack_zipfile_to_content_folder(zf): try: channel = zf.read("channel.name") folder = os.path.join(settings.ASSESSMENT_ITEM_ROOT, channel) except KeyError: # 0.16 legacy assessment zip no longer comes with a channel.name file folder = settings.KHAN_ASSESSMENT_ITEM_ROOT logging.info("Unpacking to folder {}...".format(folder)) ensure_dir(folder) zf.extractall(folder) # If assessmentitems.version exists, copy it to another location outside # of the channel folder because for some reason a test expects it to be # there. version_file = os.path.join(folder, 'assessmentitems.version') version_file_copied_dest = os.path.join(settings.ASSESSMENT_ITEM_ROOT, 'assessmentitems.version') if version_file_copied_dest != version_file: if os.path.isfile(version_file_copied_dest): os.unlink(version_file_copied_dest) # Test that file exists because there's a test that mocks unzipping and # then this would fail because a file that should exist doesn't (doh) if os.path.isfile(version_file): # Ensure that special files are in their configured locations shutil.copy(version_file, version_file_copied_dest)
def handle(self, *args, **options): old_map = os.path.exists( DUBBED_VIDEOS_MAPPING_FILEPATH) and copy.deepcopy( get_dubbed_video_map()) or {} # for comparison purposes cache_filepath = options["cache_filepath"] or os.path.join( settings.MEDIA_ROOT, 'khan_dubbed_videos.csv') max_cache_age = (not options["force"] and options["max_cache_age"]) or 0.0 if os.path.exists(cache_filepath) and datediff( datetime.datetime.now(), datetime.datetime.fromtimestamp( os.path.getctime(cache_filepath)), units="days") <= max_cache_age: # Use cached data to generate the video map csv_data = open(cache_filepath, "r").read() else: csv_data = download_ka_dubbed_video_mappings( cache_filepath=cache_filepath) # Use cached data to generate the video map raw_map = generate_dubbed_video_mappings(csv_data=csv_data) # Now we've built the map. Save it. ensure_dir(os.path.dirname(DUBBED_VIDEOS_MAPPING_FILEPATH)) logging.info("Saving data to %s" % DUBBED_VIDEOS_MAPPING_FILEPATH) with open(DUBBED_VIDEOS_MAPPING_FILEPATH, "w") as fp: json.dump(raw_map, fp) new_map = get_dubbed_video_map(reload=True) # Now tell the user about what changed. added_languages = set(new_map.keys()) - set(old_map.keys()) removed_languages = set(old_map.keys()) - set(new_map.keys()) if added_languages or removed_languages: logging.info( "*** Added support for %2d languages; removed support for %2d languages. ***" % (len(added_languages), len(removed_languages))) for lang_code in sorted( list(set(new_map.keys()).union(set(old_map.keys())))): added_videos = set(new_map[lang_code].keys()) - set( old_map[lang_code].keys()) removed_videos = set(old_map[lang_code].keys()) - set( new_map[lang_code].keys()) shared_keys = set(new_map[lang_code].keys()).intersection( set(old_map[lang_code].keys())) changed_videos = [ vid for vid in shared_keys if old_map[lang_code][vid] != new_map[lang_code][vid] ] logging.info( "\t%5s: Added %d videos, removed %3d videos, changed %3d videos." % (lang_code, len(added_videos), len(removed_videos), len(changed_videos))) logging.info("Done.")
def run_makemessages(ignore_patterns_py=[], ignore_patterns_js=[], verbosity=0): """Run makemessages command for english po files""" # Do some packages only python_package_dirs = glob.glob(os.path.join(PROJECT_ROOT, 'python-packages', '*')) ignored_packages = [os.path.join('*/python-packages/', os.path.basename(pp)) for pp in python_package_dirs if os.path.basename(pp) not in ['securesync', 'fle_utils']] # Besides externally requested ignores, add on a few standard ones. ignore_shared = ignored_packages + ['*/data/*', '*/.git/*', '*/migrations/*', '*/node_modules/*', '*/fle_utils/chronograph/*'] ignore_patterns_py = ignore_patterns_py + ignore_shared + ['*/static-libraries/*'] ignore_patterns_js = ignore_patterns_js + ignore_shared + ['*/kalite/static/*', '*/static-libraries/admin/*', '*/static-libraries/js/i18n/*', '*/kalite/distributed/static/khan-exercises/*'] + ['*jquery*', '*bootstrap*'] logging.debug("Creating / validating locale root folder") ensure_dir(LOCALE_ROOT) # Command must be run from project root logging.debug("Moving to project root directory") os.chdir(PROJECT_ROOT) call_command('clean_pyc', path=PROJECT_ROOT) logging.info("Executing makemessages command") # Generate english po file sys.stdout.write("\n\nCompiling .py / .html files... ") call_command('makemessages', extensions=['html', 'py'], verbosity=verbosity, locale='en', ignore_patterns=ignore_patterns_py, no_obsolete=True) # Generate english po file for javascript sys.stdout.write("\n\nCompiling .js files... ") call_command('makemessages', extensions=['js'], domain='djangojs', verbosity=verbosity, locale='en', ignore_patterns=ignore_patterns_js, no_obsolete=True)
def move_srts(lang_code): """ Srts live in the locale directory, but that's not exposed at any URL. So instead, we have to move the srts out to /static/subtitles/[lang_code]/ """ lang_code_ietf = lcode_to_ietf(lang_code) lang_code_django = lcode_to_django_dir(lang_code) subtitles_static_dir = os.path.join(settings.USER_STATIC_FILES, "subtitles") src_dir = os.path.join(settings.USER_WRITABLE_LOCALE_DIR, lang_code_django, "subtitles") dest_dir = get_srt_path(lang_code_django) ensure_dir(dest_dir) lang_subtitles = glob.glob(os.path.join(src_dir, "*.srt")) logging.info("Moving %d subtitles from %s to %s" % (len(lang_subtitles), src_dir, dest_dir)) for fil in lang_subtitles: srt_dest_path = os.path.join(dest_dir, os.path.basename(fil)) if os.path.exists(srt_dest_path): os.remove(srt_dest_path) # we're going to replace any srt with a newer version shutil.move(fil, srt_dest_path) if not os.path.exists(src_dir): logging.info("No subtitles for language pack %s" % lang_code) elif os.listdir(src_dir): logging.warn("%s is not empty; will not remove. Please check that all subtitles were moved." % src_dir) else: logging.info("Removing empty source directory (%s)." % src_dir) shutil.rmtree(src_dir)
def generate_srt_availability_file(lang_code): ''' For compatibility with versions less than 0.10.3, we need to generate this json file that contains the srts for the videos. ''' # this path is a direct copy of the path found in the old function that generated this file srts_file_dest_path = os.path.join(settings.STATIC_ROOT, 'data', 'subtitles', 'languages', "%s_available_srts.json") % lang_code ensure_dir(os.path.dirname(srts_file_dest_path)) srts_path = get_srt_path( lang_code) # not sure yet about this; change once command is complete try: files = os.listdir(srts_path) except OSError: # directory doesnt exist or we cant read it files = [] yt_ids = [f.rstrip(".srt") for f in files] srts_dict = {'srt_files': yt_ids} with open(srts_file_dest_path, 'wb') as fp: logging.debug('Creating %s', srts_file_dest_path) json.dump(srts_dict, fp) return yt_ids
def update_templates(): """Update template po files""" logging.info("Copying english po files to %s" % POT_PATH) # post them to exposed URL ensure_dir(POT_PATH) shutil.copy(get_po_filepath(lang_code="en", filename="django.po"), os.path.join(POT_PATH, "kalite.pot")) shutil.copy(get_po_filepath(lang_code="en", filename="djangojs.po"), os.path.join(POT_PATH, "kalitejs.pot"))
def setUp(self): self.tempdir_patch = patch.object(tempfile, "gettempdir") self.addCleanup(self.tempdir_patch.stop) self.gettempdir_method = self.tempdir_patch.start() # make sure we control the temp dir where temporary images are written self.fake_temp_dir = self.gettempdir_method.return_value = os.path.abspath("tmp/") ensure_dir(self.fake_temp_dir)
def __init__(self, *args, **kwargs): # It's not good to override __init__ for classes that inherit from TestCase # Since we're hackily inheriting here, we have to hackily invoke __init__ # Perhaps better would be to decouple this class from the testing framework # by ditching the various mixins (they invoke TestCase methods) and just calling # selenium methods directly, as the mixins are a thin wrapper for that. # -- M.C. Gallaspy, 1/21/2015 KALiteBrowserTestCase.__init__(self, "_fake_test") self.verbosity = kwargs['verbosity'] # make sure output path exists and is empty if kwargs['output_dir']: self.output_path = os.path.join( os.path.realpath(os.getcwd()), kwargs['output_dir'] ) else: self.output_path = settings.SCREENSHOTS_OUTPUT_PATH ensure_dir(self.output_path) # make sure directory is empty from screenshot files png_path = os.path.join(self.output_path, "*%s" % settings.SCREENSHOTS_EXTENSION) pngs = glob.glob(png_path) if pngs and not kwargs['no_del']: self.logwarn("==> Deleting existing screenshots: %s ..." % png_path) for filename in pngs: os.remove(filename) # setup database to use and auto-create admin user self.loginfo("==> Setting-up database ...") self.admin_user = reset_sqlite_database(self.admin_username, self.admin_email, self.default_password, verbosity=self.verbosity) self.admin_pass = self.default_password if not self.admin_user: raise Exception("==> Did not successfully setup database!") Facility.initialize_default_facility("Facility Dos") # Default facility required to avoid pernicious facility selection page facility = self.facility = Facility.objects.get(name="Facility Dos") self.create_student(username=self.student_username, password=self.default_password, facility=facility) self.create_teacher(username=self.coach_username, password=self.default_password, facility=facility) self.persistent_browser = True self.max_wait_time = kwargs.get('max_wait_time', 30) self.setUpClass() self.loginfo("==> Setting-up browser ...") super(Screenshot, self).setUp() # Selenium won't scroll to an element, so we have to make the window size is large enough so that everything is visible self.browser.set_window_size(1024, 768) # self.browser.implicitly_wait(3) # After initializing the server (with setUp) and a browser, set the language self.set_session_language(kwargs['language']) self.loginfo("==> Browser %s successfully setup with live_server_url %s." % (self.browser.name, self.live_server_url,)) self.loginfo("==> Saving screenshots to %s ..." % (settings.SCREENSHOTS_OUTPUT_PATH,))
def setUp(self): self.tempdir_patch = patch.object(tempfile, "gettempdir") self.addCleanup(self.tempdir_patch.stop) self.gettempdir_method = self.tempdir_patch.start() # make sure we control the temp dir where temporary images are written self.fake_temp_dir = self.gettempdir_method.return_value = os.path.abspath( "tmp/") ensure_dir(self.fake_temp_dir)
def update_templates(): """Update template po files""" pot_path = os.path.join(settings.DATA_PATH, "i18n", "pot") logging.info("Copying english po files to %s" % pot_path) # post them to exposed URL ensure_dir(pot_path) shutil.copy(os.path.join(settings.LOCALE_PATHS[0], "en/LC_MESSAGES/django.po"), os.path.join(pot_path, "kalite.pot")) shutil.copy(os.path.join(settings.LOCALE_PATHS[0], "en/LC_MESSAGES/djangojs.po"), os.path.join(pot_path, "kalitejs.pot"))
def __init__(self, *args, **kwargs): # It's not good to override __init__ for classes that inherit from TestCase # Since we're hackily inheriting here, we have to hackily invoke __init__ # Perhaps better would be to decouple this class from the testing framework # by ditching the various mixins (they invoke TestCase methods) and just calling # selenium methods directly, as the mixins are a thin wrapper for that. # -- M.C. Gallaspy, 1/21/2015 KALiteBrowserTestCase.__init__(self, "_fake_test") self.verbosity = kwargs['verbosity'] # make sure output path exists and is empty if kwargs['output_dir']: self.output_path = os.path.join( os.path.realpath(os.path.join(settings.PROJECT_PATH, '..')), kwargs['output_dir']) else: self.output_path = settings.SCREENSHOTS_OUTPUT_PATH ensure_dir(self.output_path) # make sure directory is empty from screenshot files png_path = os.path.join(self.output_path, "*%s" % settings.SCREENSHOTS_EXTENSION) pngs = glob.glob(png_path) if pngs and not kwargs['no_del']: self.logwarn("==> Deleting existing screenshots: %s ..." % png_path) for filename in pngs: os.remove(filename) # setup database to use and auto-create admin user self.loginfo("==> Setting-up database ...") self.admin_user = reset_sqlite_database(self.admin_username, self.admin_email, self.default_password, verbosity=self.verbosity) self.admin_pass = self.default_password if not self.admin_user: raise Exception("==> Did not successfully setup database!") Facility.initialize_default_facility("Facility Dos") # Default facility required to avoid pernicious facility selection page facility = self.facility = Facility.objects.get(name="Facility Dos") self.create_student(username=self.student_username, password=self.default_password, facility=facility) self.create_teacher(username=self.coach_username, password=self.default_password, facility=facility) self.persistent_browser = True self.max_wait_time = kwargs.get('max_wait_time', 30) self.setUpClass() self.loginfo("==> Setting-up browser ...") super(Screenshot, self).setUp() # Selenium won't scroll to an element, so we have to make the window size is large enough so that everything is visible self.browser.set_window_size(1024, 768) # self.browser.implicitly_wait(3) # After initializing the server (with setUp) and a browser, set the language self.set_session_language(kwargs['language']) self.loginfo("==> Browser %s successfully setup with live_server_url %s." % (self.browser.name, self.live_server_url,)) self.loginfo("==> Saving screenshots to %s ..." % (settings.SCREENSHOTS_OUTPUT_PATH,))
def update_templates(po_filepaths): """Update template po files""" logging.info("Copying english po files to %s" % POT_DIRPATH) # post them to exposed URL ensure_dir(POT_DIRPATH) for po_filepath in po_filepaths: pot_filename = os.path.basename(po_filepath) + 't' pot_filepath = os.path.join(POT_DIRPATH, pot_filename) shutil.copy(po_filepath, pot_filepath)
def unpack_language(lang_code, zip_filepath=None, zip_fp=None, zip_data=None): """Unpack zipped language pack into locale directory""" lang_code = lcode_to_django_dir(lang_code) logging.info("Unpacking new translations") ensure_dir(get_po_filepath(lang_code=lang_code)) ## Unpack into temp dir z = zipfile.ZipFile(zip_fp or (zip_data and StringIO(zip_data)) or open(zip_filepath, "rb")) z.extractall(os.path.join(LOCALE_ROOT, lang_code))
def produce_outputs(src_po_files, dest_path, lang_code): # ensure directory exists in locale folder, and then overwrite local po files with new ones ensure_dir(dest_path) dest_file = os.path.join(dest_path, 'django.po') dest_mo_file = os.path.join(dest_path, 'django.mo') build_file = os.path.join( dest_path, 'djangobuild.po' ) # so we dont clobber previous django.po that we build logging.info('Concatenating all po files found...') try: build_po = polib.pofile(build_file) except IOError as e: # build_file doesn't exist yet build_po = polib.POFile(fpath=build_file) for src_file in src_po_files: if os.path.basename(src_file).startswith('kalitejs'): logging.debug('Compiling %s on its own...' % src_file) js_po_file = polib.pofile(src_file) js_mo_file = os.path.join(dest_path, 'djangojs.mo') js_po_file.save(os.path.join(dest_path, 'djangojs.po')) js_po_file.save_as_mofile(js_mo_file) else: # Make sure we only concatenate .po files of the same version that we need. versioned_po_filename = os.path.join("versioned", "%s-django") % (version, ) kalite_po_filename = os.path.join( "KA Lite UI", "kalite-%s.po") % (lang_code, ) if versioned_po_filename in src_file or kalite_po_filename in src_file: logging.debug('Concatenating %s with %s...' % (src_file, build_file)) src_po = polib.pofile(src_file) build_po.merge(src_po) else: logging.debug( "Ignoring %s because it's NOT for version %s." % ( src_file, version, )) # de-obsolete messages for poentry in build_po: # ok build_po appears to be a list, but not actually one. Hence just doing # a list comprehension over it won't work. So we unobsolete entries so that # they can be detected and turned into a mo file poentry.obsolete = False build_po.save() build_po.save_as_mofile(dest_mo_file) shutil.move(build_file, dest_file) return dest_file
def _file_handler(filename=None, loggername=None): """Return a file handler with a filename specific to the logging type""" filename = filename or (loggername + ".log" if loggername else "stats.log") # Make sure that the path exists for logging to GO! ensure_dir(STATS_LOG_DIRPATH) logger_filepath = os.path.join(STATS_LOG_DIRPATH, filename) handler = logging.FileHandler(logger_filepath, encoding='utf-8', delay=True) handler.setFormatter(logging.Formatter('%(asctime)s - %(message)s')) return handler
def extract_catalog_files(zf, lang): lang = lcode_to_django_lang(lang) modir = get_po_filepath(lang) ensure_dir(modir) filename_mapping = {"frontend.mo": "djangojs.mo", "backend.mo": "django.mo"} for zipmo, djangomo in filename_mapping.items(): zipmof = zf.open(zipmo) mopath = os.path.join(modir, djangomo) logging.debug("writing to %s" % mopath) with open(mopath, "wb") as djangomof: shutil.copyfileobj(zipmof, djangomof)
def unpack_language(lang_code, zip_filepath=None, zip_fp=None, zip_data=None): """Unpack zipped language pack into locale directory""" lang_code = lcode_to_django_dir(lang_code) logging.info("Unpacking new translations") ensure_dir(get_po_filepath(lang_code=lang_code)) # # Unpack into temp dir try: z = zipfile.ZipFile(zip_fp or (zip_data and StringIO(zip_data)) or open(zip_filepath, "rb")) except zipfile.BadZipfile as e: # Need to add more information on the errror message. # See http://stackoverflow.com/questions/6062576/adding-information-to-a-python-exception raise type(e), type(e)(e.message + _("Language pack corrupted. Please try downloading the language pack again in a few minutes.")) z.extractall(os.path.join(settings.USER_WRITABLE_LOCALE_DIR, lang_code))
def move_dubbed_video_map(lang_code): lang_pack_location = os.path.join(settings.USER_WRITABLE_LOCALE_DIR, lang_code) dubbed_video_dir = os.path.join(lang_pack_location, "dubbed_videos") dvm_filepath = os.path.join(dubbed_video_dir, os.path.basename(settings.DUBBED_VIDEOS_MAPPING_FILEPATH)) if not os.path.exists(dvm_filepath): logging.error("Could not find downloaded dubbed video filepath: %s" % dvm_filepath) else: logging.debug("Moving dubbed video map to %s" % settings.DUBBED_VIDEOS_MAPPING_FILEPATH) ensure_dir(os.path.dirname(settings.DUBBED_VIDEOS_MAPPING_FILEPATH)) shutil.move(dvm_filepath, settings.DUBBED_VIDEOS_MAPPING_FILEPATH) logging.debug("Removing empty directory") try: shutil.rmtree(dubbed_video_dir) except Exception as e: logging.error("Error removing dubbed video directory (%s): %s" % (dubbed_video_dir, e))
def move_dubbed_video_map(lang_code): lang_pack_location = os.path.join(LOCALE_ROOT, lang_code) dubbed_video_dir = os.path.join(lang_pack_location, "dubbed_videos") dvm_filepath = os.path.join(dubbed_video_dir, os.path.basename(DUBBED_VIDEOS_MAPPING_FILEPATH)) if not os.path.exists(dvm_filepath): logging.error("Could not find downloaded dubbed video filepath: %s" % dvm_filepath) else: logging.debug("Moving dubbed video map to %s" % DUBBED_VIDEOS_MAPPING_FILEPATH) ensure_dir(os.path.dirname(DUBBED_VIDEOS_MAPPING_FILEPATH)) shutil.move(dvm_filepath, DUBBED_VIDEOS_MAPPING_FILEPATH) logging.debug("Removing emtpy directory") try: shutil.rmtree(dubbed_video_dir) except Exception as e: logging.error("Error removing dubbed video directory (%s): %s" % (dubbed_video_dir, e))
def handle(self, *args, **options): if settings.CENTRAL_SERVER: raise CommandError( "This must only be run on the distributed server.") if not options["lang_code"]: raise CommandError("You must specify a language code.") # ensure_dir(settings.CONTENT_ROOT) # Get list of videos lang_code = lcode_to_ietf(options["lang_code"]) video_map = get_dubbed_video_map(lang_code) or {} video_ids = options["video_ids"].split( ",") if options["video_ids"] else None video_ids = video_ids or ([ vid["id"] for vid in get_topic_videos(topic_id=options["topic_id"]) ] if options["topic_id"] else None) video_ids = video_ids or video_map.keys() # Download the videos for video_id in video_ids: if video_id in video_map: youtube_id = video_map[video_id] elif video_id in video_map.values(): # Perhaps they sent in a youtube ID? We can handle that! youtube_id = video_id else: logging.error("No mapping for video_id=%s; skipping" % video_id) continue try: scrape_video(youtube_id=youtube_id, format=options["format"], force=options["force"]) #scrape_thumbnail(youtube_id=youtube_id) logging.info( "Access video %s at %s" % (youtube_id, get_node_cache("Video")[video_id][0]["path"])) except Exception as e: logging.error("Failed to download video %s: %s" % (youtube_id, e)) logging.info("Process complete.")
def handle(self, *args, **options): old_map = os.path.exists(DUBBED_VIDEOS_MAPPING_FILEPATH) and copy.deepcopy(get_dubbed_video_map()) or {} # for comparison purposes cache_filepath = options["cache_filepath"] or os.path.join(settings.MEDIA_ROOT, 'khan_dubbed_videos.csv') max_cache_age = (not options["force"] and options["max_cache_age"]) or 0.0 csv_data = download_ka_dubbed_video_csv(cache_filepath=cache_filepath) # Use cached data to generate the video map raw_map = generate_dubbed_video_mappings_from_csv(csv_data=csv_data) # Remove any dummy (empty) entries, as this breaks everything on the client if "" in raw_map: del raw_map[""] for lang_code in settings.DUBBED_LANGUAGES_FETCHED_IN_API: logging.info("Updating {} from the API".format(lang_code)) map_from_api = dubbed_video_data_from_api(lang_code) lang_metadata = get_code2lang_map(lang_code) lang_ka_name = lang_metadata["ka_name"] raw_map[lang_ka_name].update(map_from_api) # Now we've built the map. Save it. ensure_dir(os.path.dirname(DUBBED_VIDEOS_MAPPING_FILEPATH)) logging.info("Saving data to %s" % DUBBED_VIDEOS_MAPPING_FILEPATH) with open(DUBBED_VIDEOS_MAPPING_FILEPATH, "w") as fp: json.dump(raw_map, fp) new_map = get_dubbed_video_map(reload=True) # Now tell the user about what changed. added_languages = set(new_map.keys()) - set(old_map.keys()) removed_languages = set(old_map.keys()) - set(new_map.keys()) if added_languages or removed_languages: logging.info("*** Added support for %2d languages; removed support for %2d languages. ***" % (len(added_languages), len(removed_languages))) for lang_code in sorted(list(set(new_map.keys()).union(set(old_map.keys())))): added_videos = set(new_map.get(lang_code, {}).keys()) - set(old_map.get(lang_code, {}).keys()) removed_videos = set(old_map.get(lang_code, {}).keys()) - set(new_map.get(lang_code, {}).keys()) shared_keys = set(new_map.get(lang_code, {}).keys()).intersection(set(old_map.get(lang_code, {}).keys())) changed_videos = [vid for vid in shared_keys if old_map.get(lang_code, {})[vid] != new_map.get(lang_code, {})[vid]] logging.info("\t%5s: Added %d videos, removed %3d videos, changed %3d videos." % (lang_code, len(added_videos), len(removed_videos), len(changed_videos))) logging.info("Done.")
def produce_outputs(src_po_files, dest_path, lang_code): # ensure directory exists in locale folder, and then overwrite local po files with new ones ensure_dir(dest_path) dest_file = os.path.join(dest_path, 'django.po') dest_mo_file = os.path.join(dest_path, 'django.mo') build_file = os.path.join(dest_path, 'djangobuild.po') # so we dont clobber previous django.po that we build logging.info('Concatenating all po files found...') try: build_po = polib.pofile(build_file) except IOError as e: # build_file doesn't exist yet build_po = polib.POFile(fpath=build_file) for src_file in src_po_files: if os.path.basename(src_file).startswith('kalitejs'): logging.debug('Compiling %s on its own...' % src_file) js_po_file = polib.pofile(src_file) js_mo_file = os.path.join(dest_path, 'djangojs.mo') js_po_file.save(os.path.join(dest_path, 'djangojs.po')) js_po_file.save_as_mofile(js_mo_file) else: # Make sure we only concatenate .po files of the same version that we need. versioned_po_filename = os.path.join("versioned", "%s-django") % (version,) kalite_po_filename = os.path.join("KA Lite UI", "kalite-%s.po") % (lang_code,) if versioned_po_filename in src_file or kalite_po_filename in src_file: logging.debug('Concatenating %s with %s...' % (src_file, build_file)) src_po = polib.pofile(src_file) build_po.merge(src_po) else: logging.debug("Ignoring %s because it's NOT for version %s." % (src_file, version,)) # de-obsolete messages for poentry in build_po: # ok build_po appears to be a list, but not actually one. Hence just doing # a list comprehension over it won't work. So we unobsolete entries so that # they can be detected and turned into a mo file poentry.obsolete = False build_po.save() build_po.save_as_mofile(dest_mo_file) shutil.move(build_file, dest_file) return dest_file
def handle_noargs(self, **kwargs): if not settings.IN_CONTEXT_LOCALIZED: msg = "You must have settings.IN_CONTEXT_LOCALIZED = True if you want to run this command." raise ImproperlyConfigured(msg) project_id = os.environ.get("CROWDIN_PROJECT_ID") project_key = os.environ.get("CROWDIN_PROJECT_KEY") if not project_id or not project_key: msg = "You must have environment variables CROWDIN_PROJECT_ID and CROWDIN_PROJECT_KEY defined" raise ImproperlyConfigured(msg) url = CROWDIN_URL_TEMPLATE.format( project_identifier=project_id, project_key=project_key, lang=PSEUDOLANGUAGE, ) resp = requests.get(url) resp.raise_for_status() f = StringIO.StringIO(resp.content) zf = zipfile.ZipFile(f, "r") zf_po_file_name = "versioned/{version}-django.po".format( version=SHORTVERSION) zf_js_po_file_name = "versioned/{version}-djangojs.po".format( version=SHORTVERSION) po_file_dir = os.path.join(settings.USER_WRITABLE_LOCALE_DIR, "en", "LC_MESSAGES") ensure_dir(po_file_dir) zf_po_file = zf.read(zf_po_file_name) zf_js_po_file = zf.read(zf_js_po_file_name) with open(os.path.join(po_file_dir, "django.po"), "w") as f: f.write(zf_po_file) with open(os.path.join(po_file_dir, "djangojs.po"), "w") as f: f.write(zf_js_po_file) call_command("compilemessages")
def unpack_language(lang_code, zip_filepath=None, zip_fp=None, zip_data=None): """Unpack zipped language pack into locale directory""" lang_code = lcode_to_django_dir(lang_code) logging.info("Unpacking new translations") ensure_dir(get_po_filepath(lang_code=lang_code)) # # Unpack into temp dir try: z = zipfile.ZipFile(zip_fp or (zip_data and StringIO(zip_data)) or open(zip_filepath, "rb")) except zipfile.BadZipfile as e: # Need to add more information on the errror message. # See http://stackoverflow.com/questions/6062576/adding-information-to-a-python-exception raise type(e), type(e)(e.message + _( "Language pack corrupted. Please try downloading the language pack again in a few minutes." )) z.extractall(os.path.join(settings.USER_WRITABLE_LOCALE_DIR, lang_code))
def update_jsi18n_file(code="en"): """ For efficieny's sake, we want to cache Django's js18n file. So, generate that file here, then save to disk--it won't change until the next language pack update! """ translation.activate(code) # we switch the language of the whole thread output_dir = os.path.join(os.path.dirname(__file__), 'static', 'js', 'i18n') ensure_dir(output_dir) output_file = os.path.join(output_dir, "%s.js" % code) request = HttpRequest() request.path = output_file request.session = {settings.LANGUAGE_COOKIE_NAME: code} response = javascript_catalog(request, packages=('ka-lite.locale',)) with open(output_file, "w") as fp: fp.write(response.content)
def update_jsi18n_file(code="en"): """ For efficieny's sake, we want to cache Django's js18n file. So, generate that file here, then save to disk--it won't change until the next language pack update! """ translation.activate(code) # we switch the language of the whole thread output_dir = os.path.join(settings.STATIC_ROOT, "js", "i18n") ensure_dir(output_dir) output_file = os.path.join(output_dir, "%s.js" % code) request = HttpRequest() request.path = output_file request.session = {settings.LANGUAGE_COOKIE_NAME: code} response = javascript_catalog(request, packages=('ka-lite.locale',)) with open(output_file, "w") as fp: fp.write(response.content)
def handle(self, *args, **options): options['platform'] = options['platform'].lower() # normalize if options['platform'] not in [ "all", "linux", "macos", "darwin", "windows" ]: raise CommandError( "Unrecognized platform: %s; will include ALL files." % options['platform']) # Step 0: refresh all resources if not settings.DEBUG: get_dubbed_video_map(force=True) # force a remote download # Step 1: recursively add all static files kalite_base = os.path.realpath(settings.KALITE_PATH) files_dict = recursively_add_files(dirpath=kalite_base, **options) # Step 2: Add a local_settings.py file. # For distributed servers, this is a copy of the local local_settings.py, # with a few properties (specified as command-line options) overridden ls_file = create_local_settings_file( location=os.path.realpath(kalite_base + "/kalite/local_settings.py"), server_type=options['server_type'], locale=options['locale'], central_server=options["central_server"]) files_dict[ls_file] = {"dest_path": "kalite/local_settings.py"} # Step 3: select output file. if not options['file']: options['file'] = create_default_archive_filename(options) # Step 4: package into a zip file ensure_dir(os.path.realpath(os.path.dirname( options["file"]))) # allows relative paths to be passed.=== system_specific_zipping( files_dict=dict([(v["dest_path"], src_path) for src_path, v in files_dict.iteritems()]), zip_file=options["file"], compression=ZIP_DEFLATED if options['compress'] else ZIP_STORED, callback=_default_callback_zip if options["verbosity"] else None, )
def unpack_zipfile_to_khan_content(zf): folder = settings.KHAN_ASSESSMENT_ITEM_ROOT ensure_dir(folder) zf.extractall(folder) # Ensure that special files are in their configured locations os.rename( os.path.join(settings.KHAN_ASSESSMENT_ITEM_ROOT, 'assessmentitems.version'), settings.KHAN_ASSESSMENT_ITEM_VERSION_PATH ) os.rename( os.path.join(settings.KHAN_ASSESSMENT_ITEM_ROOT, 'assessmentitems.sqlite'), settings.KHAN_ASSESSMENT_ITEM_DATABASE_PATH ) # JSON file is apparrently not required (not in the test at least) if os.path.isfile(os.path.join(settings.KHAN_ASSESSMENT_ITEM_ROOT, 'assessmentitems.json')): os.rename( os.path.join(settings.KHAN_ASSESSMENT_ITEM_ROOT, 'assessmentitems.json'), settings.KHAN_ASSESSMENT_ITEM_JSON_PATH )
def download_ka_dubbed_video_csv(download_url=None, cache_filepath=None): """ Function to do the heavy lifting in getting the dubbed videos map. Could be moved into utils """ # Get the redirect url if not download_url: logging.info("Getting spreadsheet location from Khan Academy") conn = httplib.HTTPConnection("www.khanacademy.org") conn.request("GET", "/r/translationmapping") r1 = conn.getresponse() if not r1.status == 302: # TODO: have django email admins when we hit this exception raise Exception( "Expected redirect response from Khan Academy redirect url.") download_url = r1.getheader('Location') if "docs.google.com" not in download_url: logging.warn("Redirect location no longer in Google docs (%s)" % download_url) else: download_url = download_url.replace("/edit", "/export?format=csv") logging.info("Downloading dubbed video data from %s" % download_url) response = requests.get(download_url) if response.status_code != 200: raise CommandError( "Failed to download dubbed video CSV data: status=%s" % response.status) csv_data = response.content # Dump the data to a local cache file try: ensure_dir(os.path.dirname(cache_filepath)) with open(cache_filepath, "w") as fp: fp.write(csv_data) except Exception as e: logging.error( "Failed to make a local cache of the CSV data: %s; parsing local data" % e) return csv_data
def handle_noargs(self, **kwargs): if not settings.IN_CONTEXT_LOCALIZED: msg = "You must have settings.IN_CONTEXT_LOCALIZED = True if you want to run this command." raise ImproperlyConfigured(msg) project_id = os.environ.get("CROWDIN_PROJECT_ID") project_key = os.environ.get("CROWDIN_PROJECT_KEY") if not project_id or not project_key: msg = "You must have environment variables CROWDIN_PROJECT_ID and CROWDIN_PROJECT_KEY defined" raise ImproperlyConfigured(msg) url = CROWDIN_URL_TEMPLATE.format( project_identifier=project_id, project_key=project_key, lang=PSEUDOLANGUAGE, ) resp = requests.get(url) resp.raise_for_status() f = StringIO.StringIO(resp.content) zf = zipfile.ZipFile(f, "r") zf_po_file_name = "versioned/{version}-django.po".format(version=SHORTVERSION) zf_js_po_file_name = "versioned/{version}-djangojs.po".format(version=SHORTVERSION) po_file_dir = os.path.join(LOCALE_ROOT, "en", "LC_MESSAGES") ensure_dir(po_file_dir) zf_po_file = zf.read(zf_po_file_name) zf_js_po_file = zf.read(zf_js_po_file_name) with open(os.path.join(po_file_dir, "django.po"), "w") as f: f.write(zf_po_file) with open(os.path.join(po_file_dir, "djangojs.po"), "w") as f: f.write(zf_js_po_file) call_command("compilemessages")
def extract_subtitles(zf, lang): SUBTITLE_DEST_DIR = get_subtitle_path(lang_code=lang) SUBTITLE_ZIP_DIR = "subtitles/" ensure_dir(SUBTITLE_DEST_DIR) subtitles = (s for s in zf.namelist() if SUBTITLE_ZIP_DIR in s) for subtitle in subtitles: # files inside zipfiles may come with leading directories in their # names, like subtitles/hotdog.vtt. We'll only want the actual filename # (hotdog.vtt) when extracting as that's what KA Lite expects. subtitle_filename = os.path.basename(subtitle) subtitle_dest_path = os.path.join(SUBTITLE_DEST_DIR, subtitle_filename) subtitle_fileobj = zf.open(subtitle) with open(subtitle_dest_path, "w") as dest_fileobj: shutil.copyfileobj(subtitle_fileobj, dest_fileobj)
def handle(self, *args, **options): self.setup(options) operation = args[0] self.minimal = options.get('minimal', False) self.foreground = options.get('foreground', False) self.is_template = options.get('template', False) if self.is_template: ensure_dir(django_settings.DB_CONTENT_ITEM_TEMPLATE_DIR) if operation == "download": self.start(_("Downloading content pack.")) self.download(*args, **options) elif operation == "local": self.start(_("Installing a local content pack.")) self.local(*args, **options) else: raise CommandError("Unknown operation: %s" % operation)
def unpack_zipfile_to_content_folder(zf): try: channel = zf.read("channel.name") except KeyError: channel = "" if channel: folder = os.path.join(settings.ASSESSMENT_ITEM_ROOT, channel) else: folder = settings.ASSESSMENT_ITEM_ROOT ensure_dir(folder) zf.extractall(folder) ensure_dir(settings.KHAN_ASSESSMENT_ITEM_ROOT) # Ensure that special files are in their configured locations shutil.move(os.path.join(folder, 'assessmentitems.version'), settings.KHAN_ASSESSMENT_ITEM_VERSION_PATH)
def scrape_exercise(exercise_id, lang_code, force=False): ietf_lang_code = lcode_to_ietf(lang_code) exercise_dest_filepath = get_exercise_filepath(exercise_id, lang_code=lang_code) exercise_localized_root = os.path.dirname(exercise_dest_filepath) if os.path.exists(exercise_dest_filepath) and not force: return exercise_url = "https://es.khanacademy.org/khan-exercises/exercises/%s.html?lang=%s" % (exercise_id, ietf_lang_code) logging.info("Retrieving exercise %s from %s" % (exercise_id, exercise_url)) try: ensure_dir(exercise_localized_root) resp = requests.get(exercise_url) resp.raise_for_status() with open(exercise_dest_filepath, "wb") as fp: fp.write(resp.content) except Exception as e: logging.error("Failed to download %s: %s" % (exercise_url, e))