コード例 #1
0
    def serialize(self, queryset, **options):
        """
        Serialize a queryset.
        """
        self.options = options

        self.stream = options.pop("stream", StringIO())
        self.selected_fields = options.pop("fields", None)
        self.use_natural_keys = options.pop("use_natural_keys", False)

        dest_version = options.pop(
            "dest_version", kalite.VERSION
        )  # We're serializing to send to a machine of this version.

        self.start_serialization()
        for obj in queryset:
            # See logic below.  We selectively skip serializing
            #   objects that have a (starting) version greater than the
            #   version we're serializing for.
            v_diff = version_diff(dest_version,
                                  getattr(obj, "minversion", None))
            if v_diff is not None and v_diff < 0:
                continue

            self.start_object(obj)
            # Use the concrete parent class' _meta instead of the object's _meta
            # This is to avoid local_fields problems for proxy models. Refs #17717.
            concrete_model = obj._meta.concrete_model
            for field in concrete_model._meta.local_fields:

                # "and" condition added by KA Lite.
                #
                # Serialize the field UNLESS all of the following are true:
                #   * we've passed in a specific dest_version
                #   * the field is marked with a version
                #   * that version is later than the dest_version
                v_diff = version_diff(dest_version,
                                      getattr(field, "minversion", None))
                if field.serialize and (v_diff is None or v_diff >= 0):
                    if field.rel is None:
                        if self.selected_fields is None or field.attname in self.selected_fields:
                            self.handle_field(obj, field)
                    else:
                        if self.selected_fields is None or field.attname[:
                                                                         -3] in self.selected_fields:
                            self.handle_fk_field(obj, field)
            for field in concrete_model._meta.many_to_many:
                # "and" condition added by KA Lite.  Logic the same as above.
                v_diff = version_diff(dest_version,
                                      getattr(field, "minversion", None))
                if field.serialize and (v_diff >= 0 or v_diff is None):
                    if self.selected_fields is None or field.attname in self.selected_fields:
                        self.handle_m2m_field(obj, field)
            self.end_object(obj)
        self.end_serialization()
        return self.getvalue()
コード例 #2
0
ファイル: utils_tests.py プロジェクト: Eleonore9/ka-lite
 def test_sign(self):
     """
     Test the diff in both directions, validate they are the same and of opposite signs.
     """
     v1 = "0.1"
     v2 = "0.2"
     
     self.assertTrue(version_diff(v1, v2) < 0, "First version earlier than the second returns negative.")
     self.assertTrue(version_diff(v2, v1) > 0, "Second version earlier than the first returns positive.")
     self.assertTrue(version_diff(v2, v2) == 0, "First version equals the second returns 0.")
コード例 #3
0
ファイル: versioned_json.py プロジェクト: AbhiUnni/ka-lite
    def serialize(self, queryset, **options):
        """
        Serialize a queryset.
        """
        self.options = options

        self.stream = options.pop("stream", StringIO())
        self.selected_fields = options.pop("fields", None)
        self.use_natural_keys = options.pop("use_natural_keys", False)

        dest_version = options.pop("dest_version")  # We're serializing to send to a machine of this version.

        self.start_serialization()
        for obj in queryset:
            # See logic below.  We selectively skip serializing
            #   objects that have a (starting) version greater than the
            #   version we're serializing for.
            v_diff = version_diff(dest_version, getattr(obj, "minversion", None))
            if v_diff is not None and v_diff < 0:
                continue

            self.start_object(obj)
            # Use the concrete parent class' _meta instead of the object's _meta
            # This is to avoid local_fields problems for proxy models. Refs #17717.
            concrete_model = obj._meta.concrete_model
            for field in concrete_model._meta.local_fields:

                # "and" condition added by KA Lite.
                #
                # Serialize the field UNLESS all of the following are true:
                #   * we've passed in a specific dest_version
                #   * the field is marked with a version
                #   * that version is later than the dest_version
                v_diff = version_diff(dest_version, getattr(field, "minversion", None))
                if field.serialize and (v_diff is None or v_diff >= 0):
                    if field.rel is None:
                        if self.selected_fields is None or field.attname in self.selected_fields:
                            self.handle_field(obj, field)
                    else:
                        if self.selected_fields is None or field.attname[:-3] in self.selected_fields:
                            self.handle_fk_field(obj, field)
            for field in concrete_model._meta.many_to_many:
                # "and" condition added by KA Lite.  Logic the same as above.
                v_diff = version_diff(dest_version, getattr(field, "minversion", None))
                if field.serialize and (v_diff >= 0 or v_diff is None):
                    if self.selected_fields is None or field.attname in self.selected_fields:
                        self.handle_m2m_field(obj, field)
            self.end_object(obj)
        self.end_serialization()
        return self.getvalue()
コード例 #4
0
    def test_sign(self):
        """
        Test the diff in both directions, validate they are the same and of opposite signs.
        """
        v1 = "0.1"
        v2 = "0.2"

        self.assertTrue(
            version_diff(v1, v2) < 0,
            "First version earlier than the second returns negative.")
        self.assertTrue(
            version_diff(v2, v1) > 0,
            "Second version earlier than the first returns positive.")
        self.assertTrue(
            version_diff(v2, v2) == 0,
            "First version equals the second returns 0.")
コード例 #5
0
    def handle(self, *args, **options):

        # Check that we can run
        if not settings.CENTRAL_SERVER:
            raise CommandError("This must only be run on the central server.")
        supported_langs = get_supported_languages()
        if not options["lang_codes"]:
            lang_codes = supported_langs
        else:
            requested_codes = set(options["lang_codes"].split(","))
            lang_codes = [lcode_to_ietf(lc) for lc in requested_codes if lc in supported_langs]
            unsupported_codes = requested_codes - set(lang_codes)
            if unsupported_codes:
                raise CommandError("Requested unsupported languages: %s" % sorted(list(unsupported_codes)))

        # Scrub options
        for key in options:
            # If no_update is set, then disable all update options.
            if key.startswith("update_"):
                options[key] = options[key] and not options["no_update"]

        if version_diff(options["version"], "0.10.3") < 0:
            raise CommandError("This command cannot be used for versions before 0.10.3")

        if options['low_mem']:
            logging.info('Making the GC more aggressive...')
            gc.set_threshold(36, 2, 2)

        # For dealing with central server changes across versions
        upgrade_old_schema()

        # Now, we're going to build the language packs, collecting metadata long the way.
        package_metadata = update_language_packs(lang_codes, options)
コード例 #6
0
def increment_language_pack_version(local_meta, updated_meta):
    """Increment language pack version if translations have been updated (start over if software version has incremented)"""
    if not local_meta or version_diff(local_meta.get("software_version"), version.VERSION) < 0:
        # set to one for the first time, or if this is the first build of a new software version
        language_pack_version = 1
    elif local_meta.get("total_translated") == updated_meta.get("approved") and local_meta.get("subtitle_count") == updated_meta.get("subtitle_count"):
        language_pack_version = local_meta.get("language_pack_version") or 1
    else:
        language_pack_version = local_meta.get("language_pack_version") + 1
    return language_pack_version
コード例 #7
0
def increment_language_pack_version(local_meta, updated_meta):
    """Increment language pack version if translations have been updated
(start over if software version has incremented)
    """
    if not local_meta or version_diff(local_meta.get("software_version"), version.VERSION) < 0:
        # set to one for the first time, or if this is the first build of a new software version
        language_pack_version = 1
    elif local_meta.get("total_translated") == updated_meta.get("approved") and local_meta.get("subtitle_count") == updated_meta.get("subtitle_count"):
        language_pack_version = local_meta.get("language_pack_version") or 1
    else:
        language_pack_version = local_meta.get("language_pack_version") + 1
    return language_pack_version
コード例 #8
0
ファイル: utils_tests.py プロジェクト: Eleonore9/ka-lite
    def test_levels(self):
        """
        Test major, minor, and patch-level differences.
        """

        self.assertEqual(version_diff("1", "2"), -1, "Major version diff (no minor)")
        self.assertEqual(version_diff("1.0", "2.0"), -1, "Major version diff (matching minor)")

        self.assertEqual(version_diff("0.1", "0.2"), -1, "Minor version diff (no patch)")
        self.assertEqual(version_diff("0.1.0", "0.2.0"), -1, "Minor version diff (matching patch)")

        self.assertEqual(version_diff("0.0.1", "0.0.2"), -1, "Patch version diff (no sub-patch)")
        self.assertEqual(version_diff("0.0.1.0", "0.0.2.0"), -1, "Patch version diff (matching sub-patch)")
コード例 #9
0
    def handle(self, *args, **options):

        # Check that we can run
        if not settings.CENTRAL_SERVER:
            raise CommandError("This must only be run on the central server.")
        supported_langs = get_supported_languages()
        if not options["lang_codes"]:
            lang_codes = supported_langs
        else:
            requested_codes = set(options["lang_codes"].split(","))
            lang_codes = [
                lcode_to_ietf(lc) for lc in requested_codes
                if lc in supported_langs
            ]
            unsupported_codes = requested_codes - set(lang_codes)
            if unsupported_codes:
                raise CommandError("Requested unsupported languages: %s" %
                                   sorted(list(unsupported_codes)))

        # Scrub options
        for key in options:
            # If no_update is set, then disable all update options.
            if key.startswith("update_"):
                options[key] = options[key] and not options["no_update"]

        if version_diff(options["version"], "0.10.3") < 0:
            raise CommandError(
                "This command cannot be used for versions before 0.10.3")

        if options['low_mem']:
            logging.info('Making the GC more aggressive...')
            gc.set_threshold(36, 2, 2)

        # For dealing with central server changes across versions
        upgrade_old_schema()

        # Now, we're going to build the language packs, collecting metadata long the way.
        package_metadata = update_language_packs(lang_codes, options)
コード例 #10
0
    def test_levels(self):
        """
        Test major, minor, and patch-level differences.
        """

        self.assertEqual(version_diff("1", "2"), -1,
                         "Major version diff (no minor)")
        self.assertEqual(version_diff("1.0", "2.0"), -1,
                         "Major version diff (matching minor)")

        self.assertEqual(version_diff("0.1", "0.2"), -1,
                         "Minor version diff (no patch)")
        self.assertEqual(version_diff("0.1.0", "0.2.0"), -1,
                         "Minor version diff (matching patch)")

        self.assertEqual(version_diff("0.0.1", "0.0.2"), -1,
                         "Patch version diff (no sub-patch)")
        self.assertEqual(version_diff("0.0.1.0", "0.0.2.0"), -1,
                         "Patch version diff (matching sub-patch)")
コード例 #11
0
ファイル: utils_tests.py プロジェクト: Eleonore9/ka-lite
 def test_values(self):
     """
     Test a few different values for the difference
     """
     self.assertEqual(version_diff("0.1", "0.20"), -19, "abs(diff) > 10")
コード例 #12
0
def zip_language_packs(lang_codes=None, version=VERSION):
    """Zip up and expose all language packs

    converts all into ietf
    """
    sizes = {}
    lang_codes = lang_codes or os.listdir(LANGUAGE_PACK_BUILD_DIR)
    lang_codes = [lcode_to_ietf(lc) for lc in lang_codes]
    logging.info("Zipping up %d language pack(s)" % len(lang_codes))

    for lang_code_ietf in lang_codes:
        lang_code_map = get_supported_language_map(lang_code_ietf)

        # Initialize values
        sizes[lang_code_ietf] = {"package_size": 0, "zip_size": 0}

        #
        lang_locale_path = get_lp_build_dir(lang_code_ietf, version=version)
        if not os.path.exists(lang_locale_path):
            logging.warn("Unexpectedly skipping missing directory: %s" %
                         lang_code_ietf)
        elif not os.path.isdir(lang_locale_path):
            logging.error(
                "Skipping language where a file exists where a directory was expected: %s"
                % lang_code_ietf)

        # Create a zipfile for this language
        zip_filepath = get_language_pack_filepath(lang_code_ietf,
                                                  version=version)
        ensure_dir(os.path.dirname(zip_filepath))
        logging.info("Creating zip file in %s" % zip_filepath)
        z = zipfile.ZipFile(zip_filepath, 'w', zipfile.ZIP_DEFLATED)

        # Get metadata from the versioned directory
        for metadata_file in glob.glob(
                '%s/*.json' %
                get_lp_build_dir(lang_code_ietf, version=version)):
            # Get every single file in the directory and zip it up
            filepath = os.path.join(lang_locale_path, metadata_file)
            z.write(filepath, arcname=os.path.basename(metadata_file))
            sizes[lang_code_ietf]["package_size"] += os.path.getsize(filepath)

        # Get mo files from the directory
        lang_code_crowdin = lang_code_map["crowdin"]
        mo_files = glob.glob(
            '%s/*.mo' %
            get_lp_build_dir(lcode_to_ietf(lang_code_crowdin),
                             version=version)) if lang_code_crowdin else []
        for mo_file in mo_files:
            # Get every single compiled language file
            filepath = os.path.join(lang_locale_path, mo_file)
            z.write(filepath,
                    arcname=os.path.join("LC_MESSAGES",
                                         os.path.basename(mo_file)))
            sizes[lang_code_ietf]["package_size"] += os.path.getsize(filepath)

        # include video file sizes
        remote_video_size_list = get_all_remote_video_sizes()
        z.writestr('video_file_sizes.json', str(remote_video_size_list))

        srt_dirpath = get_srt_path(lcode_to_django_dir(lang_code_map["amara"]))
        for srt_file in glob.glob(os.path.join(srt_dirpath, "*.srt")):
            z.write(srt_file,
                    arcname=os.path.join("subtitles",
                                         os.path.basename(srt_file)))
            sizes[lang_code_ietf]["package_size"] += os.path.getsize(srt_file)

        if version_diff(
                version, "0.10.3"
        ) > 0:  # since these are globally available, need to check version.
            exercises_dirpath = get_localized_exercise_dirpath(
                lang_code_map["exercises"])
            for exercise_file in glob.glob(
                    os.path.join(exercises_dirpath, "*.html")):
                # Get every single compiled language file
                filepath = os.path.join(exercises_dirpath, exercise_file)
                z.write(filepath,
                        arcname=os.path.join("exercises",
                                             os.path.basename(exercise_file)))
                sizes[lang_code_ietf]["package_size"] += os.path.getsize(
                    filepath)

        # Add dubbed video map
        z.write(DUBBED_VIDEOS_MAPPING_FILEPATH,
                arcname=os.path.join(
                    "dubbed_videos",
                    os.path.basename(DUBBED_VIDEOS_MAPPING_FILEPATH)))
        sizes[lang_code_ietf]["package_size"] += os.path.getsize(
            DUBBED_VIDEOS_MAPPING_FILEPATH)

        z.close()
        sizes[lang_code_ietf]["zip_size"] = os.path.getsize(zip_filepath)

    logging.info("Done.")
    return sizes
コード例 #13
0
def update_language_packs(lang_codes, options):

    package_metadata = {}

    since_date = datetime.datetime.now() - datetime.timedelta(int(options["days"]))

    if options['update_dubbed']:
        # Get the latest dubbed video map; it's shared across language packs
        force_dubbed_download = not os.path.exists(DUBBED_VIDEOS_MAPPING_FILEPATH) \
            or 0 < datediff(since_date, datetime.datetime.fromtimestamp(os.path.getctime(DUBBED_VIDEOS_MAPPING_FILEPATH)))
        get_dubbed_video_map(force=force_dubbed_download)

    for lang_code in lang_codes:
        lang_code_map = get_supported_language_map(lang_code)
        lang_metadata = {}

        # Step 1: Update / collect srts.  No version needed, we want to share latest always.
        if options['update_srts']:
            update_srts(since_date=since_date, lang_codes=[lang_code_map["amara"]])
        lang_metadata["subtitle_count"] = get_subtitle_count(lang_code_map["amara"])

        # Step 2: Update the dubbed video mappings. No version needed, we want to share latest always.
        dv_map = get_dubbed_video_map(lang_code_map["dubbed_videos"])
        lang_metadata["num_dubbed_videos"] = len(dv_map) if dv_map and version_diff(options["version"], "0.10.3") > 0 else 0

        # Step 3: Update the exercises.  No version needed, we want to share latest always.
        #  TODO(bcipolli): make sure that each language pack only grabs exercises that are included in its topic tree.
        if options['update_exercises'] and version_diff(options["version"], "0.10.3") > 0:
            call_command("scrape_exercises", lang_code=lang_code_map["exercises"])
        lang_metadata["num_exercises"] = get_localized_exercise_count(lang_code_map["exercises"]) if version_diff(options["version"], "0.10.3") > 0 else 0

        # Step 4: Update the crowdin translations.  Version needed!
        #   TODO(bcipolli): skip this when we're going backwards in version.
        if options["no_update"] or version_diff(options["version"], "0.10.3") == 0:
            trans_metadata = {lang_code: get_po_metadata(get_po_build_path(lang_code))}
        else:
            try:
                trans_metadata = update_translations(
                    lang_codes=[lang_code],  # will be converted, as needed
                    zip_file=options['zip_file'],
                    ka_zip_file=options['ka_zip_file'],
                    download_ka_translations=options['update_ka_trans'],
                    download_kalite_translations=options['update_kalite_trans'],
                    use_local=options["use_local"],
                    version=options["version"],
                )
            except SkipTranslations:
                trans_metadata = {lang_code: get_po_metadata(get_po_build_path(lang_code))}
        lang_metadata.update(trans_metadata.get(lang_code, {}))

        # Now create/update unified meta data

        generate_metadata(package_metadata={lang_code: lang_metadata}, version=options["version"], force_version_update=options["force_update"])

        # Zip into language packs
        package_sizes = zip_language_packs(lang_codes=[lang_code], version=options["version"])
        logging.debug("%s sizes: %s" % (lang_code, package_sizes.get(lang_code, {})))

        lang_metadata.update(package_sizes.get(lang_code, {}))

        # Update the metadata with the package size information
        update_metadata({lang_code: lang_metadata}, version=options["version"])

        # Update package metadata
        package_metadata[lang_code] = lang_metadata

    return package_metadata
コード例 #14
0
ファイル: versioned_python.py プロジェクト: AbhiUnni/ka-lite
def Deserializer(object_list, **options):
    """
    Deserialize simple Python objects back into Django ORM instances.

    It's expected that you pass the Python objects themselves (instead of a
    stream or a string) to the constructor
    """
    db = options.pop('using', DEFAULT_DB_ALIAS)

    #
    src_version = options.pop("src_version")  # version that was serialized
    dest_version = options.pop("dest_version")  # version that we're deserializing to
    assert dest_version, "For KA Lite, we should always set the dest version to the current device."

    models.get_apps()
    for d in object_list:
        # Look up the model and starting build a dict of data for it.
        Model = _get_model(d["model"])

        # See comment below for versioned fields; same logic
        #   applies here as well.
        if hasattr(Model, "version"):
            v_diff = version_diff(Model.minversion, dest_version)
            if v_diff > 0 or v_diff is None:
                continue

        data = {Model._meta.pk.attname : Model._meta.pk.to_python(d["pk"])}
        m2m_data = {}

        # Handle each field
        for (field_name, field_value) in d["fields"].iteritems():
            if isinstance(field_value, str):
                field_value = smart_unicode(field_value, options.get("encoding", settings.DEFAULT_CHARSET), strings_only=True)

            try:
                field = Model._meta.get_field(field_name)
            except models.FieldDoesNotExist as fdne:
                # If src version is newer than dest version,
                #   or if it's unknown, then assume that the field
                #   is a new one and skip it.
                # We can't know for sure, because
                #   we don't have that field (we are the dest!),
                #   so we don't know what version it came in on.
                v_diff = version_diff(src_version, dest_version)
                if v_diff > 0 or v_diff is None:
                    continue

                # Something else must be going on, so re-raise.
                else:
                    raise fdne

            # Handle M2M relations
            if field.rel and isinstance(field.rel, models.ManyToManyRel):
                if hasattr(field.rel.to._default_manager, 'get_by_natural_key'):
                    def m2m_convert(value):
                        if hasattr(value, '__iter__'):
                            return field.rel.to._default_manager.db_manager(db).get_by_natural_key(*value).pk
                        else:
                            return smart_unicode(field.rel.to._meta.pk.to_python(value))
                else:
                    m2m_convert = lambda v: smart_unicode(field.rel.to._meta.pk.to_python(v))
                m2m_data[field.name] = [m2m_convert(pk) for pk in field_value]

            # Handle FK fields
            elif field.rel and isinstance(field.rel, models.ManyToOneRel):
                if field_value is not None:
                    if hasattr(field.rel.to._default_manager, 'get_by_natural_key'):
                        if hasattr(field_value, '__iter__'):
                            obj = field.rel.to._default_manager.db_manager(db).get_by_natural_key(*field_value)
                            value = getattr(obj, field.rel.field_name)
                            # If this is a natural foreign key to an object that
                            # has a FK/O2O as the foreign key, use the FK value
                            if field.rel.to._meta.pk.rel:
                                value = value.pk
                        else:
                            value = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
                        data[field.attname] = value
                    else:
                        data[field.attname] = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
                else:
                    data[field.attname] = None

            # Handle all other fields
            else:
                data[field.name] = field.to_python(field_value)

        yield base.DeserializedObject(Model(**data), m2m_data)
コード例 #15
0
def Deserializer(object_list, **options):
    """
    Deserialize simple Python objects back into Django ORM instances.

    It's expected that you pass the Python objects themselves (instead of a
    stream or a string) to the constructor
    """
    db = options.pop('using', DEFAULT_DB_ALIAS)

    #
    src_version = options.pop("src_version",
                              None)  # version that was serialized
    dest_version = options.pop("dest_version",
                               None)  # version that we're deserializing to
    assert dest_version, "For KA Lite, we should always set the dest version to the current device."

    models.get_apps()
    for d in object_list:
        # Look up the model and starting build a dict of data for it.
        Model = _get_model(d["model"])

        # See comment below for versioned fields; same logic
        #   applies here as well.
        if hasattr(Model, "version"):
            v_diff = version_diff(Model.minversion, dest_version)
            if v_diff > 0 or v_diff is None:
                continue

        data = {Model._meta.pk.attname: Model._meta.pk.to_python(d["pk"])}
        m2m_data = {}

        # Handle each field
        for (field_name, field_value) in d["fields"].iteritems():
            if isinstance(field_value, str):
                field_value = smart_unicode(field_value,
                                            options.get(
                                                "encoding",
                                                settings.DEFAULT_CHARSET),
                                            strings_only=True)

            try:
                field = Model._meta.get_field(field_name)
            except models.FieldDoesNotExist as fdne:
                # If src version is newer than dest version,
                #   or if it's unknown, then assume that the field
                #   is a new one and skip it.
                # We can't know for sure, because
                #   we don't have that field (we are the dest!),
                #   so we don't know what version it came in on.
                v_diff = version_diff(src_version, dest_version)
                if v_diff > 0 or v_diff is None:
                    continue

                # Something else must be going on, so re-raise.
                else:
                    raise fdne

            # Handle M2M relations
            if field.rel and isinstance(field.rel, models.ManyToManyRel):
                if hasattr(field.rel.to._default_manager,
                           'get_by_natural_key'):

                    def m2m_convert(value):
                        if hasattr(value, '__iter__'):
                            return field.rel.to._default_manager.db_manager(
                                db).get_by_natural_key(*value).pk
                        else:
                            return smart_unicode(
                                field.rel.to._meta.pk.to_python(value))
                else:
                    m2m_convert = lambda v: smart_unicode(
                        field.rel.to._meta.pk.to_python(v))
                m2m_data[field.name] = [m2m_convert(pk) for pk in field_value]

            # Handle FK fields
            elif field.rel and isinstance(field.rel, models.ManyToOneRel):
                if field_value is not None:
                    if hasattr(field.rel.to._default_manager,
                               'get_by_natural_key'):
                        if hasattr(field_value, '__iter__'):
                            obj = field.rel.to._default_manager.db_manager(
                                db).get_by_natural_key(*field_value)
                            value = getattr(obj, field.rel.field_name)
                            # If this is a natural foreign key to an object that
                            # has a FK/O2O as the foreign key, use the FK value
                            if field.rel.to._meta.pk.rel:
                                value = value.pk
                        else:
                            value = field.rel.to._meta.get_field(
                                field.rel.field_name).to_python(field_value)
                        data[field.attname] = value
                    else:
                        data[field.attname] = field.rel.to._meta.get_field(
                            field.rel.field_name).to_python(field_value)
                else:
                    data[field.attname] = None

            # Handle all other fields
            else:
                data[field.name] = field.to_python(field_value)

        yield base.DeserializedObject(Model(**data), m2m_data)
コード例 #16
0
 def test_values(self):
     """
     Test a few different values for the difference
     """
     self.assertEqual(version_diff("0.1", "0.20"), -19, "abs(diff) > 10")
コード例 #17
0
def zip_language_packs(lang_codes=None, version=VERSION):
    """Zip up and expose all language packs

    converts all into ietf
    """
    sizes = {}
    lang_codes = lang_codes or os.listdir(LANGUAGE_PACK_BUILD_DIR)
    lang_codes = [lcode_to_ietf(lc) for lc in lang_codes]
    logging.info("Zipping up %d language pack(s)" % len(lang_codes))

    for lang_code_ietf in lang_codes:
        lang_code_map = get_supported_language_map(lang_code_ietf)

        # Initialize values
        sizes[lang_code_ietf] = { "package_size": 0, "zip_size": 0}

        #
        lang_locale_path = get_lp_build_dir(lang_code_ietf, version=version)
        if not os.path.exists(lang_locale_path):
            logging.warn("Unexpectedly skipping missing directory: %s" % lang_code_ietf)
        elif not os.path.isdir(lang_locale_path):
            logging.error("Skipping language where a file exists where a directory was expected: %s" % lang_code_ietf)

        # Create a zipfile for this language
        zip_filepath = get_language_pack_filepath(lang_code_ietf, version=version)
        ensure_dir(os.path.dirname(zip_filepath))
        logging.info("Creating zip file in %s" % zip_filepath)
        z = zipfile.ZipFile(zip_filepath, 'w', zipfile.ZIP_DEFLATED)

        # Get metadata from the versioned directory
        for metadata_file in glob.glob('%s/*.json' % get_lp_build_dir(lang_code_ietf, version=version)):
            # Get every single file in the directory and zip it up
            filepath = os.path.join(lang_locale_path, metadata_file)
            z.write(filepath, arcname=os.path.basename(metadata_file))
            sizes[lang_code_ietf]["package_size"] += os.path.getsize(filepath)

        # Get mo files from the directory
        lang_code_crowdin = lang_code_map["crowdin"]
        mo_files = glob.glob('%s/*.mo' % get_lp_build_dir(lcode_to_ietf(lang_code_crowdin), version=version)) if lang_code_crowdin else []
        for mo_file in mo_files:
            # Get every single compiled language file
            filepath = os.path.join(lang_locale_path, mo_file)
            z.write(filepath, arcname=os.path.join("LC_MESSAGES", os.path.basename(mo_file)))
            sizes[lang_code_ietf]["package_size"] += os.path.getsize(filepath)

        # include video file sizes
        remote_video_size_list = get_all_remote_video_sizes()
        z.writestr('video_file_sizes.json', str(remote_video_size_list))

        srt_dirpath = get_srt_path(lcode_to_django_dir(lang_code_map["amara"]))
        for srt_file in glob.glob(os.path.join(srt_dirpath, "*.srt")):
            z.write(srt_file, arcname=os.path.join("subtitles", os.path.basename(srt_file)))
            sizes[lang_code_ietf]["package_size"] += os.path.getsize(srt_file)

        if version_diff(version, "0.10.3") > 0:  # since these are globally available, need to check version.
            exercises_dirpath = get_localized_exercise_dirpath(lang_code_map["exercises"])
            for exercise_file in glob.glob(os.path.join(exercises_dirpath, "*.html")):
                # Get every single compiled language file
                filepath = os.path.join(exercises_dirpath, exercise_file)
                z.write(filepath, arcname=os.path.join("exercises", os.path.basename(exercise_file)))
                sizes[lang_code_ietf]["package_size"] += os.path.getsize(filepath)

        # Add dubbed video map
        z.write(DUBBED_VIDEOS_MAPPING_FILEPATH, arcname=os.path.join("dubbed_videos", os.path.basename(DUBBED_VIDEOS_MAPPING_FILEPATH)))
        sizes[lang_code_ietf]["package_size"] += os.path.getsize(DUBBED_VIDEOS_MAPPING_FILEPATH)

        z.close()
        sizes[lang_code_ietf]["zip_size"]= os.path.getsize(zip_filepath)

    logging.info("Done.")
    return sizes
コード例 #18
0
def update_language_packs(lang_codes, options):

    package_metadata = {}

    since_date = datetime.datetime.now() - datetime.timedelta(
        int(options["days"]))

    if options['update_dubbed']:
        # Get the latest dubbed video map; it's shared across language packs
        force_dubbed_download = not os.path.exists(DUBBED_VIDEOS_MAPPING_FILEPATH) \
            or 0 < datediff(since_date, datetime.datetime.fromtimestamp(os.path.getctime(DUBBED_VIDEOS_MAPPING_FILEPATH)))
        get_dubbed_video_map(force=force_dubbed_download)

    for lang_code in lang_codes:
        lang_code_map = get_supported_language_map(lang_code)
        lang_metadata = {}

        # Step 1: Update / collect srts.  No version needed, we want to share latest always.
        if options['update_srts']:
            update_srts(since_date=since_date,
                        lang_codes=[lang_code_map["amara"]])
        lang_metadata["subtitle_count"] = get_subtitle_count(
            lang_code_map["amara"])

        # Step 2: Update the dubbed video mappings. No version needed, we want to share latest always.
        dv_map = get_dubbed_video_map(lang_code_map["dubbed_videos"])
        lang_metadata["num_dubbed_videos"] = len(
            dv_map) if dv_map and version_diff(options["version"],
                                               "0.10.3") > 0 else 0

        # Step 3: Update the exercises.  No version needed, we want to share latest always.
        #  TODO(bcipolli): make sure that each language pack only grabs exercises that are included in its topic tree.
        if options['update_exercises'] and version_diff(
                options["version"], "0.10.3") > 0:
            call_command("scrape_exercises",
                         lang_code=lang_code_map["exercises"])
        lang_metadata["num_exercises"] = get_localized_exercise_count(
            lang_code_map["exercises"]) if version_diff(
                options["version"], "0.10.3") > 0 else 0

        # Step 4: Update the crowdin translations.  Version needed!
        #   TODO(bcipolli): skip this when we're going backwards in version.
        if options["no_update"] or version_diff(options["version"],
                                                "0.10.3") == 0:
            trans_metadata = {
                lang_code: get_po_metadata(get_po_build_path(lang_code))
            }
        else:
            try:
                trans_metadata = update_translations(
                    lang_codes=[lang_code],  # will be converted, as needed
                    zip_file=options['zip_file'],
                    ka_zip_file=options['ka_zip_file'],
                    download_ka_translations=options['update_ka_trans'],
                    download_kalite_translations=options[
                        'update_kalite_trans'],
                    use_local=options["use_local"],
                    version=options["version"],
                )
            except SkipTranslations:
                trans_metadata = {
                    lang_code: get_po_metadata(get_po_build_path(lang_code))
                }
        lang_metadata.update(trans_metadata.get(lang_code, {}))

        # Now create/update unified meta data

        generate_metadata(package_metadata={lang_code: lang_metadata},
                          version=options["version"],
                          force_version_update=options["force_update"])

        # Zip into language packs
        package_sizes = zip_language_packs(lang_codes=[lang_code],
                                           version=options["version"])
        logging.debug("%s sizes: %s" %
                      (lang_code, package_sizes.get(lang_code, {})))

        lang_metadata.update(package_sizes.get(lang_code, {}))

        # Update the metadata with the package size information
        update_metadata({lang_code: lang_metadata}, version=options["version"])

        # Update package metadata
        package_metadata[lang_code] = lang_metadata

    return package_metadata
コード例 #19
0
ファイル: trust_tests.py プロジェクト: tunapanda/packages
class TestChainOfTrust(KALiteTestCase):
    def setUp(self):
        Device.own_device = None  # clear the cache, which isn't cleared across tests otherwise.
        super(KALiteTestCase, self).setUp()

    def tearDown(self):
        super(KALiteTestCase, self).tearDown()
        Device.own_device = None  # clear the cache, which isn't cleared across tests otherwise.

    @unittest.skipIf(
        version_diff("0.12", version.VERSION) > 0,
        "generate_zone not available before v0.12.")
    @distributed_server_test
    def test_valid_own_device(self):
        """
        Chain of trust:
        1. Zone created by this device
        2. Another device joins (no central server) through an invitation
        """
        own_device = Device.get_own_device()

        call_command("generate_zone")  # put own_device on a zone
        self.assertEqual(
            DeviceZone.objects.filter(device=own_device).count(), 1,
            "Own device should be on a zone after calling generate_zone.")
        zone = Zone.objects.all()[0]

        new_device = Device(name="new_device")  # make a new device
        new_device.set_key(Key())
        new_device.save()  # get an ID
        new_device.get_metadata().save()

        # Now create an invitation, and claim that invitation for the new device.
        invitation = ZoneInvitation.generate(zone=zone, invited_by=own_device)
        invitation.claim(used_by=new_device)
        self.assertEqual(
            invitation.used_by, new_device,
            "Invitation should now be used by device %s" % new_device)
        self.assertEqual(
            DeviceZone.objects.filter(device=new_device).count(), 1,
            "There should be a DeviceZone for device %s" % new_device)
        self.assertEqual(
            DeviceZone.objects.get(device=new_device).zone, zone,
            "DeviceZone for device %s should be zone %s" % (new_device, zone))

        # Now get a chain of trust establishing the new device on the zone
        chain = ChainOfTrust(zone=zone, device=new_device)
        self.assertTrue(chain.verify(), "Chain of trust should verify.")

    @central_server_test
    def test_valid_trusted(self):
        """
        Chain of trust:
        1. Zone created by this device
        2. Another device joins (no central server) through an invitation
        """
        own_device = Device.get_own_device()
        zone = Zone(name="test_zone")
        zone.save()

        new_device = Device(name="new_device")  # make a new device
        new_device.set_key(Key())
        new_device.save()  # get an ID
        new_device.get_metadata().save()

        # Now create an invitation, and claim that invitation for the new device.
        invitation = ZoneInvitation.generate(zone=zone, invited_by=own_device)
        invitation.claim(used_by=new_device)
        self.assertEqual(
            invitation.used_by, new_device,
            "Invitation should now be used by device %s" % new_device)
        self.assertEqual(
            DeviceZone.objects.filter(device=new_device).count(), 1,
            "There should be a DeviceZone for device %s" % new_device)
        self.assertEqual(
            DeviceZone.objects.get(device=new_device).zone, zone,
            "DeviceZone for device %s should be zone %s" % (new_device, zone))

        # Now get a chain of trust establishing the new device on the zone
        chain = ChainOfTrust(zone=zone, device=new_device)
        self.assertTrue(chain.verify(), "Chain of trust should verify.")

    @distributed_server_test
    def test_invalid_invitation(self):
        """
        Chain of trust:
        1. Zone created by this device
        2. Another device joins (no central server) without an invitation--assert!
        """
        own_device = Device.get_own_device()

        call_command("generate_zone")  # put own_device on a zone
        zone = Zone.objects.all()[0]

        new_device = Device(name="new_device")  # make a new device
        new_device.set_key(Key())
        new_device.save()  # get an ID
        new_device.get_metadata().save()

        # Now create an illegal invitation--one that's not signed by the zone creator
        with self.assertRaises(ValidationError):
            ZoneInvitation.generate(zone=zone, invited_by=new_device)

        #
        invitation = ZoneInvitation(zone=zone, invited_by=new_device)
        with self.assertRaises(ValidationError):
            invitation.set_key(Key())