예제 #1
0
파일: prim_hair.py 프로젝트: Tugcga/S-USD
def set_curves_data(usd_curves,
                    usd_curves_prim,
                    data_points,
                    data_vertex_count,
                    data_width,
                    frame=None):
    # prepare usd attributes
    usd_curves.CreateTypeAttr(UsdGeom.Tokens.cubic)
    usd_curves.CreateBasisAttr(UsdGeom.Tokens.bspline)

    usd_points = usd_curves.CreatePointsAttr()
    usd_vertex_count = usd_curves.CreateCurveVertexCountsAttr()
    usd_width = usd_curves.CreateWidthsAttr()

    # set values
    if frame is None:
        usd_points.Set(data_points)
        usd_vertex_count.Set(data_vertex_count)
        usd_width.Set(data_width)
    else:
        usd_points.Set(data_points, Usd.TimeCode(frame))
        usd_vertex_count.Set(data_vertex_count, Usd.TimeCode(frame))
        usd_width.Set(data_width, Usd.TimeCode(frame))

    # set bounding box
    usd_extent = usd_curves_prim.CreateAttribute(
        "extent", Sdf.ValueTypeNames.Float3Array)
    if frame is None:
        usd_extent.Set(utils.get_bounding_box(data_points))
    else:
        usd_extent.Set(utils.get_bounding_box(data_points),
                       Usd.TimeCode(frame))
예제 #2
0
def set_pointcloud_at_frame(pointcloud_geometry,
                            usd_pointcloud,
                            usd_points_prim,
                            frame=None):
    xsi_pp = pointcloud_geometry.GetICEAttributeFromName("PointPosition")
    xsi_size = pointcloud_geometry.GetICEAttributeFromName("Size")

    xsi_pp_data = xsi_pp.DataArray
    xsi_size_data = xsi_size.DataArray

    usd_points = usd_pointcloud.CreatePointsAttr()
    usd_width = usd_pointcloud.CreateWidthsAttr()

    data_points = []
    data_width = []

    for index in range(len(xsi_pp_data)):
        data_points.append(utils.vector_to_tuple(xsi_pp_data[index]))
        data_width.append(xsi_size_data[index])

    if frame is None:
        usd_points.Set(data_points)
        usd_width.Set(data_width)
    else:
        usd_points.Set(data_points, Usd.TimeCode(frame))
        usd_width.Set(data_width, Usd.TimeCode(frame))

    # set bounding box
    usd_extent = usd_points_prim.CreateAttribute(
        "extent", Sdf.ValueTypeNames.Float3Array)
    if frame is None:
        usd_extent.Set(utils.get_bounding_box(data_points))
    else:
        usd_extent.Set(utils.get_bounding_box(data_points),
                       Usd.TimeCode(frame))
def process_scan(scan_path, predictions_dir, output_directory):
    p1, p2 = (256, 256), (256, 256)
    scan_id = os.path.basename(scan_path)
    for slice_image_path in glob.glob(scan_path + '/pngs/*.png'):
        slice_id = os.path.basename(slice_image_path).split(".")[0]

        with open(
                os.path.join(predictions_dir,
                             '{}.{}'.format(scan_id, slice_id)), 'rb') as f:
            mask = pickle.load(f)
            cp1, cp2 = utils.get_bounding_box(mask)
            p1 = min(p1[0], cp1[0]), min(p1[1], cp1[1])
            p2 = max(p2[0], cp2[0]), max(p2[1], cp2[1])
    p1, p2 = utils.get_expanded_bounding_box(p1, p2, (256, 256))

    scan_output_dir = os.path.join(output_directory, scan_id)
    os.makedirs(scan_output_dir + '/pngs')

    with open(os.path.join(output_directory, scan_id, "coordinates.txt"),
              'w') as f:
        f.write("{},{},{},{}".format(p1[0], p1[1], p2[0], p2[1]))

    for slice_image_path in glob.glob(scan_path + '/pngs/*.png'):
        image = utils.read_image(slice_image_path)
        slice_id = os.path.basename(slice_image_path).split(".")[0]
        with open(
                os.path.join(predictions_dir,
                             '{}.{}'.format(scan_id, slice_id)), 'rb') as f:
            mask = pickle.load(f)
            new_image = (image * mask)[p1[0]:p2[0], p1[1]:p2[1]]
            output_image_path = os.path.join(
                output_directory, scan_id, 'pngs',
                os.path.basename(slice_image_path))
            imsave(output_image_path, new_image)
예제 #4
0
    def preprocess(self, state):
        """Transform the point cloud to 4 images.

        Parameters
        ----------
        state : np.ndarray
            Observation of the environment.

        Returns
        -------
        np.ndarray
            Array with 4 images.

        """
        P = np.array(state[0][:, :3])
        C = np.array(state[0][:, 3:])
        # transform color values in the range of 0 to 1
        C /= 255
        P_idxs = state[1]
        neighbour_idxs = state[2]
        if P_idxs.shape[0] == 0 and neighbour_idxs.shape[0] == 0:
            camera_translation = np.array([0, 0, 0])
        else:
            # color the main superpoint in red
            C[P_idxs, 2] = 1
            # lower other color values of the main superpoint
            C[P_idxs, :2] /= 4

            # color the neighbour superpoint in blue
            C[neighbour_idxs, 0] = 1
            # lower other color values of the neighbour superpoint
            C[neighbour_idxs, 1:] /= 4

            # calculate camera_translation of segments
            main_idxs = np.vstack(
                (P_idxs[:, None], neighbour_idxs[:, None])).reshape(-1)
            P_main = P[main_idxs]
            bb_main = get_bounding_box(P_main)
            l1 = bb_main[1] - bb_main[0]
            l2 = bb_main[3] - bb_main[2]

            l = max(l1, l2)
            h = self.dist_factor * l + self.distance

            # x direction and up
            ct1 = np.array([0, -l2 / 2, h])
            # y direction and up
            ct2 = np.array([-l1 / 2, 0, h])
            ct3 = np.array([0, l2 / 2, h])
            ct4 = np.array([l1 / 2, 0, h])
            v1 = np.array([-bb_main[0], -bb_main[2], bb_main[5]])
            v2 = np.array([-bb_main[1], -bb_main[3], bb_main[5]])
            p1 = (v1 + ct1)[:, None]
            p2 = (v1 + ct2)[:, None]
            p3 = (v2 + ct3)[:, None]
            p4 = (v2 + ct4)[:, None]
            camera_translation = np.hstack((p1, p2, p3, p4))

        # transform color values in the range of -1 to 1
        C -= 0.5
        C *= 2

        P = np.transpose(P)
        P = np.vstack((P, np.ones((1, P.shape[1]))))

        Rt = self.reset_for_img(camera_translation)
        self.imgs[:] = 0
        res, mask, idxs = PtoImg2(P, self.M_proj, Rt, self.width)
        idx_start = 0
        for i in range(self.imgs.shape[0]):
            idx = idxs[i]
            idx_split = idx_start + idx
            x = res[idx_start:idx_split]
            idx_start = idx_split + idx
            y = res[idx_split:idx_start]
            m = mask[i]
            c = C[m, :]
            self.imgs[i, x, y] = c
        return self.imgs
예제 #5
0
def main(video_source):
    # load yolov2
    net = cv2.dnn.readNet("yolov2.weights", "yolov2.cfg")
    layer_names = net.getLayerNames()
    output_layers = [layer_names[i[0] - 1] for i in net.getUnconnectedOutLayers()]

    # input capture object
    cap = cv2.VideoCapture(video_source)

    if WRITE_VIDEO:
        # output video writer
        fourcc = cv2.VideoWriter_fourcc(*'XVID')
        out = cv2.VideoWriter('output.avi', fourcc, 20.0, (1280, 720))

    i = 0
    while (cap.isOpened()):
        ret, frame = cap.read()
        if not ret:
            break
        i += 1
        if not (i % 3 == 0): continue
        # img = cv2.resize(img, None, fx=0.4, fy=0.4)
        height, width, channels = frame.shape

        blob = cv2.dnn.blobFromImage(frame, 0.00392, (608, 608), (0, 0, 0), True, crop=False)
        net.setInput(blob)
        outs = net.forward(output_layers)

        boxes, confidences, class_id = get_bounding_box(outs, height, width)
        indexes = cv2.dnn.NMSBoxes(boxes, confidences, 0.5, 0.4)
        # print(class_id)
        #
        circles = []
        for i in range(len(boxes)):
            if i in indexes:
                x, y, w, h = boxes[i]

                # draw circle
                cx, cy = x + w // 2, y + h
                frame = cv2.line(frame, (cx, cy), (cx, cy - h // 2), (0, 255, 0), 2)
                frame = cv2.circle(frame, (cx, cy - h // 2), 5, (255, 20, 200), -1)
                circles.append([cx, cy - h // 2, h])

        int_circles_list = []
        indexes = []
        for i in range(len(circles)):
            x1, y1, r1 = circles[i]
            for j in range(i + 1, len(circles)):
                x2, y2, r2 = circles[j]
                if int_circle(x1, y1, x2, y2, r1 // 2, r2 // 2) >= 0 and abs(y1 - y2) < r1 // 4:
                    indexes.append(i)
                    indexes.append(j)

                    int_circles_list.append([x1, y1, r1])
                    int_circles_list.append([x2, y2, r2])
                    cv2.line(frame, (x1, y1), (x2, y2), (0, 0, 255), 2)

            rows, cols, _ = frame.shape

            for i in range(len(circles)):
                x, y, r = circles[i]

                if i in indexes:
                    color = (0, 0, 255)
                else:
                    color = (0, 200, 20)
                scale = (r) / 100
                transparentOverlay1(frame, dst_circle, (x, y - 5), alphaVal=110, color=color, scale=scale)

            cv2.rectangle(frame, (0, rows - 80), (cols, rows), (0, 0, 0), -1)
            cv2.putText(frame,
                        "Total Persons : " + str(len(boxes)),
                        (20, rows - 40),
                        fontFace=cv2.QT_FONT_NORMAL,
                        fontScale=1,
                        color=(215, 220, 245))

            cv2.putText(frame,
                        "Defaulters : " + str(len(set(indexes))),
                        (cols - 300, rows - 40),
                        fontFace=cv2.QT_FONT_NORMAL,
                        fontScale=1,
                        color=(0, 220, 245))
        if WRITE_VIDEO:
            out.write(frame)

        if SHOW_OUTPUT:
            cv2.imshow("frame", frame)
            if cv2.waitKey(1) & 0xFF == ord('q'):
                break

    if WRITE_VIDEO:
        out.release()

    cv2.destroyAllWindows()
    cap.release()
예제 #6
0
def fontbakery_check_ttf(config):
    '''Main sequence of checkers & fixers'''
    fb = FontBakeryCheckLogger(config)

    # set up a basic logging config
    handler = logging.StreamHandler()
    formatter = logging.Formatter('%(levelname)-8s %(message)s  ')
    handler.setFormatter(formatter)

    logger = logging.getLogger()
    logger.addHandler(handler)

    if config['verbose'] == 1:
        logger.setLevel(logging.INFO)
    elif config['verbose'] >= 2:
        logger.setLevel(logging.DEBUG)
    else:
        fb.progressbar = True
        logger.setLevel(logging.CRITICAL)

    if config['error']:
        fb.progressbar = False
        logger.setLevel(logging.ERROR)

    # ------------------------------------------------------
    logging.debug("Checking each file is a ttf")
    fonts_to_check = []
    for target in config['files']:
        if type(target) is TargetFont:
            fonts_to_check.append(target)
        else:
            # use glob.glob to accept *.ttf
            for fullpath in glob.glob(target):
                file_path, file_name = os.path.split(fullpath)
                if file_name.endswith(".ttf"):
                    a_target = TargetFont()
                    a_target.fullpath = fullpath
                    fonts_to_check.append(a_target)
                else:
                    logging.warning(
                        "Skipping '{}' as it does not seem "
                        "to be valid TrueType font file.".format(file_name))

# FIX-ME: Why do we attempt to sort the fonts here?
#         Do we expect to remove duplicates? It does not seem very important.
#         Anyway... this probably need some extra work to get the
#         font objects sorted by filename field...
    fonts_to_check.sort()

    if fonts_to_check == []:
        logging.error("None of the fonts are valid TrueType files!")

    checks.check_files_are_named_canonically(fb, fonts_to_check)

    if fb.config['webapp'] is True:
        # At the moment we won't perform
        # DESCRIPTION checks on the webapp
        # In particular, one of the checks depends on the magic python module
        # which is not supported on Google App Engine.
        pass
    else:
        # Perform a few checks on DESCRIPTION files

        # This expects all fonts to be in the same folder:
        a_font = fonts_to_check[0]

        # FIX-ME: This will not work if we have more than
        #         a single '/' char in the filename:
        folder_name = os.path.split(a_font.fullpath)[0]
        descfilepath = os.path.join(folder_name, "DESCRIPTION.en_us.html")
        if os.path.exists(descfilepath):
            fb.default_target = descfilepath
            contents = open(descfilepath).read()
            checks.check_DESCRIPTION_file_contains_no_broken_links(
                fb, contents)
            checks.check_DESCRIPTION_is_propper_HTML_snippet(fb, descfilepath)
            checks.check_DESCRIPTION_max_length(fb, descfilepath)
            checks.check_DESCRIPTION_min_length(fb, descfilepath)

    if not fb.config['inmem']:
        # this check does not make sense for in-memory file-like objects:
        checks.check_all_files_in_a_single_directory(fb, fonts_to_check)

    registered_vendor_ids = fetch_vendorID_list(logging)

    # DC This is definitely not step 1, cross-family comes after individual
    # in order that individual hotfixes can enable cross-family checks to pass
    ###########################################################################
    ## Step 1: Cross-family tests
    ##         * Validates consistency of data throughout all TTF files
    ##           in a given family
    ##         * The list of TTF files in infered from the METADATA.pb file
    ##         * We avoid testing the same family twice by deduplicating the
    ##           list of METADATA.pb files first
    ###########################################################################

    if fb.config['inmem'] or fb.config['webapp']:
        # TODO: Not sure why these are disabled. I need to review this.
        pass
    else:
        metadata_to_check = []
        for target in fonts_to_check:
            fontdir = os.path.dirname(target.fullpath)
            metadata = os.path.join(fontdir, "METADATA.pb")
            if not os.path.exists(metadata):
                logging.error("'{}' is missing"
                              " a METADATA.pb file!".format(target.fullpath))
            else:
                family = get_FamilyProto_Message(metadata)
                if family is None:
                    logging.warning("Could not load data from METADATA.pb.")
                elif family not in metadata_to_check:
                    metadata_to_check.append([fontdir, family])

        for dirname, family in metadata_to_check:
            ttf = {}
            for f in family.fonts:
                if font_key(f) in ttf.keys():
                    # I think this will likely never happen. But just in case...
                    logging.error(
                        "This is a fontbakery bug."
                        " We need to figure out a better hash-function"
                        " for the font ProtocolBuffer message."
                        " Please file an issue on"
                        " https://github.com/googlefonts"
                        "/fontbakery/issues/new")
                else:
                    ttf[font_key(f)] = ttLib.TTFont(
                        os.path.join(dirname, f.filename))

            if dirname == "":
                fb.default_target = "Current Folder"
            else:
                fb.default_target = dirname
            # -----------------------------------------------------
            checks.check_font_designer_field_is_not_unknown(fb, family)
            checks.check_fonts_have_consistent_underline_thickness(
                fb, family, ttf)
            checks.check_fonts_have_consistent_PANOSE_proportion(
                fb, family, ttf)
            checks.check_fonts_have_consistent_PANOSE_family_type(
                fb, family, ttf)
            checks.check_fonts_have_equal_numbers_of_glyphs(fb, family, ttf)
            checks.check_fonts_have_equal_glyph_names(fb, family, ttf)
            checks.check_fonts_have_equal_unicode_encodings(fb, family, ttf)

    # ------------------------------------------------------
    vmetrics_ymin = 0
    vmetrics_ymax = 0
    for target in fonts_to_check:
        # this will both accept BytesIO or a filepath
        font = target.get_ttfont()

        font_ymin, font_ymax = get_bounding_box(font)
        vmetrics_ymin = min(font_ymin, vmetrics_ymin)
        vmetrics_ymax = max(font_ymax, vmetrics_ymax)

    checks.check_all_fontfiles_have_same_version(fb, fonts_to_check)
    # FSanches: I don't like the following few lines.
    #           They look very hacky even though they actually work... :-P
    a_font = fonts_to_check[0]
    family_dir = os.path.split(a_font.fullpath)[0]
    cross_family = os.path.join(family_dir, "CrossFamilyChecks")
    fb.output_report(TargetFont(desc={"filename": cross_family}))
    fb.reset_report()

    ##########################################################################
    # Step 2: Single TTF tests
    #         * Tests that only check data of each TTF file, but not cross-
    #           referencing with other fonts in the family
    ##########################################################################

    # ------------------------------------------------------
    for target in fonts_to_check:
        font = target.get_ttfont()
        fb.default_target = target.fullpath
        fb.set_font(font)
        logging.info("OK: {} opened with fontTools".format(target.fullpath))

        local_styles = {}
        # Determine weight from canonical filename
        file_path, filename = os.path.split(target.fullpath)
        family, style = os.path.splitext(filename)[0].split('-')
        local_styles[style] = font

        checks.check_font_has_post_table_version_2(fb, font)
        checks.check_OS2_fsType(fb)
        checks.check_main_entries_in_the_name_table(fb, font, target.fullpath)
        checks.check_OS2_achVendID(fb, font, registered_vendor_ids)
        checks.check_name_entries_symbol_substitutions(fb, font)
        checks.check_OS2_usWeightClass(fb, font, style)
        checks.check_fsSelection_REGULAR_bit(fb, font, style)
        checks.check_italicAngle_value_is_negative(fb, font)
        checks.check_italicAngle_value_is_less_than_20_degrees(fb, font)
        checks.check_italicAngle_matches_font_style(fb, font, style)
        checks.check_fsSelection_ITALIC_bit(fb, font, style)
        checks.check_macStyle_ITALIC_bit(fb, font, style)
        checks.check_fsSelection_BOLD_bit(fb, font, style)
        checks.check_macStyle_BOLD_bit(fb, font, style)

        found = checks.check_font_has_a_license(fb, file_path)
        checks.check_copyright_entries_match_license(fb, found, file_path,
                                                     font)
        checks.check_font_has_a_valid_license_url(fb, found, font)
        checks.check_description_strings_in_name_table(fb, font)
        checks.check_description_strings_do_not_exceed_100_chars(fb, font)

        monospace_detected = checks.check_font_is_truly_monospaced(fb, font)
        checks.check_if_xAvgCharWidth_is_correct(fb, font)
        checks.check_with_ftxvalidator(fb, target.fullpath)
        checks.check_with_msfontvalidator(fb, target.fullpath)
        checks.check_with_otsanitise(fb, target.fullpath)

        validation_state = checks.check_fforge_outputs_error_msgs(
            fb, target.fullpath)
        if validation_state is not None:
            checks.perform_all_fontforge_checks(fb, validation_state)

        checks.check_OS2_usWinAscent_and_Descent(fb, vmetrics_ymin,
                                                 vmetrics_ymax)
        checks.check_Vertical_Metric_Linegaps(fb, font)
        checks.check_OS2_Metrics_match_hhea_Metrics(fb, font)
        checks.check_unitsPerEm_value_is_reasonable(fb, font)
        checks.check_font_version_fields(fb, font)
        checks.check_Digital_Signature_exists(fb, font, target.fullpath)
        checks.check_font_contains_the_first_few_mandatory_glyphs(fb, font)

        missing = checks.check_font_contains_glyphs_for_whitespace_chars(
            fb, font)
        checks.check_font_has_proper_whitespace_glyph_names(fb, font, missing)
        checks.check_whitespace_glyphs_have_ink(fb, font, missing)
        checks.check_whitespace_glyphs_have_coherent_widths(fb, font, missing)
        checks.check_with_pyfontaine(fb, target.fullpath)
        checks.check_no_problematic_formats(fb, font)
        checks.check_for_unwanted_tables(fb, font)

        ttfautohint_missing = checks.check_hinting_filesize_impact(
            fb, target.fullpath, filename)
        checks.check_version_format_is_correct_in_NAME_table(fb, font)
        checks.check_font_has_latest_ttfautohint_applied(
            fb, font, ttfautohint_missing)
        checks.check_name_table_entries_do_not_contain_linebreaks(fb, font)
        checks.check_glyph_names_are_all_valid(fb, font)
        checks.check_font_has_unique_glyph_names(fb, font)
        checks.check_no_glyph_is_incorrectly_named(fb, font)
        checks.check_EPAR_table_is_present(fb, font)
        checks.check_GASP_table_is_correctly_set(fb, font)

        has_kerning_info = checks.check_GPOS_table_has_kerning_info(fb, font)
        checks.check_nonligated_sequences_kerning_info(fb, font,
                                                       has_kerning_info)
        checks.check_all_ligatures_have_corresponding_caret_positions(fb, font)
        checks.check_there_is_no_KERN_table_in_the_font(fb, font)
        checks.check_familyname_does_not_begin_with_a_digit(fb, font)
        checks.check_fullfontname_begins_with_the_font_familyname(fb, font)
        checks.check_unused_data_at_the_end_of_glyf_table(fb, font)
        checks.check_font_has_EURO_SIGN_character(fb, font)
        checks.check_font_follows_the_family_naming_recommendations(fb, font)
        checks.check_font_enables_smart_dropout_control(fb, font)
        checks.check_MaxAdvanceWidth_is_consistent_with_Hmtx_and_Hhea_tables(
            fb, font)
        checks.check_non_ASCII_chars_in_ASCII_only_NAME_table_entries(fb, font)

        checks.check_for_points_out_of_bounds(fb, font)
        checks.check_glyphs_have_unique_unicode_codepoints(fb, font)
        checks.check_all_glyphs_have_codepoints_assigned(fb, font)
        checks.check_that_glyph_names_do_not_exceed_max_length(fb, font)
        checks.check_hhea_table_and_advanceWidth_values(
            fb, font, monospace_detected)

        ##########################################################
        ## Metadata related checks:
        ##########################################################
        skip_gfonts = False
        is_listed_in_GFD = False
        if not fb.config['webapp']:
            fontdir = os.path.dirname(target.fullpath)
            metadata = os.path.join(fontdir, "METADATA.pb")
            if not os.path.exists(metadata):
                logging.warning(
                    ("{} is missing a METADATA.pb file!"
                     " This will disable all Google-Fonts-specific checks."
                     " Please considering adding a METADATA.pb file to the"
                     " same folder as the font files.").format(filename))
                skip_gfonts = True
            else:
                family = get_FamilyProto_Message(metadata)
                if family is None:
                    logging.warning("Could not load data from METADATA.pb.")
                    skip_gfonts = True
                    break

                fb.default_target = metadata

                checks.check_METADATA_Ensure_designer_simple_short_name(
                    fb, family)
                is_listed_in_GFD = checks.check_family_is_listed_in_GFDirectory(
                    fb, family)
                checks.check_METADATA_Designer_exists_in_GWF_profiles_csv(
                    fb, family)
                checks.check_METADATA_has_unique_full_name_values(fb, family)
                checks.check_METADATA_check_style_weight_pairs_are_unique(
                    fb, family)
                checks.check_METADATA_license_is_APACHE2_UFL_or_OFL(fb, family)
                checks.check_METADATA_contains_at_least_menu_and_latin_subsets(
                    fb, family)
                checks.check_METADATA_subsets_alphabetically_ordered(
                    fb, metadata, family)
                checks.check_Copyright_notice_is_the_same_in_all_fonts(
                    fb, family)
                checks.check_METADATA_family_values_are_all_the_same(
                    fb, family)

                found_regular = checks.check_font_has_regular_style(fb, family)
                checks.check_regular_is_400(fb, family, found_regular)

                for f in family.fonts:  # pylint: disable=no-member
                    # (I know this is good, but pylint
                    #  seems confused here)
                    if filename == f.filename:
                        ###### Here go single-TTF metadata tests #######
                        # ----------------------------------------------

                        checks.check_font_on_disk_and_METADATA_have_same_family_name(
                            fb, font, f)
                        checks.check_METADATA_postScriptName_matches_name_table_value(
                            fb, font, f)
                        checks.check_METADATA_fullname_matches_name_table_value(
                            fb, font, f)
                        checks.check_METADATA_fonts_name_matches_font_familyname(
                            fb, font, f)
                        checks.check_METADATA_fullName_matches_postScriptName(
                            fb, f)
                        checks.check_METADATA_filename_matches_postScriptName(
                            fb, f)

                        ffname = checks.check_METADATA_name_contains_good_font_name(
                            fb, font, f)
                        if ffname is not None:
                            checks.check_METADATA_fullname_contains_good_fname(
                                fb, f, ffname)
                            checks.check_METADATA_filename_contains_good_fname(
                                fb, f, ffname)
                            checks.check_METADATA_postScriptName_contains_good_fname(
                                fb, f, ffname)

                        checks.check_Copyright_notice_matches_canonical_pattern(
                            fb, f)
                        checks.check_Copyright_notice_does_not_contain_Reserved_Name(
                            fb, f)
                        checks.check_Copyright_notice_does_not_exceed_500_chars(
                            fb, f)
                        checks.check_Filename_is_set_canonically(fb, f)
                        checks.check_METADATA_font_italic_matches_font_internals(
                            fb, font, f)

                        if checks.check_METADATA_fontstyle_normal_matches_internals(
                                fb, font, f):
                            checks.check_Metadata_keyvalue_match_to_table_name_fields(
                                fb, font, f)

                        checks.check_fontname_is_not_camel_cased(fb, f)
                        checks.check_font_name_is_the_same_as_family_name(
                            fb, family, f)
                        checks.check_font_weight_has_a_canonical_value(fb, f)
                        checks.check_METADATA_weigth_matches_OS2_usWeightClass_value(
                            fb, f)
                        checks.check_Metadata_weight_matches_postScriptName(
                            fb, f)
                        checks.check_METADATA_lists_fonts_named_canonicaly(
                            fb, font, f)
                        checks.check_Font_styles_are_named_canonically(
                            fb, font, f)

        # Google-Fonts specific check:
        checks.check_font_em_size_is_ideally_equal_to_1000(
            fb, font, skip_gfonts)

        ##########################################################
        ## Step 3: Regression related checks:
        # if family already exists on fonts.google.com
        ##########################################################

        # ------------------------------------------------------
        if is_listed_in_GFD:
            remote_fonts_zip = download_family_from_GoogleFontDirectory(
                family.name)  # pylint: disable=no-member
            remote_fonts_to_check = fonts_from_zip(remote_fonts_zip)

            remote_styles = {}
            for target in remote_fonts_to_check:
                fb.default_target = target.fullpath
                remote_font = target.get_ttfont()
                remote_style = target.fullpath[:-4].split('-')[1]
                remote_styles[remote_style] = remote_font

                # Only perform tests if local fonts have the same styles
                if remote_style in local_styles:
                    checks.check_regression_v_number_increased(
                        fb, local_styles[style], remote_styles[style], f)
                    checks.check_regression_glyphs_structure(
                        fb, local_styles[style], remote_styles[style], f)
                    checks.check_regression_ttfauto_xheight_increase(
                        fb, local_styles[style], remote_styles[style], f)

        fb.output_report(target)
        fb.reset_report()

        if not fb.config['webapp']:
            # ----------------------------------------------------
            # https://github.com/googlefonts/fontbakery/issues/971
            # DC: Each fix line should set a fix flag, and
            # if that flag is True by this point, only then write the file
            # and then say any further output regards fixed files, and
            # re-run the script on each fixed file with logging level = error
            # so no info-level log items are shown
            font_file_output = os.path.splitext(filename)[0] + ".fix"
            if config['autofix']:
                font.save(font_file_output)
                logging.info("{} saved\n".format(font_file_output))
            font.close()

            # -------------------------------------------------------
            if not config['verbose'] and \
               not config['json'] and \
               not config['ghm'] and \
               not config['error']:
                # in this specific case, the user would have no way to see
                # the actual check results. So here we inform the user
                # that at least one of these command line parameters
                # needs to be used in order to see the details.
                print(
                    "In order to see the actual check result messages,\n"
                    "use one of the following command-line parameters:\n"
                    "  --verbose\tOutput results to stdout.\n"
                    "  --json \tSave results to a file in JSON format.\n"
                    "  --ghm  \tSave results to a file in GitHub Markdown format.\n"
                    "  --error\tPrint only the error messages "
                    "(outputs to stderr).\n")

    if fb.config['webapp']:
        return fb.json_report_files
    else:
        if len(fb.json_report_files) > 0:
            print(("Saved check results in "
                   "JSON format to:\n\t{}"
                   "").format('\n\t'.join(fb.json_report_files)))
        if len(fb.ghm_report_files) > 0:
            print(("Saved check results in "
                   "GitHub Markdown format to:\n\t{}"
                   "").format('\n\t'.join(fb.ghm_report_files)))