Пример #1
0
    def __revert_installation(self):
        output = True
        if len(self.urls_backup) > 0:
            urls_data = ""
            for line in self.urls_backup:
                urls_data += line

            output = write_to_file(self.urls_file, urls_data)

        if len(self.settings_backup) > 0:
            settings_data = ""
            for line in self.settings_backup:
                settings_data += line

            output = write_to_file(self.settings_file, settings_data)

        for app_path in self.delete_app_dir:
            output = delete_dir(app_path)

        if output:
            for plugin_path in self.delete_plugins_dir:
                if output:
                    output = delete_dir(plugin_path)

        return output
Пример #2
0
def get_ssh_credentials(nels_id):
    credentail = storage.get_ssh_credential(nels_id)
    if not credentail:
        return None
    (host, username) = (credentail[0], credentail[1])
    feed_utils.info("fetching keys. host: %s, username:%s " % (host, username))
    key_file = path.join(config.TEMP_DIR, "%s.nels" % nels_id)
    feed_utils.info("writing key file: %s" % key_file)
    file_utils.write_to_file(key_file, credentail[2])
    run_utils.launch_cmd("chmod 600 %s" % key_file)
    return [credentail[0], credentail[1], key_file]
    def __recover_app(self):
        if len(self.settings_backup) > 0:
            settings_data = ""
            for line in self.settings_backup:
                settings_data += line
            write_to_file(self.settings_file, settings_data)

        if len(self.urls_backup) > 0:
            urls_data = ""
            for line in self.urls_backup:
                urls_data += line
            write_to_file(self.urls_file, urls_data)
    def calculate_accuracy_and_loss_and_write_report(self):
        avg_acc = self.calculate_mean(self.accuracies)
        avg_loss = self.calculate_mean(self.losses)

        file_utils.write_to_file("\n" + str(self.losses),
                                 "\n" + str(self.accuracies),
                                 str(avg_loss),
                                 str(avg_acc),
                                 path="test/")

        print("Accuracy ", avg_acc)
        print("Loss", avg_loss)
Пример #5
0
    def __edit_urls_py(self):
        checker = "RedirectView.as_view(url='/katana/')"
        data = read_json_data(self.wf_config_file)

        if data["app"]["url"].startswith("/"):
            app_url = data["app"]["url"][1:]
        else:
            app_url = data["app"]["url"]
        self.urls_inclusions.append("url(r'^" + app_url + "', include('" +
                                    data["app"]["include"] + "')),")

        data = readlines_from_file(self.urls_file)
        self.urls_backup = data
        index = -1
        for i in range(0, len(data)):
            if checker in data[i]:
                index = i + 1
                break
        white_space = data[index].split("url")
        for i in range(0, len(self.urls_inclusions)):
            self.urls_inclusions[
                i] = white_space[0] + self.urls_inclusions[i] + "\n"

        u_data = data[:index]
        u_data.extend(self.urls_inclusions)
        u_data.extend(data[index:])

        urls_data = ""
        for line in u_data:
            urls_data += line
        output = write_to_file(self.urls_file, urls_data)
        return output
    def __remove_app_from_urls(self):
        print(self.include_urls)
        data = readlines_from_file(self.urls_file)

        urls_data = ""
        for url in self.include_urls:
            urls_data = ""
            for line in data:
                if url not in line:
                    urls_data += line
            data = urls_data
        output = write_to_file(self.urls_file, urls_data)
        return output
    def __remove_app_from_settings(self):
        data = readlines_from_file(self.settings_file)
        sf_data = []
        for line in data:
            if line.strip() != "'{0}',".format(self.pkg_in_settings):
                sf_data.append(line)

        settings_data = ""
        for line in sf_data:
            settings_data += line

        output = write_to_file(self.settings_file, settings_data)
        return output
Пример #8
0
    def __edit_settings_py(self):
        data = readlines_from_file(self.settings_file)
        self.settings_backup = data
        index = -1
        for i in range(0, len(data)):
            if "wui.core" in data[i]:
                index = i
                break

        white_space = data[index].split("'")

        self.pkg_in_settings = white_space[
            0] + "'" + self.pkg_in_settings + "',\n"
        sf_data = data[:index]
        sf_data.append(self.pkg_in_settings)
        sf_data.extend(data[index:])

        settings_data = ""
        for line in sf_data:
            settings_data += line

        output = write_to_file(self.settings_file, settings_data)
        return output
Пример #9
0
 def test_add_symlink(self):
     test_link = self.new_temp_file('test_link')
     src_file = self.new_temp_file('linktest.txt')
     fu.write_to_file(src_file, "link test")
     fu.add_symlink(test_link, src_file)
     self.failUnless(os.path.lexists(test_link))
Пример #10
0
 def test_write_to_file(self):
     filename = self.new_temp_file('write_test.txt')
     random_str = ut.rand_str(100)
     fu.write_to_file(filename, random_str)
     self.assertEqual(random_str, fu.read_file(filename))
Пример #11
0
def gen_crawl_report(db_file,
                     db_pass2=None,
                     db_other_profs=None,
                     prof_dir=None):
    """ visits_cnt, cookies, localstorage, flash cookies, cache, indexeddb,
    http reqs/resps
    canvas: list distinct FPers, linked to the sites that include this FPer
    evercookie: list potential evercookies by searching ID-like common strings
    among different vectors"""
    out_dir = os.path.dirname(db_file)
    crawl_name = os.path.basename(os.path.dirname(db_file))
    figs = []  # figures to be plotted, removed for now.
    respawned = []

    if db_pass2 and db_other_profs and prof_dir:
        respawned = ev.get_flash_evercookies(db_file, db_pass2, db_other_profs,
                                             prof_dir)

    start, end = dbu.get_db_entry(db_file, dbu.DBCmd.GET_VISIT_DATES, False)
    visits_cnt = dbu.get_db_entry(db_file, dbu.DBCmd.COUNT_VISITS, False)[0]
    completed_visits_cnt = dbu.get_db_entry(db_file, dbu.DBCmd.COUNT_VISITS,
                                            True)[0]
    cookies = dbu.get_db_entry(db_file, dbu.DBCmd.COUNT_COOKIES, 0)
    localstorage = dbu.get_db_entry(db_file, dbu.DBCmd.COUNT_LOCALSTORAGE, 0)
    print "genreport len(localstorage)", len(localstorage)
    xsite_flash_cookies = get_xsite_flash_cookies(db_file)
    xsite_local_storage = get_xsite_local_storage(db_file)

    try:
        flash_cookie_count = dbu.get_db_entry(db_file, dbu.DBCmd.COUNT_LSO, 0)
    except:
        flash_cookie_count = [""]

    canvas_meta_rows = dbu.get_db_entry(db_file, dbu.DBCmd.GET_CANVAS_META, 0)
    canvas_scr_domains = {}
    canvas_events_per_script = {}
    canvas_url_counts = {}
    canvas_domain_counts = {}
    canvas_script_urls = dbu.get_db_entry(db_file,
                                          dbu.DBCmd.GET_CANVAS_SCRIPTS, 0)
    false_positives = []
    for canvas_script_url_tup in canvas_script_urls:
        canvas_script_url = canvas_script_url_tup[0]
        canvas_events = dbu.get_db_entry(db_file,
                                         dbu.DBCmd.GET_CANVAS_EVENTS_BY_SCRIPT,
                                         canvas_script_url)
        if not ca.is_canvas_false_positive(canvas_events):
            scr_evs = dbu.get_db_entry(db_file,
                                       dbu.DBCmd.GET_CANVAS_EVENTS_BY_SCRIPT,
                                       canvas_script_url)
            canvas_events_per_script[canvas_script_url] = scr_evs
            url_cnts = dbu.get_db_entry(db_file,
                                        dbu.DBCmd.COUNT_SITES_BY_CANVAS_SCRIPT,
                                        canvas_script_url)
            canvas_url_counts[canvas_script_url] = url_cnts
            domain = cu.extract_domain(canvas_script_url)
            if domain in canvas_scr_domains:
                canvas_scr_domains[domain].append(canvas_script_url)
            else:
                canvas_scr_domains[domain] = [canvas_script_url]
        else:
            false_positives.append(canvas_script_url_tup)
            # print canvas_script_url_tup

    # Remove false positives
    for false_positive in false_positives:
        canvas_script_urls.remove(false_positive)
    # total_canvas_fp_count = sum()
    all_canvasfp_ranks = {}
    all_canvasfp_ranks_urls = {}
    for canvas_scr_domain, canvas_scr_urls in canvas_scr_domains.iteritems():
        script_ranks_and_urls =\
            dbu.get_db_entry(db_file,
                             dbu.DBCmd.GET_RANK_AND_URLS_BY_CANVAS_SCRIPTS,
                             canvas_scr_urls)
        canvas_domain_counts[canvas_scr_domain] = len(script_ranks_and_urls)
        all_canvasfp_ranks[canvas_scr_domain] = map(lambda x: x[0],
                                                    script_ranks_and_urls)
        all_canvasfp_ranks_urls[canvas_scr_domain] = script_ranks_and_urls

    # print all_canvasfp_ranks
    # fu.write_to_file(j(out_dir, "%s-canvas.json" % crawl_name),
    #                 json.dumps(all_canvasfp_ranks))

    total_canvas_fp_count = sum(canvas_domain_counts.itervalues())

    # print "Total canvas FP count", total_canvas_fp_count
    rank_set = set()
    for _, v in all_canvasfp_ranks.iteritems():
        for rank in v:
            rank_set.add(rank)

    # print "Total canvas FP count - uniq", len(rank_set)

    nameSpace = {
        'title': "Crawl Report",
        'visits_cnt': visits_cnt,
        'completed_visits_cnt': completed_visits_cnt,
        'cookies': cookies[0],
        'localstorage': localstorage[0],
        'flash_cookie_count': flash_cookie_count[0],
        'canvas_meta_rows': canvas_meta_rows,
        'start': start,
        'end': end,
        'canvas_domain_counts': canvas_domain_counts,
        'canvas_url_counts': canvas_url_counts,
        'canvas_events_per_script': canvas_events_per_script,
        'canvas_scr_domains': canvas_scr_domains,
        'total_canvas_fp_count': total_canvas_fp_count,
        'canvas_script_urls': canvas_script_urls,
        'get_tld': cu.extract_domain,
        'xsite_flash_cookies': xsite_flash_cookies,
        'xsite_local_storages': xsite_local_storage,
        'respawned': respawned,
        'figs': figs,
        'canvasfp_ranks_urls': all_canvasfp_ranks_urls,
        # '3rdp_cookies': 3rdp_cookies,
    }
    report_template = Template(template_str, searchList=[nameSpace])
    fu.write_to_file(j(out_dir, "%s-report.html" % crawl_name),
                     str(report_template))
Пример #12
0
def gen_crawl_report(db_file, db_pass2=None, db_other_profs=None,
                     prof_dir=None):
    """ visits_cnt, cookies, localstorage, flash cookies, cache, indexeddb,
    http reqs/resps
    canvas: list distinct FPers, linked to the sites that include this FPer
    evercookie: list potential evercookies by searching ID-like common strings
    among different vectors"""
    out_dir = os.path.dirname(db_file)
    crawl_name = os.path.basename(os.path.dirname(db_file))
    figs = []  # figures to be plotted, removed for now.
    respawned = []

    if db_pass2 and db_other_profs and prof_dir:
        respawned = ev.get_flash_evercookies(db_file, db_pass2,
                                             db_other_profs, prof_dir)

    start, end = dbu.get_db_entry(db_file, dbu.DBCmd.GET_VISIT_DATES, False)
    visits_cnt = dbu.get_db_entry(db_file, dbu.DBCmd.COUNT_VISITS, False)[0]
    completed_visits_cnt = dbu.get_db_entry(db_file,
                                            dbu.DBCmd.COUNT_VISITS, True)[0]
    cookies = dbu.get_db_entry(db_file, dbu.DBCmd.COUNT_COOKIES, 0)
    localstorage = dbu.get_db_entry(db_file, dbu.DBCmd.COUNT_LOCALSTORAGE, 0)
    print "genreport len(localstorage)", len(localstorage)
    xsite_flash_cookies = get_xsite_flash_cookies(db_file)
    xsite_local_storage = get_xsite_local_storage(db_file)

    try:
        flash_cookie_count = dbu.get_db_entry(db_file, dbu.DBCmd.COUNT_LSO, 0)
    except:
        flash_cookie_count = [""]

    canvas_meta_rows = dbu.get_db_entry(db_file, dbu.DBCmd.GET_CANVAS_META, 0)
    canvas_scr_domains = {}
    canvas_events_per_script = {}
    canvas_url_counts = {}
    canvas_domain_counts = {}
    canvas_script_urls = dbu.get_db_entry(db_file,
                                          dbu.DBCmd.GET_CANVAS_SCRIPTS, 0)
    false_positives = []
    for canvas_script_url_tup in canvas_script_urls:
        canvas_script_url = canvas_script_url_tup[0]
        canvas_events = dbu.get_db_entry(db_file,
                                         dbu.DBCmd.GET_CANVAS_EVENTS_BY_SCRIPT,
                                         canvas_script_url)
        if not ca.is_canvas_false_positive(canvas_events):
            scr_evs = dbu.get_db_entry(db_file,
                                       dbu.DBCmd.GET_CANVAS_EVENTS_BY_SCRIPT,
                                       canvas_script_url)
            canvas_events_per_script[canvas_script_url] = scr_evs
            url_cnts = dbu.get_db_entry(db_file,
                                        dbu.DBCmd.COUNT_SITES_BY_CANVAS_SCRIPT,
                                        canvas_script_url)
            canvas_url_counts[canvas_script_url] = url_cnts
            domain = cu.extract_domain(canvas_script_url)
            if domain in canvas_scr_domains:
                canvas_scr_domains[domain].append(canvas_script_url)
            else:
                canvas_scr_domains[domain] = [canvas_script_url]
        else:
            false_positives.append(canvas_script_url_tup)
            # print canvas_script_url_tup

    # Remove false positives
    for false_positive in false_positives:
        canvas_script_urls.remove(false_positive)
    # total_canvas_fp_count = sum()
    all_canvasfp_ranks = {}
    all_canvasfp_ranks_urls = {}
    for canvas_scr_domain, canvas_scr_urls in canvas_scr_domains.iteritems():
        script_ranks_and_urls =\
            dbu.get_db_entry(db_file,
                             dbu.DBCmd.GET_RANK_AND_URLS_BY_CANVAS_SCRIPTS,
                             canvas_scr_urls)
        canvas_domain_counts[canvas_scr_domain] = len(script_ranks_and_urls)
        all_canvasfp_ranks[canvas_scr_domain] = map(lambda x: x[0],
                                                    script_ranks_and_urls)
        all_canvasfp_ranks_urls[canvas_scr_domain] = script_ranks_and_urls

    # print all_canvasfp_ranks
    # fu.write_to_file(j(out_dir, "%s-canvas.json" % crawl_name),
    #                 json.dumps(all_canvasfp_ranks))

    total_canvas_fp_count = sum(canvas_domain_counts.itervalues())

    # print "Total canvas FP count", total_canvas_fp_count
    rank_set = set()
    for _, v in all_canvasfp_ranks.iteritems():
        for rank in v:
            rank_set.add(rank)

    # print "Total canvas FP count - uniq", len(rank_set)

    nameSpace = {'title': "Crawl Report",
                 'visits_cnt': visits_cnt,
                 'completed_visits_cnt': completed_visits_cnt,
                 'cookies': cookies[0],
                 'localstorage': localstorage[0],
                 'flash_cookie_count': flash_cookie_count[0],
                 'canvas_meta_rows': canvas_meta_rows,
                 'start': start,
                 'end': end,
                 'canvas_domain_counts': canvas_domain_counts,
                 'canvas_url_counts': canvas_url_counts,
                 'canvas_events_per_script': canvas_events_per_script,
                 'canvas_scr_domains': canvas_scr_domains,
                 'total_canvas_fp_count': total_canvas_fp_count,
                 'canvas_script_urls': canvas_script_urls,
                 'get_tld': cu.extract_domain,
                 'xsite_flash_cookies': xsite_flash_cookies,
                 'xsite_local_storages': xsite_local_storage,
                 'respawned': respawned,
                 'figs': figs,
                 'canvasfp_ranks_urls': all_canvasfp_ranks_urls,
                 # '3rdp_cookies': 3rdp_cookies,
                 }
    report_template = Template(template_str, searchList=[nameSpace])
    fu.write_to_file(j(out_dir, "%s-report.html" % crawl_name),
                     str(report_template))