def paginate_table(table_rows, rows_per_page, title, output_dir): if rows_per_page <= 0: rows_per_page = int(1e100) split_table = list(ut.split_n(table_rows, rows_per_page)) #html_pages = [ut.make_temp('.html', dir = output_dir) for x in split_table] page_names = ['index.html'] + [ 'page_%d.html' % i for i in xrange(2, 1 + len(split_table)) ] page_paths = [os.path.join(output_dir, fname) for fname in page_names] for i in xrange(len(split_table)): table_html = '<table border = 1><tr>' + '\n<tr>'.join( split_table[i]) + '</table>' footer = None if len(split_table) == 1: footer = '' else: footer = '' footer += ("Back " if i == 0 else "<a href = '%s'>Back</a> " % page_names[i - 1]) footer += ("Next " if i == -1 + len(page_names) else "<a href = '%s'>Next</a> " % page_names[i + 1]) for j in xrange(len(split_table)): s = '<b>%d</b>' % (1 + j) if (i == j) else str(1 + j) footer += ("<a href = '%s'>%s</a> " % (page_names[j], s)) footer += '<br><br><br><br>' ut.make_file( page_paths[i], "<html><head>%s<title>%s</title></head><body>%s<br>%s</html>" % (table_js(), title, table_html, footer)) return page_paths
def test_push_then_pull_overwrites(clients): c1, c2 = clients c1.push() util.make_file(c1.path / 'foo.txt') c1.pull() assert not (c1.path / 'foo.txt').exists() assert (c1.path / 'foo.json').exists()
def test_fetch_creates_refs(filesystem): client_1 = filesystem / 'client_1' client_2 = filesystem / 'client_2' util.make_file(client_1 / 'foo.txt') _git.add(client_1, 'foo.txt') _git.commit(client_1, 'Committing...') _git.push(client_1, 'origin', 'master') _git.fetch(client_2, 'origin') assert _git.has_ref(client_2, 'refs/remotes/origin/master') assert not (client_2 / 'foo.txt').exists()
def test_make_dirty_repository(clients): c1, c2 = clients util.make_file(c1.path / 'foo.json') util.make_file(c1.path / 'test.json') subprocess.run(['git', 'add', 'test.json'], cwd=str(c1.path)) subprocess.run(['git', 'commit', '-m', 'new'], cwd=str(c1.path)) assert (c1.path / 'test.json').exists() c1.pull() assert not (c1.path / 'test.json').exists()
def clients(tmpdir): c1 = util.temporary_directory(tmpdir) c2 = util.temporary_directory(tmpdir) server = util.temporary_directory(tmpdir) util.initialize_bare_repository(server) util.clone_repository(server, c1 / '.') util.clone_repository(server, c2 / '.') util.make_file(c1 / 'foo.json') c1 = GitSyncer(c1) c2 = GitSyncer(c2) return c1, c2
def test_add_commit_push_then_pull(filesystem): filenames = ['test.txt', 'other.txt', 'nope.json'] for filename in filenames: util.make_file(filesystem / 'client_1' / filename) client_1 = filesystem / 'client_1' client_2 = filesystem / 'client_2' _git.add(client_1, '*.txt') _git.commit(client_1, 'pushing...') _git.push(client_1, 'origin', 'master') _git.pull(client_2, 'origin', 'master') for filename in filenames: assert (client_2 / filename).exists() == filename.endswith('.txt')
def test_ref_exists(filesystem): client_1 = filesystem / 'client_1' assert not _git.has_ref(client_1, 'refs/heads/master') assert not _git.has_ref(client_1, 'refs/remotes/origin/master') util.make_file(client_1 / 'foo.txt') _git.add(client_1, 'foo.txt') assert not _git.has_ref(client_1, 'refs/heads/master') _git.commit(client_1, 'Committing...') assert _git.has_ref(client_1, 'refs/heads/master') _git.push(client_1, 'origin', 'master') assert _git.has_ref(client_1, 'refs/remotes/origin/master') _git.fetch(client_1, 'origin') assert _git.has_ref(client_1, 'refs/remotes/origin/master')
def paginate_table(table_rows, rows_per_page, title, output_dir): if rows_per_page <= 0: rows_per_page = int(1e100) split_table = list(ut.split_n(table_rows, rows_per_page)) #html_pages = [ut.make_temp('.html', dir = output_dir) for x in split_table] page_names = ['index.html'] + ['page_%d.html' % i for i in xrange(2, 1+len(split_table))] page_paths = [os.path.join(output_dir, fname) for fname in page_names] for i in xrange(len(split_table)): table_html = '<table border = 1><tr>' + '\n<tr>'.join(split_table[i]) + '</table>' footer = None if len(split_table) == 1: footer = '' else: footer = '' footer += ("Back " if i == 0 else "<a href = '%s'>Back</a> " % page_names[i-1]) footer += ("Next " if i == -1 + len(page_names) else "<a href = '%s'>Next</a> " % page_names[i+1]) for j in xrange(len(split_table)): s = '<b>%d</b>' % (1+j) if (i == j) else str(1+j) footer += ("<a href = '%s'>%s</a> " % (page_names[j], s)) footer += '<br><br><br><br>' ut.make_file(page_paths[i], "<html><head>%s<title>%s</title></head><body>%s<br>%s</html>" % (table_js(), title, table_html, footer)) return page_paths
def show_html(html): page = ut.make_temp('.html') ut.make_file(page, html) print 'opening', page webbrowser.open(page)
total_seconds_of_analyzing = 0 if __name__ == '__main__': #Token to raise GitHub rate limit constraint if not token: print 'Forgot to export your token' #Create folder data if it does not exist already if not os.path.exists(data_dir): util.make_folder(data_dir) #Create result folder and file if not os.path.exists(result_dir): util.make_folder(result_dir) result_file_dir = result_dir + '/' + result_file if not os.path.isfile(result_file_dir): util.make_file(result_file_dir) #Collect url list ps = None pe = None #Use this set to download prior as well cs = None ce = starting_date #User this set to download inclusive #cs = starting_date #ce = cs + initial_delta print 'Starting date is ', starting_date, ' and ending date is ', ending_date while (not pe or pe < ending_date): print "starting round at: ", datetime.datetime.now() if not cs: meta_list_with_count = collect_repos.collect_repo_urls(token, ce)
def test_push_a_then_push_b_raises_push_error(clients): c1, c2 = clients c1.push() util.make_file(c2.path / 'foo.json', 'foo bar') with raises(SyncerPushError): c2.push()