def async_processing(post_data):
    job = get_current_job()
    
    url = post_data['github_url']
    url = clean_url(url)
    
    filters_rules = {
        'labels': {
            'must_have': post_data['must_have'],
            'blocklist_labels': post_data['blocklist_labels']
        }
    }
    
    dg = DatasetGenerator(
        token=post_data['github_token'], 
        repository=Repository(url, filters_rules), 
        loadFromFile=False
    )

    # SAVE ERROR ON DATABASE and reset progess status
    if(len(dg.filtered_issues) < 200):
        save_error(dg.repository, 
            "This repository has less than 200 issues after applying the bug filters")
        job.meta['progress'] = 'ERROR'
        job.meta['error'] = "This repository has less than 200 issues after applying the bug filters"
        job.save_meta()
        return

    gg = GraphGenerator(dg)
    gg.weibull()

    save_image(dg.repository)
    job.meta['progress'] = 100
    job.save_meta()
def main():

    # change loadFromFile to True if you have dataset file
    dg = DatasetGenerator(token=GITHUB_TOKEN, repository=r, loadFromFile=True)

    gg = GraphGenerator(dg)
    gg.weibull()