def Card(content): cc = parser.Parser(content).getcc() if len(cc) > 1: output.Output().plus('Found Credit Cards: %s' % str(cc).split('[')[1].split(']')[0]) elif len(cc) == 1: output.Output().plus('Found Credit Card: %s' % cc[0])
def IP(content): list_ip = parser.Parser(content).getip() if len(list_ip) > 1: output.Output().plus('Found Private IP: %s' % str(list_ip).split('[')[1].split(']')[0]) elif len(list_ip) == 1: output.Output().plus('Found Private IP: %s' % list_ip[0])
def Email(content): list_email = parser.Parser(content).getmail() if len(list_email) > 1: output.Output().plus('Found Emails: %s' % str(list_email).split('[')[1].split(']')[0]) elif len(list_email) == 1: output.Output().plus('Found Email: %s' % list_email[0])
def get_provider(url: str) -> BaseProvider: try: parsed: NamedTuple = parser.Parser(url).parse() except parser.ParserError: raise NoProviderFound() provider_class = get_provider_class(parsed.resource) # type: ignore return provider_class(parsed.repo) # type: ignore
g2=loss_change_g2, fm1=loss_change_fm1, fm2=loss_change_fm2, p=loss_change_p, path=opt.loss_file, e=e) utils.save_model(disc, gen, e, opt.checkpoints_file) utils.show_loss(opt.checkpoints_file) print("Done!") if __name__ == '__main__': args = parser.Parser(__doc__) opt = args() print(f"Working directory: {os.getcwd()}") if not os.path.isdir(opt.checkpoints_file): os.mkdir(opt.checkpoints_file) print("checkpoints directory was created") if not os.path.isdir(opt.results_file): os.mkdir(opt.results_file) print("example directory was created") if not os.path.isdir(opt.loss_file): os.mkdir(opt.loss_file) print("tracked_losses directory was created")
f = open('data/features.csv', 'w') f.write("name" + "," + "commit_count" + "," + "star_count" + "," + "watcher_count" + "," + "forks_count" + "," + "contributers_count" + "," + "open_issues_count" + "," + "subscribers_count" + "," + "period" + "\n") c = open('data/commits.csv', 'w') c.write("name" + "," + "time" + "," "value" + "\n") for coin in tqdm(glob.glob("data/*.json")): basename = os.path.splitext(coin)[0].split("/")[-1] log = parser.Parser("../data/" + basename + "-commits.logs") commit_count, dates = log.count_commits() #number of commits per day formed_dates = [] for date in dates: formed_date = datetime.strptime(date, "%a %b %d %H:%M:%S %Y %z").date() formed_dates.append(formed_date) commits_per_day = dict(Counter(formed_dates)) # parse the json file json_file = open(coin) record = json.load(json_file)