Пример #1
0
def downloadTransactionBetweenTime(wallet_id,
                                   end_time,
                                   start_time,
                                   store_path=""):  # [start_time,end_time]
    # for easily update, accuracy to date end = 2017-10-26 start = 2017-02-21. The path should be a directory instead of a file
    total_page = __getPageNum(wallet_id)
    page = range(1, total_page + 1)
    find_end = False

    if (store_path == ""):
        store_path = wallet_id + start_time + "To" + end_time
    if not os.path.exists(store_path):
        os.makedirs(store_path)

    for i in page:
        url = 'https://www.walletexplorer.com/wallet/' + wallet_id + '?page=' + str(
            i) + '&format=csv'

        local_file = store_path + "/" + wallet_id + str(i) + '.csv'

        data = NetIO.readDataFrom(url)

        end, start = __findTime(data)
        if (end >= start_time and end <= end_time) or (start >= start_time
                                                       and start <= end_time):
            CsvIO.writeToFile(local_file, data)
        elif (end < start_time):
            break
def extractTxid(wallet_tx_path, txid_store_file):
    print("Extracting")
    f = open(txid_store_file, 'a', encoding="utf-8")
    for filename in os.listdir(wallet_tx_path):
        try:
            content = csv.readFile(wallet_tx_path + filename)
            content = content.split("\n")
            all_txid = []
            for ii in range(2, len(content) - 1):
                all_txid.append(content[ii].split(",")[-1].replace('"', ""))
            for id in all_txid:
                f.write(id + '\n')
        except:
            continue
    f.close()

    print("Deleting duplication")
    content = csv.readFile(txid_store_file)
    content = content.split("\n")

    clear_content = set(content)
    try:
        clear_content.remove("")
    except:
        pass

    csv.writeToFile(txid_store_file, "")  # delete original
    f = open(txid_store_file, 'a', encoding="utf-8")
    for i in clear_content:
        f.write(i + "\n")
    f.close()

    print("Finish")
Пример #3
0
def downloadAllPublicAddressOf(wallet_id,
                               local_file="",
                               start_page=1,
                               show_time=False,
                               clear_file=False):
    if local_file == "":
        local_file = wallet_id + ".csv"  # if the user doesn't provide a file to store the key, the function will generate one

    if (clear_file):
        CsvIO.writeToFile(local_file, "")  #Clear the file first

    total_page = __getKeyPageNum(wallet_id)
    #total_time = 0;

    pattern2 = re.compile(r'<tr><td><a href=[^>]+')
    pattern_balance = re.compile(r'amount">[^&<]+')
    pattern_incomTx = re.compile(r"<td>[0-9]+")
    pattern_incomBlock = re.compile(r"<td>[0-9]+")

    for i in range(start_page, total_page + 1):
        pk_progress_moniter[wallet_id] = (i - 1) / total_page
        #start = time.time()
        url = 'https://www.walletexplorer.com/wallet/' + wallet_id + '/addresses?page=' + str(
            i)
        data = NetIO.readDataFrom(url)

        flag = 0

        match = True
        while match:
            match = pattern2.search(data, flag)

            if match:
                flag = int(match.span()[-1])
                sub = match.group()[26:-1]

                match_balance = pattern_balance.search(data, flag)
                flag = int(match_balance.span()[-1])
                balance = match_balance.group()[8:]

                match_incomTx = pattern_incomTx.search(data, flag)
                flag = int(match_incomTx.span()[-1])
                incomTx = match_incomTx.group()[4:]

                match_block = pattern_incomBlock.search(data, flag)
                flag = int(match_block.span()[-1])
                blockID = match_block.group()[4:]

                #print(sub+","+balance+","+incomTx+","+blockID)
                CsvIO.appendToFile(
                    local_file,
                    sub + "," + balance + "," + incomTx + "," + blockID)
            else:
                break
        pk_progress_moniter[wallet_id] = (i) / total_page
Пример #4
0
def downloadTransactionBetweenTime(wallet_id,
                                   end_time,
                                   start_time,
                                   store_path="",
                                   download_transaction_detail=True,
                                   show_time=False):  # [start_time,end_time]
    # for easily update, accuracy to date end = 2017-10-26 start = 2017-02-21. The path should be a directory instead of a file
    total_page = __getPageNum(wallet_id)
    page = range(1, total_page + 1)
    #find_end = False

    if (store_path == ""):
        store_path = wallet_id + "_" + start_time + "To" + end_time
    if not os.path.exists(store_path):
        os.makedirs(store_path)
    if (download_transaction_detail):
        append_file = store_path + "/Txdetail"
        if not os.path.exists(append_file):
            os.makedirs(append_file)

    #total_time = 0;
    for i in page:  #
        tx_progress_moniter[wallet_id] = (i - 1) / total_page
        detail_file = append_file + "/transactionDetail_" + str(i) + ".json"
        #start = time.time()
        url = 'https://www.walletexplorer.com/wallet/' + wallet_id + '?page=' + str(
            i) + '&format=csv'
        local_file = store_path + "/" + wallet_id + "_" + str(i) + '.csv'
        data = NetIO.readDataFrom(url)
        end, start = __findTime(data)
        if (end >= start_time and end <= end_time) or (start >= start_time
                                                       and start <= end_time):
            CsvIO.writeToFile(local_file, data)
        elif (end < start_time):
            break
        if (download_transaction_detail):
            # Also go through the transaction id on every page and append the json to a file
            d_data = CsvIO.readFile(local_file)
            dd = d_data.split("\n")
            all_txid = []
            for ii in range(2, len(dd) - 1):
                all_txid.append(dd[ii].split(",")[-1].replace('"', ""))

            for txid in all_txid:
                #bottle neck here
                try:
                    CsvIO.appendToFile(detail_file, str(tq(txid)))
                except:
                    pass
            #boost a lot
            # thrs = [threading.Thread(target=__appendDetailFile, args=[detail_file,txid]) for txid in all_txid]
            # [thr.start() for thr in thrs]
            # [thr.join() for thr in thrs]
        tx_progress_moniter[wallet_id] = i / total_page
def downloadOneCSV(url):
    global buffer, thread, error_log, error_url, log_file, store_path, url_file, all_wallet
    if (url == ""):
        return

    try:
        filename = url.replace("https://www.walletexplorer.com/wallet/",
                               "").replace("?",
                                           "_").replace("format=csv", ".csv")
        content = NetIO.readDataFrom(url)
        csv.writeToFile(store_path + filename, content)

    except Exception as e:
        csv.appendToFile(error_log, str(e))
        csv.appendToFile(error_url, url)


#Call main to start
#main()
Пример #6
0
def main():
    global buffer, thread, error_log, error_url, log_file, store_path, url_file, all_wallet, result_file

    init()
    if not os.path.exists(store_path):
        os.makedirs(store_path)
    if not os.path.exists(log_path):
        os.makedirs(log_path)
    error_log = log_path + error_log
    error_url = log_path + error_url
    log_file = log_path + log_file

    url_file = log_path + url_file

    result_file = store_path + result_file

    csv.writeToFile(url_file, "")  # clear the file

    downladPkFor(all_wallet)
def downloadTxOnly(txidpath, deduplicate=True, hint=""):
    if deduplicate:
        content = csv.readFile(txidpath)
        content = content.split("\n")
        clear_content = set(content)
        try:
            clear_content.remove("")
        except:
            pass
        csv.writeToFile(txidpath, "")  # delete original
        f = open(txidpath, 'a', encoding="utf-8")
        for i in clear_content:
            f.write(i + "\n")
        f.close()
    DetailedTransactionDownloader.init()
    DetailedTransactionDownloader.url_file = txidpath

    main_thred = threading.Thread(target=DetailedTransactionDownloader.main)
    moniter = threading.Thread(target=addMonitorFor,
                               args=[DetailedTransactionDownloader, 30])
    main_thred.start()
    moniter.start()
    main_thred.join()
    moniter.join()