Exemple #1
0
def generate_car_files(input_dir, config_path):
    config = read_config(config_path)
    output_dir = config['sender']['output_dir']
    generate_md5 = config['sender']['generate_md5']

    file_paths = read_file_path_in_dir(input_dir)
    Path(output_dir).mkdir(parents=True, exist_ok=True)

    deal_list: List[OfflineDeal] = []

    for file_path in file_paths:
        source_file_name = os.path.basename(file_path)

        offline_deal = OfflineDeal()
        offline_deal.source_file_name = source_file_name
        offline_deal.source_file_path = file_path
        offline_deal.source_file_size = os.path.getsize(file_path)
        if generate_md5:
            offline_deal.car_file_md5 = True
        deal_list.append(offline_deal)

    generate_car(deal_list, output_dir)
Exemple #2
0
def send_deals_to_miner(deal_conf: DealConfig,
                        output_dir,
                        skip_confirmation: bool,
                        task_name=None,
                        csv_file_path=None,
                        deal_list=None,
                        task_uuid=None):

    Path(output_dir).mkdir(parents=True, exist_ok=True)
    attributes = [
        i for i in OfflineDeal.__dict__.keys() if not i.startswith("__")
    ]

    file_name_suffix = "-deals"

    if csv_file_path:
        csv_file_name = os.path.basename(csv_file_path)
        filename, file_ext = os.path.splitext(csv_file_name)
        output_csv_path = os.path.join(output_dir,
                                       filename + file_name_suffix + file_ext)
    else:
        output_csv_path = os.path.join(output_dir,
                                       task_name + file_name_suffix + ".csv")

    if deal_list:
        pass
    else:
        deal_list = []
        with open(csv_file_path, "r") as csv_file:
            fieldnames = attributes

            reader = csv.DictReader(csv_file,
                                    delimiter=',',
                                    fieldnames=fieldnames)
            next(reader, None)
            for row in reader:
                deal = OfflineDeal()
                for attr in row.keys():
                    deal.__setattr__(attr, row.get(attr))
                deal_list.append(deal)

    for _deal in deal_list:

        data_cid = _deal.data_cid
        piece_cid = _deal.piece_cid
        source_file_url = _deal.car_file_url
        md5 = _deal.car_file_md5
        file_size = _deal.source_file_size
        prices = get_miner_price(deal_conf.miner_id)

        if prices:
            if deal_conf.verified_deal:
                price = prices['verified_price']
            else:
                price = prices['price']
        else:
            continue

        if Decimal(price).compare(Decimal(deal_conf.max_price)) > 0:
            logging.warning("miner %s price %s higher than max price %s" %
                            (deal_conf.miner_id, price, deal_conf.max_price))
            continue
        if int(file_size) > 0:
            piece_size, sector_size = calculate_piece_size_from_file_size(
                file_size)
        else:
            logging.error("file %s is too small" % _deal.source_file_name)
            continue

        cost = f'{calculate_real_cost(sector_size, price):.18f}'

        _deal_cid, _start_epoch = propose_offline_deal(price, str(cost),
                                                       str(piece_size),
                                                       data_cid, piece_cid,
                                                       deal_conf,
                                                       skip_confirmation)

        file_exists = os.path.isfile(output_csv_path)

        _deal.miner_id = deal_conf.miner_id
        _deal.start_epoch = _start_epoch
        _deal.deal_cid = _deal_cid

    logging.info("Swan deal final CSV Generated: %s" % output_csv_path)

    with open(output_csv_path, "w") as output_csv_file:
        output_fieldnames = [
            'uuid', 'miner_id', 'file_source_url', 'md5', 'start_epoch',
            'deal_cid'
        ]
        csv_writer = csv.DictWriter(output_csv_file,
                                    delimiter=',',
                                    fieldnames=output_fieldnames)
        csv_writer.writeheader()

        for deal in deal_list:
            csv_data = {
                'uuid': task_uuid,
                'miner_id': deal_conf.miner_id,
                'file_source_url': deal.car_file_url,
                'md5': deal.car_file_md5,
                'start_epoch': deal.start_epoch,
                'deal_cid': deal.deal_cid
            }
            csv_writer.writerow(csv_data)

    return output_csv_path
Exemple #3
0
def create_new_task(input_dir, out_dir, config_path, task_name, miner_id=None):
    # todo move config reading to cli level
    config = read_config(config_path)
    output_dir = out_dir
    if not output_dir:
        output_dir = config['sender']['output_dir']
    public_deal = config['sender']['public_deal']
    verified_deal = config['sender']['verified_deal']
    generate_md5 = config['sender']['generate_md5']
    offline_mode = config['sender']['offline_mode']

    api_url = config['main']['api_url']
    api_key = config['main']['api_key']
    access_token = config['main']['access_token']

    host = config['web-server']['host']
    port = config['web-server']['port']
    path = config['web-server']['path']

    download_url_prefix = str(host).rstrip("/")
    download_url_prefix = download_url_prefix + ":" + str(port)

    task_uuid = str(uuid.uuid4())
    final_csv_path = ""

    path = str(path).strip("/")
    logging.info(
        "Swan Client Settings: Public Task: %s  Verified Deals: %s  Connected to Swan: %s CSV/car File output dir: %s"
        % (public_deal, verified_deal, not offline_mode, output_dir))
    if path:
        download_url_prefix = os.path.join(download_url_prefix, path)
    # TODO: Need to support 2 stage
    if not public_deal:
        if not miner_id:
            print('Please provide --miner for non public deal.')
            exit(1)

    file_paths = read_file_path_in_dir(input_dir)
    Path(output_dir).mkdir(parents=True, exist_ok=True)

    deal_list: List[OfflineDeal] = []

    for file_path in file_paths:
        source_file_name = os.path.basename(file_path)

        offline_deal = OfflineDeal()
        offline_deal.source_file_name = source_file_name
        offline_deal.source_file_path = file_path
        offline_deal.source_file_size = os.path.getsize(file_path)
        if generate_md5:
            offline_deal.car_file_md5 = True
        deal_list.append(offline_deal)

    deal_list: List[OfflineDeal] = []
    csv_file_path = input_dir + "/car.csv"
    with open(csv_file_path, "r") as csv_file:
        fieldnames = ['car_file_name', 'car_file_path', 'piece_cid', 'data_cid', 'car_file_size', 'car_file_md5',
                      'source_file_name', 'source_file_path', 'source_file_size', 'source_file_md5']
        reader = csv.DictReader(csv_file, delimiter=',', fieldnames=fieldnames)
        next(reader, None)
        for row in reader:
            deal = OfflineDeal()
            for attr in row.keys():
                deal.__setattr__(attr, row.get(attr))
            deal_list.append(deal)

    # generate_car(deal_list, output_dir)

    for deal in deal_list:
        deal.car_file_url = os.path.join(download_url_prefix, deal.car_file_name)

    if not public_deal:
        final_csv_path = send_deals(config_path, miner_id, task_name, deal_list=deal_list, task_uuid=task_uuid)

    if offline_mode:
        client = None
        logging.info("Working in Offline Mode. You need to manually send out task on filwan.com. ")
    else:
        client = SwanClient(api_url, api_key, access_token)
        logging.info("Working in Online Mode. A swan task will be created on the filwan.com after process done. ")

    task = SwanTask(
        task_name=task_name,
        is_public=public_deal,
        is_verified=verified_deal
    )

    if miner_id:
        task.miner_id = miner_id

    generate_metadata_csv(deal_list, task, output_dir, task_uuid)
    generate_csv_and_send(task, deal_list, output_dir, client, task_uuid)