Ejemplo n.º 1
0
def update_task_by_uuid(config_path, task_uuid, miner_fid, csv):
    config = read_config(config_path)
    api_url = config['main']['api_url']
    api_key = config['main']['api_key']
    access_token = config['main']['access_token']
    client = SwanClient(api_url, api_key, access_token)
    client.update_task_by_uuid(task_uuid, miner_fid, csv)
Ejemplo n.º 2
0
def generate_csv_and_send(_task: SwanTask, deal_list: List[OfflineDeal],
                          _output_dir: str, _client: SwanClient, _uuid: str):
    _csv_name = _task.task_name + ".csv"
    _csv_path = os.path.join(_output_dir, _csv_name)

    logging.info('Swan task CSV Generated: %s' % _csv_path)
    with open(_csv_path, "w") as csv_file:
        fieldnames = [
            'uuid', 'miner_id', 'deal_cid', 'file_source_url', 'md5',
            'start_epoch'
        ]
        csv_writer = csv.DictWriter(csv_file,
                                    delimiter=',',
                                    fieldnames=fieldnames)
        csv_writer.writeheader()
        for _deal in deal_list:
            csv_data = {
                'uuid': _uuid,
                'miner_id': _deal.miner_id,
                'deal_cid': _deal.deal_cid,
                'file_source_url': _deal.car_file_url,
                'md5': _deal.car_file_md5 if _deal.car_file_md5 else "",
                'start_epoch': _deal.start_epoch
            }
            csv_writer.writerow(csv_data)

    if _client:
        with open(_csv_path, "r") as csv_file:
            _client.post_task(_task, csv_file)
Ejemplo n.º 3
0
def upload_car_files(input_dir, config_path):
    class CarFile:
        car_file_name = None
        car_file_path = None
        piece_cid = None
        data_cid = None
        car_file_size = None
        car_file_md5 = None
        source_file_name = None
        source_file_path = None
        source_file_size = None
        source_file_md5 = None
        car_file_address = None

    attributes = [i for i in CarFile.__dict__.keys() if not i.startswith("__")]

    config = read_config(config_path)
    storage_server_type = config['main']['storage_server_type']
    if storage_server_type == "web server":
        logging.info("Please upload car files to web server manually.")
    else:
        gateway_address = config['ipfs-server']['gateway_address']
        gateway_ip, gateway_port = SwanClient.parseMultiAddr(gateway_address)
        car_files_list: List[CarFile] = []
        car_csv_path = input_dir + "/car.csv"
        with open(car_csv_path, "r") as csv_file:
            reader = csv.DictReader(csv_file,
                                    delimiter=',',
                                    fieldnames=attributes)
            next(reader, None)
            for row in reader:
                car_file = CarFile()
                for attr in row.keys():
                    car_file.__setattr__(attr, row.get(attr))
                car_files_list.append(car_file)
        for car_file in car_files_list:
            logging.info("Uploading car file %s" % car_file.car_file_name)
            car_file_hash = SwanClient.upload_car_to_ipfs(
                car_file.car_file_path)
            car_file.car_file_address = "http://" + gateway_ip + ":" + gateway_port + "/ipfs/" + car_file_hash
            logging.info("Car file %s uploaded: %s" %
                         (car_file.car_file_name, car_file.car_file_address))

        with open(car_csv_path, "w") as csv_file:
            csv_writer = csv.DictWriter(csv_file,
                                        delimiter=',',
                                        fieldnames=attributes)
            csv_writer.writeheader()
            for car_file in car_files_list:
                csv_writer.writerow(car_file.__dict__)
Ejemplo n.º 4
0
def scanner():
    config = read_config()
    api_url = config['main']['api_url']
    api_key = config['main']['api_key']
    access_token = config['main']['access_token']
    scan_interval = config['main']['scan_interval']
    miner_fid = config['main']['miner_fid']

    while True:
        client = SwanClient(api_url, api_key, access_token)
        deals = client.get_offline_deals(miner_fid, DEAL_STATUS_FILE_IMPORTED, SCAN_NUMNBER)

        if deals is None or isinstance(deals, Exception):
            if isinstance(deals, Exception):
                logger.error(str(deals))
            logger.error("Failed to get offline deals.")
            logger.info("Sleeping...")
            time.sleep(scan_interval)
            continue

        if len(deals) == 0:
            logger.info("No ongoing offline deals found.")
            logger.info("Sleeping...")
            time.sleep(scan_interval)
            continue

        for deal in deals:
            deal_id = deal.get("id")
            deal_cid = deal.get("deal_cid")
            logger.info("ID: %s. Deal CID: %s. Deal Status: %s.", deal.get("id"), deal_cid, deal.get("status"))
            command = "lotus-miner storage-deals list -v | grep " + deal_cid
            try:
                pipe = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                stdout, stderr = pipe.communicate()
                if stderr != b'':
                    raise Exception(stderr)
                if stdout == b'':
                    note = "Failed to find deal on chain."
                    update_offline_deal_status(DEAL_STATUS_FAILED, note, deal_id)
                    raise Exception("Failed to find deal on chain. Deal ID: " + deal_id)
                stdout = stdout.decode("utf-8")
                logger.info("Deal details: %s", stdout)
                on_chain_message = ""
                # Deal status starts with StorageDeal, such as StorageDealError, StorageDealSealing etc.
                deal_status_index = stdout.find("StorageDeal", 0)
                on_chain_status = stdout[deal_status_index:stdout.find(' ', deal_status_index)]
                # Only ERROR deal has Message
                if on_chain_status == ONCHAIN_DEAL_STATUS_ERROR:
                    # Error message usually starts at bit 355
                    on_chain_message = stdout[355:].strip()
                    note = "Failed to find deal on chain."
                    update_offline_deal_status(DEAL_STATUS_FAILED, note, deal_id)
                    logger.info("Setting deal %s status as ImportFailed", deal_cid)
                if on_chain_status == ONCHAIN_DEAL_STATUS_ACTIVE:
                    deal_complete_note = "Deal has been completed"
                    update_offline_deal_status(DEAL_STATUS_ACTIVE, deal_complete_note, deal_id)
                    logger.info("Setting deal %s status as Active", deal_cid)
                if on_chain_status == ONCHAIN_DEAL_STATUS_AWAITING:
                    current_epoch = get_current_epoch()
                    if current_epoch != -1 and current_epoch > deal.get("start_epoch"):
                        note = "Sector is proved and active, while deal on chain status is " \
                               "StorageDealAwaitingPreCommit. Set deal status as ImportFailed."
                        update_offline_deal_status(DEAL_STATUS_FAILED, note, deal_id)
                        logger.info("Setting deal %s status as ImportFailed due to on chain status bug.", deal_cid)
                message = {
                    "on_chain_status": on_chain_status,
                    "on_chain_message": on_chain_message
                }
                offline_deal_message = OfflineDealMessage(message_type=MESSAGE_TYPE_ON_CHAIN,
                                                          message_body=json.dumps(message),
                                                          offline_deals_cid=deal_cid)
                # TODO: Update offline deal message to Swan
                logger.info("On chain offline_deal message created. Message Body: %s.", json.dumps(message))
                continue
            except Exception as e:
                message = {
                    "message": str(e)
                }
                offline_deal_message = OfflineDealMessage(message_type=MESSAGE_TYPE_SWAN,
                                                          message_body=json.dumps(message),
                                                          offline_deals_cid=deal_cid)
                # TODO: Update offline deal message to Swan
                logger.info("On chain offline_deal message created. Message Body: %s.", json.dumps(message))
                logger.error(str(e))
                continue
        logger.info("Sleeping...")
        time.sleep(scan_interval)
Ejemplo n.º 5
0
DEAL_STATUS_FILE_IMPORTED = "FileImported"
DEAL_STATUS_ACTIVE = 'DealActive'
MESSAGE_TYPE_ON_CHAIN = "ON CHAIN"
MESSAGE_TYPE_SWAN = "SWAN"
ONCHAIN_DEAL_STATUS_ERROR = "StorageDealError"
ONCHAIN_DEAL_STATUS_ACTIVE = "StorageDealActive"
ONCHAIN_DEAL_STATUS_AWAITING = "StorageDealAwaitingPreCommit"

# Max number of deals to be scanned at a time
SCAN_NUMNBER = "100"

config = read_config()
api_url = config['main']['api_url']
api_key = config['main']['api_key']
access_token = config['main']['access_token']
client = SwanClient(api_url, api_key, access_token)

class OfflineDealMessage:

    def __init__(self, message_type, message_body, offline_deals_cid):
        self.message_type = message_type
        self.message_body = message_body
        self.offline_deals_cid = offline_deals_cid

    def toJSON(self):
        return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4)


def get_current_epoch():
    try:
        info_proving = subprocess.run(['lotus-miner', 'proving', 'info'], stdout=subprocess.PIPE).stdout.decode(
Ejemplo n.º 6
0
def importer():
    logger = logging.getLogger('swan_miner_deal_importer')
    config = read_config()
    api_url = config['main']['api_url']
    api_key = config['main']['api_key']
    access_token = config['main']['access_token']
    import_interval = config['main']['import_interval']
    expected_sealing_time = config['main']['expected_sealing_time']
    miner_fid = config['main']['miner_fid']

    while True:
        client = SwanClient(api_url, api_key, access_token)
        deals = client.get_offline_deals(miner_fid, DEAL_STATUS_READY,
                                         IMPORT_NUMNBER)

        if deals is None or isinstance(deals, Exception):
            if isinstance(deals, Exception):
                logger.error(str(deals))
            logger.error("Failed to get offline deals.")
            logger.info("Sleeping...")
            time.sleep(import_interval)
            continue

        if len(deals) == 0:
            logger.info("No pending offline deals found.")
            logger.info("Sleeping...")
            time.sleep(import_interval)
            continue

        for deal in deals:
            logger.info('')
            logger.info(
                "Deal CID: %s. File Path: %s",
                deal["deal_cid"],
                deal["file_path"],
            )

            on_chain_status = get_deal_on_chain_status(deal["deal_cid"])
            if on_chain_status.startswith("StorageDeal") is False:
                logger.error(on_chain_status)
                logger.error(
                    "Failed to get deal on chain status, please check if lotus-miner is running properly."
                )
                logger.info("Sleeping...")
                time.sleep(import_interval)
                break

            logger.info("Deal on chain status: %s.", on_chain_status)

            if on_chain_status == ONCHAIN_DEAL_STATUS_ERROR:
                logger.info("Deal on chain status is error before importing.")
                note = "Deal error before importing."
                update_offline_deal_status(DEAL_STATUS_FAILED, note,
                                           str(deal["id"]))
                continue

            if on_chain_status == ONCHAIN_DEAL_STATUS_ACTIVE:
                logger.info("Deal on chain status is active before importing.")
                note = "Deal active before importing."
                update_offline_deal_status(DEAL_STATUS_ACTIVE, note,
                                           str(deal["id"]))
                continue

            if on_chain_status == ONCHAIN_DEAL_STATUS_ACCEPT:
                logger.info(
                    "Deal on chain status is StorageDealAcceptWait. Deal will be ready shortly."
                )
                continue

            if on_chain_status == ONCHAIN_DEAL_STATUS_NOTFOUND:
                logger.info("Deal on chain status not found.")
                note = "Deal not found."
                update_offline_deal_status(DEAL_STATUS_FAILED, note,
                                           str(deal["id"]))
                continue

            if on_chain_status != ONCHAIN_DEAL_STATUS_WAITTING:
                logger.info("Deal is already imported, please check.")
                note = on_chain_status
                update_offline_deal_status(DEAL_STATUS_FILE_IMPORTED, note,
                                           str(deal["id"]))
                continue

            try:
                info_proving = subprocess.run(
                    ['lotus-miner', 'proving', 'info'],
                    stdout=subprocess.PIPE).stdout.decode('utf-8')
                current_epoch = int(
                    re.search("(?<=Current Epoch: {11})[0-9]+",
                              info_proving).group(0))
            except Exception as e:
                logger.error(
                    "Failed to get current epoch. Please check if miner is running properly."
                )
                logger.error(str(e))
                logger.info("Sleeping...")
                time.sleep(import_interval)
                break

            logger.info("Current epoch: %s. Deal starting epoch: %s",
                        current_epoch, deal.get("start_epoch"))
            try:
                if deal.get(
                        "start_epoch") - current_epoch < expected_sealing_time:
                    logger.info(
                        "Deal will start too soon. Do not import this deal.")
                    note = "Deal expired."
                    update_offline_deal_status(DEAL_STATUS_FAILED, note,
                                               str(deal["id"]))
                    continue

                command = "lotus-miner storage-deals import-data " + deal.get(
                    "deal_cid") + " " + deal.get("file_path")
                logger.info('Command: %s' % command)

                note = ""
                update_offline_deal_status(DEAL_STATUS_FILE_IMPORTING, note,
                                           str(deal["id"]))

                pipe = subprocess.Popen(command,
                                        shell=True,
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.STDOUT)
                out, err = pipe.communicate()

                # There should be no output if everything goes well
                if out != b'':
                    update_offline_deal_status(DEAL_STATUS_FAILED, str(out),
                                               str(deal["id"]))
                    logger.error(
                        "Import deal failed. CID: %s. Error message: %s",
                        deal["deal_cid"], str(out))
                    continue

                update_offline_deal_status(DEAL_STATUS_FILE_IMPORTED, "",
                                           str(deal["id"]))

                logger.info("Deal CID %s imported.", deal["deal_cid"])
                logger.info("Sleeping...")
                time.sleep(import_interval)

            except Exception as e:
                logger.error("Import deal failed. CID: %s. Error message: %s",
                             deal["deal_cid"], str(e))
                note = str(e)
                update_offline_deal_status(DEAL_STATUS_FAILED, note,
                                           str(deal["id"]))
                continue

        logger.info("Sleeping...")
        time.sleep(import_interval)
Ejemplo n.º 7
0
def create_new_task(input_dir, out_dir, config_path, task_name, miner_id=None):
    # todo move config reading to cli level
    config = read_config(config_path)
    output_dir = out_dir
    if not output_dir:
        output_dir = config['sender']['output_dir']
    public_deal = config['sender']['public_deal']
    verified_deal = config['sender']['verified_deal']
    generate_md5 = config['sender']['generate_md5']
    offline_mode = config['sender']['offline_mode']

    api_url = config['main']['api_url']
    api_key = config['main']['api_key']
    access_token = config['main']['access_token']

    host = config['web-server']['host']
    port = config['web-server']['port']
    path = config['web-server']['path']

    download_url_prefix = str(host).rstrip("/")
    download_url_prefix = download_url_prefix + ":" + str(port)

    task_uuid = str(uuid.uuid4())
    final_csv_path = ""

    path = str(path).strip("/")
    logging.info(
        "Swan Client Settings: Public Task: %s  Verified Deals: %s  Connected to Swan: %s CSV/car File output dir: %s"
        % (public_deal, verified_deal, not offline_mode, output_dir))
    if path:
        download_url_prefix = os.path.join(download_url_prefix, path)
    # TODO: Need to support 2 stage
    if not public_deal:
        if not miner_id:
            print('Please provide --miner for non public deal.')
            exit(1)

    file_paths = read_file_path_in_dir(input_dir)
    Path(output_dir).mkdir(parents=True, exist_ok=True)

    deal_list: List[OfflineDeal] = []

    for file_path in file_paths:
        source_file_name = os.path.basename(file_path)

        offline_deal = OfflineDeal()
        offline_deal.source_file_name = source_file_name
        offline_deal.source_file_path = file_path
        offline_deal.source_file_size = os.path.getsize(file_path)
        if generate_md5:
            offline_deal.car_file_md5 = True
        deal_list.append(offline_deal)

    deal_list: List[OfflineDeal] = []
    csv_file_path = input_dir + "/car.csv"
    with open(csv_file_path, "r") as csv_file:
        fieldnames = ['car_file_name', 'car_file_path', 'piece_cid', 'data_cid', 'car_file_size', 'car_file_md5',
                      'source_file_name', 'source_file_path', 'source_file_size', 'source_file_md5']
        reader = csv.DictReader(csv_file, delimiter=',', fieldnames=fieldnames)
        next(reader, None)
        for row in reader:
            deal = OfflineDeal()
            for attr in row.keys():
                deal.__setattr__(attr, row.get(attr))
            deal_list.append(deal)

    # generate_car(deal_list, output_dir)

    for deal in deal_list:
        deal.car_file_url = os.path.join(download_url_prefix, deal.car_file_name)

    if not public_deal:
        final_csv_path = send_deals(config_path, miner_id, task_name, deal_list=deal_list, task_uuid=task_uuid)

    if offline_mode:
        client = None
        logging.info("Working in Offline Mode. You need to manually send out task on filwan.com. ")
    else:
        client = SwanClient(api_url, api_key, access_token)
        logging.info("Working in Online Mode. A swan task will be created on the filwan.com after process done. ")

    task = SwanTask(
        task_name=task_name,
        is_public=public_deal,
        is_verified=verified_deal
    )

    if miner_id:
        task.miner_id = miner_id

    generate_metadata_csv(deal_list, task, output_dir, task_uuid)
    generate_csv_and_send(task, deal_list, output_dir, client, task_uuid)