Exemple #1
0
def main(index_file_cid: str) -> NoReturn:
    with ipfshttpclient.connect() as client:
        index_content = client.cat(index_file_cid).decode('utf-8')

    index = web_archive_index.WebArchiveIndex.from_string(index_content)

    record = index.find_by_original_uri(HOMEPAGE)

    assert record is not None

    html_page_cid = record.data['locator'].split('/')[-1]

    with ipfshttpclient.connect() as client:
        html_content = client.cat(html_page_cid).decode('utf-8')

    country_data, regions_data = parse.parse_html(
        html_content,
        retrieved_time=record.retrieved_time,
        source_url=f'ipfs://{html_page_cid}',
    )

    json_report = country_data_to_jsonld(country_data)

    json_report = pyld.jsonld.expand(json_report)

    print(json.dumps(json_report, indent=2, ensure_ascii=False))
Exemple #2
0
async def api_ipfs_hash(request, ipfs_hash: str):
    """Get PQL contents in `ipfs_hash`, return error if not valid PQL.

    Args:
        ipfs_hash: PQL JSON
    """
    if ipfs_hash == "new":
        return response.json({
            "pql":
            request.app.config["TEMPLATE_PQL_DEFINITION"],
            "hash":
            "New PQL definition",
        })

    ipfs = ipfshttpclient.connect(
        request.app.config["IPFS_API_SERVER_ADDRESS"])

    try:
        js = ipfs.get_json(ipfs_hash,
                           timeout=int(request.app.config["--timeout"]))

        return response.json({"pql": js, "hash": ipfs_hash})
    except DecodingError:
        return response.json({"error": "Not a JSON file."}, status=400)
    except Exception:
        return response.json({"error": "Not a file."}, status=400)
Exemple #3
0
def publish_by_hash() -> Any:
    """ publish an existing IPFS object using IPNS """

    # get ipfs hash
    try:
        payload = json.loads(request.data.decode("utf8"))
        ipfs_hash = payload["hashToPublish"]
    except KeyError as e:
        return {"error": str(e)}, 500

    # find in database
    try:
        _ = db.session.query(Ipfs).filter(Ipfs.pin_hash == ipfs_hash).one()
    except NoResultFound as e:
        return {"error": str(e)}, 500

    # proxy
    try:
        with ipfshttpclient.connect() as client:
            ipnshash = client.name.publish(ipfs_hash)["Name"]
    except KeyError as e:
        return {"error": str(e)}, 500

    # success
    return {"IpnsHash": ipnshash}
Exemple #4
0
def pin_by_hash() -> Any:
    """ pin an existing upload to IPFS """

    # get ipfs hash
    try:
        payload = json.loads(request.data.decode("utf8"))
        ipfs_hash = payload["hashToPin"]
    except KeyError as e:
        return {"error": str(e)}, 500

    # find in database
    ipfs = db.session.query(Ipfs).filter(Ipfs.pin_hash == ipfs_hash).first()
    if ipfs:
        return {"error": "Already pinned"}, 400

    # proxy
    try:
        with ipfshttpclient.connect() as client:
            client.pin.add(ipfs_hash)
    except KeyError as e:
        return {"error": str(e)}, 500

    # add to database
    md = payload.get("pinataMetadata")
    _add_to_db(ipfs_hash, md)

    # success -- yes: this is a different case to pinFileToIPFS...
    return {
        "id": ipfs.ipfs_id,
        "ipfsHash": ipfs_hash,
        "status": "searching",
        "name": ipfs.name,
    }
def start_node():
    try:
        return ipfshttpclient.connect('/dns/ipfs/tcp/5001/http', session=True)
    except ipfshttpclient.exceptions.ConnectionError:
        logger.info(f"{Log.WARNING}Waiting for node active{Log.ENDC}")
        time.sleep(RECURSIVE_SLEEP_REQUEST)
        return start_node()
Exemple #6
0
def encrypt(general_config, policy_encrypting_key, message, file, ipfs):
    """Encrypt a message under a given policy public key."""

    # Setup
    emitter = setup_emitter(general_config=general_config,
                            banner=policy_encrypting_key)
    ENRICO = _create_enrico(emitter, policy_encrypting_key)
    if not (bool(message) ^ bool(file)):
        emitter.error(
            f'Pass either --message or --file. Got {message}, {file}.')
        raise click.Abort

    # Encryption Request
    encryption_request = {
        'policy_encrypting_key': policy_encrypting_key,
        'message': message,
        'file': file
    }
    response = ENRICO.controller.encrypt_message(request=encryption_request)

    # Handle Ciphertext
    # TODO: This might be crossing the bridge of being application code
    if ipfs:
        emitter.message(f"Connecting to IPFS Gateway {ipfs}")
        ipfs_client = ipfshttpclient.connect(ipfs)
        cid = ipfs_client.add_str(response['message_kit'])
        emitter.message(f"Uploaded message kit to IPFS (CID {cid})",
                        color='green')
    return response
Exemple #7
0
    def fetch_allow_list(self) -> None:
        """fetch allow list from ipfs"""

        logging.info("fetching valid allow list")
        ipfs_client = ipfshttpclient.connect()
        res = ipfs_client.get(self.robonomics_allow_list_hash)
        pass
def make_commit(repo: RepositoryContractWrapper, commit_message: str):
    ipfs = ipfshttpclient.connect()

    filepaths = []
    ipfs_hashes = []

    for root, dirs, files in os.walk("./"):
        for name in files:
            filepath = os.path.join(root, name)

            if filepath == "./.repodata.json":
                continue

            print(filepath)

            filepaths.append(filepath)
            ipfs_hashes.append(ipfs.add(filepath)["Hash"])

    print(f"filepaths = {filepaths}")
    print(f"ipfs_hashes = {ipfs_hashes}")
    print(f"current_branch = {current_branch}")

    repo.make_commit(filepaths, ipfs_hashes, current_branch, current_commit, comment=commit_message)
    commit_id = repo.most_recent_commit(current_branch)

    with open("./.repodata.json", "r") as infile:
        repodata = json.load(infile)

    repodata["current_commit_id"] = commit_id

    with open("./.repodata.json", "w") as outfile:
        json.dump(repodata, outfile)
Exemple #9
0
def get_ipfs(host=None, port=settings.IPFS_API_PORT):
    """Establish a connection to IPFS.

    Args:
        host (str): The IPFS host to connect to.
            Defaults to environment variable: IPFS_HOST.  The host name should be of the form 'ipfs.infura.io' and not
            include 'https://'.
        port (int): The IPFS port to connect to.
            Defaults to environment variable: env IPFS_API_PORT.

    Raises:
        CommunicationError: The exception is raised when there is a
            communication error with IPFS.

    Returns:
        ipfshttpclient.client.Client: The IPFS connection client.

    """
    if host is None:
        clientConnectString = f'/dns/{settings.IPFS_HOST}/tcp/{settings.IPFS_API_PORT}/{settings.IPFS_API_SCHEME}'
    else:
        clientConnectString = f'/dns/{host}/tcp/{settings.IPFS_API_PORT}/https'
    try:
        return ipfshttpclient.connect(clientConnectString)
    except CommunicationError as e:
        logger.exception(e)
        raise IPFSCantConnectException(
            'Failed while attempt to connect to IPFS')
    return None
Exemple #10
0
def upload(req):
    myfile = req.FILES.get("upload", None)
    if not myfile:
        return HttpResponse("no file selected")
    if myfile.name != "Dockerfile":
        return HttpResponse("仅支持Dockerfile文件")
    newfile = open(myfile.name, 'wb+')
    for chunk in myfile.chunks():
        newfile.write(chunk)
    newfile.close()
    print("filename:", myfile.name)
    os.system("docker cp {} ipfs_host:/".format(myfile.name))
    ipfs_api = ipfshttpclient.connect('/ip4/127.0.0.1/tcp/5001/http')
    res = ipfs_api.add(myfile.name)
    print("res:", res)
    ipfs_api.close()
    os.remove(myfile.name)
    name = req.POST.get('name')
    price = req.POST.get('price')
    args = [name.encode('utf8'), int(price), res['Hash']]
    receipt = client.sendRawTransactionGetReceipt(contract_addr, contract_abi,
                                                  "publish", args)
    txhash = receipt['transactionHash']
    if receipt['status'] != '0x0':
        tx_list.append(tx(txhash, "developer", "", 0, False))
        return render(req, "response.html", {"response": "软件发布失败"})
    tx_list.append(tx(txhash, "developer", "", 0, True))
    return render(
        req, "response.html", {
            "response": "upload success!",
            "info1": "ipfs hash is {}".format(res['Hash']),
            "info2": "tx hash on fisco is {}".format(txhash)
        })
Exemple #11
0
def ipfs():
    client = ipfshttpclient.connect('/ip4/127.0.0.1/tcp/5001/http')
    res = client.add('y.pdf')
    print("https://ipfs.io/ipfs/" + res['Hash'])
    file = open('ipfs_files/y.pdf', 'wb')
    file.write(client.cat(res['Hash']))
    return "IPFS"
Exemple #12
0
    def ipfs(self):  # type: () -> ipfshttpclient.Client
        if self._ipfs is None:
            if self['ipfs'] is not None:
                host = os.environ.get(ENV_NAME_IPFS_HOST) or self['ipfs'].get('host')
                port = os.environ.get(ENV_NAME_IPFS_PORT) or self['ipfs'].get('port')
                multiaddr = os.environ.get(ENV_NAME_IPFS_MULTIADDR) or self['ipfs'].get('multiaddr')
            else:
                multiaddr = os.environ.get(ENV_NAME_IPFS_MULTIADDR)
                host = os.environ.get(ENV_NAME_IPFS_HOST)
                port = os.environ.get(ENV_NAME_IPFS_PORT)

            # Hack to allow cross-platform Docker to reference the Docker host's machine with $HOST_ADDR
            if host and host.startswith('$'):
                logger.info(f'Resolving host name from environment variable {host}')
                host = os.environ[host[1:]]

            if host == 'localhost':
                host = '127.0.0.1'

            if not multiaddr:
                multiaddr = f'/ip4/{host}/tcp/{port}/http'

            logger.info(f'Connecting and caching to IPFS host \'{multiaddr}\'')
            self._ipfs = ipfshttpclient.connect(multiaddr)

        return self._ipfs
def send_create_task_TX(max_iteration=10, file="config.ini"):
    proc = subprocess.Popen(
        'docker run --rm -it -v {}:/root/:z tony92151/py-abci python3 /root/py-app/utils.py -config /root/py-app/config/{} > FIRSTMOD.txt'
        .format(os.path.abspath("./script"), file),
        shell=True)
    proc.wait()

    time.sleep(1)
    client = ipfshttpclient.connect("/ip4/0.0.0.0/tcp/5001/http")

    ipfs_model = client.add_str(
        open(os.path.abspath("./FIRSTMOD.txt"), "r").read())
    print(ipfs_model)

    param = json.loads(
        '{"type": "create_task","max_iteration": "","aggtimeout": 20,"weight":""}'
    )
    param["max_iteration"] = max_iteration
    param["weight"] = ipfs_model

    b64payload = base64.b64encode(
        json.dumps(param).encode('UTF-8')).decode('UTF-8')
    print(b64payload)

    url = "http://localhost:26657"
    payload = json.loads(
        '{"jsonrpc":"2.0", "method": "broadcast_tx_sync", "params": "", "id": 1}'
    )
    payload["params"] = [b64payload]

    # Adding empty header as parameters are being sent in payload
    headers = {"Content-Type": "application/json"}
    r = requests.post(url, data=json.dumps(payload), headers=headers)
    print(json.dumps(json.loads(r.content), indent=4, sort_keys=True))
    return proc
Exemple #14
0
def pin_DB(mydb):
    """
    Pin all IPFS thingies in the DB
    :param mydb: DB object
    :return: list of pinned ipfs hashes
    """
    # get all ipfs pins
    with ipfshttpclient.connect(addr=ipfsaddress) as client:
        pins = client.pin.ls(type="all")
        pinlist = [i for i in pins["Keys"].keys()]
        # pin the ones that are not pinned already
        new_pins = list()
        for i in mydb:
            if i['ipfs'] not in pinlist:
                if VERBOSE:
                    logging.warning("pinning: %s" % i['ipfs'])
                _ = client.pin.add(i['ipfs'])
                new_pins.append(i['ipfs'])
            if i['ipfsmeta'] not in pinlist:
                if VERBOSE:
                    logging.warning("pinning: %s" % i['ipfsmeta'])
                _ = client.pin.add(i['ipfsmeta'])
                new_pins.append(i['ipfsmeta'])

    if len(new_pins) == 0:
        if VERBOSE:
            logging.warning("no hashes needed pinning")
    return new_pins
Exemple #15
0
 def __init__(self, ipfs_peer_host, ipfs_peer_port, gateway_addresses):
     self._api = ipfshttpclient.connect(
         f"/dns/{ipfs_peer_host}/tcp/{ipfs_peer_port}/http")
     self._gateway_addresses = gateway_addresses
     self._cnode_endpoints = None
     self._ipfsid = self._api.id()
     self._multiaddr = get_valid_multiaddr_from_id_json(self._ipfsid)
Exemple #16
0
def upload():
    client = ipfshttpclient.connect(os.environ.get('IPFS_CONNECT_URL'))
    user = User.get_by_email(session['email'])
    try:
        urlProof = request.form['proof']
    except:
        urlProof = ""
    file = request.files['filename']
    res = client.add(file)
    client.close()
    cReward = Accion.getActionById(session['accionId'])
    kpi = request.form['kpi']
    strRecompensa = str(cReward.recompensa).replace(",", ".")
    cReward.recompensa = float(strRecompensa) * float(kpi)
    sendCoins(session['email'], cReward.recompensa, res['Hash'], urlProof)
    try:
        cReward.nombre = translator.translate(cReward.nombre,
                                              dest=session['lang']).text
    except:
        pass
    del session['accionId']
    return render_template("recompensa.html",
                           name=session['name'],
                           accion=cReward,
                           email=session['email'],
                           user=user)
Exemple #17
0
    def client(self):
        if self._client is None:
            url = parse_url(self.base_uri)
            self._client = ipfshttpclient.connect(
                f'/dns/{url.host}/tcp/{url.port}/{url.scheme}', session=True)

        return self._client
Exemple #18
0
def fetch(repo: RepositoryContractWrapper, commit_id: int):
    """Replace the current local files, their contents and the directory
    structure with the ones specified at the given commit"""

    for root, dirs, files in os.walk("./"):
        for name in files:
            filepath = os.path.join(root, name)

            if filepath == "./.repodata.json":
                # Don't delete the repodata file - that is going to end badly
                continue

            os.remove(filepath)
        for directory in dirs:
            fulldir = os.path.join(root, directory)
            os.rmdir(fulldir)

    file_ids = repo.get_files_from_commit(commit_id)

    ipfs = ipfshttpclient.connect()

    for fid in file_ids:
        filedata = repo.get_file(fid)

        filepath = filedata[0]
        ipfshash = filedata[1]

        os.makedirs(os.path.dirname(filepath), exist_ok=True)
        with open(filepath, "wb") as outfile:
            outfile.write(ipfs.cat(ipfshash))
Exemple #19
0
    async def execute_ipfs(ipfs_address: str, ipfs_hash: str) -> str:
        """Execute PQL definition located in IPFS.

        Args:
            ipfs_address (str): IPFS API address
            ipfs_hash (str): IPFS hash to execute

        Returns:
            str: result of the executed PQL JSON

        Raises:
            PqlDecodingError: file was not a JSON or IPFS was unreachable.
        """
        logger.info(f"Execute IPFS PQL {ipfs_address}/{ipfs_hash} request.")

        try:
            # Connect to the IPFS API server
            ipfs = ipfshttpclient.connect(ipfs_address)

            # Fetch the JSON from the hash
            req = ipfs.get_json(ipfs_hash, timeout=int(app.config["--timeout"]))
        except DecodingError:
            raise PqlDecodingError("object decoding error, expecting JSON format.")
        except ReadTimeout:
            raise PqlDecodingError("IPFS timed out before retrieving the file.")

        res = await parse_and_execute(req)
        logger.info(f"Obtained result {res}")

        return res
Exemple #20
0
    def add_dir(self, dir, encrypted=True):
        if not os.path.exists(dir.path):
            raise FileNotFoundError(dir.path)

        original_dir_path = dir.path
        if encrypted:
            encrypted_dir_path = dir.encrypt_content(self._cipher,
                                                     self._working_dir)
            dir.path = encrypted_dir_path.replace(os.sep, '/')

        with ipfshttpclient.connect() as client:
            resp = client.add(dir.path)

        self._logger.info("Added %s to IPFS" % dir.path)
        self._logger.debug(json.dumps(resp, indent=4))

        if encrypted:
            dir.remove()
            dir.path = original_dir_path

        if isinstance(resp, list):
            return [(os.path.join(os.path.dirname(dir.path),
                                  d['Name']).replace(os.sep, '/'), d['Hash'])
                    for d in resp]
        elif isinstance(resp, dict):
            return [(os.path.join(os.path.dirname(dir.path),
                                  resp['Name']).replace(os.sep,
                                                        '/'), resp['Hash'])]
        else:
            raise Exception("Unhandled response instance %s!" % type(resp))
Exemple #21
0
def test_ipfs(app):
    json_file = "tests/res/test_ipfs.json"
    ipfs_peer_host = app.config["ipfs"]["host"]
    ipfs_peer_port = app.config["ipfs"]["port"]

    # Instantiate IPFS client from src lib
    ipfsclient = IPFSClient(ipfs_peer_host, ipfs_peer_port, [])

    remove_test_file(json_file)
    api = ipfshttpclient.connect(
        f"/dns/{ipfs_peer_host}/tcp/{ipfs_peer_port}/http")

    # Create generic metadata object w/above IPFS multihash
    test_metadata_object = dict(track_metadata_format)
    test_metadata_object["title"] = chance.name()
    test_metadata_object["release_date"] = str(chance.date())
    test_metadata_object["file_type"] = "mp3"
    test_metadata_object["license"] = "HN"
    with open(json_file, "w") as f:
        json.dump(test_metadata_object, f)

    # Add metadata object to ipfs node
    json_res = api.add(json_file)
    metadata_hash = json_res["Hash"]

    # Invoke audius-ipfs
    ipfs_metadata = ipfsclient.get_metadata(metadata_hash,
                                            track_metadata_format)
    remove_test_file(json_file)

    # Confirm retrieved metadata object state
    for key in test_metadata_object:
        assert key in ipfs_metadata
        assert test_metadata_object[key] == ipfs_metadata[key]
def _get_multihash(buf: set, endpoint: str = "/ip4/127.0.0.1/tcp/5001/http") -> (str, str):
    payload = {}

    for m in buf:
        if m.public in payload:
            payload[m.public]["measurements"].append(_create_row(m))
        else:
            payload[m.public] = {
                "model": m.model,
                "measurements": [
                    _create_row(m)
                ]
            }

    rospy.logdebug(f"Payload before sorting: {payload}")
    payload = _sort_payload(payload)
    rospy.logdebug(f"Payload sorted: {payload}")

    temp = NamedTemporaryFile(mode="w", delete=False)
    rospy.logdebug(f"Created temp file: {temp.name}")
    temp.write(json.dumps(payload))
    temp.close()

    with ipfshttpclient.connect(endpoint) as client:
        response = client.add(temp.name)
        return (response["Hash"], temp.name)
Exemple #23
0
    def __init__(self,eth_api,ipfs_api):
        self.w3 = Web3(HTTPProvider(eth_api))#以太坊geth客户端接口
        #print(self.w3)
        if not self.w3.isConnected():
            raise ETHConnectionError("区块链未连接")
        self.client = ipfshttpclient.connect(ipfs_api)#ipfs接口
        self.port=7001#数据帧接收端口

        self.record={}
        #json 文件加载
        if os.path.exists("data.pkl"):
            param=params.UmbralParameters(self.curve)
            with open("data.pkl","rb") as f:
                records=pickle.load(f)
                for record in records:
                    #record也许需要处理一下
                    for key in record.keys():
                        capsule=pre.Capsule.from_bytes(record[key],param)
                        record[key]=capsule
                        self.record.update(record)

        print("Already get record:",self.record)
        self.tempo_cfrags={}#暂时接收来自其他节点的cfrag帧
        sock_ip_get = socket(AF_INET, SOCK_DGRAM)
        sock_ip_get.connect(('8.8.8.8', 80))
        ip = sock_ip_get.getsockname()[0]
        self.IP=ip
        print("Host Information:",self.IP,":",self.port)
def _get_multihash(
    buf: set, db: object, endpoint: str = "/ip4/127.0.0.1/tcp/5001/http"
) -> tp.Dict[str, str]:
    payload = {}
    for m in buf:
        if m.public in payload:
            payload[m.public]["measurements"].append(m.measurement_check())
        else:
            payload[m.public] = {
                "model": m.model,
                "geo": "{},{}".format(m.geo_lat, m.geo_lon),
                "measurements": [m.measurement_check()],
            }

    logger.debug(f"Payload before sorting: {payload}")
    payload = _sort_payload(payload)
    logger.debug(f"Payload sorted: {payload}")

    temp = NamedTemporaryFile(mode="w", delete=False)
    logger.debug(f"Created temp file: {temp.name}")
    temp.write(json.dumps(payload))
    temp.close()

    with ipfshttpclient.connect(endpoint) as client:
        response = client.add(temp.name)
        db.add_data("not sent", response["Hash"], time.time(), json.dumps(payload))
        return (response["Hash"], temp.name)
def hash_user_file(user_file, file_key):
    encrypt_file(user_file, file_key)
    encrypted_file_path = user_file + ".aes"
    client = ipfshttpclient.connect('/dns/ipfs.infura.io/tcp/5001/https')
    response = client.add(encrypted_file_path)
    file_hash = response['Hash']
    return file_hash
Exemple #26
0
def addFile(request):
    file_obj = request.data['file']
    print(request.META)
    ro = UploadedResearchObject.objects.create( 
        oricid=request.META['HTTP_ORICID'],
        uploadedfile=file_obj
    )

    try:
        client = ipfshttpclient.connect(IPFS_ADDRESS)
        filepath = ro.uploadedfile.path
        res = client.add(filepath)
        print("The hash of file is : ",res['Hash'])
        researcher = "resource:org.jro.Researcher#"+str(ro.oricid)
        rojid="resource:org.jro.ROJ#"+ str(res['Hash'])
        print("\n Adding the research object to the blockchain")
        r = requests.post(COMPOSER_REST_URL+'/api/Add', data = { "$class": "org.jro.Add", "rojId": rojid, "node": res['Hash'], "creator": researcher })
        print(r.content)
        if r.status_code==200:
            print("\n Success")
            data={"hash": res['Hash']}
        else:
            print(r.status_code)
            data={"hash":"error"}
    except ConnectionRefusedError:
        print("Connection error, Please ensure ipfs daemon is running.")    
    return Response(data,status=200)
Exemple #27
0
def compare_images(id):
    ipfs = ipfshttpclient.connect("/dns/ipfs.infura.io/tcp/5001/https")
    instance = Image.objects.get(ipfsHash=id)
    logger.info(instance.ipfsHash)
    cat_resp = ipfs.cat(instance.ipfsHash)
    i1 = pillow_image.open(io.BytesIO(cat_resp))
    i1 = i1.resize(tuple(int(x / IMAGE_REDUCTION_FACTOR) for x in i1.size),
                   pillow_image.ANTIALIAS)
    images = np.array(Image.objects.exclude(ipfsHash=id))
    chunks = np.array_split(images, NUMBER_OF_PROCESSES)

    with mp.Manager() as manager:
        jobs = []
        comp_list = manager.list()
        for (i, s) in enumerate(chunks):
            j = mp.Process(target=comparison_thread,
                           args=(comp_list, i, ipfs, s, i1))
            j.start()
            jobs.append(j)

        for j in jobs:
            j.join()

        logger.info(comp_list)
        sim_flag = create_similar(comp_list, instance)

    logger.info("Comparison process finished")
Exemple #28
0
def ipfs(filepath):
    client = ipfshttpclient.connect('/ip4/127.0.0.1/tcp/5001/http')

    imagepath = os.path.join(app.config['TEMP_FOLDER'], filepath)
    res = client.add(imagepath)
    lst = list(res.values())
    client.close()
    return lst[1]
Exemple #29
0
def sendFile(filename):
    try:
        with ipfs.connect() as client:
            res = client.add(filename)
            client.close()
            return res['Hash']
    except:
        print("Error while Sending file:" + filename)
Exemple #30
0
def base():
    client = ipfshttpclient.connect('/ip4/127.0.0.1/tcp/5001/http')
    print("Klient ", client)

    res = client.add('test.txt')
    print("res ", res)

    print(client.cat(res['Hash']))
Exemple #31
0
# Spawn IPFS daemon in data directory
DAEMON = subprocess.Popen(["ipfs", "daemon", "--enable-pubsub-experiment"])
os.environ["PY_IPFS_HTTP_CLIENT_TEST_DAEMON_PID"] = str(DAEMON.pid)

# Collect the exit code of `DAEMON` when `SIGCHLD` is received
# (otherwise the shutdown test fails to recognize that the daemon process is dead)
if os.name == "posix":
	import signal
	signal.signal(signal.SIGCHLD, lambda *a: DAEMON.poll())

# Wait for daemon to start up
import ipfshttpclient
while True:
	try:
		ipfshttpclient.connect(HOST, PORT)
	except ipfshttpclient.exceptions.ConnectionError:
		time.sleep(0.05)
	else:
		break


##################
# Run test suite #
##################

PYTEST_CODE = 1
try:
	# Run tests in CI-mode (will stop the daemon at the end through the API)
	os.environ["CI"] = "true"