예제 #1
0
    def __init__(self):
        '''
            Robonomics information channel initialisation.
        '''
        rospy.init_node('ipfs_channel')
        self.lighthouse = rospy.get_param('~lighthouse_contract')
        ipfs_api_parts = urlparse(
            rospy.get_param('~ipfs_http_provider')).netloc.split(':')
        self.ipfs_client = ipfsapi.Client(host=ipfs_api_parts[0],
                                          port=ipfs_api_parts[1])

        self.incoming_offer = rospy.Publisher('incoming/offer',
                                              Offer,
                                              queue_size=10)
        self.incoming_demand = rospy.Publisher('incoming/demand',
                                               Demand,
                                               queue_size=10)
        self.incoming_result = rospy.Publisher('incoming/result',
                                               Result,
                                               queue_size=10)

        rospy.Subscriber(
            'eth/sending/offer', Offer,
            lambda m: publish(self.ipfs_client, self.lighthouse, bid2dict(m)))
        rospy.Subscriber(
            'eth/sending/demand', Demand,
            lambda m: publish(self.ipfs_client, self.lighthouse, ask2dict(m)))
        rospy.Subscriber(
            'eth/sending/result', Result,
            lambda m: publish(self.ipfs_client, self.lighthouse, res2dict(m)))
예제 #2
0
파일: ipfs.py 프로젝트: chainify/engine
def create_ipfs_file(data):
    ipfs = ipfsapi.Client(config['ipfs']['host'], config['ipfs']['post_port'])
    if not ipfs:
        return bad_request('IPFS server can not be reached')

    if ipfs_init.enc_mothod == 'AES':
        enc_data = crypto_aes.encrypt(data).decode("utf-8")
    else:
        enc_data = data

    dir_id = str(uuid.uuid4())
    file_id = str(uuid.uuid4())
    dir_path = './files/' + dir_id
    os.mkdir(dir_path)
    file_name = file_id + '.txt'
    file_path = dir_path + '/' + file_name

    with open(file_path, 'w') as f:
        f.write(enc_data)

    ipfs_data = ipfs.add(file_path)

    os.remove(file_path)
    os.rmdir(dir_path)

    return ipfs_data
예제 #3
0
    def getNodeID(self):
        # Depends on IPFS Daemon instance
        identity = ''
        if self.initialized is not 'false':
            client = node.Client(host='localhost', port=5001, base='api/v0', chunk_size=4096)
            identity = client.id()

        return identity
예제 #4
0
    def run(self, *args):
        ipfs_api = ipfsapi.Client('127.0.0.1', 5001)
        method = getattr(ipfs_api, self._method)
        kwargs = {}
        for idx, arg in enumerate(args):
            kwargs[ipfs_tasks[self._method]['args'][idx]] = arg

        return method(**kwargs)
예제 #5
0
 def __init__(self, host='127.0.0.1', port=10006):
     super(IPFS, self).__init__()
     self.port = port
     self.host = host
     try:
         self.api = ipfsapi.Client(self.host, self.port)
         logging.debug('Created IPFS Client')
     except Exception:
         logging.debug('Network does not exist: %s', self.host)
예제 #6
0
def check_ipfs_file(hash):
    print("Syncing ipfs hash {}".format(hash))
    client = ipfsapi.Client("ipfs", 5001)
    client.cat(hash)
    requests.get("https://ipfs.infura.io/ipfs/{}/".format(hash))
    requests.get(
        "https://ipfs.infura.io:5001/api/v0/pin/add?arg=/ipfs/{}".format(hash))
    requests.get("http://ipfs.io/ipfs/{}/".format(hash))
    return
예제 #7
0
	def upload(self, fileName):
		try:
			# Get the client instance of the IPFS local node
			api = ipfsapi.Client('127.0.0.1', 5001)
			# upload the file onto local node and store the hash returned
			result = api.add(fileName)
		# If the local daemon is not alive then catch the error 
		except urllib3.exceptions.NewConnectionError:
			raise Exception('Is the local IPFS Daemon running?')
		return result
예제 #8
0
def add_file_contents(contents):
    # Empty the file before writing to it
    open('.__temp', 'w').close()

    with open(".__temp", "wb") as fp:
        fp.write(contents)
        fp.close()

    api_ip = socket.gethostbyname('127.0.0.1')
    ipfs_api_obj = ipfsapi.Client(host=api_ip, port=5001)
    return ipfs_api_obj.add(".__temp")["Hash"]
예제 #9
0
    def retrieve(self, fileHash):
        try:
            # Access the entire IPFS network
            api = ipfsapi.Client('127.0.0.1', 8080)
            # query for the file on the entire network
            fileContents = api.cat(fileHash)
        # If the local daemon is not alive then catch the error
        except urllib3.exceptions.NewConnectionError:
            raise Exception('Is the local IPFS Daemon running?')

        return fileContents
예제 #10
0
    def setUp(self):
        self.api = ipfsapi.Client()

        # Add resources to be pinned.
        self.resource = self.api.add_str('Mary had a little lamb')
        resp_add = self.api.add('test/functional/fake_dir', recursive=True)
        self.fake_dir_hashes = [el['Hash'] for el in resp_add if 'Hash' in el]
        for resp in resp_add:
            if resp["Name"] == "fake_dir":
                self.fake_dir_hash = resp["Hash"]
            elif resp["Name"] == "fake_dir/test2":
                self.fake_dir_test2_hash = resp["Hash"]
예제 #11
0
def bootstrap():
    # new search engine db
    if VERBOSE:
        print("creating new database")
    app.se.load_db()
    uploads = app.se.db["uploads"]
    app.se = SearchEngine()
    app.se.db["uploads"] = uploads
    if VERBOSE:
        print("Get all references from Blockchain")
    # get all metadata from blockchain
    bc_data = blockchaincom.retrieve_from_dogecoin(blockchaincom.op_return_dogecoin.OP_RETURN_DOGECOIN_ADDRESS, VERBOSE=VERBOSE)
    #print(bc_data)
    if VERBOSE:
        print("Get all data from IPFS")
    con = ipfsapi.Client('localhost', 5001)
    added_keys="<ul>"
    for message in bc_data:
        if VERBOSE:
            print("working on message %s" % message[0].decode('latin1'))
        try:
            dtype, dhash = message[0].decode('latin1').split(':')
        except:
            continue
        if dtype != 'meta':
            continue
        # get metadata
        try:
            if VERBOSE:
                print("Try to get hash %s from IPFS" % dhash)
            metadata = con.get_pyobj(dhash)
        except:
            print('Could not access pyobject %s' %dhash)
            continue
        #print(metadata)
        # add it to local search engine
        newkeys = metadata["category"] + ','
        newkeys += metadata["author"] + ','
        newkeys += metadata["keywords"] + ','
        newkeys += metadata["title"] + ','
        newkeys += os.path.splitext(metadata["filename"])[-1][1:] + ','
        newkeys = app.se.str2keys(newkeys) # extract meta data keys
        app.se.add_keys(keylist=newkeys, documentref=metadata["documentref"],metadata=metadata, docid=metadata["docid"])
        # save metadata ipfs
        app.se.db["metadata"][metadata["documentref"]] = dhash
        #pin it
        con.pin_add(dhash)
        #con.pin_add(metadata["documentref"])
        added_keys += "<li>" + "<a href='document?id=%s'>%s</a>" %(metadata["documentref"], dhash) + '</li>\n'
    #save db again
    added_keys += "</ul>"
    app.se.save_db()
    return bottle.template('templates/bootstrap.tpl', data=added_keys)
예제 #12
0
    def __init__(self):
        rospy.init_node('aira_graph')
        ipfs_api_parts = urlparse(rospy.get_param('~ipfs_http_provider')).netloc.split(':')
        __keyfile_helper = KeyfileHelper(rospy.get_param('~keyfile'),
                                         keyfile_password_file=rospy.get_param('~keyfile_password_file'))
        self.__account = __keyfile_helper.get_local_account_from_keyfile()

        self.ipfs_client = ipfsapi.Client(host=ipfs_api_parts[0], port=ipfs_api_parts[1])

        self.graph_topic = rospy.get_param('~graph_topic')
        self.lighthouse_topic = rospy.get_param('~lighthouse_topic')
        self.greeting = rospy.Publisher('greeting', String, queue_size=10)
예제 #13
0
def upload_to_ipfs():
    ipfs = ipfsapi.Client(g.ipfs, 5001)
    if not ipfs:
        return bad_request('IPFS server can not be reached')

    if len(request.files) == 0:
        return bad_request('No files to upload')

    ipfs_data = None
    file_type = None
    for file_name in request.files:
        dir_id = str(uuid.uuid4())
        file_id = str(uuid.uuid4())
        dir_path = './files/' + dir_id
        os.mkdir(dir_path)
        file_path = dir_path + '/' + file_id
        request.files[file_name].save(file_path)
        file_type = magic.from_file(file_path, mime=True)
        ipfs_data = ipfs.add(file_path, only_hash=True)

        file_size = int(ipfs_data['Size'])
        if file_size > g.file_size_limit:
            os.remove(file_path)
            os.rmdir(dir_path)
            return bad_request(
                'File size limit exceeded ({0} bytes over {1} bytes limit)'.
                format(file_size, g.file_size_limit))

        conn = g.mysql.connection
        cursor = conn.cursor()
        try:
            cursor.execute("""
				SELECT
					f.id, 
					f.name,
					f.size, 
					f.ipfs_hash, 
					f.tx_id, 
					f.created
				FROM files f
				WHERE ipfs_hash='{ipfs_hash}'
			""".format(ipfs_hash=ipfs_data['Hash']))
            db_file = cursor.fetchone()
        except Exception, error:
            print 'ERROR: ', error
            return bad_request(error)
        finally:
예제 #14
0
    def init_api(self):
        if self.ipfs_gateway is not None:
            if ':' in self.ipfs_gateway:
                host, port = self.ipfs_gateway.split(':')
            else:
                host, port = self.ipfs_gateway, 5001

            port = int(port)
        else:
            host = 'localhost'
            port = 5001

        self.api = ipfsapi.Client(host=host, port=port)

        # fail quickly if we're not able to contact ipfs
        self.id = self.api.id()
        LOG.debug('initialized ipfs api')
예제 #15
0
파일: util.py 프로젝트: TomBaxter/ipwb
def isDaemonAlive(hostAndPort="{0}:{1}".format(IPFSAPI_HOST, IPFSAPI_PORT)):
    """Ensure that the IPFS daemon is running via HTTP before proceeding"""
    client = ipfsapi.Client(IPFSAPI_HOST, IPFSAPI_PORT)

    try:
        # ConnectionError/AttributeError if IPFS daemon not running
        client.id()
        return True
    except (ConnectionError):  # exceptions.AttributeError):
        logError("Daemon is not running at http://" + hostAndPort)
        return False
    except OSError:
        logError("IPFS is likely not installed. "
                 "See https://ipfs.io/docs/install/")
        sys.exit()
    except Exception as e:
        logError('Unknown error in retrieving daemon status')
        logError(sys.exc_info()[0])
예제 #16
0
class worker():
    __daemon = node.Client(host='localhost', port=5001, base='api/v0', chunk_size=4096)

    def __init__(self, path):
        self.object = path

    def addFile(self):
        try:

            if os.path.exists(self.object):
                meta = self.__daemon.add(self.object, chunker='size-2048')

                return meta
            else:
                raise _NULLPATH(self.object)

        except _NULLPATH as exception:
            print(exception.error)
            exit(exception.code)
예제 #17
0
def isDaemonAlive(hostAndPort="{0}:{1}".format(IPFSAPI_IP, IPFSAPI_PORT)):
    """Ensure that the IPFS daemon is running via HTTP before proceeding"""
    client = ipfsapi.Client(IPFSAPI_IP, IPFSAPI_PORT)

    try:
        # OSError if ipfs not installed
        subprocess.call(['ipfs', '--version'], stdout=open(devnull, 'wb'))

        # ConnectionError if IPFS daemon not running
        client.id()
        return True
    except ConnectionError:
        print "Daemon is not running at http://" + hostAndPort
        return False
    except OSError:
        print "IPFS is likely not installed. See https://ipfs.io/docs/install/"
        sys.exit()
    except:
        print 'Unknown error in retrieving daemon status'
        print sys.exc_info()[0]
예제 #18
0
파일: util.py 프로젝트: openthings/ipwb
def isDaemonAlive(hostAndPort="{0}:{1}".format(IPFSAPI_IP, IPFSAPI_PORT)):
    """Ensure that the IPFS daemon is running via HTTP before proceeding"""
    client = ipfsapi.Client(IPFSAPI_IP, IPFSAPI_PORT)

    try:
        # OSError if ipfs not installed, redundant of below
        # subprocess.call(['ipfs', '--version'], stdout=open(devnull, 'wb'))

        # ConnectionError/AttributeError if IPFS daemon not running
        client.id()
        return True
    except (ConnectionError, exceptions.AttributeError):
        logError("Daemon is not running at http://" + hostAndPort)
        return False
    except OSError:
        logError("IPFS is likely not installed. "
                 "See https://ipfs.io/docs/install/")
        sys.exit()
    except Exception as e:
        logError('Unknown error in retrieving daemon status')
        logError(sys.exc_info()[0])
예제 #19
0
def ipfsgateway(docid):
    """
    A simple IPFS gateway, that will only serve locally uploaded IPFS files (i.e. not access external files)
    :return: file
    """
    # first of all, clean directory
    clean_temp_files()
    # docid = bottle.request.query.id
    if docid:
        # get all local pinned ID
        # con = ipfsApi.Client('localhost', 5001)
        # all = list(con.refs_local()) # not yet implemented, so we will use the refs from the search engine instead
        #
        app.se.load_db()  # if our db is empty, try to load it from disk
        allrefs = list(app.se.db["documents"].keys())
        if docid in allrefs:
            # serve doc
            con = ipfsapi.Client('localhost', 5001)
            oldpath = os.getcwd()
            os.chdir(os.path.join(oldpath, "temp"))
            file_to_serve = []
            try:
                result = con.get(docid)
                print(result)
                documentfilename = app.se.db["documents"][docid]["filename"]
                docext = os.path.splitext(documentfilename)[-1]
                os.rename(docid, docid+docext)
                os.chdir(oldpath)
                file_to_serve = bottle.static_file(docid+docext, root='temp')
            except:
                pass
            finally:
                os.chdir(oldpath)
            return file_to_serve
        else:
            raise bottle.HTTPError(status=404, body="Not found", traceback=None)
    else:
        raise bottle.HTTPError(status=404, body="Not found", traceback=None)
예제 #20
0
 def setUp(self):
     self.api = ipfsapi.Client()
예제 #21
0
import json
import requests
import uuid
import hashlib
import ipfsapi

# Address of cgtd at search.cancergenetrust.org
TEST_CGTD_ADDRESS = "QmWPSzKERs6KAjb8QfSXViFqyEUn3VZYYnXjgG6hJwXWYK"

ipfs = ipfsapi.Client("ipfs", 5001)


def url_for(server, *args):
    """ Generate versions REST url """
    return "{}/v0/{}".format(server, "/".join(args))


def get_latest_index(server):
    """
    Return the latest steward's index forcing local resolution
    so we don't get a cached entry.
    """
    # multihash = ipfs.name_resolve(ipfs.id()["ID"], opts={'local': True})["Path"].rsplit('/')[-1]
    # return json.loads(ipfs.cat(multihash))
    r = requests.get(url_for(server, ""))
    assert (r.status_code == requests.codes.ok)
    return r.json()


def test_root(server):
    r = requests.get(server)
예제 #22
0
 def initialize(self, context):
     super(IPFSProvider, self).initialize(context)
     self.ipfs_api = ipfsapi.Client('127.0.0.1', 5001)
######################
# Boring setup stuff #
######################

r_input = input("Enter IPFS Hash: ")

try:
    SEEDNODE_URI = sys.argv[1]
except IndexError:
    SEEDNODE_URI = "localhost:10151"

##############
# IPFS Setup #
##############

api = ipfsapi.Client(host='https://ipfs.infura.io', port=5001)
ipfs_file = api.object_get(r_input)
ipfs_bytes = json.dumps(ipfs_file).encode('utf-8')

# pprint(ipfs_file)

##############################################
# Ursula, the Untrusted Re-Encryption Proxy  #
##############################################

print("Summoning Ursula")
ursula = Ursula.from_seed_and_stake_info(seed_uri=SEEDNODE_URI,
                                         federated_only=True,
                                         minimum_stake=0)
##########
# POLICY #
예제 #24
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

import ipfsapi
import sys
import time
from threading import Thread

data = [
    "Hello", "IPFS", "World!",
    "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nam lobortis sapien eros, eget tempus augue auctor sed. Fusce euismod, magna vitae congue pulvinar, odio tellus volutpat orci, nec lacinia nisl lacus quis sem. Ut ligula turpis, vehicula a condimentum auctor, dapibus sit amet ipsum. Aenean iaculis rhoncus metus ut rutrum. Fusce ut turpis eget velit viverra tempor. Phasellus vulputate vestibulum dolor consectetur tincidunt. Sed aliquet dui ut libero placerat, lacinia facilisis sapien dictum. Aenean vel mi eget nisl ornare auctor imperdiet ut ex. Ut et erat ac dolor tempus porttitor. Nulla tincidunt accumsan lacus, sit amet bibendum dolor viverra eu. Praesent id ultrices justo, vitae porta libero. Sed eget aliquam ante, eu bibendum erat. Donec volutpat fermentum lectus vitae dapibus. Vestibulum porttitor ipsum ipsum, ut tempus nisi iaculis eget. Nulla ullamcorper nibh in turpis dignissim, id elementum libero commodo. Fusce condimentum libero quis eros aliquam, hendrerit malesuada augue maximus. Aliquam erat volutpat. Aenean nisi nunc, aliquet vitae eros vestibulum, condimentum volutpat ligula. Phasellus semper porttitor nibh, ut euismod ligula vehicula rhoncus. Phasellus venenatis augue a quam viverra, quis aliquet lacus malesuada. Phasellus pellentesque nisi ut augue ornare placerat. Aliquam ornare vel velit quis rhoncus. Vivamus ac risus mi. Nunc condimentum odio in ex dapibus, non vulputate velit tempor. Nam accumsan vehicula ipsum. Aenean laoreet lorem ut mi pulvinar porttitor. Phasellus non ante sed libero volutpat iaculis. Proin eu feugiat elit, ut vehicula nisi. Aliquam pulvinar tellus at nulla vehicula blandit. Mauris tincidunt dolor sed lorem aliquet tristique. Quisque iaculis erat fringilla sodales pretium. Aenean eget sapien et velit volutpat hendrerit et nec massa. Nulla facilisi. Etiam vitae orci in orci sodales semper in vitae orci. Proin venenatis purus hendrerit venenatis dignissim. Donec tempor augue sit amet enim sodales vestibulum. In non ex lacinia sem consequat sagittis. Maecenas fermentum ultricies vulputate. Sed nec leo eget massa condimentum imperdiet. Nulla facilisi. Fusce auctor elit mi, sit amet rutrum justo vulputate non. Maecenas eu sollicitudin mi. Praesent mattis ex neque, quis tempor dui faucibus eget. Sed tempor, libero ut ornare aliquam, erat eros dictum dolor, vitae fringilla dui metus et mauris. Etiam vehicula nisl libero, et tincidunt dolor bibendum a. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Nam quis auctor metus. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Vivamus eget hendrerit purus. Duis ullamcorper mi at metus sollicitudin pulvinar. Integer et eleifend turpis. Suspendisse placerat turpis ante, vel cursus neque sodales et. Vivamus at egestas nibh, nec euismod magna. Praesent ut laoreet arcu. Pellentesque dictum et turpis et fermentum. Nulla sollicitudin nulla at posuere molestie. Aenean luctus ante a convallis vulputate. Vivamus vitae ligula volutpat, fermentum ante nec, tempus lectus. Curabitur vitae est eu mi ultricies maximus. Duis elementum magna quis lacus auctor congue. Sed a purus tempus, accumsan mauris sed, mollis enim. Nulla commodo finibus ante ut rhoncus.",
    "Bacon ipsum dolor amet jerky turkey porchetta, boudin ham strip steak salami ribeye picanha. Strip steak short ribs buffalo porchetta, spare ribs alcatra ham hock tail tongue burgdoggen prosciutto beef tri-tip kevin. Shoulder bresaola pork loin beef pork chop chicken. Pancetta shankle capicola pork loin flank pork belly tail turkey. Turducken pork bacon buffalo. Pastrami frankfurter capicola porchetta ground round. Shankle bacon jerky filet mignon swine. Bresaola shankle rump shoulder bacon meatloaf, flank pancetta sirloin ground round tenderloin pastrami beef t-bone alcatra. Ribeye pork loin kevin kielbasa cupim. Cupim flank sausage strip steak, turkey ham venison. Doner fatback picanha, bacon buffalo t-bone kielbasa cow. Filet mignon shoulder jowl ribeye, t-bone corned beef pastrami rump cupim pig. Pork loin pastrami bresaola tongue, ball tip shankle picanha filet mignon leberkas strip steak pork belly tenderloin tri-tip spare ribs swine. Porchetta bacon pastrami, chicken brisket short ribs frankfurter tri-tip tenderloin pig beef ribs. Rump picanha meatloaf ball tip brisket, salami pork loin turducken cupim burgdoggen chicken short ribs ground round pork. Porchetta buffalo spare ribs pig beef ribs meatloaf corned beef shank tongue salami. Pork chop kevin cow ham hock corned beef. Biltong pork belly landjaeger picanha tri-tip swine prosciutto sirloin chuck andouille strip steak shank burgdoggen filet mignon. Kevin boudin spare ribs, sirloin flank pork chop ground round porchetta ball tip alcatra cupim. Turkey cow filet mignon, hamburger buffalo andouille strip steak porchetta pork chop. Tenderloin turkey rump, shoulder turducken sausage cupim pancetta bresaola. Pig sirloin corned beef, pancetta prosciutto andouille beef ribs burgdoggen hamburger ground round capicola tongue filet mignon. Frankfurter picanha boudin bacon fatback ground round rump sausage spare ribs strip steak. Drumstick shank frankfurter pork loin bacon buffalo sirloin burgdoggen. Tenderloin frankfurter biltong landjaeger, shank pork venison pancetta. Turducken kevin salami tail, shankle pork shank pastrami sirloin pork loin boudin short loin doner swine. Ham hock ham shank, rump tri-tip shankle salami venison short ribs. Rump cupim sirloin swine tenderloin shoulder pork pork loin porchetta. Tenderloin burgdoggen beef pig cupim corned beef jowl kevin salami."
]

IPFSAPI = ipfsapi.Client()
BEGINTIME = time.time()

digests = []
results = []


def reset():
    global BEGINTIME, results
    BEGINTIME = time.time()
    results = [None] * len(data)


def load_into_ipfs():
    for idx in range(len(data)):
        start = time.time() - BEGINTIME
        digests.append(IPFSAPI.add_bytes(bytearray(data[idx], "utf-8")))
예제 #25
0
import ipfsapi
import random

TOPIC = "airalab.measure.ipfs.bandwidth"

if __name__ == '__main__':
    ipfs = ipfsapi.Client()
    numbers = []

    for n in range(0, 1000):
        data = random.randint(1, 2**32)
        # print(data)
        ipfs_hash = ipfs.add_str(str(data))
        numbers.append(data)

        ipfs.pubsub_pub(TOPIC, ipfs_hash)

    with open('spammed.txt', 'w') as f:
        f.write('\n'.join(map(str, numbers)))
        f.close()

예제 #26
0
 def setUp(self):
     self.api = ipfsapi.Client()
     self.multihash = 'QmYA2fn8cMbVWo4v95RwcwJVyQsNtnEwHerfWR8UNtEwoE'
     self.content_size = 248
예제 #27
0
from util import IPFSAPI_IP, IPFSAPI_PORT, IPWBREPLAY_IP, IPWBREPLAY_PORT

# from warcio.archiveiterator import ArchiveIterator

import requests
import datetime

from Crypto.Cipher import XOR
import base64

from __init__ import __version__ as ipwbVersion

DEBUG = False

IPFS_API = ipfsapi.Client(IPFSAPI_IP, IPFSAPI_PORT)


# TODO: put this method definition below indexFileAt()
def pushToIPFS(hstr, payload):
    ipfsRetryCount = 5  # WARC->IPFS attempts before giving up
    retryCount = 0
    while retryCount < ipfsRetryCount:
        try:
            httpHeaderIPFSHash = pushBytesToIPFS(bytes(hstr))
            payloadIPFSHash = pushBytesToIPFS(bytes(payload))
            if retryCount > 0:
                m = 'Retrying succeeded after {0} attempts'.format(retryCount)
                print(m)
            return [httpHeaderIPFSHash, payloadIPFSHash]
        except NewConnectionError as e:
예제 #28
0
 def setUp(self):
     self.api = ipfsapi.Client()
     self._olddir = os.getcwd()
     os.chdir(HERE)
     # Add a resource to get the stats for.
     self.resource = self.api.add_str('Mary had a little lamb')
예제 #29
0
파일: replay.py 프로젝트: TomBaxter/ipwb
from .__init__ import __version__ as ipwbVersion


from flask import flash
from werkzeug.utils import secure_filename
from flask import send_from_directory
from flask import make_response

UPLOAD_FOLDER = '/tmp'
ALLOWED_EXTENSIONS = ('.warc', '.warc.gz')

app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
app.debug = False

IPFS_API = ipfsapi.Client(IPFSAPI_HOST, IPFSAPI_PORT)


@app.context_processor
def formatters():
    return {'pluralize': lambda x, s, p: "{} {}".format(x, s if x == 1 else p)}


@app.after_request
def setServerHeader(response):
    response.headers['Server'] = 'InterPlanetary Wayback Replay/' + ipwbVersion
    return response


def allowed_file(filename):
    return filename.lower().endswith(ALLOWED_EXTENSIONS)
예제 #30
0
class IpfsApiTest(unittest.TestCase):

    api = ipfsapi.Client()

    fake = [{
        'Hash': u'QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX',
        'Name': 'fake_dir/fsdfgh'
    }, {
        'Hash': u'QmYAhvKYu46rh5NcHzeu6Bhc7NG9SqkF9wySj2jvB74Rkv',
        'Name': 'fake_dir/popoiopiu'
    }, {
        'Hash': u'QmeMbJSHNCesAh7EeopackUdjutTJznum1Fn7knPm873Fe',
        'Name': 'fake_dir/test3/ppppoooooooooo'
    }, {
        'Hash': u'QmRphRr6ULDEj7YnXpLdnxhnPiVjv5RDtGX3er94Ec6v4Q',
        'Name': 'fake_dir/test3'
    }, {
        'Hash': u'QmNuvmuFeeWWpxjCQwLkHshr8iqhGLWXFzSGzafBeawTTZ',
        'Name': 'fake_dir/test2/llllg'
    }, {
        'Hash': u'Qmb1NPqPzdHCMvHRfCkk6TWLcnpGJ71KnafacCMm6TKLcD',
        'Name': 'fake_dir/test2/fssdf'
    }, {
        'Hash': u'QmX1dd5DtkgoiYRKaPQPTCtXArUu4jEZ62rJBUcd5WhxAZ',
        'Name': 'fake_dir/test2'
    }, {
        'Hash': u'QmRphRr6ULDEj7YnXpLdnxhnPiVjv5RDtGX3er94Ec6v4Q',
        'Name': 'fake_dir/test3'
    }, {
        'Hash': u'QmYqqgRahxbZvudnzDu2ZzUS1vFSNEuCrxghM8hgT8uBFY',
        'Name': 'fake_dir'
    }]

    fake_lookup = dict((i['Name'], i['Hash']) for i in fake)

    ## test_add_multiple_from_list
    fake_file = 'fake_dir/fsdfgh'
    fake_file_only_res = {
        'Name': 'fsdfgh',
        'Hash': 'QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX'
    }
    fake_file2 = 'fake_dir/popoiopiu'
    fake_files_res = [{
        'Name': 'fsdfgh',
        'Hash': 'QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX'
    }, {
        'Name': 'popoiopiu',
        'Hash': 'QmYAhvKYu46rh5NcHzeu6Bhc7NG9SqkF9wySj2jvB74Rkv'
    }]

    ## test_add_multiple_from_dirname
    fake_dir_test2 = 'fake_dir/test2'
    fake_dir_res = [{
        'Name': 'test2/fssdf',
        'Hash': 'Qmb1NPqPzdHCMvHRfCkk6TWLcnpGJ71KnafacCMm6TKLcD'
    }, {
        'Name': 'test2/llllg',
        'Hash': 'QmNuvmuFeeWWpxjCQwLkHshr8iqhGLWXFzSGzafBeawTTZ'
    }, {
        'Name': 'test2',
        'Hash': 'QmX1dd5DtkgoiYRKaPQPTCtXArUu4jEZ62rJBUcd5WhxAZ'
    }]

    ## test_add_filepattern_from_dirname
    fake_dir_test2 = 'fake_dir/test2'
    fnpattern = 'fss*'
    # the hash of the folder is not same as above because the content of the folder
    # added is not same.
    fake_dir_fnpattern_res = [{
        'Name':
        'test2/fssdf',
        'Hash':
        'Qmb1NPqPzdHCMvHRfCkk6TWLcnpGJ71KnafacCMm6TKLcD'
    }, {
        'Name':
        'test2',
        'Hash':
        'QmT5rV6EsKNSW619SntLrkCxbUXXQh4BrKm3JazF2zEgEe'
    }]

    ## test_add_recursive
    fake_dir = 'fake_dir'
    fake_dir_recursive_res = [{
        'Hash': 'QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX',
        'Name': 'fake_dir/fsdfgh'
    }, {
        'Hash': 'QmYAhvKYu46rh5NcHzeu6Bhc7NG9SqkF9wySj2jvB74Rkv',
        'Name': 'fake_dir/popoiopiu'
    }, {
        'Hash': 'Qmb1NPqPzdHCMvHRfCkk6TWLcnpGJ71KnafacCMm6TKLcD',
        'Name': 'fake_dir/test2/fssdf'
    }, {
        'Hash': 'QmNuvmuFeeWWpxjCQwLkHshr8iqhGLWXFzSGzafBeawTTZ',
        'Name': 'fake_dir/test2/llllg'
    }, {
        'Hash': 'QmeMbJSHNCesAh7EeopackUdjutTJznum1Fn7knPm873Fe',
        'Name': 'fake_dir/test3/ppppoooooooooo'
    }, {
        'Hash': 'QmX1dd5DtkgoiYRKaPQPTCtXArUu4jEZ62rJBUcd5WhxAZ',
        'Name': 'fake_dir/test2'
    }, {
        'Hash': 'QmRphRr6ULDEj7YnXpLdnxhnPiVjv5RDtGX3er94Ec6v4Q',
        'Name': 'fake_dir/test3'
    }, {
        'Hash': 'QmYqqgRahxbZvudnzDu2ZzUS1vFSNEuCrxghM8hgT8uBFY',
        'Name': 'fake_dir'
    }]

    ## test_refs
    refs_res = [{
        'Err': '',
        'Ref': 'QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX'
    }, {
        'Err': '',
        'Ref': 'QmYAhvKYu46rh5NcHzeu6Bhc7NG9SqkF9wySj2jvB74Rkv'
    }, {
        'Err': '',
        'Ref': 'QmX1dd5DtkgoiYRKaPQPTCtXArUu4jEZ62rJBUcd5WhxAZ'
    }, {
        'Err': '',
        'Ref': 'QmRphRr6ULDEj7YnXpLdnxhnPiVjv5RDtGX3er94Ec6v4Q'
    }]

    def setUp(self):
        self._olddir = os.getcwd()
        os.chdir(HERE)

    def tearDown(self):
        os.chdir(self._olddir)

    #########
    # TESTS #
    #########

    def test_version(self):
        expected = ['Repo', 'Commit', 'Version']
        resp_version = self.api.version()
        for key in expected:
            self.assertTrue(key in resp_version)

    def test_id(self):
        expected = [
            'PublicKey', 'ProtocolVersion', 'ID', 'AgentVersion', 'Addresses'
        ]
        resp_id = self.api.id()
        for key in expected:
            self.assertTrue(key in resp_id)

    def test_add_single_from_str(self):
        res = self.api.add(self.fake_file)
        self.assertEqual(res, self.fake_file_only_res)

    def test_add_single_from_fp(self):
        with open(self.fake_file, 'rb') as fp:
            res = self.api.add(fp)
            self.assertEqual(res, self.fake_file_only_res)

    def test_add_multiple_from_list(self):
        res = self.api.add([self.fake_file, self.fake_file2])
        self.assertEqual(res, self.fake_files_res)

    def test_add_multiple_from_dirname(self):
        res = self.api.add(self.fake_dir_test2)
        self.assertEqual(sorted(res, key=lambda x: x['Name']),
                         sorted(self.fake_dir_res, key=lambda x: x['Name']))

    def test_add_filepattern_from_dirname(self):
        res = self.api.add(self.fake_dir_test2, fnpattern=self.fnpattern)
        self.assertEqual(
            sorted(res, key=lambda x: x['Name']),
            sorted(self.fake_dir_fnpattern_res, key=lambda x: x['Name']))

    def test_add_recursive(self):
        res = self.api.add(self.fake_dir, recursive=True)
        self.assertEqual(
            sorted(res, key=lambda x: x['Name']),
            sorted(self.fake_dir_recursive_res, key=lambda x: x['Name']))

    def test_add_get_pyobject(self):
        data = [-1, 3.14, u'Hän€', b'23']
        res = self.api.add_pyobj(data)
        self.assertEqual(data, self.api.get_pyobj(res))

    def test_get_file(self):
        self.api.add(self.fake_file)

        test_hash = self.fake[0]['Hash']

        self.api.get(test_hash)
        self.assertIn(test_hash, os.listdir(os.getcwd()))

        os.remove(test_hash)
        self.assertNotIn(test_hash, os.listdir(os.getcwd()))

    def test_get_dir(self):
        self.api.add(self.fake_dir, recursive=True)

        test_hash = self.fake[8]['Hash']

        self.api.get(test_hash)
        self.assertIn(test_hash, os.listdir(os.getcwd()))

        shutil.rmtree(test_hash)
        self.assertNotIn(test_hash, os.listdir(os.getcwd()))

    def test_get_path(self):
        self.api.add(self.fake_file)

        test_hash = self.fake[8]['Hash'] + '/fsdfgh'

        self.api.get(test_hash)
        self.assertIn('fsdfgh', os.listdir(os.getcwd()))

        os.remove('fsdfgh')
        self.assertNotIn('fsdfgh', os.listdir(os.getcwd()))

    def test_refs(self):
        self.api.add(self.fake_dir, recursive=True)

        refs = self.api.refs(self.fake[8]['Hash'])

        self.assertEqual(sorted(refs, key=lambda x: x['Ref']),
                         sorted(self.refs_res, key=lambda x: x['Ref']))

    def test_refs_local(self):
        refs = self.api.refs_local()

        self.assertEqual(sorted(refs[0].keys()), ['Err', 'Ref'])

    def test_cat_single_file_str(self):
        self.api.add(self.fake_file)
        res = self.api.cat('QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX')
        self.assertEqual(b"dsadsad\n", res)