def get_ipfs_client(options=None): if options is None: options = load_prefs().get('ipfs', {}) return ipfsApi.Client( options.get('host', '127.0.0.1'), options.get('port', 5001))
def ipfs_add(self): """ Adds this upload to ipfs. Raises exceptions on failures. """ if os.path.exists(self.full_path()): api = ipfsApi.Client('127.0.0.1', 5001) # chdir so that we only pass the base filename to Client.add(); # if you pass in a full path, it loses the filename when it wraps it # in a directory origdir = os.getcwd() os.chdir(os.path.dirname(self.full_path())) error = None try: # encode to utf8 or urllib will raise error inside Client.add() filename = self.file_name.encode('utf8') # "-w" option wraps the file in a directory so we can generate a nicer url. # There doesn't seem to be a way to tell ipfs to use a different filename # (it's be better to use structured_file_name) than disk filename response = api.add(filename, opts={'w': True}) except Exception, e: error = e finally:
def episode_to_ipfs(episode_id): if Episode.objects.filter(pk=episode_id).exists(): episode_obj = Episode.objects.get(pk=episode_id) print(episode_obj.member.MemberID) member_obj = Member.objects.get(pk=episode_obj.member.MemberID) try: client = ipfsApi.Client('127.0.0.1', 5001) except Exception as e: print(e) return False try: episode_obj.HistoryHash = member_obj.LastHash episode_obj.Active = False episode_json = EpisodeSerializer(episode_obj).data episode_hash = client.add_json(episode_json) print(episode_hash) member_obj.LastHash = episode_hash member_obj.save() episode_obj.delete() except Exception as e: print(e) # rollback episode_obj.HistoryHash = None episode_obj.Active = True return False return True
def publish_sample_data(cypher_key, ipfs_server, ipfs_port, filename, sample_size=100): # create sample data parts = filename.split('.') result_file_name = "/tmp/%s-sample.%s.gz" % (parts[0], "json") out_file = gzip.open(result_file_name, "wt") sample_data = [] with gzip.open(filename, "rt") as in_file: csv_reader = csv.DictReader(in_file) cursor = 0 for row in csv_reader: if cursor < sample_size: sample_data.append(row) else: break cursor += 1 out_file.write( json.dumps(sample_data, indent=4, sort_keys=False, default=str)) out_file.close() md5_hash = create_file_hash(result_file_name) enc_file_name = result_file_name + '.enc' encrypt_file(cypher_key, result_file_name, enc_file_name) api = ipfsApi.Client(ipfs_server, ipfs_port) res = api.add(enc_file_name) return {'ipfs_hash': res['Hash'], 'md5_file_hash': md5_hash}
def decrypt_file(self, round): """ Decrypt a given message if there are at least threshold+1 shares available. Args: file_id (int): the id of the message Returns: (bytes) the decrypted message """ shares = self.select_t_valid_share(round) c1_power_s = cru.point_from_eth(cru.point_to_eth(cru.Cipher.recover_c1(shares))) db = get_db() file_info = db.execute('SELECT * FROM encrypted_file WHERE round=?', (round,)).fetchone() c2 = (int(file_info['c2x']), int(file_info['c2y'])) c2 = cru.point_from_eth(c2) cid_ipfs = str(file_info['hash'])[2:-1] ipfs_api = ipfsApi.Client('127.0.0.1', 8080) ipfs_api.get(cid_ipfs) with open(cid_ipfs, 'rb') as f: cipher_file = f.read() os.remove(cid_ipfs) encrypt_file = { 'cipher_file': cipher_file, 'c1': c1_power_s, 'c2': c2 } return cru.Cipher.decrypt(encrypt_file)
def __init__(self): self.api = ipfsapi.Client('127.0.0.1', 5001) try: ipfsapi.assert_version(self.api.version()['Version'], minimum='0.4.3', maximum='0.5.0') except ipfsapi.exceptions.VersionMismatch: return "Please update the IPFS daemon."
def publish_all_data(self, table_name, cypher_key, ipfs_server, ipfs_port, cols=None): cursor = self.connection.cursor( cursor_factory=psycopg2.extras.RealDictCursor, name='large_dataset') total_records = 0 if cols is None: # get all columns query = "select * from cherre_sample_data.%s " % table_name print(query) cursor.execute(query) first_row = cursor.fetchone() total_records += 1 cols = [desc[0] for desc in cursor.description] else: select_query = "select {} from cherre_sample_data.%s " % table_name limit_query = sql.SQL(select_query).format( sql.SQL(', ').join(map(sql.Identifier, cols))) print(limit_query.as_string(self.connection)) cursor.execute(select_query) result_file_name = "/tmp/%s.%s.gz" % (table_name, "csv") with gzip.open(result_file_name, "wt") as out_file: csv_writer = csv.DictWriter(out_file, fieldnames=cols) csv_writer.writeheader() if first_row is not None: csv_writer.writerow(first_row) while True: rows = cursor.fetchmany(size=5000) if not rows: break total_records += len(rows) for row in rows: csv_writer.writerow(row) cursor.close() out_file.close() md5_hash = create_file_hash(result_file_name) enc_file_name = result_file_name + '.enc' encrypt_file(cypher_key, result_file_name, enc_file_name) api = ipfsApi.Client(ipfs_server, ipfs_port) res = api.add(enc_file_name) return { 'ipfs_hash': res['Hash'], 'md5_file_hash': md5_hash, 'num_of_rows': total_records }
def decrpt_has(key, file_hash): server_config = config(section='ipfs') api = ipfsApi.Client(server_config['endpoint'], server_config['port']) api.get(file_hash) outfile_name = 'plaintext.gz' decrypt_file(key.encode('utf-8'), file_hash, outfile_name) with gzip.open(outfile_name, "r") as file: data = file.read() return Response(data, mimetype='application/json')
def publish_all_data(cypher_key, ipfs_server, ipfs_port, filename): md5_hash = create_file_hash(filename) enc_file_name = "/tmp/" + filename + '.enc' encrypt_file(cypher_key, filename, enc_file_name) api = ipfsApi.Client(ipfs_server, ipfs_port) res = api.add(enc_file_name) return {'ipfs_hash': res['Hash'], 'md5_file_hash': md5_hash}
def setUp(self): self.app = create_app('testing') self.app_context = self.app.app_context() self.app_context.push() self.client = ipfsApi.Client() db.create_all() down.basePath = "/home/sebastian/testing-stw/" log_handler = logging.FileHandler( '/home/sebastian/testing-stw/STW.log') log_handler.setLevel(logging.INFO) self.app.logger.setLevel(logging.INFO) proxy_util.default_event_loop = asyncio.new_event_loop()
def setUp(self): self.app = create_app('testing') self.app_context = self.app.app_context() self.app_context.push() self.client = ipfsApi.Client() db.create_all() p.static_path = os.path.abspath( os.path.expanduser("~/") + "PycharmProjects/STW/static") log_handler = logging.FileHandler( '/home/sebastian/testing-stw/STW.log') log_handler.setLevel(logging.INFO) self.app.logger.setLevel(logging.INFO)
def setUp(self): self.app = create_app('testing') self.app_context = self.app.app_context() self.app_context.push() self.client = ipfsApi.Client() db.create_all() down.basePath = "/home/sebastian/testing-stw/" download_thread.base_path = "/home/sebastian/testing-stw/" proxy_util.static_path = "/home/sebastian/PycharmProjects/STW/static/" log_handler = logging.FileHandler( '/home/sebastian/testing-stw/STW.log') log_handler.setLevel(logging.INFO) self.app.logger.setLevel(logging.INFO)
def publish_sample_data(self, table_name, cypher_key, ipfs_server, ipfs_port, sample_size=100, cols=None, output='json'): cursor = self.connection.cursor( cursor_factory=psycopg2.extras.RealDictCursor) # create sample data if cols is None: # get all columns query = "select * from cherre_sample_data.%s " % table_name + "limit %s" % sample_size print(query) cursor.execute(query) else: select_query = "select {} from cherre_sample_data.%s " % table_name + "limit %s" % sample_size limit_query = sql.SQL(select_query).format( sql.SQL(', ').join(map(sql.Identifier, cols))) print(limit_query.as_string(self.connection)) cursor.execute(limit_query) rows = cursor.fetchall() json_str = json.dumps(rows, indent=4, sort_keys=False, default=str) result_file_name = "/tmp/%s-sample.%s.gz" % (table_name, output) out_file = gzip.open(result_file_name, "wt") if output == 'csv': csv_writer = csv.DictWriter(out_file, fieldnames=cols) csv_writer.writeheader() for row in rows: csv_writer.writerow(row) else: out_file.write(json_str) out_file.close() md5_hash = create_file_hash(result_file_name) enc_file_name = result_file_name + '.enc' encrypt_file(cypher_key, result_file_name, enc_file_name) api = ipfsApi.Client(ipfs_server, ipfs_port) res = api.add(enc_file_name) return {'ipfs_hash': res['Hash'], 'md5_file_hash': md5_hash}
def init_api(self): if self.ipfs_gateway is not None: if ':' in self.ipfs_gateway: host, port = self.ipfs_gateway.split(':') else: host, port = self.ipfs_gateway, 5001 port = int(port) else: host = 'localhost' port = 5001 self.api = ipfsApi.Client(host=host, port=port) # fail quickly if we're not able to contact ipfs self.id = self.api.id() LOG.debug('initialized ipfs api')
def setUp(self): self.app = create_app('testing') if os.path.exists('/home/sebastian/testing-stw/stw.log'): os.remove('/home/sebastian/testing-stw/stw.log') log_handler = logging.FileHandler( '/home/sebastian/testing-stw/stw.log') log_handler.setLevel(logging.INFO) self.app.logger.addHandler(log_handler) self.app.logger.setLevel(logging.INFO) self.app.logger.info("Start logging tests:\n") self.app_context = self.app.app_context() self.app_context.push() self.client = ipfs.Client() db.create_all() down.basePath = "/home/sebastian/testing-stw/" self.client = self.app.test_client()
def ipfs_generate(name, birthdate, filename): """Создает/обновляет файл с информацией о пользователе, добавляет его в IPFS, возвращает ipfs-link и ее sig""" filename = f'{name}_{filename[:5]}.txt' user_info_file = open(f'{filename}', "w") # Запись пользовательских данных в файл user_info_file.write(f'Name: {name}\n') user_info_file.write(f'Birthdate: {birthdate}') user_info_file.close() api = ipfsApi.Client('127.0.0.1', 5001) # требует ipfs-daemon заранее res = api.add(f'{filename}') # добавление в ipfs ipfs_link = res["Hash"] ipfs_link_sign = vasya_pr_key.sign( ipfs_link.encode("utf-8")) # генерация подписи return ipfs_link, ipfs_link_sign.hex()
def publish_all_data(self,cypher_key,ipfs_server,ipfs_port): result_file_name = "/tmp/%s.%s.gz" % (self.id, "csv") out_file = gzip.open(result_file_name, "wt") csv_writer = csv.writer(out_file) total_records = 1 # get the first record and header dataset = self.client.get(self.id, content_type='csv', limit=1, offset=0) for row in dataset: csv_writer.writerow(row) out_file.flush() set_limit = 10000 while True: dataset = self.client.get(self.id, content_type='csv', limit=set_limit, offset=total_records) # minus the header row total_records += len(dataset) - 1 # no data only header # if len(dataset) == 1: # break # for testing purpose fetch only 300000 records if len(dataset) == 1 or total_records > 300001: break for row in dataset[1:]: csv_writer.writerow(row) out_file.flush() out_file.close() md5_hash=create_file_hash(result_file_name) enc_file_name = result_file_name + '.enc' encrypt_file(cypher_key, result_file_name, enc_file_name) api = ipfsApi.Client(ipfs_server, ipfs_port) res = api.add(enc_file_name) return { 'ipfs_hash': res['Hash'],'md5_file_hash': md5_hash, 'num_of_rows': total_records }
def send_file(self, file_to_encrypt): """ Encrypt a file with a random symetric key and put it on ipfs. Then encrypt the symetric key with ElGamal cryptosystem and put it on the blockchain Warning: Call send_msg function on CipherETHDKG, and so create a transaction and send it. Args: file_to_encrypt (bytes): the file to encrypt """ # ciphering the file db = get_db() mpk_sql = db.execute("SELECT * FROM mpk").fetchone() mpk = cru.point_from_eth((int(mpk_sql['x']), int(mpk_sql['y']))) db.close() cipher = cru.Cipher(mpk) ct = cipher.encrypt(file_to_encrypt) # put cipher file in ipfs ipfs_api = ipfsApi.Client('127.0.0.1',5001) with open(os.path.join(current_app.config['UPLOAD_FOLDER'],'temp_file'), 'wb') as f: f.write(ct['cipher_file']) res = ipfs_api.add(os.path.join(current_app.config['UPLOAD_FOLDER'],'temp_file'))[0] os.remove(os.path.join(current_app.config['UPLOAD_FOLDER'],'temp_file')) # send the encryption of symmetric key used in AES to the blockchain transaction = self.contract.functions.send_msg( res['Hash'].encode(), cru.point_to_eth(ct['c1']), cru.point_to_eth(ct['c2']) ).buildTransaction( { 'chainId':1, 'gas':200000, 'nonce': self.w3.eth.getTransactionCount(self.account) } ) signed_tx = self.w3.eth.account.signTransaction(transaction, self.private_key) txn_hash = self.w3.eth.sendRawTransaction(signed_tx.rawTransaction) return txn_hash
def publish_sample_data(self,cypher_key,ipfs_server,ipfs_port,sample_size=100,output='json'): # create sample data dataset = self.client.get(self.id,content_type=output,limit=sample_size) result_file_name = "/tmp/%s-sample.%s.gz" % (self.id, output) out_file = gzip.open(result_file_name, "wt") if output == 'csv': csv_writer = csv.writer(out_file) for row in dataset: csv_writer.writerow(row) else: out_file.write(json.dumps(dataset,indent=4,sort_keys=False,default=str)) out_file.close() md5_hash = create_file_hash(result_file_name) enc_file_name = result_file_name + '.enc' encrypt_file(cypher_key,result_file_name, enc_file_name) api = ipfsApi.Client(ipfs_server, ipfs_port) res = api.add(enc_file_name) return { 'ipfs_hash': res['Hash'],'md5_file_hash': md5_hash }
import ipfsApi api = ipfsApi.Client('https://ipfs.infura.io', 5001) def addFile(file): res = api.add(file) print(res) return res
import ipfsApi from os import path import os import shutil from pathlib import Path from data_secure_app.main.web3_functions import get_key_for_user, get_key_for_user_by_doctor, get_list_of_user_files from data_secure_app.encryption.encryption_engine import Encryptor import time ipfs_client = ipfsApi.Client(host="127.0.0.1", port=5001) home = str(Path.home()) def add(filepath): if path.exists(filepath): try: start = time.time() response = ipfs_client.add(filepath) print('IPFS Storage Time: ', round((time.time() - start), 2)) print(response) for r in response: if r['Name'] == filepath: print('returning', r, True) return r, True print('add: returning empty in for') return {}, False except Exception as e: print('add: returning empty in except', e) return {}, False else: print('add: returning empty in else')
import ipfsApi import argparse from io import BytesIO from pywb.warc.archiveiterator import DefaultRecordParser from surt import surt from requests.packages.urllib3.exceptions import NewConnectionError from requests.exceptions import ConnectionError import requests from __init__ import __version__ as ipwbVersion IP = '127.0.0.1' PORT = '5001' IPFS_API = ipfsApi.Client(IP, PORT) def main(): args = checkArgs(sys.argv) # Verify that a WARC file has been passed in verifyDaemonIsAlive(args.daemon_address) verifyFileExists(args.warcPath) # verifyFileExists() textRecordParserOptions = { 'cdxj': True, 'include_all': False, 'surt_ordered': False} cdxLines = '' ipfsRetryCount = 5 # WARC->IPFS attempts before giving up ipfsTempPath = '/tmp/ipfs/'
from flask import Flask from flask import Response from requests.exceptions import ConnectionError import requests import util as ipwbConfig from util import IPFSAPI_IP, IPFSAPI_PORT, IPWBREPLAY_IP, IPWBREPLAY_PORT from util import INDEX_FILE app = Flask(__name__) app.debug = True # @app.route("/") # def hello(): # return "Hello World!" IPFS_API = ipfsApi.Client(IPFSAPI_IP, IPFSAPI_PORT) @app.route('/webui/<path:path>') def showWebUI(path): path = 'ipwb/webui/' + path with open(path, 'r') as webuiFile: content = webuiFile.read() if 'index.html' in path: content = content.replace('MEMCOUNT', str(retrieveMemCount())) content = content.replace( 'var uris = []', 'var uris = ' + getURIsInCDXJ()) content = content.replace('INDEXSRC', os.path.abspath(INDEX_FILE)) return Response(content)
class IpfsApiTest(unittest.TestCase): api = ipfsApi.Client() fake = [{ 'Hash': u'QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX', 'Name': 'fake_dir/fsdfgh' }, { 'Hash': u'QmYAhvKYu46rh5NcHzeu6Bhc7NG9SqkF9wySj2jvB74Rkv', 'Name': 'fake_dir/popoiopiu' }, { 'Hash': u'QmeMbJSHNCesAh7EeopackUdjutTJznum1Fn7knPm873Fe', 'Name': 'fake_dir/test3/ppppoooooooooo' }, { 'Hash': u'QmRphRr6ULDEj7YnXpLdnxhnPiVjv5RDtGX3er94Ec6v4Q', 'Name': 'fake_dir/test3' }, { 'Hash': u'QmNuvmuFeeWWpxjCQwLkHshr8iqhGLWXFzSGzafBeawTTZ', 'Name': 'fake_dir/test2/llllg' }, { 'Hash': u'Qmb1NPqPzdHCMvHRfCkk6TWLcnpGJ71KnafacCMm6TKLcD', 'Name': 'fake_dir/test2/fssdf' }, { 'Hash': u'QmX1dd5DtkgoiYRKaPQPTCtXArUu4jEZ62rJBUcd5WhxAZ', 'Name': 'fake_dir/test2' }, { 'Hash': u'QmbZuss6aAizLEAt2Jt2BD29oq4XfMieGezi6mN4vz9g9A', 'Name': 'fake_dir' }] fake_lookup = dict((i['Name'], i['Hash']) for i in fake) ## test_add_multiple_from_list fake_file = 'fake_dir/fsdfgh' fake_file2 = 'fake_dir/popoiopiu' fake_files_res = [{ u'Hash': u'QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX', u'Name': u'fsdfgh' }, { u'Hash': u'QmYAhvKYu46rh5NcHzeu6Bhc7NG9SqkF9wySj2jvB74Rkv', u'Name': u'popoiopiu' }, { u'Hash': u'QmVkNdzCBukBRdpyFiKPyL2R15qPExMr9rV9RFV2kf9eeV', u'Name': u'' }] ## test_add_multiple_from_dirname fake_dir_test2 = 'fake_dir/test2' fake_dir_res = [{ u'Hash': u'QmNuvmuFeeWWpxjCQwLkHshr8iqhGLWXFzSGzafBeawTTZ', u'Name': u'llllg' }, { u'Hash': u'Qmb1NPqPzdHCMvHRfCkk6TWLcnpGJ71KnafacCMm6TKLcD', u'Name': u'fssdf' }, { u'Hash': u'QmX1dd5DtkgoiYRKaPQPTCtXArUu4jEZ62rJBUcd5WhxAZ', u'Name': u'' }] ## test_add_recursive fake_dir = 'fake_dir' fake_dir_recursive_res = [{ u'Hash': u'QmQcCtMgLVwvMQGu6mvsRYLjwqrZJcYtH4mboM9urWW9vX', u'Name': u'fake_dir/fsdfgh' }, { u'Hash': u'QmYAhvKYu46rh5NcHzeu6Bhc7NG9SqkF9wySj2jvB74Rkv', u'Name': u'fake_dir/popoiopiu' }, { u'Hash': u'QmeMbJSHNCesAh7EeopackUdjutTJznum1Fn7knPm873Fe', u'Name': u'fake_dir/test3/ppppoooooooooo' }, { u'Hash': u'QmRphRr6ULDEj7YnXpLdnxhnPiVjv5RDtGX3er94Ec6v4Q', u'Name': u'fake_dir/test3' }, { u'Hash': u'QmNuvmuFeeWWpxjCQwLkHshr8iqhGLWXFzSGzafBeawTTZ', u'Name': u'fake_dir/test2/llllg' }, { u'Hash': u'Qmb1NPqPzdHCMvHRfCkk6TWLcnpGJ71KnafacCMm6TKLcD', u'Name': u'fake_dir/test2/fssdf' }, { u'Hash': u'QmX1dd5DtkgoiYRKaPQPTCtXArUu4jEZ62rJBUcd5WhxAZ', u'Name': u'fake_dir/test2' }, { u'Hash': u'QmYqqgRahxbZvudnzDu2ZzUS1vFSNEuCrxghM8hgT8uBFY', u'Name': u'fake_dir' }] def setUp(self): self._olddir = os.getcwd() os.chdir(HERE) def tearDown(self): os.chdir(self._olddir) ######### # TESTS # ######### def test_add_single_from_str(self): res = self.api.add(self.fake_file) self.assertEqual(res[u"Hash"], self.fake_lookup[self.fake_file]) def test_add_single_from_fp(self): with open(self.fake_file, 'rb') as fp: res = self.api.add(fp) self.assertEqual(res[u"Hash"], self.fake_lookup[self.fake_file]) def test_add_multiple_from_list(self): res = self.api.add([self.fake_file, self.fake_file2]) self.assertEqual(res, self.fake_files_res) def test_add_multiple_from_dirname(self): res = self.api.add(self.fake_dir_test2) self.assertEqual(sorted(res, key=lambda x: x['Name']), sorted(self.fake_dir_res, key=lambda x: x['Name'])) def test_add_recursive(self): res = self.api.add(self.fake_dir, recursive=True) self.assertEqual( sorted(res, key=lambda x: x['Name']), sorted(self.fake_dir_recursive_res, key=lambda x: x['Name'])) def test_add_get_pyobject(self): data = [-1, 3.14, u'Hän€', b'23'] res = self.api.add_pyobj(data) self.assertEqual(data, self.api.get_pyobj(res))
def uploadIpfs(src): api = ipfsApi.Client('https://ipfs.infura.io', 5001) res = api.add(src) return res
def upload(): form = ActUploadForm() if request.method == 'POST' and form.validate_on_submit(): image = form.image.data image_file = image.read() # sha256_hash = hashlib.sha256(form.patientHash.data.encode()) # patientHash = sha256_hash.hexdigest() patientHash = hashlib.sha256(form.patientHash.data.encode()).hexdigest() encoded_file = base64.b64encode(image_file) ### File upload to IPFS ### tmp_file_name = str(g.user.doctorNumber)+".jpg" with open(tmp_file_name, "wb") as tmp_file: tmp_file.write(base64.b64decode(encoded_file)) # Unable for awhile ### Produce a new image from deep learning model new_file_name = "new" + tmp_file_name img = Image.open(tmp_file_name) pixel_img = np.array(img) predicted_img = predict.predict(pixel_img) new_image = Image.fromarray(predicted_img) new_image = new_image.convert("RGB") new_image.save(new_file_name) # current_path = os.getcwd() # new_file_name = "new_file.tif" ipfs_api = ipfsApi.Client("127.0.0.1", 5001) ipfs_response = ipfs_api.add(tmp_file_name) ipfs_new_response = ipfs_api.add(new_file_name) img_cid = ipfs_response["Hash"] new_img_cid = ipfs_new_response["Hash"] # print("img_cid :",img_cid) os.remove(tmp_file_name) ####### TEST ######### # os.remove(new_file_name) # print("ipfs response :", ipfs_response) # print("ipfs new response :", ipfs_new_response) ########################### blockchain_url = blockchain_api_url() apiURL = blockchain_url + 'doctor/uploadPatientData' # print("apiURL :", apiURL) headers = {"Content-Type": "application/json"} json_data = { "doctorNumber": g.user.doctorNumber, "patientHash": patientHash, "rawImgCID": img_cid, "resultImgCID": new_img_cid } response = requests.post(url=apiURL, headers=headers, json=json_data) # print("Response: ", response) # print("Response Headers: ", response.headers) # print("Response Text: ", response.text) #### Search after upload #### apiURL = blockchain_url + 'patient/getMyData/' + patientHash response = requests.get(apiURL) # print("Response Type: ", type(response)) # print("Response: ", response) # print("Response Headers: ", response.headers) # print("Response Text: ", response.text) data_list = [] res = sorted(json.loads(response.text), key=lambda x: -(x['Timestamp']['seconds']['low'])) for each_data in res: tm = time.localtime(each_data['Timestamp']['seconds']['low']) time_data = str(tm.tm_year) + '.' + str(tm.tm_mon) + '.' + str(tm.tm_mday) + ' / ' + str( tm.tm_hour) + ':' + str(tm.tm_min) # rawImg_location = "https://ipfs.io/ipfs/" + each_data['Value']['rawImgCID'] + "/" rawImg_location = "http://127.0.0.1:8080/ipfs/" + each_data['Value']['rawImgCID'] + "/" # resultImg_location = "https://ipfs.io/ipfs/" + each_data['Value']['resultImgCID'] + "/" resultImg_location = "http://127.0.0.1:8080/ipfs/" + each_data['Value']['resultImgCID'] + "/" data = {'time': time_data, 'rawImg': rawImg_location, 'resultImg': resultImg_location} data_list.append(data) return render_template('act/showResult.html', data_list=data_list) return render_template('act/upload.html', form=form)
traceback.print_tb(sys.exc_info()[2]) connected = False class State: pass state = State() print("Connecting to IPFS") while (not connected): try: state.ipfs_client = ipfsApi.Client('127.0.0.1', 5001) connected = True except: handle_exception() print("Waiting for IPFS daemon") time.sleep(5) print("Creating IRC client") bot = bottom.Client(host=host, port=port, ssl=ssl) def sysmsg(s): bot.send("PRIVMSG", target=CHANNEL, message=s) def usage(message):
try: import ipfsApi except: print("[!] Module ipfs-api not installed") exit() try: import skyhookfilecrypt except: print("[!] Module skyhookfilecrypt not installed") exit() from datetime import datetime import os, db, aux, config peer = ipfsApi.Client(config.host, config.port) def uploadFile(fileName): if not fileName in [f for f in os.listdir(".") if os.path.isfile(f)]: return (1) else: password = aux.getRandomString(32) aesName = "{}.sky".format(fileName) tmpPath = "{}/{}".format(config.tmpDir, aesName) print("[+] Encrypting {}".format(fileName)) try:
import ipfsApi import json api = ipfsApi.Client(host='https://ipfs.infura.io', port=5001) #with open('ipfs.txt') as json_file: #abi = json.load(json_file) new_file = api.add('pe.jpg') #p=api.cat('QmZqiuTUnSXXM3SUFdZ3mYkHHXh2MyNVTci1aaksv3fsPS') #p=p.decode("utf-8") #p=json.loads(p) #print(p['employee']['name']) print(new_file)
from sqlalchemy import and_ import ipfsApi as ipfs """ :author: Waqar This module sends email. In case a scheduled articles is found changed and blocked in a given country. Some code from downloader.py is copied here. Since this code works out of application context. The code re-usability from downloader.py was not possible. """ basePath = 'app/pdf/' apiPostUrl = 'http://www.originstamp.org/api/stamps' apiKey = '7be3aa0c7f9c2ae0061c9ad4ac680f5c ' blockSize = 65536 options = {'quiet': ''} errorCaught = "" ipfs_Client = ipfs.Client('127.0.0.1', 5001) class OriginstampError(Exception): """ Error-Class for problems happening during requesting URL. """ def __init__(self, message, req): super(OriginstampError, self).__init__(message) request = req def get_pages_send_email(post, task): url = post.urlSite if task.china: proxy = app.config['STW_CHINA_PROXY']