def getUserInfo(request): userPhoneNumberToVerify = request.POST.get("number") securityToken = request.POST.get("securityToken") # TODO: Make sure the securityToken is not expired. user = UserPhone.objects.get(phone_number=userPhoneNumberToVerify) isValidToken = int(user.token) == int(securityToken) if isValidToken: groupsWithStatus = list( UserinGroup.objects.filter(user=userPhoneNumberToVerify)) firstName = user.name location = user.region response = JsonResponse({ "firstName": firstName, "groupsWithStatus": groupsWithStatus, "location": location }) response.__setitem__("Access-Control-Allow-Origin", "*") return response else: response = HttpResponse() response.status_code = 401 response.__setitem__("Access-Control-Allow-Origin", "*") return response
def info(request): # step 1 - get the hex hex_p = subprocess.Popen(['multichain-cli', 'food2', 'getinfo'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = hex_p.communicate() big_string = ''.join(out.split('\n\n')[1]).strip() resp = json.loads(big_string) # step 3 - index into json to get either prev txid or coinbase # new_txid = resp['vin'][0]['txid'] # sender_address = resp['vout'][1]['scriptPubKey']['addresses'][0] # print "Prev txid is ", new_txid # print "Sender address is ", sender_address # prev_owner = User.objects.get(address=sender_address).name # print "sender name is ", prev_owner # step 4 - look for coinbase response = JsonResponse(resp) response.__setitem__("Access-Control-Allow-Origin", "*") return response
def getJsondata(request): jsonData = MachineLearning.dothething() response = JsonResponse(jsonData) response.__setitem__("Access-Control-Allow-Origin", "*") response.__setitem__("Access-Control-Allow-Headers", "x-requested-width") #jsonData=JSONRenderer().render(jsonData) return response
def giveMeRegions(request): myResponse = JsonResponse([{ "code": x[0], "name": x[1] } for x in regionChoices], safe=False) myResponse.__setitem__("Access-Control-Allow-Origin", "*") return myResponse
def send_user(user): if verify_password(request_body['password'], user.password): token = create_token() session = Session(content_type=ContentType.objects.get_for_model( user.__class__), object_id=user.id, key=token) session.save() response = JsonResponse(user.to_dict(), status=200) response.__setitem__(header='jwt', value=token) return response
def searchresulthandling(request): if request.method == "POST": form = AddSubreddit(request.POST) if form.is_valid(): value = form.cleaned_data('subReddit') print(value) jsonData = MachineLearning.dothething(value) response = JsonResponse(jsonData) response.__setitem__("Access-Control-Allow-Origin", "*") response.__setitem__("Access-Control-Allow-Headers", "x-requested-width") # jsonData=JSONRenderer().render(jsonData) return response
def widget_list(request): if request.method == 'GET': widgets = Widget.objects.all() serializer = WidgetSerializer(widgets, many=True) response = JsonResponse(serializer.data, safe=False) response.__setitem__('Access-Control-Allow-Origin', '*') return response elif request.method == 'POST': data = JSONParser().parse(request) serializer = WidgetSerializer(data=data) if serializer.is_valid(): serializer.save() return JsonResponse(serializer.data, status=201) return JsonResponse(serializer.errors, status=400)
def upload(request): res = {'error': 1} if request.method == 'POST': imgFile = request.FILES.get('imgFile') upload_img_path = os.path.join(BASE_DIR, 'media', 'upload', 'imgs') if not os.path.exists(upload_img_path): os.makedirs(upload_img_path) with open(os.path.join(upload_img_path, imgFile.name), 'wb') as f: for line in imgFile: f.write(line) res['error'] = 0 res['url'] = '/bbs/media/upload/imgs/%s' % imgFile.name response = JsonResponse(res) response.__setitem__('X-Frame-Options', 'SAMEORIGIN') return response
def handle_batch_requests(request, *args, **kwargs): ''' A view function to handle the overall processing of batch requests. ''' batch_start_time = datetime.now() try: # Get the Individual WSGI requests. wsgi_requests = get_wsgi_requests(request) except BadBatchRequest as brx: return HttpResponseBadRequest(content=str(brx)) # Fire these WSGI requests, and collect the response for the same. response = execute_requests(wsgi_requests) # Evrything's done, return the response. resp = JsonResponse(response, safe=False) if _settings.ADD_DURATION_HEADER: resp.__setitem__(_settings.DURATION_HEADER_NAME, str((datetime.now() - batch_start_time).seconds)) return resp
def checkWhetherSmsVerificationCodeIsValidAndReturnAToken(request): userPhoneNumberToVerify = request.POST.get("number") verificationCode = request.POST.get("verificationCode") print userPhoneNumberToVerify user = UserPhone.objects.get(phone_number=userPhoneNumberToVerify) isValidCode = int(user.verificationNumber) == int(verificationCode) # TODO: This token should have an expiration time. newMagicTokenForThisUser = "******".format(randint(0, 999999999)) user.token = newMagicTokenForThisUser user.save() if isValidCode: response = JsonResponse({"authToken": newMagicTokenForThisUser}) response.__setitem__("Access-Control-Allow-Origin", "*") return response else: response = HttpResponse() response.status_code = 401 return response
def get_balance(request): dict = {} data = {} meta = {} dict["meta"] = meta dict["data"] = data w3 = Web3(HTTPProvider("http://localhost:8545")) # try: # compiled_sol = compile_source(contract_source_code) # Compiled source code # compiled_sol = compile_files(["MetaCoin.sol"],output_values=['abi']) # contract_interface = compiled_sol['MetaCoin.sol:Metacoin'] # metacoin_abi = [{'constant': False, 'inputs': [{'name': 'receiver', 'type': 'address'}, {'name': 'amount', 'type': 'uint256'}], 'name': 'sendCoin', 'outputs': [{'name': 'sufficient', 'type': 'bool'}], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': False, 'inputs': [], 'name': 'MetaCoin', 'outputs': [], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': True, 'inputs': [{'name': 'addr', 'type': 'address'}], 'name': 'getBalance', 'outputs': [{'name': '', 'type': 'uint256'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'anonymous': False, 'inputs': [{'indexed': True, 'name': '_from', 'type': 'address'}, {'indexed': True, 'name': '_to', 'type': 'address'}, {'indexed': False, 'name': '_value', 'type': 'uint256'}], 'name': 'Transfer', 'type': 'event'}] # try: metacoin_instance = w3.eth.contract(metacoin_abi, "0x016ed8e1ece5a5962584143e87ccf20f5a0d048a", ContractFactoryClass=ConciseContract) # try: b = metacoin_instance.getBalance("0xc5af40009043617a2042634aa0e88eae350334ff", call={'from': "0xc5af40009043617a2042634aa0e88eae350334ff"}) # data['balance'] = w3.eth.accounts[0] data['old_balance'] = b tx = metacoin_instance.sendCoin("0x2483144f5c99e7185aa47ceb3ecad347dfc3f668", 200, transact={'from': "0xc5af40009043617a2042634aa0e88eae350334ff", 'gas': 410000}) data['tx'] = tx transaction_filter = eth.TransactionFilter(tx) b_new = metacoin_instance.getBalance("0x2483144f5c99e7185aa47ceb3ecad347dfc3f668", call={'from': "0x2483144f5c99e7185aa47ceb3ecad347dfc3f668"}) data['new_balance'] = b_new info = "Success" # data['balance'] = w3.isConnected() # data['balance'] = wrapper.get_solc_binary_path() # except: # import sys # info = "%s || %s" % (sys.exc_info()[0], sys.exc_info()[1]) # # info = "Syntax Error Or Parameter Error" # # dict['message'] = info # meta['code'] = "400" # meta['message'] = info # jsonr = simplejson.dumps(dict) # res = HttpResponseBadRequest(jsonr) # res.__setitem__('Access-Control-Allow-Origin', '*') # return res if info == "Success": meta['code'] = "200" meta['message'] = "ok" res = JsonResponse(dict) res.__setitem__('Access-Control-Allow-Origin', '*') return res else: meta['code'] = "400" meta['message'] = info jsonr = simplejson.dumps(dict) res = HttpResponseBadRequest(jsonr) res.__setitem__('Access-Control-Allow-Origin', '*') return res
def read_smart(request): global temperature print(f"Returned {temperature}") response = JsonResponse({"temp": temperature}) response.__setitem__('Access-Control-Allow-Origin', '*') return response
def producer(request, fromAddress, toAddress, assetNameHex, assetQtyHex, metaDataHex): """ qty, assetName should be dict assetIds should be dicts unhex them but for now just do single assets """ # assetNameList = assetNameHex.decode('hex').split(', ') # assetQtyList = assetQtyHex.decode('hex').split(', ') # # should be able to many many assets. # # that's too hard at this hour # asset_dict = {} # for i in range(len(assetNameList)): # asset_dict[assetNameList[i]] = assetQtyList[i] assetName = str(assetNameHex.decode('hex')) assetQty = str(assetQtyHex.decode('hex')) # should be able to many many assets. # that's too hard at this hour asset_dict = {} asset_dict[assetName] = assetQty metaData = metaDataHex.decode('hex') fromAddress = '1MXuLZpXkSCrmKxV8tLwQkFCrGdcKoAz9c6ZAu' print 'fromAddress', fromAddress print 'toAddress length is ', len(toAddress) print 'assetNameHex as str ', str(assetNameHex) print 'assetQtyHex', assetQtyHex print 'assetQty', assetQty print 'metaDataHex', metaDataHex print 'asset_dict', str(asset_dict) print 'metaData', metaData # step 1 - get the hex hex_p = subprocess.Popen([ 'multichain-cli', 'food2', 'sendwithmetadatafrom', fromAddress, toAddress, '"' + str(asset_dict) + '"', metaData ], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = hex_p.communicate() print out big_string = ''.join(out.split('\n\n')[1]).strip() resp = json.loads(big_string) # step 3 - index into json to get either prev txid or coinbase new_txid = resp['vin'][0]['txid'] sender_address = resp['vout'][1]['scriptPubKey']['addresses'][0] print "Prev txid is ", new_txid print "Sender address is ", sender_address prev_owner = User.objects.get(address=sender_address).name print "sender name is ", prev_owner # step 4 - look for coinbase response = JsonResponse(resp) response.__setitem__("Access-Control-Allow-Origin", "*") return response
resp = json.loads(big_json2) del json_p # step 3 - index into json to get either prev txid or coinbase new_txid = resp['vin'][0]['txid'] try: sender_address = resp['vout'][1]['scriptPubKey']['addresses'][0] except Exception, e: print "No more transactions" finally: print "Sender address is ", sender_address # prev_owner = User.objects.get(address=sender_address).name # print "sender name is ", prev_owner print 'time of transcation is' owner_data = {} owner_data['owner'] = prev_owner owner_data['sender_address '] = sender_address owner_data['timestamp'] = '1341345' old_txid=txid txid=new_txid print '\n\n\n\n\n\n' response = JsonResponse(resp) response.__setitem__("Access-Control-Allow-Origin", "*") return response
big_json2 = ''.join(big_json.split('\n\n')[1]) resp = json.loads(big_json2) del json_p # step 3 - index into json to get either prev txid or coinbase new_txid = resp['vin'][0]['txid'] try: sender_address = resp['vout'][1]['scriptPubKey']['addresses'][0] except Exception, e: print "No more transactions" finally: print "Sender address is ", sender_address # prev_owner = User.objects.get(address=sender_address).name # print "sender name is ", prev_owner print 'time of transcation is' owner_data = {} owner_data['owner'] = prev_owner owner_data['sender_address '] = sender_address owner_data['timestamp'] = '1341345' old_txid = txid txid = new_txid print '\n\n\n\n\n\n' response = JsonResponse(resp) response.__setitem__("Access-Control-Allow-Origin", "*") return response
def profile(request): print '************* profile ************' print str(datetime.today()) error_msg = '' user = request.user if 'repo' in request.GET and 'name' not in request.GET: # asking for ontologies in a repo repo = request.GET['repo'] print 'repo :<%s>' % (repo) print 'got the repo' try: print 'trying to validate repo' hackatt = True for repooo in user.repos: if repooo.url == repo: hackatt = False break if hackatt: # trying to access a repo that does not belong to the use currently logged in return render(request, 'msg.html', { 'msg': 'This repo is not added, please do so in the main page' }) print 'try to get abs folder' if type(autoncore.g) == type(None): print 'access token is: ' + request.session['access_token'] update_g(request.session['access_token']) try: ontologies = parse_online_repo_for_ontologies(repo) print 'ontologies: ' + str(len(ontologies)) arepo = Repo.objects.get(url=repo) pnames = PublishName.objects.filter(user=user, repo=arepo) for o in ontologies: print '--------\n%s\n' % o o['published'] = False o['pname'] = '' for pn in pnames: if pn.ontology == o[ 'ontology']: # to compare without the leading / o['published'] = True o['pname'] = pn.name break for d in o: print ' ' + d + ': ' + str(o[d]) print 'testing redirect' print 'will return the Json' jresponse = JsonResponse({'ontologies': ontologies}) jresponse.__setitem__('Content-Length', len(jresponse.content)) sys.stdout.flush() sys.stderr.flush() return jresponse except Exception as e: print "exception in getting the ontologies for the repo: " + str( repo) print "exception: " + str(e) arepo = Repo.objects.get(url=repo) arepo.state = 'Invalid repository' arepo.save() ontologies = [] jresponse = JsonResponse({'ontologies': ontologies}) jresponse.__setitem__('Content-Length', len(jresponse.content)) sys.stdout.flush() sys.stderr.flush() return jresponse except Exception as e: print 'exception: ' + str(e) # elif 'name' in request.GET: # publish with a new name # print request.GET # name = request.GET['name'] # target_repo = request.GET['repo'] # ontology_rel_path = request.GET['ontology'] # found = False # for r in user.repos: # if target_repo == r.url: # found = True # repo = r # break # if found: # if the repo belongs to the user # # if len(PublishName.objects.filter(name=name)) > 1: # error_msg = 'a duplicate published names, please contact us ASAP to fix it' # # elif len(PublishName.objects.filter(name=name)) == 0 or (PublishName.objects.get(name=name).user == user and # PublishName.objects.get(name=name).repo == repo and # PublishName.objects.get( # name=name).ontology == ontology_rel_path): # if (len(PublishName.objects.filter(name=name)) == 0 and # len(PublishName.objects.filter(user=user, ontology=ontology_rel_path, repo=repo)) > 0): # error_msg += 'can not reserve multiple names for the same ontology' # else: # autoncore.prepare_log(user.email) # # cloning_repo should look like '[email protected]:user/reponame.git' # cloning_repo = '[email protected]:%s.git' % target_repo # sec = ''.join([random.choice(string.ascii_letters + string.digits) for _ in range(4)]) # folder_name = 'pub-' + sec # clone_repo(cloning_repo, folder_name, dosleep=True) # repo_dir = os.path.join(autoncore.home, folder_name) # doc_dir = os.path.join(repo_dir, 'OnToology', ontology_rel_path[1:], 'documentation') # print 'repo_dir: %s' % repo_dir # print 'doc_dir: %s' % doc_dir # htaccess_f = os.path.join(doc_dir, '.htaccess') # if not os.path.exists(htaccess_f): # print 'htaccess is not found' # # error_msg += 'make sure your ontology has documentation and htaccess' # error_msg += 'We couldn\'t reserve your w3id. Please make sure that your ontology has ' \ # 'documentation and htacess. For that, click on "Generate documentation, diagrams' \ # ' and evaluation" on the menu, and once the process is completed, accept the ' \ # 'pull request on you GitHub repository' # else: # print 'found htaccesss' # f = open(htaccess_f, 'r') # file_content = f.read() # f.close() # f = open(htaccess_f, 'w') # for line in file_content.split('\n'): # if line[:11] == 'RewriteBase': # f.write('RewriteBase /publish/%s \n' % name) # else: # f.write(line + '\n') # f.close() # # comm = 'rm -Rf /home/ubuntu/publish/%s' % name # comm = 'rm -Rf ' + os.path.join(publish_dir, name) # print(comm) # call(comm, shell=True) # # comm = 'mv %s /home/ubuntu/publish/%s' % (doc_dir, name) # comm = 'mv %s %s' % (doc_dir, os.path.join(publish_dir, name)) # print comm # call(comm, shell=True) # if len(PublishName.objects.filter(name=name)) == 0: # p = PublishName(name=name, user=user, repo=repo, ontology=ontology_rel_path) # p.save() # else: # if PublishName.objects.get(name=name).user == user: # print 'same user' # if PublishName.objects.get(name=name).repo == repo: # print 'same repo' # if PublishName.objects.get(name=name).ontology == ontology_rel_path: # print 'same ontology' # error_msg += ' Name already taken' # else: # not found # error_msg += 'You should add this repo to OnToology first' elif 'delete-name' in request.GET: name = request.GET['delete-name'] p = PublishName.objects.filter(name=name) if len(p) == 0: error_msg += 'This name is not reserved' elif p[0].user.id == user.id: pp = p[0] pp.delete() pp.save() # comm = 'rm -Rf /home/ubuntu/publish/%s' % name comm = 'rm -Rf ' + os.path.join(publish_dir, name) call(comm, shell=True) else: error_msg += 'You are trying to delete a name that does not belong to you' print 'testing redirect' repos = user.repos for r in repos: try: if len(r.url.split('/')) != 2: user.update(pull__repos=r) r.delete() user.save() continue r.user = r.url.split('/')[0] r.rrepo = r.url.split('/')[1] except: user.update(pull__repos=r) user.save() request.GET = [] sys.stdout.flush() sys.stderr.flush() return render( request, 'profile.html', { 'repos': repos, 'pnames': PublishName.objects.filter(user=user), 'error': error_msg, 'manager': request.user.email in get_managers() })
def profile(request): print '************* profile ************' print str(datetime.today()) error_msg = '' user = request.user if 'repo' in request.GET and 'name' not in request.GET: # asking for ontologies in a repo repo = request.GET['repo'] print 'repo :<%s>' % (repo) print 'got the repo' try: print 'trying to validate repo' hackatt = True for repooo in user.repos: if repooo.url == repo: hackatt = False break if hackatt: # trying to access a repo that does not belong to the use currently logged in return render(request, 'msg.html', { 'msg': 'This repo is not added, please do so in the main page' }) print 'try to get abs folder' if type(autoncore.g) == type(None): print 'access token is: ' + request.session['access_token'] update_g(request.session['access_token']) try: ontologies = parse_online_repo_for_ontologies(repo) ontologies = autoncore.add_themis_results(repo, ontologies) print 'ontologies: ' + str(len(ontologies)) arepo = Repo.objects.get(url=repo) pnames = PublishName.objects.filter(user=user, repo=arepo) for o in ontologies: print '--------\n%s\n' % o o['published'] = False o['pname'] = '' for pn in pnames: if pn.ontology == o[ 'ontology']: # to compare without the leading / o['published'] = True o['pname'] = pn.name break for d in o: print ' ' + d + ': ' + str(o[d]) print 'testing redirect' print 'will return the Json' jresponse = JsonResponse({'ontologies': ontologies}) jresponse.__setitem__('Content-Length', len(jresponse.content)) sys.stdout.flush() sys.stderr.flush() return jresponse except Exception as e: print "exception in getting the ontologies for the repo: " + str( repo) print "exception: " + str(e) arepo = Repo.objects.get(url=repo) arepo.state = 'Invalid repository' arepo.save() ontologies = [] jresponse = JsonResponse({'ontologies': ontologies}) jresponse.__setitem__('Content-Length', len(jresponse.content)) sys.stdout.flush() sys.stderr.flush() return jresponse except Exception as e: print 'exception: ' + str(e) elif 'delete-name' in request.GET: name = request.GET['delete-name'] p = PublishName.objects.filter(name=name) if len(p) == 0: error_msg += 'This name is not reserved' elif p[0].user.id == user.id: pp = p[0] pp.delete() pp.save() comm = 'rm -Rf ' + os.path.join(publish_dir, name) call(comm, shell=True) else: error_msg += 'You are trying to delete a name that does not belong to you' print 'testing redirect' repos = user.repos for r in repos: try: if len(r.url.split('/')) != 2: user.update(pull__repos=r) r.delete() user.save() continue r.user = r.url.split('/')[0] r.rrepo = r.url.split('/')[1] except: user.update(pull__repos=r) user.save() request.GET = [] sys.stdout.flush() sys.stderr.flush() if request.user.email in get_managers(): num_pending_msgs = rabbit.get_pending_messages() num_of_rabbit_processes = get_num_of_processes_of_rabbit() else: num_pending_msgs = -2 num_of_rabbit_processes = -2 return render( request, 'profile.html', { 'repos': repos, 'pnames': PublishName.objects.filter(user=user), 'num_pending_msgs': num_pending_msgs, 'num_of_rabbit_processes': num_of_rabbit_processes, 'error': error_msg, 'manager': request.user.email in get_managers() })
def producer(request, fromAddress, toAddress, assetNameHex, assetQtyHex, metaDataHex): """ qty, assetName should be dict assetIds should be dicts unhex them but for now just do single assets """ # assetNameList = assetNameHex.decode('hex').split(', ') # assetQtyList = assetQtyHex.decode('hex').split(', ') # # should be able to many many assets. # # that's too hard at this hour # asset_dict = {} # for i in range(len(assetNameList)): # asset_dict[assetNameList[i]] = assetQtyList[i] assetName = str(assetNameHex.decode('hex')) assetQty = str(assetQtyHex.decode('hex')) # should be able to many many assets. # that's too hard at this hour asset_dict = {} asset_dict[assetName] = assetQty metaData = metaDataHex.decode('hex') fromAddress = '1MXuLZpXkSCrmKxV8tLwQkFCrGdcKoAz9c6ZAu' print 'fromAddress', fromAddress print 'toAddress length is ', len(toAddress) print 'assetNameHex as str ', str(assetNameHex) print 'assetQtyHex', assetQtyHex print 'assetQty', assetQty print 'metaDataHex', metaDataHex print 'asset_dict', str(asset_dict) print 'metaData', metaData # step 1 - get the hex hex_p = subprocess.Popen(['multichain-cli', 'food2', 'sendwithmetadatafrom', fromAddress, toAddress, '"' + str(asset_dict) + '"', metaData], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = hex_p.communicate() print out big_string = ''.join(out.split('\n\n')[1]).strip() resp = json.loads(big_string) # step 3 - index into json to get either prev txid or coinbase new_txid = resp['vin'][0]['txid'] sender_address = resp['vout'][1]['scriptPubKey']['addresses'][0] print "Prev txid is ", new_txid print "Sender address is ", sender_address prev_owner = User.objects.get(address=sender_address).name print "sender name is ", prev_owner # step 4 - look for coinbase response = JsonResponse(resp) response.__setitem__("Access-Control-Allow-Origin", "*") return response