Ejemplo n.º 1
0
def replace_books(isbn):
    request_data = request.get_json()
    if (not valid_put_request_data(request_data)):
        invalidbookObject = {
            "error":
            "valid book request must be passed in the request",
            "helpString":
            "Data passed id similar to this{'name':tfgh,'price':7.88}"
        }
        response = Response(json.dumbs(invalidbookObject),
                            status=400,
                            mimetype='application/json')
        return response
    # new_book={
    #     'name':request_data['name'],
    #     'price':request_data['price'],
    #     'isbn':isbn
    # }
    # i = 0
    # for book in books:
    #     currentisbn = ["isbn"]
    #     # print(currentisbn)
    #     if currentisbn == isbn:
    #         # print("foundbook")
    #         books[i] = new_book
    #     i+=1
    Book.replace_book(isbn, request_data['name'], request_data['price'])
    response = Response("", status=204)
    return response
Ejemplo n.º 2
0
def add_task(list_id):
	#Add a task to a list.
	try:
		DB.lists[list_id].tasks.append(Task(text=request.form.get('text')))
	except IndexError:
		return json.dumbs({'status': 'FAIL'})
	return json.dumps({'status': 'OK'})
    def data_received(self, data):
        message = data.decode()
        print('Data received: {!r}'.format(message))
        JSON_data = json.loads(message)
        global trigger_bool

        if (trigger_bool):
            #self.client.settimeout(20)
            ack_packet = json.dumps({"trigger_call": True})
            print('\n\n\nsending data back to the client\n\n' + ack_packet)
            self.transport.write(ack_packet.encode("utf-8"))
            trigger_bool = False

        if ('BLANK' not in JSON_data):
            if 'map_name' in JSON_data:
                pub_MAPPING_STATUS.publish(True)
                print(JSON_data["map_name"])
                temp_variable = JSON()
                temp_variable.MAP_NAME = str(JSON_data['map_name'])
                temp_variable.MAP_CREATOR = str(JSON_data['map_created_by'])
                temp_variable.GPS_LAT = float(
                    JSON_data['last_location']['system_latitude'])
                temp_variable.GPS_LONG = float(
                    JSON_data['last_location']['system_longitude'])
                temp_variable.calling_number = str(JSON_data['calling_number'])
                temp_variable.call_duration = float(JSON_data['call_duration'])
                temp_variable.TIME_BASED_TRIGGER = (
                    JSON_data['trigger_time_based'])
                temp_variable.DISTANCE_BASED_TRIGGER = (
                    JSON_data['trigger_distance_based'])
                print(temp_variable)
                pub_JSON.publish(temp_variable)
                ack_packet = json.dumbs(
                    {"Data_recieved_by_raspberry_pi": True})
                self.transport.write(ack_packet.encode())

                #print ("x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x")

            elif 'stop_mapping' in JSON_data:
                print("stop mapping ")
                ack_packet = json.dumps({"stopped_mapping": True})
                print('\n\n\nsending data back to the client\n\n' + ack_packet)
                self.transport.write(ack_packet.encode())
                pub_MAPPING_STATUS.publish(False)

        print('Close the client socket')
        self.transport.close()
Ejemplo n.º 4
0
	def copy_and_send(self):
		if self.ackcount <= -10 or selef.paused:
			return

		# copy buffer
		self.protocol.factory.copy_rpi_buffers(self.associated_rpi, self.streaming_buffer_read, self.streaming_buffer_write)


		if len(self.streaming_buffer_read) > 0 or len(self.streaming_buffer_write) > 0: # if there's any input?
			msg = {'cmd': common_protocol.ServerCommands.WRITE_DATA}
			msg['read'] = self.streaming_buffer_read
			msg['write'] = self.streaming_buffer_write
			self.ackcount -= 1
			self.protocol.sendMessage(json.dumbs(msg))

			# let's keep dumping until we run out of data
			reactor.callLater(0, self.copy_and_send)

		else:
			self.pause_streaming()
Ejemplo n.º 5
0
    def _collect(self):
        idx = 0
        dashboards = self.dashboards()
        for board in dashboards:
            projects = getattr(board, 'projects')
            if len(projects):
                for p in projects:
                    report = self._create_report()
                    setattr(report, 'projectId', p['id'])
                    metrics = {
                        'dashboard': api.content.get_uuid(obj=board),
                        'projectId': p['id'],
                        'created': '',
                        'updated': '',
                        'metrics': list()
                    }

                    setattr(report, 'report', json.dumbs(metrics))
                modified(board)
                board.reindexObject(idxs='modified')
        return idx
Ejemplo n.º 6
0
def jprint(obj):
    texto = json.dumbs(bane)
    print(texto)
Ejemplo n.º 7
0
def fail(msg="fail"):
    return json.dumbs({"code": 500, "msg": msg})
Ejemplo n.º 8
0
def fail(msg="fail"):
    return json.dumbs({"code":500,"msg":msg})
Ejemplo n.º 9
0
 def unexpected():
     input.croak("Unexpected token: " + json.dumbs(input.peek()))
Ejemplo n.º 10
0
    def execute(trial=False):
        '''Retrieve some data sets (not using the API here for the sake of simplicity).'''
        startTime = datetime.datetime.now()

        # Set up the database connection.
        client = dml.pymongo.MongoClient()
        repo = client.repo
        repo.authenticate('crussack', 'crussack')

        #sidewalk data
        url = 'http://bostonopendata-boston.opendata.arcgis.com/datasets/6aa3bdc3ff5443a98d506812825c250a_0.geojson'
        response = urlopen(url).read().decode("utf-8")
        r = json.loads(response)
        s = json.dumps(r, sort_keys=True, indent=2)
        repo.dropCollection("getSidewalkInventory")
        repo.createCollection("getSidewalkInventory")
        repo['crussack.getSidewalkInventory'].insert_many(r)
        #repo['crussack.getSidewalkInventory'].metadata({'complete':True})
        #print(repo['crussack.getSidewalkInventory'].metadata())

        #university data
        url = 'http://bostonopendata-boston.opendata.arcgis.com/datasets/cbf14bb032ef4bd38e20429f71acb61a_2.geojson'
        response = urllib.request.urlopen(url).read().decode("utf-8")
        r = json.loads(response)
        s = json.dumps(r, sort_keys=True, indent=2)
        repo.dropCollection("getUniversities")
        repo.createCollection("getUniversities")
        repo['crussack.getUniversities'].insert_many(r['result']['records'])
        #repo['crussack.getUniversities'].metadata({'complete':True})
        #print(repo['crussack.getUniversities'].metadata())

        #traffic data
        url = 'http://bostonopendata-boston.opendata.arcgis.com/datasets/de08c6fe69c942509089e6db98c716a3_0.geojson'
        response = urllib.request.urlopen(url).read().decode("utf-8")
        r = json.loads(response)
        s = json.dumps(r, sort_keys=True, indent=2)
        repo.dropCollection("getTrafficData")
        repo.createCollection("getTrafficData")
        repo['crussack.getTrafficData'].insert_many(r['features'])
        #repo['crussack.getTrafficData'].metadata({'complete':True})
        #print(repo['crussack.getTrafficData'].metadata())

        #mbta bus data
        url = 'http://realtime.mbta.com/developer/api/v2/routes?api_key=' + api_key + '&format=json'
        response = urllib.request.urlopen(url).read().decode("utf-8")
        r = json.loads(response)
        s = json.dumbs(r, sort_keys=True, indent=2)
        repo.dropCollection("getBusData")
        repo.createCollection("getBusData")
        repo['crussack.getBusData'].insert_many(r)

        #street light data
        url = 'https://data.boston.gov/datastore/odata3.0/c2fcc1e3-c38f-44ad-a0cf-e5ea2a6585b5?$top=100&$format=json'
        response = urllib.request.urlopen(url).read().decode("utf-8")
        r = json.loads(response)
        s = json.dumps(r, sort_keys=True, indent=2)
        repo.dropCollection("getLightData")
        repo.createCollection("getLightData")
        repo['crussack.getLightData'].insert_many(r['value'])

        repo.logout()
        endTime = datetime.datetime.now()

        return {"start": startTime, "end": endTime}
Ejemplo n.º 11
0
def save_raffle_data(data = "{}"):
    f = open("raffle-data.json", "w")
    f.write(json.dumbs(data))
    f.close()
Ejemplo n.º 12
0
def createJSON(dictt):
    return json.dumbs(dictt)
Ejemplo n.º 13
0
  parms = dict()
  parms['address'] = address
  if api_key is not False : parms['key'] = api_key
  url = serviceurl + urllib.parse.urlencode(parms)

  print('Retrieving', url)
  uh = urllib.request.urlopen(url)
  data = uh.read().decode()
  print('Retrieved', len(data), 'characters')

  try:
    js = json.loads(data)
  except:
    js = None

  if not js or 'status' not in js or js['status'] != 'OK' :
    print('=== Failure To Retrieve ===')
    print(data)
    continue

    print(json.dumbs(js, indent=4))

  lat = js["results"][0]["geometry"]["location"]["lat"]
  lng = js["results"][0]["geometry"]["location"]["lng"]
  print('lat', lat, 'lng', lng)
  location = js['results'][0]['formatted_address']
  print(location)

# API key = AIzaSyDDbFOq7Q7AhUJ37raPAOzGqakP1r7eESg
Ejemplo n.º 14
0
            error[commandIndexer] = msg

# Index xenforo post user
commandIndexer = command + " " + xenforoUser + " --rotate"
proc = subprocess.Popen(commandIndexer, shell=True, stdout=subprocess.PIPE)
proc.wait()
if proc.returncode == 0:
    success.append(True)
else:
    for msg in proc.stdout:
        error[commandIndexer] = msg

if (len(error) > 0):
    print ">>> Error: %r" % error
    mail(gmail_rev, "(FAILED) - SphinxSearch",
         "Error messages: " + json.dumbs(error))
else:
    mail(gmail_rev, "(COMPLETED) - SphinxSearch", "Index COMPLETED.")

# Check maximum limit size of index (max 4GB)
for part in range(0, shardPart):
    index = format(part, '02')

    if index == "00":
        commandIndexTool = executePath + "indextool --config " + sphinxConfigPath + " --dumpheader " + sphinxLocatePath + xenforoThread + ".sph"
    else:
        commandIndexTool = executePath + "indextool --config " + sphinxConfigPath + " --dumpheader " + sphinxLocatePath + xenforoThread + "_" + index + ".sph"

    proc = subprocess.Popen(commandIndexTool,
                            shell=True,
                            stdout=subprocess.PIPE)
        for msg in proc.stdout:
            error[commandIndexer] = msg

# Index xenforo post user
commandIndexer = command + " " + xenforoUser + " --rotate"
proc = subprocess.Popen(commandIndexer, shell=True, stdout=subprocess.PIPE)
proc.wait()
if proc.returncode == 0:
    success.append(True)
else:
    for msg in proc.stdout:
        error[commandIndexer] = msg

if (len(error) > 0):
    print ">>> Error: %r" % error
    mail(gmail_rev, "(FAILED) - SphinxSearch", "Error messages: " + json.dumbs(error))
else:
    mail(gmail_rev, "(COMPLETED) - SphinxSearch", "Index COMPLETED.")

# Check maximum limit size of index (max 4GB)
for part in range(0, shardPart):
    index = format(part, '02')

    if index == "00":
        commandIndexTool = executePath + "indextool --config " + sphinxConfigPath + " --dumpheader " + sphinxLocatePath + xenforoThread + ".sph"
    else:
        commandIndexTool = executePath + "indextool --config " + sphinxConfigPath + " --dumpheader " + sphinxLocatePath + xenforoThread + "_" + index + ".sph"

    proc = subprocess.Popen(commandIndexTool, shell=True, stdout=subprocess.PIPE)
    outputString = proc.communicate()[0]
    proc.wait()
Ejemplo n.º 16
0
            pos_com = 0
            neg_com = 0

            for num in decision_list:
                if (num < -0.15):
                    neg_com += 1
                if (num > 0.15):
                    pos_com += 1
            results.append({
                "title": article['title'],
                "url": article['url'],
                "all": all_com,
                "pos": pos_com,
                "neg": neg_com
            })

    print(json.dumps(results, indent=2, ensure_ascii=False), flush=True)


if __name__ == '__main__':
    if (len(sys.argv) < 2):
        print(json.dumbs({error: "no query"}))
    else:
        with open('python/model.bin', 'rb') as f:
            loaded_model = pickle.load(f)
            articles = getCommentsLenta(sys.argv[1], sys.argv[2], sys.argv[3])
            if (len(articles) != 0):
                toneComments(articles, loaded_model)
            else:
                print(json.dumps({}, indent=2, ensure_ascii=False), flush=True)
Ejemplo n.º 17
0
	def wrap(request, *args, **kwargs):
		if request.user.is_authenticated():
			return view_function(request, *args, **kwargs)
		output = json.dumbs({'not_authenticated': True})
		return HttpResponse(output, content_type = 'application/json')