def data_download(self): """ function for saving data from queue into PdfData table """ print("Starting data download") print("-" * 79) with self._app.app_context(): while 1: da_queue = FifoDiskQueue( os.path.dirname(__file__) + '/../queuedata') data = da_queue.pop() da_queue.close() if not data: print('sleep for 10') sleep(10) #break else: raw_data = json.loads(data.decode('utf-8')) info_log(self.logger.info, "Step1 Save into db Start", raw_data['reqd_data']) self._save_pdf_data(raw_data['reqd_data'], raw_data['tags'], raw_data['instance_id']) info_log(self.logger.info, "Step1 Save into db End", raw_data['reqd_data'])
def run(self): global global_l counter = 0 while self._stop_t == False: counter += 1 time.sleep(round(self.client.delay - (self.client.delay / 4.), 2)) if counter % self.client.batch_size == 0 and self.client.connection_error is False: print("sending data from thread {}:{}".format( self.client.cls_name(), id(self.client))) self.client.send_data(global_l[:self.client.batch_size]) global_l = global_l[self.client.batch_size:] counter = 0 if len(global_l) > 4 * self.client.batch_size: fq = FifoDiskQueue("{}.fifo.sql".format(self.client.name)) for obj in global_l[:self.client.batch_size * 2]: print("SAVED: {}".format(obj)) fq.push(obj) global_l = global_l[self.client.batch_size * 2:] fq.close() if (counter + 2) % self.client.batch_size == 0: fq = FifoDiskQueue("{}.fifo.sql".format(self.client.name)) if len(fq) > 0: for i in range(2): for elem in fq.pull(): print("PULL ELEM", elem) global_l.append(elem) print("DB SIZE", len(fq)) fq.close()
def pop(queue): queue = FifoDiskQueue(queue) async_spec_bytes = queue.pop() queue.close() if async_spec_bytes: return json.loads(async_spec_bytes.decode()) else: return None
def run(self): queue = FifoDiskQueue(self.queue) while True: if self.query_cancel(): break qe = queue.pop() if qe == None: break task = qe.decode('latin1') os.system(task) queue.close()
def run(self): while True: queue = FifoDiskQueue("{}.fifo.sql".format(self.name)) #print(len(queue)) if len(queue) > 0 and self.is_network_up(): messages = queue.pull() response = self.send_blocks_msg(messages) if response is None or response is not True: self.sync_failed(response, messages) queue.close() time.sleep(self.DELAY)
def addSettings(): settings = request.form['settings'] if (settings != "null"): setting_queue = FifoDiskQueue("settings_file") setting_queue.push(settings.encode(encoding='UTF-8')) setting_queue.close() return "sucess" return "No settings to update"
def getTab(): tab = FifoDiskQueue("tab_file") content = [] t = tab.pop() while(t != None): t = t.decode(encoding='UTF-8') content.append(t) t = tab.pop() tab.close() return json.dumps(content)
def fetch_data(self): error = None tags = None try: if self.config['RANGE']: RANGE_NAME = self.config['SHEETNAME'] + "!" + self.config[ 'RANGE'] else: RANGE_NAME = self.config['SHEETNAME'] #call class method which return sheet data and error if permission is not there getValueMapping = self.get_sheetvalues(self.config['SHEETID'], RANGE_NAME) mappingError = getValueMapping[1] # Error in fetching mapping mappingValues = getValueMapping[0] # mapping values list if not mappingError: raw_data = mappingValues # Create a JSON from data. column_names = raw_data[0] final_data = [] for data in raw_data[2:]: single_data = dict() counter = 0 for col in column_names: single_data[col] = data[counter] counter += 1 tags = self.get_tags() all_data = dict() all_data['req_data'] = single_data all_data.update( self.config) # merge tags with sheet each row data #final_data.append(raw_data) raw_data = dict() raw_data['reqd_data'] = all_data raw_data['tags'] = tags q = FifoDiskQueue( os.path.dirname(__file__) + '/../../queuedata') q.push(json.dumps(raw_data).encode('utf-8')) q.close() else: error = "No Mapping details found" except Exception as e: import traceback error = "Failed to fetch mapping detials" mappingValues = None return error
def dataDownload(self): print("Starting data download") print("-" * 79) with self._app.app_context(): while 1: q = FifoDiskQueue(os.path.dirname(__file__) + '/../queuedata') data = q.pop() q.close() if not data: print('sleep for 10') sleep(10) #break else: raw_data = json.loads(data.decode('utf-8')) self.save_pdf_data(raw_data['reqd_data'], raw_data['tags'])
def addTab(): drink_name = request.form['drink_name'] if(drink_name != "null"): conn = getdbConn() tab = FifoDiskQueue("tab_file") drink_id = conn.execute('SELECT id FROM Drink WHERE name LIKE "' + drink_name + '" LIMIT 1;').fetchone() if(drink_id == None): return json.dumps({'success':False,'error':"NoDrinkInDB"}), 400, {'ContentType':'application/json'} tab.push(str(drink_id[0]).encode(encoding='UTF-8')) tab.close() conn.close() return json.dumps({'success':True, 'hello':'hey'}), 200, {'ContentType':'application/json'} return json.dumps({'success':False,'error':"DrinkNameNull"}), 400, {'ContentType':'application/json'}
def tap(): cabinet_id = request.args.get("cabinet_id") if(cabinet_id is not None): tab = FifoDiskQueue("tab_file") current_drink_id = tab.pop() if(current_drink_id != None): conn = getdbConn() drink_id = int(current_drink_id.decode(encoding='UTF-8')) tab.close() cursor = conn.execute(getSerachString(drink_id)) ingredients = [] for row in cursor: ingredients.append({ "name": row[1], "ratio": row[2] }) drink = { "drink_name": row[0], "ingredients": ingredients } response = {"settings":"null","drink":drink} return json.dumps(response) return json.dumps({"settings": "null", "drink":"null"}) else: return json.dumps({'success': False,'error':"Must Have Cabinet ID to access to tap"}), 400, {'ContentType':'application/json'}
def push(queue, async_spec): queue = FifoDiskQueue(queue) queue.push(json.dumps(async_spec).encode()) queue.close()
def fetch_data(self): error = None tags = None try: reqData = json.loads(json.dumps(request.json)) # Converting unicodes to str and then str to dict. formId = reqData[ 'formId'] # Getting the form ID to distinguish between various template documents and mapping sheets newReqData = reqData['data'][0] # Getting the data : [{values}] instanceId = newReqData['instanceID'] # Getting the instance id for searching routes newReqData = json.loads(json.dumps(newReqData)) # Getting the new data userName = newReqData['username'] formSubmissionDate = newReqData[ '*meta-submission-date*'] # Correcting the submission date and removing the time endIndex = formSubmissionDate.find(str('T')) formSubmissionDate = formSubmissionDate[:endIndex] newReqData['*meta-submission-date*'] = formSubmissionDate # Saving the corrected date in the json myDict = {} for k, v in newReqData.items(): if type(v) is dict: for k1, v1 in v.items(): if k1 == "url": # correcting the URLs baseUrl = 'http://aggregate.cttsamagra.xyz:8080/' indexStart = 0 # Finding the substring indexEnd = v1.find( ":8080/") + 6 # Find the stopping point newv1 = v1.replace(v1[indexStart:indexEnd], baseUrl) myDict[k] = newv1 elif type(v) is float or type(v) is int: myDict[k] = str(v) elif type(v) is list: myDict[k] = str(v[0]) # Converting list to str else: if v is None: v = "NO_TEXT_FOUND" myDict[k] = v # Calculate Udise from its database and then Calculate distance from udise calculated_distance = 'Not available' # Calculate using udise myDict['calculated_distance'] = calculated_distance all_data = dict() all_data['req_data'] = myDict all_data['FORMID'] = formId all_data['INSTANCEID'] = instanceId all_data['USERNAME'] = userName all_data['FORMSUBMISSIONDATE'] = formSubmissionDate self.raw_data = all_data tags = self.get_tags() all_data.update(self.config) raw_data = dict() raw_data['reqd_data'] = all_data raw_data['tags'] = tags q = FifoDiskQueue(os.path.dirname(__file__)+'/../../queuedata') q.push(json.dumps(raw_data).encode('utf-8')) q.close() except Exception as e: import traceback error = "Failed to fetch mapping detials" return error
import os import sys import networkx as nx from queuelib import FifoDiskQueue q = FifoDiskQueue("diskFile3") completed = False for root, dirs, files in os.walk(sys.argv[1]): for file in files: print file q.push(sys.argv[1] + file) q.close() print "-----------------" while True: q = FifoDiskQueue("diskFile3") file1 = q.pop() file2 = q.pop() q.close() if file1 and file2: print file1 print file2 fileId1 = "_".join(file1.split("_")[1:5]).split(".")[0] fileId2 = "_".join(file2.split("_")[1:5]).split(".")[0] newFileId = sys.argv[2] + "_" + fileId1 + "_" + fileId2 + ".gpickle" print newFileId G1 = nx.read_gpickle(file1) G2 = nx.read_gpickle(file2) G1.add_nodes_from(G2.nodes(data=True)) G1.add_edges_from(G2.edges(data=True)) nx.write_gpickle(G1, newFileId)
def add_job(self, s): queue = FifoDiskQueue(self.queue) queue.push(s.encode('latin1')) queue.close()
APP_SECRET = api_keys.APP_SECRET OAUTH_TOKEN = api_keys.OAUTH_TOKEN OAUTH_TOKEN_SECRET = api_keys.OAUTH_TOKEN_SECRET twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET) selfies = ['1.png', '2.png', '3.png'] q = FifoDiskQueue("queuefile") if len(q) == 0: for selfie in selfies: q.push(selfie) selfie = q.pop() q.close() @sched.scheduled_job('interval', minutes=10) def revolve_avatar(): avatar = open("selfies/" + selfie, 'rb') twitter.update_profile_image(image=avatar) @sched.scheduled_job('cron', day_of_week='mon-sun', hour=5) def evolve_banner(): twitter.update_profile_banner_image(banner=image) sched.start()
def save(self, messages): queue = FifoDiskQueue(self.database_name) for message in messages: queue.push(message) queue.close()
def run(self, fn, sleep=1): while True: queue = FifoDiskQueue(self.database_name) for message in self.generate_data(fn, sleep=sleep): queue.push(message) queue.close()