def check_device_status(self): '''Check if the device responds, and also save the size of the tracklogs which's inside the respond.''' verbose('checking device status.. ', newline=False) buf = self._communicate(CHECK_STATUS) success = True if OK in buf else False if success: verbose('ok') self.logsize = convert_logsize_value(buf[-4:]) else: fprint('error: is the device turned on?') return success
def purge_log_on_device(self): '''Delete all tracklogs from the device.''' self.check_if_device_is_empty() fprint('purge log on device.. ', newline=False) buf = self._communicate(PURGE_LOG) if OK not in buf: die('error while trying to purge the log') else: while FINISH not in buf: time.sleep(1) buf += self.ser.read(self.ser.inWaiting()) fprint('ok')
def main(): twitter = get_twitter_API() open('collect.txt', 'w') f = open('collect.txt', 'a') fprint('Collecting tweets........................', f) fprint('Started collecting tweets From Twitter Based on hashtags', f) min_id = 0 for tags in hashtags: tweets, tweets_from_file, since_id, users = [], [], [], [] count = 0 fprint("Collecting data for : " + tags, f) tweets_from_file = json_loader(tweets_from_file, tags) if tweets_from_file: for i in tweets_from_file: since_id.append(i['since_id']) if i['screen_name'] not in users: users.append(i['screen_name']) count += 1 min_id = min(since_id) tweets = get_tweets(twitter, tags, tweets, min_id) tweets = process_data(tweets) for z in range(len(tweets)): if tweets[z]['screen_name'] not in users: users.append(tweets[z]['screen_name']) count += 1 fprint( "Total No. of Users who tweets for this hashtag : " + str(count), f) for i in tweets: tweets_from_file.append(i) json_writer(tweets_from_file, tags, f) print('Tweets saved to each hashtags file \n') f.close()
def download_data(self): '''Download all tracklogs from the device.''' self.check_if_device_is_empty() verbose('switching device to download mode.. ', newline=False) chunks = int(math.ceil(self.logsize / 128.0)) size_of_chunks = chunks * 128 if OK in self._communicate(INIT_DOWNLOAD_MAINNAV_MG_950D) or \ OK in self._communicate(INIT_DOWNLOAD_QSTART_BT_Q2000): verbose('ok') buf = '' # download first chunk: chunk = 1 buf += self._communicate(DOWNLOAD_CHUNK_FIRST, bytes=132)[3:-1] while chunk < chunks: # download remaining chunks chunk += 1 buf += self._communicate(DOWNLOAD_CHUNK_NEXT, bytes=132)[3:-1] fprint('\rdownloading: %s%%' % int((len(buf) / float(size_of_chunks)) * 100), newline=False) fprint('') # only unnecessary data left, aborting: self._communicate(ABORT_TRANSMISSION, answer=False) verbose('switching device back to standard mode.. ', newline=False) self._communicate(INIT_STANDARD, answer=False) verbose('ok') return buf else: fprint('error, unknown device')
def main(): print( "--------------------------------- Started Summary -----------------------------------" ) collector_details = open("collect.txt", 'r').read() cluster_details = open("cluster.txt", 'r').read() classify_details = open("classify.txt", 'r', encoding='utf-8').read() open("summary.txt", 'w') f = open("summary.txt", 'a', encoding='utf-8') fprint("collect.py : \n" + collector_details, f) fprint("cluster.py : \n" + cluster_details, f) fprint("classify.py : \n" + classify_details, f) f.close() print("Details saved to : summary.txt") print( "---------------------------------- Finished summary -----------------------------------" )
def main(): print( "--------------------------------- Started Clustering -----------------------------------" ) tweets = [] names = {} open('cluster.txt', 'w') f = open('cluster.txt', 'a') for tags in hashtags: tweets = json_loader(tweets, tags) tweeter_names, _ = return_unique_user(tweets) names[tags] = tweeter_names teams = list(names.keys()) for i in range(len(teams)): for j in range(i + 1, len(teams)): a = list(set(names[teams[i]]) & set(names[teams[j]])) fprint( "Common number of followers for the team " + teams[i] + " and " + teams[j] + " : " + str(len(a)), f) graph = plot_graph(names) g = draw_network_graph(graph, names, "clusters.png") fprint( 'Graph has %d nodes and %d edges' % (graph.order(), graph.number_of_edges()), f) result = girvan_newman(g, 5, 20) for i in range(1, len(result)): fprint( 'Cluster %d Number of nodes or followers %d' % (i, len(result[i])), f) draw_network_graph(graph.subgraph(result[i]), result[i], "cluster_" + str(i) + ".png") print("Cluster %d nodes" % i, result[i], file=f) f.close() print( "---------------------------------- Finished Clustering -----------------------------------" )
def print_data(tag, test_data,predicted_labels): pos_instance, neg_instance, neutral_instance, index_pos_instance, index_neg_instance, index_neutral_instance = 0,0,0,0,0,0 for i in range(len(predicted_labels)): if predicted_labels[i] == "negative": index_neg_instance = i neg_instance+=1 elif predicted_labels[i] == "positive": index_pos_instance = i pos_instance+=1 elif predicted_labels[i] == "neutral": index_neutral_instance = i neutral_instance+=1 with open("classify.txt",'a', encoding="utf8") as fp: fprint("*********************************"+tag +"**************************************", fp) fprint("Positive Number of Tweets for "+tag+" : " + str(pos_instance), fp) fprint("Negative Number of Tweets for "+tag+" : "+ str(neg_instance), fp) fprint("Neutral Number of Tweets for "+tag+" : "+ str(neutral_instance), fp) fprint("Positive Tweets Example for "+tag+" : " + str(test_data[index_pos_instance]).replace("\n",""), fp) fprint("Negative Tweets Example for "+tag+" : " + str(test_data[index_neg_instance]).replace("\n",""), fp) fprint("Neutral Tweets Example for "+tag+" : " + str(test_data[index_neutral_instance]).replace("\n",""), fp) fp.close()