def __init__(self): # super(Bot, self).__init__() self.name = "aptbot" # self.emoji = ":robot_face:" # app credentials from local env self.oauth = { "client_id": os.environ.get("CLIENT_ID"), "client_secret": os.environ.get("CLIENT_SECRET"), # scope for limiting permissions for app "scope": "bot" } self.verification = os.environ.get("VERIFICATION_TOKEN") # NOTE: Python-slack requires a client connection to generate # an oauth token. We can connect to the client without authenticating # by passing an empty string as a token and then reinstantiating the # client with a valid OAuth token once we have one. self.client = SlackClient("") self.bot_id = '' # to be filled later self.at_bot = '' self.serializer = Serializer() self.commands = { 'group': 'information about the APT group(s) containing the given name', 'tool': 'list of APT groups that use the given tool', 'target': 'list of APT groups that target the given asset or organization', 'ops': 'list of APT group that executed the given operation' } path = '../data/' with open(path + 'groups.pkl', 'rb') as f: self.gid_to_group = pickle.load(f) # dict of groups with open(path + 'command_to_gid.pkl', 'rb') as f: self.command_to_gid = pickle.load(f)
def save_to_file(self): try: sfd_res = QtWidgets.QFileDialog.getSaveFileName( self, 'Load file...') if len(sfd_res) == 0: QtWidgets.QMessageBox.critical(self, 'Error', 'Файлы не выбраны') return else: path = sfd_res[0] sales_data = [] for i in range(self.ui.saleTableWidget.rowCount()): sales_row = [] for j in range(self.ui.saleTableWidget.columnCount()): cell_text = '' if self.ui.saleTableWidget.item(i, j): cell_text = self.ui.saleTableWidget.item(i, j).text() sales_row.append(cell_text) sales_data.append(sales_row) data = [ self.ui.WSpinBox.value(), self.ui.TSpinBox.value(), self.ui.CpSpinBox.value(), self.ui.CxSpinBox.value(), self.ui.CdSpinBox.value(), self.ui.CySpinBox.value(), self.ui.DtSpinBox.value(), sales_data ] Serializer.serialize(path, data) except Exception: QtWidgets.QMessageBox.critical(self, 'Error', 'Ошибка сохранения')
def create_topic_models(collection, n_topics=30): id = str(collection.id) corpus_path = corpora_path + id + "/" + id + "_rf_stopwords-included_cs.corpus" feature_path = corpus_path + ".features" serializer = Serializer() corpus_rf = serializer.load(corpus_path, type="csr_matrix") feature_names = serializer.load(feature_path, type="list") #corpus = list(collection.documents.values_list("content", flat=True)) docs = list(collection.documents.values_list("id", flat=True)) lda = TopicModeller(n_topics=n_topics) lda.create_topic_models(corpus_rf, feature_names) #randomstate? currently 0 topic_data = lda.documents_per_topic( corpus_rf, docs, feature_names) #list of dicts {"desc", "doc_ids"} topics = [] for topic in topic_data: t = {"desc": "", "docs": []} t["desc"] = topic["desc"] for id in topic["doc_ids"]: t["docs"].append(collection.documents.get(pk=id)) topics.append(t) return topics
def get_highest_freq_words(id, n=500, calc_m=True): serializer = Serializer() name = str(id) + "_tokens_stopwords-excluded_cs.corpus" corpus_tokenized = serializer.load(corpora_path + str(id) + "/" + name) analyzer = Analyzer() freqs = analyzer.get_frequencies(corpus_tokenized, n) # if calc_m is True: # m = int(len(freqs)/2) # elif n is None: # m = len(freqs) # return dict(freqs[:m]) return dict(freqs)
def start(self): if not self._is_running: if self._is_closed: raise RuntimeError( 'Python kernel is closed and cannot be restarted.') self._is_running = True debug_msg("Connect.") self._connection = self._connect(('localhost', int(sys.argv[1]))) debug_msg("Create executors.") self._execute_thread_executor = self._create_execute_thread_executor( ) self._executor = self._create_executor() debug_msg("Create Python commands.") self._commands = PythonCommands( self._create_messaging(self._connection), self) self._setup_builtin_request_handlers() debug_msg("Load serialization library.") self._serialization_library = self._load_serialization_library( sys.argv[2]) debug_msg("Create type extension manager.") self._type_extension_manager = TypeExtensionManager(self._commands) debug_msg("Create serialization helper.") self._serializer = Serializer(self._serialization_library, self._type_extension_manager) # Start commands/messaging system once everything is set up. debug_msg("Start Python commands.") self._commands.start()
def retrieve_version_candidates(doc): doc_id = str(doc.id) corpus_path = static_path + "similarities/" + doc_id + "/" + doc_id + "_similarities.corpus" candidates = {} if os.path.isfile(corpus_path): serializer = Serializer() corpus = serializer.load(corpus_path) for c_id, score in corpus.items(): #score = hash.jaccard(c_id) if score > 0.5 and score < 0.9: candidate = Document.objects.get(pk=c_id) candidates[c_id] = (candidate, score) return candidates
def get_clusters(collection_id, corpus=None, doc2vec=False, tf_idf=False, k=30): collection = Collection.objects.get(pk=collection_id) #features = None vectors = None features = list(collection.documents.values_list("id", flat=True)) if doc2vec is True: #features = list(collection.documents.values_list("id", flat=True)) vectors = corpus.wv.vectors elif tf_idf is True: serializer = Serializer() c_id = str(collection_id) v_path = corpora_path + c_id + "/" + c_id + "_tf-idf_stopwords-included_cs.corpus" vectors = serializer.load(v_path, type="csr_matrix") #features = serializer.load(v_path + ".features") clusterer = Clusterer(k=k) clusters_kmeans = clusterer.cluster_kmeans(vectors, file_output=False, console_output=True, reduced_vectors=None, feature_names=features) print("Sorting docs by cluster") clusters = {} limit = 5000 for key, doc_ids in clusters_kmeans.items(): clusters[key] = [] random.shuffle(doc_ids) for id in doc_ids[:limit]: clusters[key].append(collection.documents.get(pk=id)) # if collection.documents.count() > 15000: # print("Creating excerpts from clusters...") # for i, cluster in clusters: # if len(cluster) > limit: # random.shuffle(cluster) # clusters[i] = cluster[:limit] return clusters
def run_pipeline(text): ret_text = '\n' + '=================== TEXT ======================' ret_text += '\n' + text ret_text += '\n' + '================== TOKENS =====================' tokens = tokenize_text(text) ret_text += '\n' + pprint_tokens(tokens) ret_text += '\n' + '=================== PARSE =====================' ret_text += '\n' parser = Serializer.load(os.getcwd() + '/parser') #parser = LR1Parser(G) ret_parser = parser([t.token_type for t in tokens]) parse, operations = ret_parser if parse is None: return ret_text + "\nParsing Error at " + operations ret_text += '\n'.join(repr(x) for x in parse) ret_text += '\n' + '==================== AST ======================' ast = evaluate_reverse_parse(parse, operations, tokens) formatter = FormatVisitor() tree = formatter.visit(ast) ret_text += '\n' + tree ret_text += '\n' + '============== COLLECTING TYPES ===============' errors = [] collector = TypeCollector(errors) collector.visit(ast) context = collector.context ret_text += '\n' + 'Errors:' for error in errors: ret_text += '\n' + error ret_text += '\n' + 'Context:' ret_text += '\n' + str(context) ret_text += '\n' + '=============== BUILDING TYPES ================' builder = TypeBuilder(context, errors) builder.visit(ast) ret_text += '\n' + 'Errors: [' for error in errors: ret_text += '\n' + '\t' ret_text += '\n' + error ret_text += '\n' + ']' ret_text += '\n' + 'Context:' ret_text += '\n' + str(context) ret_text += '\n' + '=============== CHECKING TYPES ================' checker = TypeChecker(context, errors) try: scope, errors = checker.visit(ast) while(checker.changed): scope, errors = checker.visit(ast) except SemanticError as e: errors = [e.text] ret_text += '\n' + 'Errors: [' for error in errors: ret_text += '\n' + '\t' ret_text += '\n' + error ret_text += '\n' + ']' if len(errors) == 0: checker.printer(ast) return ret_text
def find_versions(id): c_id = str(id) name = c_id + "_tokens_stopwords-included_cs.corpus" path = corpora_path + c_id + "/" + name #corpus_tokenized = pickle.load(open(corpora_path + c_id + "/" + name, "rb")) #corpus_tokenized = joblib.load(corpora_path + c_id + "/" + name) serializer = Serializer() corpus_tokenized = serializer.load(path) #save_path = corpora_path + c_id + "/" + c_id + "_similarities.corpus" save_path = static_path + "similarities/" find_versions_async.delay(corpus_tokenized, save_path) # #make_versions_from_similarities(id) return
def __init__(self): #Serializer components self.serializer = Serializer() self.restoreFilePath = None #Tournament details self.humanScore = 0 self.botScore = 0 self.nextPlayer = None self.gameResult = None #Booleans for decision making self.quit = False self.restoringGame = False #Notifications purposes self.notifications = Notifications()
def getClassificationModel(self, dataset: Dataset, backgroundFilePath: str): try: backgroundModel = Serializer.Deserialize(backgroundFilePath) return backgroundModel except Exception as ex: Logger.Error("getBackgroundClassificationModel -> Error reading classifier for dataset " + dataset.name + " from file: " + backgroundFilePath + " : " + str(ex), ex) return None
def __init__(self): self.name = "aptbot" self.emoji = ':robot_face:' self.token = os.environ.get('BOT_TOKEN') self.client = SlackClient(self.token) self.bot_id = self.get_bot_id() self.at_bot = '<@' + self.bot_id + '>' self.serializer = Serializer() self.commands = { 'group': 'information about the APT group(s) containing given name', 'tool': 'list of APT groups that use given tool', 'target': 'list of APT groups that target given asset or organization', 'ops': 'list of APT group that executed given operation' } path = '../data/' with open(path + 'groups.pkl', 'rb') as f: self.gid_to_group = pickle.load(f) # dict of groups with open(path + 'command_to_gid.pkl', 'rb') as f: self.command_to_gid = pickle.load(f)
def get_similar_docs(doc, n=50): #otherwise create them collection_id = str(doc.collection.id) model_file = model_path + collection_id + ".model" corpus_path = corpora_path + collection_id + "/" + collection_id + "_tokens_stopwords-included_cs.corpus" try: serializer = Serializer() corpus = serializer.load(corpus_path) except FileNotFoundError as e: print("Korpus 3 (tokenisiert, mit Stoppwörtern) nicht vorhanden.") return [] embedder = DocEmbedder() model = embedder.run(path=model_file) vs = doc.version_candidates.count() dups = doc.duplicates.count() n2 = n + dups + vs sim_docs = embedder.show_similar_docs(corpus=corpus, doc_id=doc.id, topn=n2) results = [] for id, score in sim_docs: d = Document.objects.get(pk=id) if d not in doc.duplicates.all( ) and d not in doc.version_candidates.all(): version = Version(candidate=d, version_of=doc, similarity_measure="Word2Vec", similarity_score=score) version.save() return results
def get_tf_idf(doc_id): doc = Document.objects.get(pk=doc_id) c_id = str(doc.collection.id) filename = c_id + "_tf-idf_stopwords-included_cs.corpus" filepath = corpora_path + c_id + "/" + filename try: serializer = Serializer() vectors = serializer.load(filepath) features = serializer.load(filepath + ".features") idx = serializer.load(filepath + ".idx") except FileNotFoundError: return "Bitte auf der Hauptseite der Sammlung die Tf-idf-Repräsentation erzeugen. Dann werden an dieser Stelle die spezifischsten Worte angezeigt." pos = idx[doc_id] docvec = vectors[pos] feature_array = np.array(features) tfidf_sorting = np.argsort(docvec.toarray()).flatten()[::-1] n = 30 top_n = feature_array[tfidf_sorting][:n] return top_n
def command_register (self, arguments, user): match = None commandResponse = ""; for event in GameEventManager.ScheduledEvents: if (event.EventName.lower() == arguments[0].Value.lower()): match = event if (match == None): newEvent = GameEvent() newEvent.EventName = arguments[0].Value GameEventManager.AddEvent(newEvent) print("Creating new event: " + newEvent.EventName) Serializer.SaveToFile(); else: commandResponse = "An event by the name of " + event.EventName + " already exists. Try joining that event using $join or choose a different name for your event." print("An event by the name of " + event.EventName + " already exists.") return commandResponse;
def command_join (self, arguments, user): match = None commandResponse = ""; for event in GameEventManager.ScheduledEvents: if (event.EventName.lower() == arguments[0].Value.lower()): match = event if (match != None): if (user in match.Users) == False: match.AddUser(user) commandResponse = "You've signed up for the " + event.EventName + " event! Have fun!" print("Adding " + user + " to " + event.EventName + " roster.") Serializer.SaveToFile(); else: commandResponse = "Good news! You're already signed up for the " + event.EventName + " event!" print("User named " + user + " already exists in " + event.EventName + " roster.") else: commandResponse = "That's odd...No event by the name of " + event.EventName + " could be found. Perhaps it could be under a different name?" print("An event by the name of " + event.EventName + " could not be found.") return commandResponse;
def command_leave (self, arguments, user): match = None commandResponse = ""; for event in GameEventManager.ScheduledEvents: if (event.EventName.lower() == arguments[0].Value.lower()): match = event if (match != None): if user in match.Users: match.RemoveUser(user) commandResponse = "You have left the " + event.EventName + " roster!" print("Removing " + user + " from " + event.EventName + " roster.") Serializer.SaveToFile(); else: commandResponse = "You don't appear to be currently signed up for the " + event.EventName + " event." print("User named " + user + " is not currently present in " + event.EventName + " roster.") else: commandResponse = "That's odd...No event by the name of " + event.EventName + " could be found. Perhaps it could be under a different name?" print("An event by the name of " + event.EventName + " could not be found.") return commandResponse;
def reset_timer(): print('Checking reset timers.') now = datetime.utcnow() anyReset = False for gameEvent in GameEventManager.ScheduledEvents: if gameEvent.AutoResetRoster: if (now == gameEvent.AutoResetTime): gameEvent.ResetRoster() anyReset = true if anyReset: Serializer.SaveToFile() # get number of seconds until the next hour delta = timedelta(hours=1) next_hour = (now + delta).replace(microsecond=0, second=0, minute=2) wait_seconds = (next_hour - now).seconds # recall this method on the next hour marker threading.Timer(wait_seconds, reset_timer).start()
def load_from_file(self): try: ofd_res = QtWidgets.QFileDialog.getOpenFileName( self, 'Open file...') if len(ofd_res) == 0: QtWidgets.QMessageBox.critical(self, 'Error', 'Файлы не выбраны') return else: w_value, t_value, cp_value, cx_value, cd_value, cy_value, dt_value, sales_data = \ Serializer.deserialize(ofd_res[0]) self.ui.WSpinBox.setValue(float(w_value)) self.ui.TSpinBox.setValue(float(t_value)) self.ui.CpSpinBox.setValue(float(cp_value)) self.ui.CxSpinBox.setValue(float(cx_value)) self.ui.CdSpinBox.setValue(float(cd_value)) self.ui.CySpinBox.setValue(float(cy_value)) self.ui.DtSpinBox.setValue(float(dt_value)) self.ui.saleTableWidget.clear() self.ui.saleTableWidget.setRowCount(0) for row in sales_data: self.ui.saleTableWidget.setRowCount( self.ui.saleTableWidget.rowCount() + 1) self.ui.saleTableWidget.setColumnCount(len(row)) for i in range(len(row)): cell = self.ui.saleTableWidget.item( self.ui.saleTableWidget.rowCount() - 1, i) if not cell: new_cell = QtWidgets.QTableWidgetItem() new_cell.setText(row[i]) self.ui.saleTableWidget.setItem( self.ui.saleTableWidget.rowCount() - 1, i, new_cell) except Exception: QtWidgets.QMessageBox.critical(self, 'Error', 'Ошибка сохранения')
def closeEvent(self, event): """ if msgSender isnt initiated, close the program (thread not running in Sender), else send the disconnection signal to Thread and server""" if self.msgSender=="": event.accept() else : self.msgSender.sendToStream(Serializer.serializeDisconnection(self.pseudo))
def writeClassifierTobackgroundFile(self, backgroundFilePath: str, classifier): # now we write the classifier to file prior to returning the object Serializer.Serialize(backgroundFilePath, classifier)
def updateClientsConnectedPseudos(self): pseudosListString = Serializer.serializePseudoList( self.connectedDict.values()) self.sendMsgToAllSockets(pseudosListString)
def set_serialization_library(self, path_to_serialization_library_module): debug_msg("Load serialization library.") self._serialization_library = self._load_serialization_library(path_to_serialization_library_module) debug_msg("Create serialization helper.") self._serializer = Serializer(self._serialization_library, self._type_extension_manager)
class Bot: """ Instantiates a Bot object to handle Slack onboarding interactions.""" def __init__(self): # super(Bot, self).__init__() self.name = "aptbot" # self.emoji = ":robot_face:" # app credentials from local env self.oauth = { "client_id": os.environ.get("CLIENT_ID"), "client_secret": os.environ.get("CLIENT_SECRET"), # scope for limiting permissions for app "scope": "bot" } self.verification = os.environ.get("VERIFICATION_TOKEN") # NOTE: Python-slack requires a client connection to generate # an oauth token. We can connect to the client without authenticating # by passing an empty string as a token and then reinstantiating the # client with a valid OAuth token once we have one. self.client = SlackClient("") self.bot_id = '' # to be filled later self.at_bot = '' self.serializer = Serializer() self.commands = { 'group': 'information about the APT group(s) containing the given name', 'tool': 'list of APT groups that use the given tool', 'target': 'list of APT groups that target the given asset or organization', 'ops': 'list of APT group that executed the given operation' } path = '../data/' with open(path + 'groups.pkl', 'rb') as f: self.gid_to_group = pickle.load(f) # dict of groups with open(path + 'command_to_gid.pkl', 'rb') as f: self.command_to_gid = pickle.load(f) def auth(self, code): """ Authenticate with OAuth and assign correct scopes. Save a dictionary of authed team information in memory on the bot object. Parameters ---------- code : str temporary authorization code sent by Slack to be exchanged for an OAuth token """ # After the user has authorized this app for use in their Slack team, # Slack returns a temporary authorization code that we'll exchange for # an OAuth token using the oauth.access endpoint auth_response = self.client.api_call( "oauth.access", client_id=self.oauth["client_id"], client_secret=self.oauth["client_secret"], code=code) # keep track of authorized teams and their associated OAuth tokens team_id = auth_response["team_id"] authed_teams[team_id] = { "bot_token": auth_response["bot"]["bot_access_token"] } # reconnect to the Slack Client with the correct team's bot token self.client = SlackClient(authed_teams[team_id]["bot_token"]) def save_bot_id(self): # retrieve bot id api_call = self.client.api_call('users.list') pprint(api_call) if api_call.get('ok'): # retrieve all users so we can find our bot users = api_call.get('members') for user in users: if 'name' in user and user.get('name') == self.name: self.bot_id = user.get('id') self.at_bot = "<@" + self.bot_id + ">" # check else: print("could not find bot user with the name " + self.name) def handle_message_evt(self, event_type, slack_event): """ checks if a valid command is directed to bot, calls command handler for response, and posts response on slack """ print('authed teams are', authed_teams) print('ids are ', self.bot_id, self.at_bot) # if not self.at_bot: # self.auth() if not self.at_bot or not self.bot_id: self.save_bot_id() evt = slack_event['event'] # if directed at bot, set text after the @ mention, whitespace removed print(evt.keys(), self.at_bot) if 'text' in evt.keys() and self.at_bot in evt['text']: text, channel = slack_event['text'], slack_event['channel'] text = text.split(self.at_bot)[1].strip() # pprint(text, channel) return else: # if not text or message not directed at bot, skip return parsed = text.split(' ', 1) # command and args (or None) # help command takes no arguments if len(parsed) == 1 and parsed[0] == 'help': response = self.handle_command('help') elif len(parsed) >= 2 and parsed[ 0] in self.commands: # any command with at least one arg response = self.handle_command(parsed[0], parsed[1]) else: # invalid command response = self.default_response() # post result as attachment post_message = self.client.api_call("chat.postMessage", channel=channel, username=self.name, icon_emoji=self.emoji, text='response', attachments=response) print('message attachment posted!') def handle_command(self, command, args=None): """ finds each arg in appropriate APT dictionary and posts serialized result on Slack""" # as of now, help just returns default response if command == 'help': return self.serializer.default_attachment(self.commands) # other commands with no args could be added here later gids, groups = [], [] for arg in args: gid = self.command_to_gid[command][arg] groups.append(self.gid_to_group[gid]) return self.serializer.groups_attachment(groups) def default_response(self): return self.serializer.default_attachment(self.commands)
def simulate(fieldIterable, scriptIterable, ship, sendMessage): """ Run a simulation using the following parameters: - fieldIterable: an iterable containing strings defining our initial minefield, e.g. a fields file. - scriptIterable: an iterable containing command-set strings, e.g. 'gamma north' - ship: a Ship instance that defines what commands are legal and what command sets are legal in a single turn. - sendMessage: a function we will call with strings that should be displayed to the user We run the simulation until one of these conditions is met: - No mines remain - We missed a mine - We have no more command-sets to execute We return a statistics dictionary with the following keys: - initialNumMines: how many mines were in the MineField when we started? - finalNumMines: how many mines were in the MineField when we stopped running the simulation? - hasMoreCommandSets: boolean; were there command-sets left in scriptIterable when we stopped running? - numCommandsByType: dictionary mapping command types (e.g. 'move', 'torpedo') to the number of each type we executed. """ # Build the mineField and the command we'll use to make the ship drop through it. mineField = Serializer.mineFieldFromStringIterable(MineField, fieldIterable) def drop(mineField): return mineField.setShipPosition(MineField.addCoordinates(mineField.getShipPosition(), (0, 0, -1))) # Initialize the return object. numMinePositions = len(mineField.getMinePositions()) statistics = { 'initialNumMines': numMinePositions, 'finalNumMines': numMinePositions, 'hasMoreCommandSets': True, 'numCommandsByType': {'move':0, 'torpedo':0} } # Read command sets from the file-like object, keeping track of what "step" we're on. enumeratedCommandSets = enumerate(CommandSetIterable(scriptIterable)) for commandSetIndex, commandNames in enumeratedCommandSets: sendMessage('Step %d' % (commandSetIndex + 1,)) sendMessage('\n'.join(Serializer.mineFieldToStringIterable(mineField))) sendMessage(' '.join(commandNames)) # Execute each command in the set, updating our command type statistics as we go. for command in ship.commandNamesToCommandSet(commandNames): commandType, execute = command['type'], command['execute'] execute(mineField) statistics['numCommandsByType'][commandType] += 1 # The ship drops after completing the set of commands, then we recenter and broadcast our state. drop(mineField).recenter() sendMessage('\n'.join(Serializer.mineFieldToStringIterable(mineField))) # If all mines are cleared, we're done. positions = mineField.getMinePositions() if not len(positions): break # If any mines are present at or above our z-position, we're done. missedMines = [ p for p in positions if p[2] >= 0 ] if len(missedMines): break # Finish off the statistics object and return it. statistics['finalNumMines'] = len(mineField.getMinePositions()) try: enumeratedCommandSets.next() statistics['hasMoreCommandSets'] = True except StopIteration: statistics['hasMoreCommandSets'] = False return statistics
def main(): # initialize the node rospy.init_node('serializer', anonymous=False) # subscribe to the velocities message rospy.Subscriber('vel_cmds', Twist, _handle_twist) rospy.Subscriber('me', Pose2D, _handle_me) # create a serializer object ser = Serializer() ser.setPID(0, 1.5, 1.5, 70000) #ser.setPID(1, 1.6, 3, 100000) #ser.setPID(2, 1.5, 3, 100000) #ser.setPID(3, 1.4, 3, 100000) # loop until shutdown rate = rospy.Rate(100) # 100 Hz cntr = 0 while not rospy.is_shutdown(): # get body frame speeds vx_w, vy_w = _twist.linear.x, _twist.linear.y curAngle = _me.theta # vx_b, vy_b = FrameConverter._convert_world_to_body_velocities( # vx_w, vy_w, curAngle) # get wheel speeds wz = _twist.angular.z # w1, w2, w3 = FrameConverter._convert_world_to_motor_velocities( # vx_b, vy_b, wz) # w1, w2, w3 = FrameConverter._convert_world_to_motor_velocities( # vx_w, vy_w, curAngle) # Try going straight forward at 0.5 m/s # vx_w, vy_w, wz, curAngle = 0.5, 0, 0, 0 w1, w2, w3 = FrameConverter._convert_world_to_motor_velocities( vx_w, vy_w, wz, curAngle) ser_factor = 1 w1, w2, w3 = w1 * ser_factor, w2 * ser_factor, w3 * ser_factor #Parker Lusks suggestion to reduce effects from stiction smallestCommandForMotion = 0.99 negligableCommandThresh = 0.1 if np.abs(w1) < smallestCommandForMotion and np.abs( w1) > negligableCommandThresh: w1 = np.sign(w1) * smallestCommandForMotion elif np.abs(w1) < negligableCommandThresh: w1 = 0 if np.abs(w2) < smallestCommandForMotion and np.abs( w2) > negligableCommandThresh: w2 = np.sign(w2) * smallestCommandForMotion elif np.abs(w2) < negligableCommandThresh: w2 = 0 if np.abs(w3) < smallestCommandForMotion and np.abs( w3) > negligableCommandThresh: w3 = np.sign(w3) * smallestCommandForMotion elif np.abs(w3) < negligableCommandThresh: w3 = 0 if cntr == 20: # print 'Serializer: w1: {:.3f}\tw2: {:.3f}\tw3: {:.3f}'.format( # w1, w2, w3) cntr = 0 cntr += 1 # update the speeds ser.set_speed(w1, w2, w3) # Wait however long it takes to make this tick at 100Hz rate.sleep() # time.sleep(1.0/sampleRate) # speed = getSpeed() # speedsM1.append(speed[0]/pulsePerRotation) # speedsM2.append(speed[1]/pulsePerRotation) # speedsM3.append(speed[2]/pulsePerRotation) # shutdown ser.disengage()
from Parameters import Parameters from MakeSeq import make_seq if __name__ == u'__main__': # args parser = argparse.ArgumentParser() parser.add_argument('--gpu', '-g', default=None, type=int) args = parser.parse_args() # calculation parameters parameters = Parameters() # get data vocabs = Vocabs() serializer = Serializer(vocabs) train_data = make_train_data(vocabs, parameters.time_steps, parameters.data_num) print('--- first 10 data ---') for data in train_data[0: 10]: print(''.join(data)) train_data, result_data = serialize_data(serializer, train_data, parameters.time_steps) # set models seq2seq = Seq2Seq(serializer.num_char, parameters.hidden_num) objective = L.Classifier(seq2seq) objective.compute_accuracy = False
class Bot: """ Instantiates a Bot object to handle Slack onboarding interactions.""" def __init__(self): self.name = "aptbot" self.emoji = ':robot_face:' self.token = os.environ.get('BOT_TOKEN') self.client = SlackClient(self.token) self.bot_id = self.get_bot_id() self.at_bot = '<@' + self.bot_id + '>' self.serializer = Serializer() self.commands = { 'group': 'information about the APT group(s) containing given name', 'tool': 'list of APT groups that use given tool', 'target': 'list of APT groups that target given asset or organization', 'ops': 'list of APT group that executed given operation' } path = '../data/' with open(path + 'groups.pkl', 'rb') as f: self.gid_to_group = pickle.load(f) # dict of groups with open(path + 'command_to_gid.pkl', 'rb') as f: self.command_to_gid = pickle.load(f) def get_bot_id(self): """ gets bot id using token """ # retrieve bot id api_call = self.client.api_call('users.list') if api_call.get('ok'): # retrieve all users so we can find our bot users = api_call.get('members') for user in users: if 'name' in user and user.get('name') == self.name: return user.get('id') else: print("could not find bot user with the name " + self.name) return None def handle_command(self, text, channel=''): """ parses text and handles if valid command """ parsed = text.split(' ', 1) # command and args (can be None) groups, response = {}, '' if len(parsed) == 1 and parsed[ 0] == 'help': # help command takes no arguments response = self.default_response() elif len(parsed ) == 2 and parsed[0] in self.commands: # any valid command cmmd, arg = parsed dct = self.command_to_gid[cmmd] # get target map for key, gid in dct.items(): if arg.lower() in key.lower() and gid not in groups.keys(): groups[gid] = self.gid_to_group[gid] response = self.serializer.groups_response(groups) else: # invalid command response = self.default_response() if not channel: # for testing return len(groups) # post result as attachment self.client.api_call("chat.postMessage", channel=channel, username=self.name, icon_emoji=self.emoji, text=response) print('message attachment posted!') def default_response(self): """ returns default response """ return self.serializer.default_response(self.commands) def parse_slack_output(self, slack_rtm_output): """ The Slack Real Time Messaging API is an events firehose. this parsing function returns None unless a message is directed at the Bot, based on its ID. """ output_list = slack_rtm_output if output_list and len(output_list) > 0: for output in output_list: if output and 'text' in output and self.at_bot in output[ 'text']: # return text after the @ mention, whitespace removed return output['text'].split(self.at_bot)[1].strip(), \ output['channel'] return None, None def run(self): """ runs and processes slack output in a lopp""" READ_WEBSOCKET_DELAY = 1 # 1 second delay between reading if self.client.rtm_connect(): print("APT bot connected and running!") while True: command, channel = self.parse_slack_output( self.client.rtm_read()) if command and channel: self.handle_command(command, channel) time.sleep(READ_WEBSOCKET_DELAY) else: print("Connection failed. Invalid Slack token or bot ID?")
class Tournament: #Default Constructor def __init__(self): #Serializer components self.serializer = Serializer() self.restoreFilePath = None #Tournament details self.humanScore = 0 self.botScore = 0 self.nextPlayer = None self.gameResult = None #Booleans for decision making self.quit = False self.restoringGame = False #Notifications purposes self.notifications = Notifications() """ ********************************************************************* Function Name: play_tournament Purpose: Runs a series of games and maintains score until user serializes or quits Parameters: none Return Value: none Local Variables: none Assistance Received: none ********************************************************************* """ # Runs a Tournament def play_tournament(self): #Ask user if they want to restore the tournament from existing file self.notifications.msg_restore_from_file() if self.wants_to_continue(): self.restoringGame = True self.notifications.msg_enter_file_path() self.restoreFilePath = raw_input() #Start the tournament and keep going until user chooses to quit or serialize while True: #Implement a fresh game game = Game() #Modify the board and other tournament, game objects from serialization file here if one is provided if self.restoringGame: pkg = {} pkg['board'] = Board() pkg['botWins'] = None pkg['humanWins'] = None pkg['nextPlayer'] = None #Exit the game if restore failed if not self.serializer.read_from_file(self.restoreFilePath, pkg): self.notifications.msg_serialized("FAILED") return game.board = deepcopy(pkg['board']) self.botScore = pkg['botWins'] self.humanScore = pkg['humanWins'] self.nextPlayer = pkg['nextPlayer'] self.gameResult = game.implement_game(self.restoringGame, self.nextPlayer) self.restoringGame = False else: self.gameResult = game.implement_game(self.restoringGame) #If a player has won the game if self.gameResult == 'h': self.humanScore += 1 if self.gameResult == 'c': self.botScore += 1 # 'S' refers to serialize during computer's turn and 's' refers to serialize during human's turn if (self.gameResult == 'S' or self.gameResult == 's'): self.serialize_game(game) return True #Ask if user wants to continue to next round self.notifications.msg_want_to_play_again() if not self.wants_to_continue(): self.quit = True self.notifications.draw_divider() #If user chooses to quit, stop the tournament if self.quit: break #Displaying the tournament results self.notifications.msg_display_results(self.botScore, self.humanScore) """ ********************************************************************* Function Name: serialize_game Purpose: Processes the serialization request from the human player Parameters: game, the current game object Return Value: none Local Variables: none Assistance Received: none ********************************************************************* """ # Serializes a Tournament state def serialize_game(self, game): #Store the next player in a string if self.gameResult == 'S': self.nextPlayer = "Computer" else: self.nextPlayer = "Human" #Write the serialized output to a file and exit if (self.serializer.write_to_file(game.board, self.botScore, self.humanScore, self.nextPlayer)): self.notifications.msg_serialized("SUCCESSFUL") else: self.notifications.msg_serialized("FAILED") """ ********************************************************************* Function Name: wants_to_continue Purpose: To Ask user if they want to continue. At any given time Parameters: none Return Value: True if user picks yes, False otherwise Local Variables: none Assistance Received: none ********************************************************************* """ # Gets user's choice on whether to continue to another round def wants_to_continue(self): #Continue asking user for input until they press 'y' or 'n' while True: input = getche() if (input == 'y' or input == 'Y'): return True if (input == 'n' or input == 'N'): return False
def initConnection(self): self.msgSender = Sender() pseudoSerialized = Serializer.serializePseudo(self.pseudo) self.msgSender.sendToStream(pseudoSerialized) self.msgSender.c.msgReceivedSignal.connect(self.reactToReceivedMsg) # try to do better if it works self.msgSender.c.pseudoChangedSignal.connect(self.reactToPseudosConnected)
def sendMsgToServer(self): msgSerialized = Serializer.serializeTextEntry(self.pseudo + ": " +self.lineEdit_2.text()) self.msgSender.sendToStream(msgSerialized) self.clearField()
raw_frequencies=False, tokens=False, sents=False, tf_idf=False, cs=True, remove_stopwords=False, async=False): name = get_filename(collection.id, tokens, sents, tf_idf, cs, remove_stopwords, raw_frequencies) folder_path = corpora_path + str(collection.id) if not os.path.exists(folder_path): os.mkdir(folder_path) if os.path.exists(folder_path + "/" + name) and async is False: serializer = Serializer() corpus = serializer.load(folder_path + "/" + name) else: corpus = None path = folder_path + "/" + name features_path = None #docs = list(collection.documents.values_list("id", "content")) if raw_frequencies or tf_idf: features_path = folder_path + "/" + name + ".features" process_collection_async.delay(collection.id, path, tokens, sents,