def search_video_by_video(self, video_lib, source, notification, description=None, config=None): """ search video by video. :param video_lib: video lib :type video_lib: string :param source: video source :type source: string :param notification: notification name :type notification: string :param description: description for this request :type description: string :return: **BceResponse** """ body = {'source': source, 'notification': notification} if description is not None: body['description'] = description return self._send_request( http_methods.POST, str.encode('/v2/videolib/' + video_lib), headers={b'Content-Type': b'application/json'}, params={b'searchByVideo': b''}, body=json.dumps(body), config=config)
def get_signature(self, prepared_request): if prepared_request.body == None: body = '' else: body = prepared_request.body signing_bytestring = b"\x00".join([ str.encode(prepared_request.method), str.encode(prepared_request.url), str.encode(body) ]) signature = hmac.new(str.encode(self.api_secret), signing_bytestring, digestmod=sha256) return signature.hexdigest()
def begin_test(line): s = socket.socket() s.connect(("10.0.0.63", 23)) s.send(str.encode("netstat\r\n")) result = s.recv(1024).decode() print(result) s.close() return "Banana Terminal" in result
def _sendReq(self, req): """ Actually send the request. """ tn = telnetlib.Telnet(self.host) self.logger.info('sent: %s', req) tn.write(str.encode(req)) res = tn.read_until(b':', TIME_OUT).decode('latin-1').split()[0] tn.close() return res
def setConnectionState(self, enable): """ Connects or disconnects from the GP3 eye tracking hardware. By default, when ioHub is started, a connection is automatically made, and when the experiment completes and ioHub is closed, so is the GP3 connection. Args: enable (bool): True = enable the connection, False = disable the connection. Return: bool: indicates the current connection state to the eye tracking hardware. """ if enable is True and self._gp3 is None: try: self._rx_buffer='' self._gp3 = socket.socket() address = ('127.0.0.1',4242) self._gp3.connect(address) init_connection_str='<SET ID="ENABLE_SEND_CURSOR" STATE="1" />\r\n' init_connection_str+='<SET ID="ENABLE_SEND_POG_LEFT" STATE="1" />\r\n' init_connection_str+='<SET ID="ENABLE_SEND_POG_RIGHT" STATE="1" />\r\n' init_connection_str+='<SET ID="ENABLE_SEND_USER_DATA" STATE="1"/>\r\n' init_connection_str+='<SET ID="ENABLE_SEND_PUPIL_LEFT" STATE="1" />\r\n' init_connection_str+='<SET ID="ENABLE_SEND_PUPIL_RIGHT" STATE="1" />\r\n' init_connection_str+='<SET ID="ENABLE_SEND_POG_FIX" STATE="1" />\r\n' init_connection_str+='<SET ID="ENABLE_SEND_POG_BEST" STATE="1" />\r\n' init_connection_str+='<SET ID="ENABLE_SEND_DATA" STATE="0" />\r\n' init_connection_str+='<SET ID="ENABLE_SEND_COUNTER" STATE="1" />\r\n' init_connection_str+='<SET ID="ENABLE_SEND_TIME" STATE="1" />\r\n' init_connection_str+='<SET ID="ENABLE_SEND_TIME_TICK" STATE="1" />\r\n' self._gp3.sendall(str.encode(init_connection_str)) # block for upp to 1 second to get reply txt. strStatus = self._checkForNetData(1.0) if strStatus: self._rx_buffer = '' return True else: return False except socket.error as e: if e.args[0]==10061: print2err('***** Socket Error: Check Gazepoint control software is running *****') print2err('Error connecting to GP3 ', e) elif enable is False and self._gp3: try: if self._gp3: self.setRecordingState(False) self._gp3.close() self._gp3 = None self._rx_buffer='' except Exception: print2err('Problem disconnecting from device - GP3') self._rx_buffer='' return self.isConnected()
def _sendReq(self, req): """ Actually send the request. """ tn = telnetlib.Telnet(self.host) self.logger.info('sent: %s', req) tn.write(str.encode(req)) res = tn.read_until(b':', TIME_OUT).decode('latin-1') tn.close() self.logger.info('get: %s', res) return res
def UDP_send_much(trigger_line): MESSAGE = "Message #" sock = socket.socket sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.connect((HOST, PORT)) for i in range(0, 5): msg = str.encode(MESSAGE + repr(i)) sock.send(msg) print("Sending {}".format(msg))
def setRecordingState(self,recording): """ setRecordingState is used to start or stop the recording of data from the eye tracking device. args: recording (bool): if True, the eye tracker will start recordng available eye data and sending it to the experiment program if data streaming was enabled for the device. If recording == False, then the eye tracker stops recording eye data and streaming it to the experiment. If the eye tracker is already recording, and setRecordingState(True) is called, the eye tracker will simple continue recording and the method call is a no-op. Likewise if the system has already stopped recording and setRecordingState(False) is called again. Args: recording (bool): if True, the eye tracker will start recordng data.; false = stop recording data. Return:trackerTime bool: the current recording state of the eye tracking device """ current_state = self.isRecordingEnabled() if self._gp3 and recording is True and current_state is False: self._rx_buffer='' self._gp3.sendall(str.encode('<SET ID="ENABLE_SEND_DATA" STATE="1" />\r\n')) rxdat = self._checkForNetData(1.0) if rxdat is None: EyeTracker._recording=False return EyeTrackerDevice.enableEventReporting(self, False) EyeTracker._recording=True elif self._gp3 and recording is False and current_state is True: self._rx_buffer='' self._gp3.sendall(str.encode('<SET ID="ENABLE_SEND_DATA" STATE="0" />\r\n')) rxdat = self._checkForNetData(1.0) EyeTracker._recording=False self._latest_sample=None self._latest_gaze_position=None return EyeTrackerDevice.enableEventReporting(self, recording)
def write(self, query_string): """Write a string to the serial port""" with self.communications_lock: assert self.ser.isOpen( ), "Warning: attempted to write to the serial port before it was opened. Perhaps you need to call the 'open' method first?" try: if self.ser.outWaiting() > 0: self.ser.flushOutput() #ensure there's nothing waiting except AttributeError: if self.ser.out_waiting > 0: self.ser.flushOutput() #ensure there's nothing waiting self.ser.write( str.encode(self.initial_character + str(query_string) + self.termination_character))
def readline(self, timeout=None): with self.communications_lock: eol = str.encode(self.termination_character) leneol = len(eol) line = bytearray() while True: c = self.ser.read(1) if c: line += c if line[-leneol:] == eol: break else: break return line.decode()
def execute_application(executable, params, process, raw_input_line): cmd = [" ".join(["./" + executable])] debug("Command: " + str(cmd)) process[0] = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, shell=True) start = time.time() debug("Waiting for termination") out, err = process[0].communicate(str.encode(raw_input_line)) end = time.time() debug("Execution time: %s seconds" % (end - start)) raw_policy = bytes.decode(out).split("\n") return read_solution(raw_policy)
def insert_image(self, image_lib, source, description=None, config=None): """ insert an image. :param image_lib: image lib :type image_lib: string :param source: video source :type source: string :param description: description for this request :type description: string :return: **BceResponse** """ body = {'source': source} if description is not None: body['description'] = description return self._send_request( http_methods.PUT, str.encode('/v2/imagelib/' + image_lib), headers={b'Content-Type': b'application/json'}, body=json.dumps(body), config=config)
def b64d_enc_dec(str, encode, decode): return base64.urlsafe_b64decode(str.encode(encode)).decode(decode)
def b64d_enc_dec(str, encode="ascii", decode="utf-8"): return b64d(str.encode(encode)).decode(decode)
def stream(self, events): logger = setup_logging('fuzzylookup') args = [ val for val in self._metadata.searchinfo.args[2:] if '=' not in val ] logger.debug("Arguments: " + str(self._metadata.searchinfo.args[2:])) arg_count = len(args) arg_index = 0 # Parse the arguments to the command if arg_count >= 3: while arg_index < arg_count: # Process the lookup name, lookup field, search field if self.lookup == '': self.lookup = args[arg_index] arg_index += 1 if self.lookupfield == '': self.lookupfield = args[arg_index] if len(args) >= arg_index + 2: if args[arg_index + 1].upper() == 'AS': self.searchfield = args[arg_index + 2] arg_index += 3 else: self.searchfield = self.lookupfield arg_index += 1 else: self.searchfield = self.lookupfield arg_index += 1 if arg_index < len(args) and None not in [ self.lookup, self.lookupfield, self.searchfield ]: if args[arg_index].upper() == 'OUTPUT': self.output_overwrite = True elif args[arg_index].upper() == 'OUTPUTNEW': self.output_overwrite = False else: # Add field to output fields list output_field_name = args[arg_index].strip(',') if len(args) >= arg_index + 2: if args[arg_index + 1].upper() == 'AS': self.output_aliases[output_field_name] = args[ arg_index + 2] arg_index += 2 else: self.output_aliases[ output_field_name] = output_field_name else: self.output_aliases[ output_field_name] = output_field_name arg_index += 1 else: logger.critical( "Not enough parameters specified to execute fuzzylookup.") print("Not enough parameters specified to execute fuzzylookup.") exit(1957) if None in [self.lookup, self.lookupfield, self.searchfield]: logger.critical("Could not parse all arguments for fuzzylookup") print("Could not parse all arguments for fuzzylookup") exit(1173) logger.debug("lookup: " + self.lookup) logger.debug("lookupfield: " + self.lookupfield) logger.debug("searchfield: " + self.searchfield) logger.debug("output_overwrite: " + str(self.output_overwrite)) logger.debug("output_aliases: " + str(self.output_aliases)) if self.prefix is None: self.prefix = '' if self.addmetrics is None: self.addmetrics = False logger.debug("prefix = %s", self.prefix) logger.debug("addmetrics = %s", self.addmetrics) #log beginning of comparison logger.info('Comparing %s to %s in %s lookup for fuzzy matches', self.searchfield, self.lookupfield, self.lookup) start_time = time.time() lookupfilter_str = '' # See if we have a lookup filter we can use in the root search if self.lookupfilter is not None and len(self.lookupfilter) > 0: # Split the filter into multiple key/value filters # Break the data into multiple fields, if needed # Replace the space delimiter with |, then split by | filter_list = re.sub(r'\s+(\w+=)', '|\g<1>', self.lookupfilter).split('|') # pylint: disable=anomalous-backslash-in-string for f in filter_list: logger.debug("filter = " + f) filter_re = re.compile(r'^(.*?)([<>=]+)(.*)$') m = filter_re.match(f) if m is not None: filter_obj = { 'field': m.group(1), 'op': m.group(2), 'value': m.group(3).strip('"') } # Find the dynamic filters, referencing $fieldname$ from the event if re.search(r'\$\w+\$', f) is None: self.lookup_filters_static.append(filter_obj) else: # Find the static filters self.lookup_filters_dynamic.append(filter_obj) else: # Only handle field/value pair filters. Ignore all others. logger.info("Ignored filter: %s", f) # Build the static filter string to go into the SPL search for f in self.lookup_filters_static: lookupfilter_str += '{0}{1}"{2}" '.format( f['field'].replace('|', ""), f['op'], f['value'].replace('|', "")) logger.debug("Static lookup filter: %s", lookupfilter_str) if len(lookupfilter_str) > 0: lookup_search = '|inputlookup {0} where {1}="*" | search {2} | eval {1}=lower({1}) | dedup {1}'.format( self.lookup, self.lookupfield, lookupfilter_str) else: lookup_search = '|inputlookup {0} where {1}="*" | eval {1}=lower({1}) | dedup {1}'.format( self.lookup, self.lookupfield) logger.info('Lookup query is: %s' % (lookup_search)) # Connect via existing session key self.session_key = self._metadata.searchinfo.session_key self.splunkd_uri = self._metadata.searchinfo.splunkd_uri namespace = self._metadata.searchinfo.app try: self.service = client.connect(token=self.session_key) logger.info('Successfully connected to %s', str(self.splunkd_uri)) except BaseException as e: logger.error('Error connecting: %s', repr(e)) # bind incoming search results for reading and extraction of search field # execute lookup command and bind results logger.info('Attempting to cache lookup of %s', self.lookup) # Set the URL of the Splunk endpoint search_url = '%s/servicesNS/nobody/%s/search/jobs' % (self.splunkd_uri, namespace) # Set the headers for HTTP requests headers = { 'Authorization': 'Splunk %s' % self.session_key, 'Content-Type': 'application/x-www-form-urlencoded' } try: request_data = { "search": lookup_search, "exec_mode": 'oneshot', "count": '0', "rf": self.lookupfield, # Required fields list "namespace": namespace, "output_mode": 'json' } #logger.debug('Request data: %s', str(request_data)) logger.debug('Search URL: %s', str(search_url)) #logger.debug('Headers: %s', str(headers)) payload = str.encode(urllib.parse.urlencode(request_data)) json_data, result_code = request('POST', search_url, payload, headers) # Write the values from the lookup to lookup_list self.lookup_list = json.loads(json_data)['results'] logger.info('Retrieved %d records from lookup %s', len(self.lookup_list), self.lookup) logger.debug('Response code: %s', result_code) #logger.debug('Response contents: %s', json_data) except BaseException as e: logger.error('Could not cache lookup %s: %s', self.lookup, repr(e)) # Make a Pool of workers pool = ThreadPool(5) try: count = 0 if len(self.lookup_list) > 0: logger.debug("Running ThreadPool") results = pool.map(self.get_distances, events) for result in results: yield result count += 1 else: for event in events: yield event count += 1 except BaseException as e: logger.error("Error: %s" % repr(e)) results = {} duration_secs = round(time.time() - start_time) logger.info( "Completed fuzzylookup search command for %s results in %s seconds.", str(count), str(duration_secs))
def UDP_send(trigger_line): MESSAGE = str.encode("POSIX is for hipsters") sock = socket.socket sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.bind((S_HOST, S_PORT + 1)) sock.sendto(MESSAGE, (HOST, PORT))
def _get_animixplay_link(self, url): result = requests.get(url).text soup = bs.BeautifulSoup(result, 'html.parser') if '/v2/' in url or '/v4/' in url: url_id = str.encode(url.split("/")[4]) url_id = base64.b64encode(url_id).decode() post_id = ('NaN{}N4CP9Eb6laO9N'.format(url_id)).encode() post_id = base64.b64encode(post_id).decode() title = soup.find('span', {'class':'animetitle'}).get_text() data_id = 'id2' if '/v4/' in url else 'id' try: data = (requests.post('https://animixplay.com/raw/2ENCwGVubdvzrQ2eu4hBH', data={data_id:post_id}).json()) except: if '/v4/' in url: data = (requests.post('https://animixplay.com/e4/5SkyXQULLrn9OhR', data={'id':url.split('/')[-1]}).json())['epstream'] if '/v2' in url: data = (requests.post('https://animixplay.com/e2/T23nBBj3NfRzTQx', data={'id':url.split('/')[-1]}).json())['epstream'] if '/v4/' in url: if int(episode) > len(data): return [] # Has a list of mp4 links. ## return data elif '/v2/' in url: # Has elaborate list for all metadata on episodes. data = [] for i in data: info_dict = i.get('src', None) # Looks like mp4 is always first in the list # Sometimes it returns None if info_dict: srcs = [] for k in info_dict: if k['type'] == 'mp4': srcs.append({'file': k.get('file', ''), 'flavor': k.get('lang', ''), 'res': k.get('resolution', '')}) data.append(srcs) ## else: ## episodes.append('') if int(episode) > len(data): return [] return {'title': title, 'episodes': data} else: try: ep_list = soup.find('div', {'id':'epslistplace'}).get_text() title = soup.find('span', {'class':'animetitle'}).get_text() jdata = json.loads(ep_list) ## keyList = list(jdata.keys()) ## del keyList[0] ep_total = jdata['eptotal'] episodes = jdata['stape'] episodes_total = len(episodes) if ep_total == episodes_total: return {'title': title, 'episodes': episodes} if ep_total > 30: return {'episodes': None} episodes = self._get_animixplay_link_gen(url, ep_total, episodes_total) return {'title': title, 'episodes': episodes} except: # Link generation jdata = (requests.post('https://animixplay.com/e5/dZ40LAuJHZjuiWX', data={'id':url.split('/')[-1]}).json()) title = jdata['details']['title'] ep_total = jdata['epstream']['eptotal'] episodes = jdata['epstream']['stape'] episodes_total = len(episodes) if ep_total == episodes_total: return {'title': title, 'episodes': episodes} if ep_total > 30: return {'episodes': None} episodes = self._get_animixplay_link_gen(url, ep_total, episodes_total) return {'title': title, 'episodes': episodes}
def str_to_raw(str): """Convert string received from commandline to raw (unescaping the string)""" try: # Python 2 return str.decode('string_escape') except: # Python 3 return str.encode().decode('unicode_escape')