def stringReceived(self, string): phone_data = json.loads(string) user = phone_data["user"] directory = phone_data["directory"] filename = phone_data["filename"] raw_data = phone_data["raw_data"] has_raw_data = phone_data["has_raw_data"] Logger.debug("FilenameReceiver Server: received: {0}, {1}, {2}, {3}".format(user, directory, filename, has_raw_data)) # Save the file to disk (if we have the file data) if has_raw_data == "1": image_data = raw_data.decode('base64') folder_path = os.getcwd() + os.sep + "received_files" + os.sep + directory if not os.path.exists(folder_path): os.makedirs(folder_path) f = open(folder_path+os.sep+filename, 'wb') f.write(image_data) f.close() Logger.debug("FilenameReceiver Server: wrote image file to, received_files/{0}/{1}".format(directory, filename)) # Do something here, in terms of logic (assuming received the file). result = self.app_root.root.process_image_from_phone(int(user), folder_path+os.sep+filename, filename) if result is True: # Send an ack back to the computer (or phone), with the filename source = self.transport.getPeer() # retrieve the ip of the computer that sent us the payload output = {"filename": filename} line = json.dumps(output) ClientCreator(reactor, TCPSender).connectTCP(source.host, 7895).addCallback(sendMessage, "{0}".format(line)).addErrback(printError)
def update_webpage_image(self, coreImage, t): texture = coreImage.texture self.ids["WebpageImage"].texture = texture self.ids["WebpageImageLarge"].texture = texture Logger.debug("WebpageImage: Finished image update in " + str(time.time() - t))
def webpage_image_updater(self): while True: if self.do_webpage_image_update: t = time.time() Logger.debug("WebpageImage: Starting image update") data = io.BytesIO(self.driver.get_screenshot_as_png()) coreImage = CoreImage(data, ext="png") Clock.schedule_once( lambda _: self.Gui.update_webpage_image(coreImage, t), 0) try: time.sleep(self.webpage_image_update_interval - (time.time() - t)) except ValueError: Logger.warning( "WebpageImage: Convert took to long, took " + str(time.time() - t) + " and it should've took " + str(self.webpage_image_update_interval)) else: time.sleep(self.webpage_image_update_interval)
def on_image_filepath(self, _, image_filepath): # self has default size at this point, sizing must be done in on_size try: self.canvas.remove(self._core_image) Logger.debug('%s: removed old _core_image', self.__class__.__name__) except: pass with self.canvas: # mipmap=True changes tex_coords and screws up calculations # TODO Research mipmap more self._core_image = cimage = CoreImage(image_filepath, mipmap=False) texture = cimage.texture self.image.texture = texture self.mesh.texture = texture self.texture = texture # TODO Position Image center/zoomed by default # Nexus 10 Image is smaller, test density independent # TODO Is there any benifit to Image vs Rectangle w/ Texture ? # No need for Image if we're not taking advantage of ratio maintaining code # img = Image(texture=texture, keep_ratio=True, allow_stretch=True) # img.bind(norm_image_size=self.on_norm_image_size) # self.add_widget(img) # Just set Scatter and Rectangle to texture size self.image.size = texture.size self.size = texture.size
def on_parent_size(self, widget, size): if not self.autosize: # Other code will set pos/scale return Logger.debug(self.__class__.__name__ + '.on_parent_size %s', size) p_width, p_height = size # Ignore default/zero sizes if p_height == 0.0 or p_height == 1: Logger.debug('ignoring size %s', size) return # self size is always set to Image size, instead just re-center and re-scale # Idea: Maybe avoid this if user has moved-resized? # Fit Image/Mesh within #self.image.size = size # TODO Control points should stay aligned with image #print(self.__class__.__name__, 'on_size', size) # FIXME Updating Image.size messes up ControlPoint references # Only do this once # if self._image_size_set: # return # self.image.size = size # self._image_size_set = True img = self.image self.center = self.parent.center # scale up to fit, whichever dimension is smaller h_scale = p_height/float(self.height) w_scale = p_width/float(self.width) self.scale = min(h_scale, w_scale)
def add_picture_to_scroller(self, instance, widget): if self.parent is not None: self.parent.remove_widget(widget) item = widget # adjust the size of the image to allow for the borderimage background to be shown # fudge the borderimage into the equation (hard because it is drawn outside of the image, # and thus does not come into the calculations for the scrollview, which means # we need to manually fix up the left/right/top/bottom cases, for this to show # properly on the screen # 36 happens to the be border image size numbers we are using in the .kv file item.image.size = item.image.image_ratio*(self.scrollview_height-36), self.scrollview_height-36 item.size = item.image.size[0]+36, item.image.size[1]+18 item.size_hint_x = None # the scroller, effects size, and not the scale of the container, so we must adjust for this, # else the object will be in the container with its current transforms, which would look weird item.scale = 1 item.rotation = 0 try: self.items.add_widget(widget) except: Logger.debug("Vertical Scroller: (picture) timing issue, means user touched this object, so now it has a parent, when it shouldn't, so don't add to the scroller afterall")
def verifyCallback(self, connection, x509, errno, depth, preverifyOK): # default value of post verify is set False postverifyOK = False if not preverifyOK: # Pre-verification failed Logger.debug( "SSLCONTEXT: [Pre-verification] Certificate verification failed, {}".format(x509.get_subject())) else: # Add post verification callback here. # Get x509 subject subject = x509.get_subject() Logger.debug("SSLCONTEXT: [Pre-verification] Certificate [{}] Verfied.".format(subject)) # Perform post verification checks postverifyOK = self.postverifyCallback(subject, preverifyOK) # Post verification tasks if postverifyOK: self.postverify_hook(connection, x509) return preverifyOK and postverifyOK
def postverifyCallback(self, subject, preverifyOK): if not preverifyOK: return preverifyOK # variables for post-verify callback check on cert fields _cert_fields = constants.SSL_CERT_FIELDS _values_dict = constants.SSL_POST_VERIF_VALUES # Passed checks checklist_count = 0 # Get certificate components certificate_components = dict(subject.get_components()) # Check fields for i in _values_dict.keys(): if certificate_components[_cert_fields[i]] in _values_dict[i]: checklist_count += 1 else: print certificate_components[_cert_fields[i]] print _values_dict[i] # Checklist roundoff if checklist_count == len(_values_dict.keys()): Logger.debug("SSLCONTEXT: [Post-verification] certificate verfication passed.") return True else: Logger.debug( "SSLCONTEXT: [Post-verification] Certificate verification failed. ({}/{} checks passed)".format(checklist_count, len(_values_dict.keys()))) return False
def load_user_data(self, callback: Optional[Callable] = None): if len(App.get_running_app().user_mapping) > 0: Logger.debug("StellaPayUI: Not loading user data again") return user_data = App.get_running_app().session_manager.do_get_request(url=Connections.get_users()) Logger.debug("StellaPayUI: Loaded user data") App.get_running_app().user_mapping = {} if user_data and user_data.ok: # convert to json user_json = user_data.json() print(f"StellaPayUI: Loading user mapping on thread {threading.current_thread().name}") # append json to list and sort the list for user in user_json: # store all emails adressed in the sheet_menu App.get_running_app().user_mapping[user["name"]] = user["email"] # Sort items App.get_running_app().user_mapping = OrderedDict( sorted(App.get_running_app().user_mapping.items(), key=lambda x: x[0])) # Create dialog and its items on the main thread self.create_user_select_dialog(callback=callback) else: Logger.critical("StellaPayUI: Error: addresses could not be fetched from server") os._exit(1)
def nfc_card_presented(self, uid: str): Logger.debug("StellaPayUI: Read NFC card with uid" + uid) # If we are currently making a transaction, ignore the card reading. if App.get_running_app().active_user is not None: Logger.debug("StellaPayUI: Ignoring NFC card as we are currently making a transaction.") return # Show the spinner self.ids.spinner.active = True # Request user info for the specific UID to validate person response = App.get_running_app().session_manager.do_get_request(url=Connections.request_user_info() + uid) # Check response code to validate whether this user existed already. If so, proceed # to the productScreen, else proceed to the registerUID screen if response and response.ok: # store result in JSON query_json = response.json() # Move to WelcomeScreen self.manager.transition = SlideTransition(direction='left') # store user-mail for payment confirmation later user_mail = query_json["owner"]["email"] user_name = query_json["owner"]["name"] App.get_running_app().active_user = user_name # Go to the product screen self.manager.current = Screens.PRODUCT_SCREEN.value else: # User was not found, proceed to registerUID file self.manager.get_screen(Screens.REGISTER_UID_SCREEN.value).nfc_id = uid self.manager.current = Screens.REGISTER_UID_SCREEN.value
def __init__(self, path): self.path = path Logger.debug("FaceDetector.__init__()") self.scale_factor = 2 self.detector = cv2.CascadeClassifier( os.path.join(self.path, "./data/haarcascade_frontalface_default.xml"))
def dataReceived(self, data): # print data if len(data) == 1: state = int(data) assert 0 <= state assert state <= 3 Logger.debug(self.__class__.__name__ + ': in ' + whoAmI() + '. ' + 'State is: ' + state) self.mindCupolaArduinoController.setModeAutomatically(state)
def privmsg(self, user, channel, msg): """This will get called when the bot receives a message.""" user = user.split('!', 1)[0] Logger.debug('{}->{}: {}'.format(user, channel, msg)) #self.factory.app.message_callback("<%s> %s" % (user, msg)) try: self._handle_privmsg(user, channel, msg) except Exception as e: Logger.error('{}->{}: {} | {}'.format(user, channel, msg, e))
def __getitem__(self, item): if not self._loaded_textures[item]: # first, check if a texture with the same name already exist in the # cache # pylint: disable-msg=redefined-builtin chr = type(self._filename) uid = chr(u'%s|%d|%d') % (self._filename, self._mipmap, item) texture = Cache.get('kv.texture', uid) # if not create it and append to the cache if texture is None: zfilename = self._index_list[item] # read file and store it in mem with fileIO struct around it tmpfile = BytesIO(self._zip_file.read(zfilename)) ext = zfilename.split('.')[-1].lower() image = None for loader in ImageLoader.loaders: if (ext not in loader.extensions() or not loader.can_load_memory()): continue Logger.debug('Image%s: Load <%s> from <%s>', loader.__name__[11:], zfilename, self._filename) try: image = loader(zfilename, ext=ext, rawdata=tmpfile, inline=True) except: # pylint: disable-msg=bare-except # noqa # Loader failed, continue trying. continue break if image is None: raise AssertionError("Could not load image {} (index {}) " "from zip {}".format( zfilename, item, self._filename)) self.width = image.width self.height = image.height imagedata = image._data[ 0] # pylint: disable-msg=protected-access source = '{}{}|'.format( 'zip|' if self._filename.endswith('.zip') else '', self._no_cache) imagedata.source = chr(source) + uid texture = Texture.create_from_data(imagedata, mipmap=self._mipmap) if not self._no_cache: Cache.append('kv.texture', uid, texture) if imagedata.flip_vertical: texture.flip_vertical() self._loaded_textures[item] = texture return self._loaded_textures[item]
def collect_modules(self): self.modules = [] modules_names = [ x for x in os.listdir(MODULE_BASE_DIR) if x.endswith('app') ] Logger.debug('Extensions directories found: {}'.format(modules_names)) load_result = list(map(self.import_extension_module, modules_names)) self.modules = [x[0] for x in load_result if x[0] != None] ret = [x[1] for x in load_result if x[0] != None] return ret
def import_extension_module(self, module_name): try: mod = importlib.import_module(module_name) Logger.debug('Loaded extension {}'.format(module_name)) return mod, module_name except Exception as e: Logger.error('Error loading extension {}: {}'.format( module_name, e)) self.errors.add(module_name) return None, module_name
def generate_uuid(host=None): """ Generate capsule UID for particular host. """ uuid_str = str(uuid5(NAMESPACE_URL, host))[0:8] Logger.debug("UTILITIES: UUID({}) = {}".format(host, uuid_str)) return uuid_str
def load_products(self): print( f"Loading product data on thread {threading.current_thread().name}" ) start_time = time.time() if len(self.tabs[0].ids.container.children) > 0: Logger.debug( "StellaPayUI: Don't load products view again as it's already there.." ) print( f"Loaded products (after skipping) in {time.time() - start_time} seconds" ) return Logger.debug(f"StellaPayUI: Setting up product view") for tab in self.tabs: for product in App.get_running_app().products_per_category[ tab.text]: # Get fun fact description of database product_description = App.get_running_app( ).database_manager.get_random_fun_fact(product.get_name()) # Add item to the tab tab.ids.container.add_widget( ItemListUX(text=product.get_name(), secondary_text=product_description, secondary_theme_text_color="Custom", secondary_text_color=[0.509, 0.509, 0.509, 1], price="€" + product.get_price(), shopping_cart=self.shopping_cart)) # Add last item to the products (for each category) that is empty. This improves readability. tab.ids.container.add_widget( ItemListUX(text="", secondary_text="", secondary_theme_text_color="Custom", secondary_text_color=[0.509, 0.509, 0.509, 1], price=None, shopping_cart=None)) print( f"Loaded products of category {tab.text} (no skipping) in {time.time() - start_time} seconds" ) print( f"Loaded all products (no skipping) in {time.time() - start_time} seconds" ) return
def get_nat_ip(): "Get IP of NAT." s = socket(AF_INET, SOCK_STREAM) host = "127.0.0.1" try: s.connect(("www.google.com", 80)) except error: Logger.debug("UTILITIES: No active NAT connection.") return host else: host = s.getsockname()[0] s.close() return host
def load_categories_and_products(self): # Get all categories names response = App.get_running_app().session_manager.do_get_request( url=Connections.get_categories()) Logger.debug("StellaPayUI: Loading product categories") # Check status response if response and response.ok: categories = response.json() Logger.debug( f"StellaPayUI: Retrieved {len(categories)} categories") # Load tab for each category for cat in categories: # Request products from category tab_text request = Connections.get_products() + cat['name'] response = App.get_running_app( ).session_manager.do_get_request(request) Logger.debug( f"StellaPayUI: Loading products for category '{cat['name']}'" ) # Evaluate server response if response and response.ok: # convert response to json products_json = response.json() self.products_per_category[cat['name']] = [] Logger.debug( f"StellaPayUI: Retrieved {len(products_json)} products for category '{cat['name']}'" ) # Create a product object for all for product in products_json: # Only add the product to the list if the product must be shown if product['shown']: p = Product().create_from_json(product) self.products_per_category[cat['name']].append(p) else: # Error in retrieving products from server Logger.critical( "StellaPayUI: Products could not be retrieved: " + response.text) os._exit(1) # If we loaded everything correctly, we can tell the startup screen we loaded correctly. screen_manager.get_screen( Screens.STARTUP_SCREEN.value).on_products_loaded() else: # Error Logger.critical( "StellaPayUI: Categories could not be retrieved: " + response.text) os._exit(1)
def on_animation_step(self, widget, step): """animation_step changed. move control points to step with animation Set mesh_attached=False if on setup_step """ Logger.debug('%s: on_animation_step %s', self.__class__.__name__, step) if not isinstance(step, basestring): raise ValueError('animation_step must be a string, given %s'%step, type(step)) # Track all possible step keys if step != setup_step and step not in self.animation_steps_order: self.animation_steps_order.append(step) resume_animation = self.animating if self.animating: # Switched step while previewing animation self.preview_animation(False) if step == setup_step: self.image_opacity = self.faded_image_opacity # ControlPoints will detach mesh after animation in: move_control_points() self.mesh_attached = False else: # Not first animation step self.image_opacity = 0.0 mesh = self.mesh if self._previous_step == setup_step: # Redo base vertices when moving from 0 to other Logger.debug('Recalculating vertices/indices during transition from step 0') self.calc_mesh_vertices(step=setup_step, preserve_uv=False) if not self.mesh_attached: # attach before moving to other animation steps for cp in self.control_points: cp.attach_mesh(True) self.mesh_attached = True self.move_control_points(step, detach_mesh_after=step == setup_step) self._previous_step = step if resume_animation: self.preview_animation()
def GetLocalIPV4_FallBack() -> str: """ gets the local IP by opening a socket to itself """ def udp_listening_server() -> str: """ the server component """ bMsg: bytes tAddress: Tuple try: oInSocket: socket.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) oInSocket.bind(("0.0.0.0", 18888)) oInSocket.setblocking(False) while True: tResult: Tuple = select.select([oInSocket], [], []) bMsg, tAddress = tResult[0][0].recvfrom(1024) if bMsg == b'ORCAIPREQUEST': aIP.append(tAddress[0]) break oInSocket.close() except Exception as exc: LogError(uMsg=u'GetLocalIp:udp_listening_server:', oException=exc) return u'127.0.0.0' try: Logger.debug("Using Fallback to detect V4 IP Address") aIP: List = [] oThread: threading.Thread = threading.Thread( target=udp_listening_server) oThread.aIP = aIP oThread.start() oOutSocket: socket.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) oOutSocket.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) i: int = 0 while len(aIP) == 0: oOutSocket.sendto(b'ORCAIPREQUEST', ("255.255.255.255", 18888)) fSleep(fSeconds=0.1) i += 1 if i == 10: break oOutSocket.close() if len(aIP) > 0: return aIP[0] else: return u'127.0.0.0' except Exception as e: LogError(uMsg='GetLocalIpV4:', oException=e) return u'127.0.0.0'
def create_static_database(self): # Connect to the database and return database connection object conn = None Logger.debug(f"StellaPayUI: Creating static fun fact database") # Create all tables and add the the database file try: conn = sqlite3.connect('db/static_fun_fact_database.db') print(sqlite3.version) # # SQLite command to create table with two fields, namely product and fun_fact static_fun_facts_table = "CREATE TABLE IF NOT EXISTS static_fun_facts(" \ "product text NOT NULL, " \ "fun_fact text PRIMARY KEY " \ ");" # # one_day_fun_fact_table = "CREATE TABLE IF NOT EXISTS one_day_fun_fact(" \ # "product text PRIMARY KEY " \ # "fun_fact text PRIMARY KEY " \ # ");" # # one_week_fun_fact_table = "CREATE TABLE IF NOT EXISTS one_week_fun_fact(" \ # "product text PRIMARY KEY " \ # "fun_fact text PRIMARY KEY " \ # ");" # # one_month_fun_fact_table = "CREATE TABLE IF NOT EXISTS one_month_fun_fact(" \ # "product text PRIMARY KEY " \ # "fun_fact text PRIMARY KEY " \ # ");" # # Create connection to the database and add the tables db_conn = conn.cursor() db_conn.execute(static_fun_facts_table) # db_conn.execute(one_day_fun_fact_table) # db_conn.execute(one_week_fun_fact_table) # db_conn.execute(one_month_fun_fact_table) except sqlite3.Error as e: Logger.critical("StellaPayUI: " + e) os._exit(1) self.connection = conn
def get_my_ip(): "Get my public IP address or if offline get my NAT IP." try: # Get IP from curlmyip.com which gives the raw ip address my_pub_ip = urlopen("http://curlmyip.com").read().strip() # Check for portal redirects if offline if not ip_address_is_valid(my_pub_ip): my_pub_ip = None except URLError: Logger.debug("UTILITIES: No active internet connection.") my_pub_ip = None # Get local IP my_loc_ip = get_local_ip() return (my_loc_ip, my_pub_ip)
def detect_face(self, dt): height, width = self.ids.camera.texture.height, self.ids.camera.texture.width img = np.frombuffer(self.ids.camera.texture.pixels, np.uint8) img = img.reshape(height, width, 4) img = np.flipud(img) factor = Window.height / self.ids.camera.resolution[1] img = cv2.resize(img, None, fx=factor, fy=factor) if self.faces_detection_active: # Logger.debug("avant detection visage") detected_faces = App.get_running_app().face_detector.detect_faces(img) for c in self.faces: self.canvas.remove(c) self.faces = [] for (x, y, w, h) in detected_faces: Logger.debug("visage détécté %s, %s => %s %s" % (x, y, w, h)) with self.canvas: Color(1, 0, 0, 0.8, mode="rgba") # TODO: remove this magic numbers r = Rectangle(size=(h, w), pos=(x + 200, y + 75)) self.faces.append(r)
def load_facts_database(self): Logger.debug(f"StellaPayUI: Loading fun fact database") cursor = self.connection.cursor() cursor.execute("SELECT product, fun_fact FROM static_fun_facts") fun_facts = cursor.fetchall() count = 0 for item in fun_facts: product = item[0] fun_fact = item[1] self.loaded_facts[product].append(fun_fact) count += 1 Logger.debug(f"StellaPayUI: Loaded {count} fun facts from the database")
def print_message(self, msg, peerid=None, intermediate=False): "Print a message in the output window." # Indicates multiline output required if intermediate: text = "{}{}".format(constants.GUI_LABEL_LEFT_PADDING, msg) else: # One line print if not peerid: peerid = self.comm_service.peerid # If local pid, substitute with peer name if peerid == self.comm_service.peerid: peerid = constants.PEER_NAME # Get peer message color rcc = self.comm_service.swarm_manager.get_peerid_color(peerid) # Single line output with peer id text = "{}{}[color={}]{}[/color] : {}".format( constants.GUI_LABEL_LEFT_PADDING, constants.GUI_LABEL_PROMPT_SYM, rcc, str(peerid), msg ) text = "\n{}".format(text) # Send text to console self.display_text("\n" + text) # Print in log if constants.ENABLE_CMD_LOG: # Get peer id for log if not peerid: logger_peerid = constants.PEER_NAME else: logger_peerid = peerid Logger.debug("TESTSERVER: [{}] => {}".format(logger_peerid, msg))
def load_category_data(self): print( f"StellaPayUI: Loading category data on thread {threading.current_thread().name}" ) start_time = time.time() if len(self.tabs) > 0: Logger.debug( "StellaPayUI: Don't load tabs as we already have that information." ) print( f"StellaPayUI: Loaded category data and tabs (after skipping) in {time.time() - start_time} seconds" ) # Load product items (because we still need to reload them) self.load_products() return Logger.debug("StellaPayUI: Loading category view") for category in App.get_running_app().products_per_category.keys(): # Create tab display tab = TabDisplay(text=category) self.ids.android_tabs.add_widget(tab) self.tabs.append(tab) print( f"StellaPayUI: Loaded category data and tabs (no skipping) in {time.time() - start_time} seconds" ) # Load product items self.load_products()
from typing import List from typing import Dict from typing import Tuple from kivy import Logger import select import socket import threading from ORCA.utils.LogError import LogError from ORCA.utils.Sleep import fSleep try: import netifaces Logger.debug("Loaded netifaces") except Exception as ex: Logger.error("Can't load netifaces:" + str(ex)) __all__ = ['GetIPAddressV4'] def GetIPAddressV4() -> str: uPreferredAdapter: str = u'eth0' uInet_Type: str = u'AF_INET' uRet: str = u'127.0.0.1' aFound: List[str] = [] iInet_num: int try:
def on_stop(self): Logger.debug("StellaPayUI: Stopping!") self.loop.stop() # Stop event loop
def on_control_points_disabled(self, _, disable): Logger.debug("AnimationConstructor: disable_control_points. disable=%s", disable) for cp in self.control_points: cp.disabled = disable
def build(self): self.theme_cls.theme_style = "Dark" self.theme_cls.primary_palette = "Amber" self.theme_cls.primary_hue = "600" self.theme_cls.accent_pallet = "Teal" self.theme_cls.accent_hue = "300" self.theme_cls.green_button = (0.262, 0.627, 0.278, 1) self.theme_cls.red_button = (0.898, 0.450, 0.450, 1) self.theme_cls.purple_button = (0.694, 0.612, 0.851, 1) self.theme_cls.complementary_color_1 = (0.623, 0.858, 0.180, 1) self.theme_cls.complementary_color_2 = (0, 0.525, 0.490, 1) # Set background image to match color of STE logo Window.clearcolor = (0.12549, 0.12549, 0.12549, 0) # Set size of the window Window.size = (int(self.config.get('device', 'width')), int(self.config.get('device', 'height'))) Logger.info( f"StellaPayUI: Window height {self.config.get('device', 'height')} and width {self.config.get('device', 'width')}." ) # Don't run in borderless mode when we're running on Linux (it doesn't seem to work so well). Window.borderless = False if sys.platform.startswith("linux") else True hostname = None try: hostname = self.config.get('server', 'hostname') Logger.info(f"StellaPayUI: Hostname for server: {hostname}") Connections.hostname = hostname except Exception: Logger.warning( "StellaPayUI: Using default hostname, since none was provided") pass if self.config.get('device', 'fullscreen') == 'True': Logger.info(f"StellaPayUI: Running in fullscreen mode!") Window.fullscreen = True else: Logger.info(f"StellaPayUI: Running in windowed mode!") Window.fullscreen = False if self.config.get('device', 'show_cursor') == 'True': Window.show_cursor = True else: Window.show_cursor = False # Load .kv file Builder.load_file('kvs/DefaultScreen.kv') Logger.debug("StellaPayUI: Starting event loop") self.loop: AbstractEventLoop = asyncio.new_event_loop() self.event_loop_thread = threading.Thread(target=self.run_event_loop, args=(self.loop, ), daemon=True) self.event_loop_thread.start() Logger.debug("StellaPayUI: Start authentication to backend") self.loop.call_soon_threadsafe(self.session_manager.setup_session, self.load_categories_and_products) # Initialize defaultScreen (to create session cookies for API calls) ds_screen = DefaultScreen(name=Screens.DEFAULT_SCREEN.value) # Load screenloader and add screens screen_manager.add_widget( StartupScreen(name=Screens.STARTUP_SCREEN.value)) screen_manager.add_widget(ds_screen) screen_manager.add_widget( WelcomeScreen(name=Screens.WELCOME_SCREEN.value)) screen_manager.add_widget( RegisterUIDScreen(name=Screens.REGISTER_UID_SCREEN.value)) screen_manager.add_widget( ConfirmedScreen(name=Screens.CONFIRMED_SCREEN.value)) screen_manager.add_widget( CreditsScreen(name=Screens.CREDITS_SCREEN.value)) screen_manager.add_widget( ProductScreen(name=Screens.PRODUCT_SCREEN.value)) screen_manager.add_widget( ProfileScreen(name=Screens.PROFILE_SCREEN.value)) Logger.debug( "StellaPayUI: Registering default screen as card listener") ds_screen.register_card_listener(self.card_connection_manager) Logger.debug("StellaPayUI: Starting NFC reader") self.card_connection_manager.start_nfc_reader() screen_manager.get_screen( Screens.CREDITS_SCREEN.value).ids.version_build.text = str( self.build_version) return screen_manager
def on_size(self, _, size): Logger.debug(self.__class__.__name__ + '.on_size %s', size) self.bbox_diagonal = (size[0]**2 + size[1]**2)**0.5
def peripheral_state_changed(self, state): Logger.debug("BLE: peripheral state changed: {}".format(state)) self.ble_peripheral_ready = ble_peripheral.has_ble
def switch_face_detection(self, *args): self.faces_detection_active = not self.faces_detection_active Logger.debug("switch_face_detection %s" % self.faces_detection_active) args[0].text_color = (1, 1, 0, 1) if self.faces_detection_active else (0, 0, 1, 1)
def removing_people_enable(self, instance): Logger.debug("removing_people_enable %s " % instance)
def on_start(self): Logger.debug("StellaPayUI: Starting StellaPay!")
def switch_and_set_back(self, newcurrent): Logger.debug("Asked to switch and set back") self.back_screen_name = self.current self.switch_to(newcurrent)
def callback(self): Logger.debug("callback")
def on_manualMode(self, instance, value): assert type(value) in [bool] Logger.debug(self.__class__.__name__ + ': in [' + whoAmI() + '] manualMode is ' + str(self.manualMode) )
def picture_taken(self, obj, filename): Logger.debug("picture_taken %s => %s" % (obj, filename)) App.get_running_app().manager.switch_to("Editor") App.get_running_app().manager.current_screen.source = os.path.join("%s/DCIM/" % primary_external_storage_path(), filename)
def peripheral_service_added(self, service): Logger.debug("BLE: connect: peripheral service added: {}".format(service))
def validate(self, buffer_stream, data_buffer): #Run validations if data_buffer.type == 0: #The buffer data type is not assigned, perform no operations Logger.debug('Validations: Buffer Data Type not assigned') data_buffer.add_error('Data Buffer Type not assigned') elif data_buffer.type == 1: Logger.debug('Product Validation Initialized') #Ensure the buffer is long enough if len(data_buffer.data) < 2: data_buffer.add_error('Missing Fields in data buffer') else: #Base layer validations if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Product ID is a required field') return True #Ensure the first element is an integer first_element = "%s" % (data_buffer.data[0]) if first_element.isdigit() == False: data_buffer.add_error('Product ID should be an integer') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Product Name is a required field') return True elif len('%s' % (data_buffer.data[1])) < 2: data_buffer.add_error('Insufficient Product Name length') namestring = '%s' % (data_buffer.data[1]) if namestring[0].isupper() == False: data_buffer.add_error( 'First Letter of Product Name should be capitalized') elif data_buffer.type == 2: Logger.debug('Module Validation Initialized') #Ensure the buffer is long enough if len(data_buffer.data) < 3: data_buffer.add_error('Missing Fields in data buffer') else: #Base layer validations if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Module ID is a required field') return True if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Product ID is a required field') return True #Ensure the first element is an integer first_element = "%s" % (data_buffer.data[0]) if first_element.isdigit() == False: data_buffer.add_error('Module ID should be an integer') #Ensure the second element is an integer second_element = "%s" % (data_buffer.data[1]) if second_element.isdigit() == False: data_buffer.add_error('Product ID should be an integer') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error('Module Name is a required field') return True elif len('%s' % (data_buffer.data[2])) < 2: data_buffer.add_error('Insufficient Module Name length') namestring = '%s' % (data_buffer.data[2]) if namestring[0].isupper() == False: data_buffer.add_error( 'First Letter of Module Name should be capitalized') elif data_buffer.type == 3: Logger.debug('System Area Validation Initialized') #Ensure the buffer is long enough if len(data_buffer.data) < 3: data_buffer.add_error('Missing Fields in data buffer') else: if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('System Area ID is a required field') return True if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Module ID is a required field') return True #Ensure the first element is an integer first_element = "%s" % (data_buffer.data[0]) if first_element.isdigit() == False: data_buffer.add_error( 'System Area ID should be an integer') #Ensure the second element is an integer second_element = "%s" % (data_buffer.data[1]) if second_element.isdigit() == False: data_buffer.add_error('Module ID should be an integer') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error( 'System Area Name is a required field') return True elif len('%s' % (data_buffer.data[2])) < 2: data_buffer.add_error( 'Insufficient System Area Name length') namestring = '%s' % (data_buffer.data[2]) if namestring[0].isupper() == False: data_buffer.add_error( 'First Letter of System Area Name should be capitalized' ) elif data_buffer.type == 4: Logger.debug('Key Action Validation Initialized') #Ensure the buffer is long enough if len(data_buffer.data) < 5: data_buffer.add_error('Missing Fields in data buffer') else: if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Key Action ID is a required field') return True if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('System Area ID is a required field') return True #Ensure the first element is an integer first_element = "%s" % (data_buffer.data[0]) if first_element.isdigit() == False: data_buffer.add_error('Key Action ID should be an integer') #Ensure the second element is an integer second_element = "%s" % (data_buffer.data[1]) if second_element.isdigit() == False: data_buffer.add_error( 'System Area ID should be an integer') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error( 'Key Action Name is a required field') return True elif len('%s' % (data_buffer.data[2])) < 2: data_buffer.add_error( 'Insufficient Key Action Name length') namestring = '%s' % (data_buffer.data[2]) if namestring[0].isupper() == False: data_buffer.add_error( 'First Letter of Key Action Name should be capitalized') elif data_buffer.type == 5: #Ensure the buffer is long enough if len(data_buffer.data) < 3: data_buffer.add_error('Missing Fields in data buffer') else: Logger.debug('Input Parameter Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error( 'Input Parameter ID is a required field') return True if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Key Action ID is a required field') return True #Ensure the first element is an integer first_element = "%s" % (data_buffer.data[0]) if first_element.isdigit() == False: data_buffer.add_error('Key Action ID should be an integer') #Ensure the second element is an integer second_element = "%s" % (data_buffer.data[1]) if second_element.isdigit() == False: data_buffer.add_error( 'Input Parameter ID should be an integer') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error( 'Input Parameter Name is a required field') return True elif len('%s' % (data_buffer.data[2])) < 2: data_buffer.add_error( 'Insufficient Input Parameter Name length') namestring = '%s' % (data_buffer.data[2]) if namestring[0].isupper() == False: data_buffer.add_error( 'First Letter of Input Parameter Name should be capitalized' ) elif data_buffer.type == 6: Logger.debug('Client Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Client ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Client Name is a required field') return True elif len('%s' % (data_buffer.data[1])) < 2: data_buffer.add_error('Insufficient Client Name length') namestring = '%s' % (data_buffer.data[1]) if namestring[0].isupper() == False: data_buffer.add_error( 'First Letter of Client Name should be capitalized') elif data_buffer.type == 7: Logger.debug('Project Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Project ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Client ID is a required field') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error('Project Name is a required field') return True elif len('%s' % (data_buffer.data[2])) < 2: data_buffer.add_error('Insufficient Project Name length') namestring = '%s' % (data_buffer.data[2]) if namestring[0].isupper() == False: data_buffer.add_error( 'First Letter of Project Name should be capitalized') elif data_buffer.type == 8: Logger.debug('Test Script Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Test Script ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Project ID is a required field') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error('Test Script Name is a required field') return True elif len('%s' % (data_buffer.data[2])) < 2: data_buffer.add_error('Insufficient Test Script Name length') namestring = '%s' % (data_buffer.data[2]) if namestring[0].isupper() == False: data_buffer.add_error( 'First Letter of Test Script Name should be capitalized') elif data_buffer.type == 9: Logger.debug('Workflow Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Workflow ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Test Script ID is a required field') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error('Workflow Name is a required field') return True elif len('%s' % (data_buffer.data[2])) < 2: data_buffer.add_error('Insufficient Workflow Name length') namestring = '%s' % (data_buffer.data[2]) if namestring[0].isupper() == False: data_buffer.add_error( 'First Letter of Workflow Name should be capitalized') elif data_buffer.type == 10: Logger.debug('Workflow Action Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Workflow Action ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Workflow ID is a required field') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error('Key Action ID is a required field') elif data_buffer.type == 11: Logger.debug('Workflow Next Action Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error( 'Workflow Next Action ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error( 'Workflow First Action ID is a required field') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error( 'Workflow Second Action ID is a required field') elif data_buffer.type == 12: Logger.debug('Workflow Parameter Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error( 'Workflow Parameter ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Workflow Action ID is a required field') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error('Input Parameter ID is a required field') if data_buffer.data[3] == '' or data_buffer.data[3] is None: data_buffer.add_error( 'Workflow Parameter Value is a required field') elif data_buffer.type == 13: Logger.debug('Flowchart Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Next Action ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Key Action ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[2] is None: data_buffer.add_error('Row is a required field') if data_buffer.data[2] == '' or data_buffer.data[3] is None: data_buffer.add_error('Column is a required field') data_buffer.next_status() buffer_stream.task_done()
def on_score_change(self, _game: Game, score: int): Logger.debug(f'New score: {score}') self._score.text = f'Score: {score:3}'
def connectionMade(self): Logger.debug(self.__class__.__name__ + ': in [' + whoAmI() + '] Connected.')
def central_state_changed(self, state): Logger.debug("BLE: central state changed: {}".format(state)) self.ble_central_ready = ble_central.has_ble
def switch_to(self, name, **kwargs): Logger.debug("Asked to switch to %s " % name) self.current = name
def unpack_stream(self, stream, shared_key=None): "Unpack serial data into stream." # Decompress data stream if constants.ENABLE_COMPRESSION: Logger.info("STREAM: Decompressing Stream...") stream = decompress(stream) # Check if data is of expected chunk size if len(stream) != constants.STREAM_SIZE_AUTH_BLOCK and \ len(stream) != constants.STREAM_SIZE_MSG_BLOCK: raise StreamOverflowError() if len(stream) == constants.STREAM_SIZE_AUTH_BLOCK: Logger.info("STREAM: Unpacking Authentication Stream...") # Unpack auth stream to variables ( stream_flag, stream_type, stream_content, stream_token, stream_hmac ) = struct.unpack( "!?{}s{}s{}s{}s".format( constants.STREAM_TYPE_LEN, constants.STREAM_CONTENT_LEN, constants.STREAM_PEER_KEY_LEN, constants.STREAM_CHKSUM_LEN ), stream ) elif len(stream) == constants.STREAM_SIZE_MSG_BLOCK: Logger.info("STREAM: Unpacking Message Stream...") # Unpack msg block stream to variables ( stream_flag, stream_type, stream_content, stream_token, stream_hmac ) = struct.unpack( "!?{}s{}s{}s{}s".format( constants.STREAM_TYPE_LEN, constants.STREAM_CONTENT_LEN, constants.STREAM_TOKEN_LEN, constants.STREAM_CHKSUM_LEN ), stream ) else: Logger.error("STREAM: Invalid Stream Length received.") return [None] * 3 # Remove all null characters if present stream_content = stream_content.rstrip('\0') stream_token = stream_token.rstrip('\0') # Get uid stream_uid = generate_uuid(self.peer_host) # Get stream object stream_obj = Stream( stream_uid, stream_flag, stream_type, stream_content, stream_token, ) # Add stream to store self.add_store(stream_uid, stream_obj.dict) # Verify stream integrity if not self.check_hmac(stream_uid, stream_hmac): Logger.error("STREAM: Stream Checksum mismatch.") return [None] * 3 # Check stream signing mode if stream_flag == STREAM_TYPES.UNAUTH: # Stream key is peer public key pass elif stream_flag == STREAM_TYPES.AUTH: # Generate token at destination side # Perform key challenge if generate_token(stream_uid, shared_key) != stream_token: Logger.error("STREAM: Token challenge Fail!") Logger.error("STREAM: RCVD: {}".format(b64encode(stream_token))) Logger.error("STREAM: EXPD: {}".format(b64encode(generate_token(stream_uid, shared_key)))) return [None] * 3 else: Logger.info("STREAM: Token challenge Pass!") # AES Decryption if constants.AES_AVAILABLE: Logger.info("STREAM: Decrypting content...") # Generate iv from stream token iv = md5hash(stream_token, hexdigest=False) # Create AES object AES_obj = AES.new(shared_key, AES.MODE_CBC, iv) # Decrypt content stream_content = AES_obj.decrypt(stream_content) # Upad decrypted content stream_content = self.unpad(stream_content) def pkey_action(val): val = md5hash(val) return val if stream_flag == STREAM_TYPES.UNAUTH: Logger.debug("""STREAM: Unpacking: \n{}""".format( self.storage_table(shorten_len=64, action_dict={"STREAM_PKEY":pkey_action}) )) Logger.debug("""DEBUG STREAM: FLAG: {} TYPE: {} CONTENT: {} KEY: {} CHECKSUM: {} """.format( self.get_store_item(stream_uid, 'STREAM_FLAG'), self.get_store_item(stream_uid, 'STREAM_TYPE'), stream_content, b64encode(self.get_store_item(stream_uid, 'STREAM_PKEY')), self.get_store_hmac(stream_uid))) # Unshuffle contentself._storage[sid].hmac() if constants.ENABLE_SHUFFLE: Logger.info("STREAM: Unscrambling content...") stream_content = unshuffler( shuffled_string=stream_content, iterations=constants.STREAM_CONT_SHUFF_ITER ) if stream_flag == STREAM_TYPES.UNAUTH: Logger.info("STREAM: Successfully unpacked AUTH Stream.") return (self.get_store_item(stream_uid, "STREAM_TYPE"), self.get_store_item(stream_uid, "STREAM_CONTENT"), bytes_to_num( self.get_store_item(stream_uid, "STREAM_PKEY") )) elif stream_flag == STREAM_TYPES.AUTH: Logger.info("STREAM: Successfully unpacked MSG Stream.") return (self.get_store_item(stream_uid, "STREAM_TYPE"), stream_content, self.get_store_item(stream_uid, "STREAM_PKEY")) else: Logger.info("STREAM: Unpack of stream unsuccessfull.") return [None] * 3
def close_settings(self, *args): Logger.debug("Closing settings") if self.manager.current == "Settings": self.manager.go_back()
def on_presenceState(self, instance, value): assert type(value) in [int, float] self.presenceString = self.stateDictionary[int(round(self.presenceState))] Logger.debug(self.__class__.__name__ + ': in [' + whoAmI() + '] presenceState is ' + str(self.presenceState) + ' - ' + self.presenceString)
def pack_stream(self, stream_type, stream_content, stream_host, stream_flag=STREAM_TYPES.AUTH, shared_key=None): "Pack data into stream." # Check length of content. if len(stream_content) > constants.STREAM_CONTENT_LEN: raise StreamOverflowError(constants.STREAM_CONTENT_LEN) # Check length of capsule type. if len(stream_type) > constants.STREAM_TYPE_LEN: raise StreamOverflowError(constants.STREAM_TYPE_LEN) # Generate uid stream_uid = generate_uuid(stream_host) # Stream type stream_type = stream_type.upper() # Stream peer key stream_token = None #For testing _debug_stream_content = stream_content # Shuffle content if constants.ENABLE_SHUFFLE: Logger.info("STREAM: Scrambling content...") stream_content = shuffler( string=stream_content, iterations=constants.STREAM_CONT_SHUFF_ITER ) # Check stream signing mode if stream_flag == STREAM_TYPES.UNAUTH: # Stream key is peer key # NOTE peer public key is sent during # authentication. stream_token = num_to_bytes(self.public_key) elif stream_flag == STREAM_TYPES.AUTH: # Generate token at source side stream_token = generate_token(stream_uid, shared_key) # AES Encryption if constants.AES_AVAILABLE: Logger.info("STREAM: Encrypting content...") # Generate iv from stream token iv = md5hash(stream_token, hexdigest=False) # Create AES object AES_obj = AES.new(shared_key, AES.MODE_CBC, iv) # Pad string stream_content = self.pad(stream_content) # Encrypt string stream_content = AES_obj.encrypt(stream_content) # Create stream object stream_obj = Stream( stream_uid, stream_flag, stream_type, stream_content, stream_token, ) # Add stream to store self.add_store( stream_uid, stream_obj.dict ) if stream_flag == STREAM_TYPES.UNAUTH: Logger.info("STREAM: Packing Authentication Stream...") # Pack store into authentication stream stream = struct.pack( "!?{}s{}s{}s{}s".format( constants.STREAM_TYPE_LEN, constants.STREAM_CONTENT_LEN, constants.STREAM_PEER_KEY_LEN, constants.STREAM_CHKSUM_LEN ), self.get_store_item(stream_uid, 'STREAM_FLAG'), self.get_store_item(stream_uid, 'STREAM_TYPE'), self.get_store_item(stream_uid, 'STREAM_CONTENT'), self.get_store_item(stream_uid, 'STREAM_PKEY'), self.get_store_hmac(stream_uid) ) elif stream_flag == STREAM_TYPES.AUTH: Logger.info("STREAM: Packing Message Stream...") # Pack store into message block stream stream = struct.pack( "!?{}s{}s{}s{}s".format( constants.STREAM_TYPE_LEN, constants.STREAM_CONTENT_LEN, constants.STREAM_TOKEN_LEN, constants.STREAM_CHKSUM_LEN ), self.get_store_item(stream_uid, 'STREAM_FLAG'), self.get_store_item(stream_uid, 'STREAM_TYPE'), self.get_store_item(stream_uid, 'STREAM_CONTENT'), self.get_store_item(stream_uid, 'STREAM_PKEY'), self.get_store_hmac(stream_uid) ) else: Logger.error("STREAM: Invalid Stream Flag received.") return None def pkey_action(val): val = md5hash(val) return val if stream_flag == STREAM_TYPES.UNAUTH: Logger.debug("""STREAM: Packing: \n{}""".format( self.storage_table(shorten_len=64, action_dict={"STREAM_PKEY":pkey_action}) )) Logger.debug("""DEBUG STREAM: FLAG: {} TYPE: {} CONTENT: {} KEY: {} CHECKSUM: {} """.format( self.get_store_item(stream_uid, 'STREAM_FLAG'), self.get_store_item(stream_uid, 'STREAM_TYPE'), _debug_stream_content, b64encode(self.get_store_item(stream_uid, 'STREAM_PKEY')), self.get_store_hmac(stream_uid))) # Compress stream if constants.ENABLE_COMPRESSION: Logger.info("STREAM: Compressing Stream...") stream = compress(stream) Logger.info("STREAM: Succesfully packed stream.") return stream
def peripheral_advertising_started(self): Logger.debug("BLE: connect: advertisement started")
def on_char_write(self, char, error): if error: Logger.error("BLE: error writing data: {}: {}".format(char, error)) else: Logger.debug("BLE: write successful: {}".format(char))
def validate(self, buffer_stream, data_buffer): #Run validations if data_buffer.type == 0: #The buffer data type is not assigned, perform no operations Logger.debug('Validations: Buffer Data Type not assigned') data_buffer.add_error('Data Buffer Type not assigned') elif data_buffer.type == 1: Logger.debug('Product Validation Initialized') #Ensure the buffer is long enough if len(data_buffer.data) < 2: data_buffer.add_error('Missing Fields in data buffer') else: #Base layer validations if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Product ID is a required field') return True #Ensure the first element is an integer first_element = "%s" % (data_buffer.data[0]) if first_element.isdigit() == False: data_buffer.add_error('Product ID should be an integer') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Product Name is a required field') return True elif len('%s' % (data_buffer.data[1])) < 2: data_buffer.add_error('Insufficient Product Name length') namestring = '%s' % (data_buffer.data[1]) if namestring[0].isupper() == False: data_buffer.add_error('First Letter of Product Name should be capitalized') elif data_buffer.type == 2: Logger.debug('Module Validation Initialized') #Ensure the buffer is long enough if len(data_buffer.data) < 3: data_buffer.add_error('Missing Fields in data buffer') else: #Base layer validations if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Module ID is a required field') return True if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Product ID is a required field') return True #Ensure the first element is an integer first_element = "%s" % (data_buffer.data[0]) if first_element.isdigit() == False: data_buffer.add_error('Module ID should be an integer') #Ensure the second element is an integer second_element = "%s" % (data_buffer.data[1]) if second_element.isdigit() == False: data_buffer.add_error('Product ID should be an integer') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error('Module Name is a required field') return True elif len('%s' % (data_buffer.data[2])) < 2: data_buffer.add_error('Insufficient Module Name length') namestring = '%s' % (data_buffer.data[2]) if namestring[0].isupper() == False: data_buffer.add_error('First Letter of Module Name should be capitalized') elif data_buffer.type == 3: Logger.debug('System Area Validation Initialized') #Ensure the buffer is long enough if len(data_buffer.data) < 3: data_buffer.add_error('Missing Fields in data buffer') else: if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('System Area ID is a required field') return True if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Module ID is a required field') return True #Ensure the first element is an integer first_element = "%s" % (data_buffer.data[0]) if first_element.isdigit() == False: data_buffer.add_error('System Area ID should be an integer') #Ensure the second element is an integer second_element = "%s" % (data_buffer.data[1]) if second_element.isdigit() == False: data_buffer.add_error('Module ID should be an integer') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error('System Area Name is a required field') return True elif len('%s' % (data_buffer.data[2])) < 2: data_buffer.add_error('Insufficient System Area Name length') namestring = '%s' % (data_buffer.data[2]) if namestring[0].isupper() == False: data_buffer.add_error('First Letter of System Area Name should be capitalized') elif data_buffer.type == 4: Logger.debug('Key Action Validation Initialized') #Ensure the buffer is long enough if len(data_buffer.data) < 5: data_buffer.add_error('Missing Fields in data buffer') else: if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Key Action ID is a required field') return True if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('System Area ID is a required field') return True #Ensure the first element is an integer first_element = "%s" % (data_buffer.data[0]) if first_element.isdigit() == False: data_buffer.add_error('Key Action ID should be an integer') #Ensure the second element is an integer second_element = "%s" % (data_buffer.data[1]) if second_element.isdigit() == False: data_buffer.add_error('System Area ID should be an integer') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error('Key Action Name is a required field') return True elif len('%s' % (data_buffer.data[2])) < 2: data_buffer.add_error('Insufficient Key Action Name length') namestring = '%s' % (data_buffer.data[2]) if namestring[0].isupper() == False: data_buffer.add_error('First Letter of Key Action Name should be capitalized') elif data_buffer.type == 5: #Ensure the buffer is long enough if len(data_buffer.data) < 3: data_buffer.add_error('Missing Fields in data buffer') else: Logger.debug('Input Parameter Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Input Parameter ID is a required field') return True if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Key Action ID is a required field') return True #Ensure the first element is an integer first_element = "%s" % (data_buffer.data[0]) if first_element.isdigit() == False: data_buffer.add_error('Key Action ID should be an integer') #Ensure the second element is an integer second_element = "%s" % (data_buffer.data[1]) if second_element.isdigit() == False: data_buffer.add_error('Input Parameter ID should be an integer') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error('Input Parameter Name is a required field') return True elif len('%s' % (data_buffer.data[2])) < 2: data_buffer.add_error('Insufficient Input Parameter Name length') namestring = '%s' % (data_buffer.data[2]) if namestring[0].isupper() == False: data_buffer.add_error('First Letter of Input Parameter Name should be capitalized') elif data_buffer.type == 6: Logger.debug('Client Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Client ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Client Name is a required field') return True elif len('%s' % (data_buffer.data[1])) < 2: data_buffer.add_error('Insufficient Client Name length') namestring = '%s' % (data_buffer.data[1]) if namestring[0].isupper() == False: data_buffer.add_error('First Letter of Client Name should be capitalized') elif data_buffer.type == 7: Logger.debug('Project Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Project ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Client ID is a required field') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error('Project Name is a required field') return True elif len('%s' % (data_buffer.data[2])) < 2: data_buffer.add_error('Insufficient Project Name length') namestring = '%s' % (data_buffer.data[2]) if namestring[0].isupper() == False: data_buffer.add_error('First Letter of Project Name should be capitalized') elif data_buffer.type == 8: Logger.debug('Test Script Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Test Script ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Project ID is a required field') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error('Test Script Name is a required field') return True elif len('%s' % (data_buffer.data[2])) < 2: data_buffer.add_error('Insufficient Test Script Name length') namestring = '%s' % (data_buffer.data[2]) if namestring[0].isupper() == False: data_buffer.add_error('First Letter of Test Script Name should be capitalized') elif data_buffer.type == 9: Logger.debug('Workflow Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Workflow ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Test Script ID is a required field') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error('Workflow Name is a required field') return True elif len('%s' % (data_buffer.data[2])) < 2: data_buffer.add_error('Insufficient Workflow Name length') namestring = '%s' % (data_buffer.data[2]) if namestring[0].isupper() == False: data_buffer.add_error('First Letter of Workflow Name should be capitalized') elif data_buffer.type == 10: Logger.debug('Workflow Action Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Workflow Action ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Workflow ID is a required field') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error('Key Action ID is a required field') elif data_buffer.type == 11: Logger.debug('Workflow Next Action Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Workflow Next Action ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Workflow First Action ID is a required field') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error('Workflow Second Action ID is a required field') elif data_buffer.type == 12: Logger.debug('Workflow Parameter Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Workflow Parameter ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Workflow Action ID is a required field') if data_buffer.data[2] == '' or data_buffer.data[2] is None: data_buffer.add_error('Input Parameter ID is a required field') if data_buffer.data[3] == '' or data_buffer.data[3] is None: data_buffer.add_error('Workflow Parameter Value is a required field') elif data_buffer.type == 13: Logger.debug('Flowchart Validation Initialized') if data_buffer.data[0] == '' or data_buffer.data[0] is None: data_buffer.add_error('Next Action ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[1] is None: data_buffer.add_error('Key Action ID is a required field') if data_buffer.data[1] == '' or data_buffer.data[2] is None: data_buffer.add_error('Row is a required field') if data_buffer.data[2] == '' or data_buffer.data[3] is None: data_buffer.add_error('Column is a required field') data_buffer.next_status() buffer_stream.task_done()