def on_status(self, status): try: text = status.extended_tweet["full_text"].encode('utf-8') except AttributeError: text = status.text.encode('utf-8') if (text.startswith('RT')): return # data = {'tweet': text, 'labeled': False, 'words': [], 'users': ["Default"]} data = { 'tweet': text, 'labeled': False, 'words': [], 'users': [{ 'name': "Default", 'labels': None }] } listOfData.append(data) print text print "\n" if len(listOfData) >= 100: sys.exitfunc()
def run(self): self.initialise_logging() try: try: indexer = Indexer(*self.indexer_args) while not self.kill_self.value: # we poll with a timeout to allow for stopping the # process, otherwise we can't check the kill_self # if we're blocking in .recv() if self.inpipe[1].poll(2): collection, filter_settings = self.inpipe[1].recv() self.outpipe[0].send( indexer.do_indexing(collection, filter_settings)) except KeyboardInterrupt: # This happens on normal process termination, just log it as # info. Don't re-raise because we don't want it to be printed # to stderr, and we're about to exit anyway. get_remote_log().info("Indexer process terminating") except: # This process is about to exit, and there's no layer above us # to handle exceptions. Therefore, we just log the error. We # don't re-raise it because this would cause output to stderr, # and stderr doesn't exist in some of the contexts we run in. import traceback tb = traceback.format_exc() get_remote_log().critical( 'Unhandled exception in IndexerProcess.run(), traceback follows:\n %s' % tb) finally: get_remote_log().info("Cleaning up child processes of indexer") sys.exitfunc() get_remote_log().info("Child processes of indexer stopped") # safe to raise this - doesn't produce output on # stderr/stdout and needed for clean shutdown. raise SystemExit
def main(): """ Execute the nose test runner. Drop privileges and alter the system argument to remove the userid and group id arguments that are only required for the test. """ if len(sys.argv) < 2: print ( u'Run the test suite using drop privileges username as first ' u'arguments. Use "-" if you do not want elevated mode.') sys.exit(1) # Delay import after coverage is started. from chevah.empirical.nose_memory_usage import MemoryUsage from chevah.empirical.nose_test_timer import TestTimer from chevah.empirical.nose_run_reporter import RunReporter from chevah.empirical import EmpiricalTestCase drop_user = sys.argv[1] EmpiricalTestCase.initialize(drop_user=drop_user) EmpiricalTestCase.dropPrivileges() new_argv = ['chevah-test-runner'] new_argv.extend(sys.argv[2:]) sys.argv = new_argv plugins = [ TestTimer(), RunReporter(), MemoryUsage(), ] try: nose_main(addplugins=plugins) except SystemExit, error: if cov: cov.stop() cov.save() import threading print "Max RSS: %s" % EmpiricalTestCase.getPeakMemoryUsage() threads = threading.enumerate() if len(threads) < 2: # No running threads, other than main so we can exit as normal. sys.exit(error.code) else: print "There are still active threads: %s" % threads # We do a brute force exit here, since sys.exit will wait for # unjoined threads. # We have to do some manual work to compensate for skipping sys.exit() sys.exitfunc() # Don't forget to flush the toilet. sys.stdout.flush() sys.stderr.flush() os._exit(error.code)
def __init__(self): currdir = os.path.split(os.path.realpath(__file__))[0] const_file = currdir + os.sep + "const.conf" if not os.path.exists(const_file): log("const config file does not exist.") sys.exitfunc() else: f = open(const_file, "r") self.const = json.load(f) f.close()
def _updateMinerFromPath(self, newMinerPath): self.createRecoveryPath() args = [sys.executable, os.path.join(newMinerPath,"miner_upgrade.py"), \ "-v", miner_version.version, "-b", str(miner_version.build), \ "-d", miner_globals.minerBaseDir, "-r", self.getRecoveryPath()] if loggers.installLogEnabled: args += ["--log", loggers.getMainLogFileName()] loggers.installLog.info("Executing miner upgrade via: %s", " ".join(args)) # call explicitly atexit functions sys.exitfunc() os.execv(sys.executable, args)
def safeEpicsExit(): '''Calls epicsExit() after ensuring Python exit handlers called.''' if hasattr(sys, 'exitfunc'): try: # Calling epicsExit() will bypass any atexit exit handlers, so call # them explicitly now. sys.exitfunc() finally: # Make sure we don't try the exit handlers more than once! del sys.exitfunc epicsExit()
def __run(self): try: self.run() except BaseException as err: # normally, there is no point to continue if not isinstance(err, (SystemExit, KeyboardInterrupt)): traceback.print_exc() if not os.environ.get('ERROR_NOEXIT'): sys.exitfunc() os._exit(1) self.__finish()
def back(allow_override=True, anim=True): if allow_override and screen.back() is not True: return elif not history: if hasattr(sys, 'exitfunc'): sys.exitfunc() os._exit(0) else: new_screen = history.pop() if anim: replace_anim(new_screen, -1) else: replace(new_screen)
def build_dir_list(project_dir, product_list, year_list): """Create a list of full directory paths for downloaded MODIS files.""" dir_list = [] try: if os.path.exists(project_dir): for product in product_list.itervalues(): for year in year_list: dir_list.append("{}\{}\{}".format(project_dir, year, product)) return dir_list except OSError, e: if not os.path.exists(project_dir): print('Error creating directory list. Project directory folder not found.') sys.exitfunc() return dir_list
def back(allow_override=True, anim=True): while history and getattr(history[-1], '_no_stack', False): history.pop() if allow_override and _screen.back() is not True: return elif not history: if hasattr(sys, 'exitfunc'): sys.exitfunc() os._exit(0) else: new_screen = history.pop() if anim: replace_anim(new_screen, -1) else: replace(new_screen)
def SvcDoRun(self): # Write a 'started' event to the event log... LogInfoMsg('The Flax service has started.') # Redirect stdout and stderr to avoid buffer overflows and to allow # debugging while acting as a service sys.stderr = open( os.path.join(flaxpaths.paths.log_dir, 'flax_stderr.log'), 'w') sys.stdout = open( os.path.join(flaxpaths.paths.log_dir, 'flax_stdout.log'), 'w') try: # Start flax, non-blocking. self._flax_main.start(blocking=False) self.ReportServiceStatus(win32service.SERVICE_RUNNING) # Wait for message telling us that we're stopping. win32event.WaitForSingleObject(self.hWaitStop, win32event.INFINITE) LogInfoMsg('The Flax service is stopping.') # Wait for the service to stop (and reassure windows that we're still # trying to stop). self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING, 5000) while not self._flax_main.join(4): self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING, 5000) # Perform cleanup. # This is needed because of a bug in PythonService.exe - it doesn't # call Py_Finalize(), so atexit handlers don't get called. We call # sys.exit_func() directly as a workaround. When the # bug is fixed, we should stop doing this. See: # https://sourceforge.net/tracker/?func=detail&atid=551954&aid=1273738&group_id=78018 # for details. sys.exitfunc() sys.stderr.close() sys.stdout.close() # Log that we've stopped. LogInfoMsg('The Flax service has stopped.') # The python service framework will tell windows that we've stopped # when we return from this function, so we don't need to do that # explicitly. except: import traceback tb = traceback.format_exc() LogErrorMsg('Exception during SvcDoRun, traceback follows:\n %s' % tb)
def check_exitfunc_availability(): if hasattr(sys, 'exitfunc'): print 'it\'s true that there is already exitfunc registered' print 'outputs of %s is: %s' % (repr(sys.exitfunc), sys.exitfunc()) else: print 'nothing to append'
def restart_as_needed(): for (name, mod) in sys.modules.iteritems(): if name != '__main__' and not name.startswith('gcode'): continue if mod is None: continue file = mod.__file__ if file.endswith('.pyc'): file = file[:-1] if os.stat(file).st_mtime > program_start_time: print >>sys.stderr, '%s changed, restarting' % file argv = [sys.executable, '-m', 'gcode.shell'] + sys.argv[1:] sys.exitfunc() sys.exitfunc = None # Don't call it again if execv fails. os.execv(argv[0], argv)
def make_dirs(dir_list): """Creates new directories to store downloaded MODIS files""" try: if dir_list: for dir in dir_list: if not os.path.exists(dir): print ("Creating new directory " + dir) os.makedirs(dir, 0777) else: print ("Overwriting existing directory with " + dir) shutil.rmtree(dir) os.makedirs(dir, 0777) except IndexError as e: print("Error making directories. The directory list is empty.") sys.exitfunc() return
def check_exitfunc_availability(): if hasattr(sys, 'exitfunc'): print 'it\'s true that there is already exitfunc registered' print 'outputs of %s is: %s' %(repr(sys.exitfunc), sys.exitfunc()) else: print 'nothing to append'
def safeEpicsExit(code=0): '''Calls epicsExit() after ensuring Python exit handlers called.''' if hasattr(sys, 'exitfunc'): # py 2.x try: # Calling epicsExit() will bypass any atexit exit handlers, so call # them explicitly now. sys.exitfunc() finally: # Make sure we don't try the exit handlers more than once! del sys.exitfunc elif hasattr(atexit, '_run_exitfuncs'): # py 3.x atexit._run_exitfuncs() # calls epicsExitCallAtExits() # and then OS exit() imports.epicsExit(code)
def _run_child_command(self, command_argv): # This is the point where we would actually want to do something with # our life # TODO: We may want to consider special-casing the 'lp-serve' # command. As that is the primary use-case for this service, it # might be interesting to have an already-instantiated instance, # where we can just pop on an extra argument and be ready to go. # However, that would probably only really be measurable if we # prefork. As it looks like ~200ms is 'fork()' time, but only # 50ms is run-the-command time. retcode = commands.run_bzr_catch_errors(command_argv) self._close_child_file_descriptors() trace.mutter("%d finished %r" % (os.getpid(), command_argv)) # TODO: Should we call sys.exitfunc() here? it allows atexit # functions to fire, however, some of those may be still # around from the parent process, which we don't really want. sys.exitfunc() # See [Decision #6] return retcode
def _run_child_command(self, command_argv): # This is the point where we would actually want to do something with # our life # TODO: We may want to consider special-casing the 'lp-serve' # command. As that is the primary use-case for this service, it # might be interesting to have an already-instantiated instance, # where we can just pop on an extra argument and be ready to go. # However, that would probably only really be measurable if we # prefork. As it looks like ~200ms is 'fork()' time, but only # 50ms is run-the-command time. retcode = commands.run_bzr_catch_errors(command_argv) self._close_child_file_descriptors() trace.mutter('%d finished %r' % (os.getpid(), command_argv)) # TODO: Should we call sys.exitfunc() here? it allows atexit # functions to fire, however, some of those may be still # around from the parent process, which we don't really want. sys.exitfunc() # See [Decision #6] return retcode
def exe(cmd, cd=None, environ=None): """Execute the command replacing the current process.""" debug("{}", cmd) if isinstance(cmd, str): cmd = shlex.split(cmd) if environ: overrides = environ environ = os.environ.copy() environ.update(overrides) if cd: os.chdir(cd) if hasattr(sys, 'exitfunc'): sys.exitfunc() try: if environ: os.execvpe(cmd[0], cmd, environ) else: os.execvp(cmd[0], cmd) except OSError as exc: raise fail(str(exc))
def runcode(self, code, temp_filename=None): global interruptable try: self.usr_exc_info = None interruptable = True try: exec(code, self.locals) finally: interruptable = False except: self.usr_exc_info = sys.exc_info() if quitting: exit() # even print a user code SystemExit exception, continue print_exception(temp_filename) jit = self.rpchandler.console.getvar("<<toggle-jit-stack-viewer>>") if jit: self.rpchandler.interp.open_remote_stack_viewer() else: if hasattr(sys,'exitfunc') and sys.exitfunc: sys.exitfunc() flush_stdout()
def replace_app(cmd, *args): args = list(args) if platform == 'macos': from PyObjCTools import AppHelper AppHelper.stopEventLoop() aboot = args[0].replace('loader_darwin', '__boot__') if os.path.exists(aboot): args[0] = aboot elif platform == 'linux': os.chdir(settings.app_dir) try: if platform != "macos": loader.terminate() finally: if hasattr(sys, 'exitfunc'): sys.exitfunc() #os.chdir(settings.app_dir) if platform == 'win32': subprocess.Popen(cmd, creationflags=0x08000000) else: os.execl(cmd, cmd, *args) sys.exit(0)
def BIOS(): del BIOS ##print "pyvm BIOS 1.0" try: _COMPAT() except: print "BIOS: (errors in _COMPAT)" # empty sys.path. # We cannot say `sys.path = []`, because it is referenced internally # and pyvm will use the old value. # We cannot say `del sys.path [:]`, because ATM this opcode is not # implemented. So.. clear the list the safe way. while sys.path: sys.path.pop() sys.path.append('memfs://Lib/') try: __import_compiled__('memfs://main.pyc', '__main__') except SystemExit: pass except: try: print "BIOS: Uncaught exception:", sys.exc_info( ), traceback.format_exc() except: try: print "BIOS: exception while formatting exception!!", sys.exc_info( ) except: pass try: sys.exitfunc() except: pass
def BIOS (): del BIOS ##print "pyvm BIOS 1.0" try: _COMPAT () except: print "BIOS: (errors in _COMPAT)" # empty sys.path. # We cannot say `sys.path = []`, because it is referenced internally # and pyvm will use the old value. # We cannot say `del sys.path [:]`, because ATM this opcode is not # implemented. So.. clear the list the safe way. while sys.path: sys.path.pop () sys.path.append ('memfs://Lib/') try: __import_compiled__ ('memfs://main.pyc' , '__main__') except SystemExit: pass except: try: print "BIOS: Uncaught exception:", sys.exc_info (), traceback.format_exc () except: try: print "BIOS: exception while formatting exception!!", sys.exc_info () except: pass try: sys.exitfunc () except: pass
def write_data_to_excel(list_to_send_write_function, count, check_box_states): was_File_existing = False # make background fills blueFill = PatternFill(start_color='5AB7E8', end_color='5AB7E8', fill_type='solid') purpleFill = PatternFill(start_color='9D2C7D', end_color='9D2C7D', fill_type='solid') greyFill = PatternFill(start_color='939598', end_color='939598', fill_type='solid') evs_red_fill = PatternFill(start_color='EE2653', end_color='EE2653', fill_type='solid') greenFill = PatternFill(start_color='00B050', end_color='00B050', fill_type='solid') lightgreyFill = PatternFill(start_color='F1CFE8', end_color='F1CFE8', fill_type='solid') try: try: os.remove( "{}/Desktop/Entity Data Validation-Output.xlsx".format(home)) was_File_existing = True except: pass data_file = openpyxl.Workbook() sheet = data_file.worksheets[0] sheet.title = "United Kingdom" # data_worksheet=data_file.worksheets[0] country = "United Kingdom" # headers for fetched headers_format_list=['REGISTRY NUMBER', 'OFFICIAL ENTITY NAME', 'LEGAL ADDRESS', 'LEGAL FORM', 'STATUS','DISSOLVE DATE', \ 'PREVIOUS NAMES','DATES OF NAME CHANGE'] col = 1 for header in headers_format_list: sheet.cell(row=1, column=col).value = header sheet.cell(row=1, column=col).font = sheet.cell( row=1, column=col).font.copy(bold=True) sheet.cell( row=1, column=col).style = Style(font=Font(color=Color(colors.WHITE))) sheet.cell(row=1, column=col).fill = blueFill col = col + 1 # headers for pre headers_format_list = [ 'REGISTRY NUMBER', 'OFFICIAL ENTITY NAME', 'LEGAL ADDRESS', 'LEGAL FORM', 'PREVIOUS NAMES' ] for header in headers_format_list: sheet.cell(row=1, column=col).value = header sheet.cell(row=1, column=col).font = sheet.cell( row=1, column=col).font.copy(bold=True) sheet.cell( row=1, column=col).style = Style(font=Font(color=Color(colors.WHITE))) sheet.cell(row=1, column=col).fill = purpleFill col = col + 1 # headers for matched headers = [ 'REGISTRY NUMBER', 'OFFICIAL ENTITY NAME', 'LEGAL ADDRESS', 'LEGAL FORM', 'PREVIOUS NAMES' ] for i in range(len(headers)): sheet.cell(row=1, column=col).value = headers[i] sheet.cell(row=1, column=col).font = sheet.cell( row=1, column=col).font.copy(bold=True) sheet.cell( row=1, column=col).style = Style(font=Font(color=Color(colors.WHITE))) sheet.cell(row=1, column=col).fill = greyFill col = col + 1 #fetched data lists registry_number_list = list_to_send_write_function[0] official_entity_name_list = list_to_send_write_function[1] legal_address_list = list_to_send_write_function[2] legal_form_list = list_to_send_write_function[3] status_list = list_to_send_write_function[4] dissolve_date_list = list_to_send_write_function[5] previous_names_list = list_to_send_write_function[6] date_of_change_list = list_to_send_write_function[7] statusVar.set("Reading Excel data...") top.update() #data lists from pre-file prefile_data = read_pre_file(file_path) prefile_country_list = prefile_data[0] prefile_registry_no_list_with_spaces = prefile_data[1] prefile_registry_no_list = [] for no in prefile_registry_no_list_with_spaces: no = no.lstrip().rstrip() prefile_registry_no_list.append(no) prefile_entity_name_list = prefile_data[2] prefile_address_list = prefile_data[3] prefile_legal_form_list = prefile_data[4] prefile_prev_name_list = prefile_data[5] prefile_registry_no_to_be_written = [] prefile_entity_name_to_be_written = [] prefile_address_to_be_written = [] prefile_legal_form_to_be_written = [] prefile_prev_name_to_be_written = [] for i in range(len(prefile_country_list)): if str(prefile_country_list[i]).lower() == country.lower(): prefile_registry_no_to_be_written.append( prefile_registry_no_list[i]) prefile_entity_name_to_be_written.append( prefile_entity_name_list[i]) prefile_address_to_be_written.append(prefile_address_list[i]) prefile_legal_form_to_be_written.append( prefile_legal_form_list[i]) prefile_prev_name_to_be_written.append( prefile_prev_name_list[i]) if len(prefile_registry_no_list) != count: prefile_registry_no_to_be_written = prefile_registry_no_to_be_written[ 0:count] prefile_entity_name_to_be_written = prefile_entity_name_to_be_written[ 0:count] prefile_address_to_be_written = prefile_address_to_be_written[ 0:count] prefile_legal_form_to_be_written = prefile_legal_form_to_be_written[ 0:count] prefile_prev_name_to_be_written = prefile_prev_name_to_be_written[ 0:count] prefile_list_to_sent_for_differences = [ prefile_registry_no_to_be_written, prefile_entity_name_to_be_written, prefile_address_to_be_written, prefile_legal_form_to_be_written, prefile_prev_name_to_be_written ] fetched_list_to_be_sent_for_differences = [ registry_number_list, official_entity_name_list, legal_address_list, legal_form_list, previous_names_list ] differences_list = get_differences_list( fetched_list_to_be_sent_for_differences, prefile_list_to_sent_for_differences) registry_numbers_differences_list = get_reg_no_diff_list( registry_number_list, prefile_registry_no_to_be_written) statusVar.set("Exporting data to Excel...") top.update() # write fetched data global_row = sheet.max_row + 1 row = global_row for i in range(len(registry_number_list)): if check_box_states[0] is True: sheet.cell(row=row, column=1).value = registry_number_list[i] sheet.column_dimensions[convertToTitle(1)].width = 18 if check_box_states[1] is True: sheet.cell(row=row, column=2).value = official_entity_name_list[i] sheet.column_dimensions[convertToTitle(2)].width = 22 if check_box_states[2] is True: sheet.cell(row=row, column=3).value = legal_address_list[i] sheet.column_dimensions[convertToTitle(3)].width = 15.5 if check_box_states[3] is True: sheet.cell(row=row, column=4).value = legal_form_list[i] sheet.column_dimensions[convertToTitle(4)].width = 13.5 if check_box_states[4] is True: status, sep, deletion_date = status_list[i].partition(",") sheet.cell(row=row, column=5).value = status sheet.column_dimensions[convertToTitle(5)].width = 10 if check_box_states[4] is True: sheet.cell(row=row, column=6).value = dissolve_date_list[i] sheet.column_dimensions[convertToTitle(6)].width = 15 if check_box_states[5] is True: sheet.cell(row=row, column=7).value = previous_names_list[i] sheet.column_dimensions[convertToTitle(7)].width = 17 if check_box_states[6] is True: sheet.cell(row=row, column=8).value = date_of_change_list[i] sheet.column_dimensions[convertToTitle(8)].width = 23.5 row += 1 row = global_row for i in range(len(prefile_registry_no_to_be_written)): if check_box_states[0] is True: col = 9 sheet.cell( row=row, column=col).value = prefile_registry_no_to_be_written[i] sheet.column_dimensions[convertToTitle(col)].width = 18 if check_box_states[1] is True: col = 10 sheet.cell( row=row, column=col).value = prefile_entity_name_to_be_written[i] sheet.column_dimensions[convertToTitle(col)].width = 22 if check_box_states[2] is True: col = 11 sheet.cell(row=row, column=col).value = prefile_address_to_be_written[i] sheet.column_dimensions[convertToTitle(col)].width = 16 if check_box_states[3] is True: col = 12 sheet.cell( row=row, column=col).value = prefile_legal_form_to_be_written[i] sheet.column_dimensions[convertToTitle(col)].width = 15 if check_box_states[5] is True: sheet.cell( row=row, column=13).value = prefile_prev_name_to_be_written[i] sheet.column_dimensions[convertToTitle(13)].width = 17 row += 1 # write match data row = global_row for i in range(len(registry_numbers_differences_list)): if check_box_states[0] is True: col = 14 sheet.cell( row=row, column=col).value = registry_numbers_differences_list[i] sheet.column_dimensions[convertToTitle(col)].width = 18 if str(registry_numbers_differences_list[i]) == 'MATCH': sheet.cell(row=row, column=col).style = Style(font=Font( color=Color(colors.WHITE))) sheet.cell(row=row, column=col).fill = greenFill elif str(registry_numbers_differences_list[i]) == 'NO MATCH': sheet.cell(row=row, column=col).style = Style(font=Font( color=Color(colors.WHITE))) sheet.cell(row=row, column=col).fill = evs_red_fill row += 1 row = global_row for i in range(len(prefile_registry_no_to_be_written)): if check_box_states[1] is True: col = 15 sheet.cell(row=row, column=col).value = differences_list[0][i] sheet.column_dimensions[convertToTitle(col)].width = 22 if str(differences_list[0][i]) == 'MATCH': sheet.cell(row=row, column=col).style = Style(font=Font( color=Color(colors.WHITE))) sheet.cell(row=row, column=col).fill = greenFill elif str(differences_list[0][i]) == 'NO MATCH': sheet.cell(row=row, column=col).style = Style(font=Font( color=Color(colors.WHITE))) sheet.cell(row=row, column=col).fill = evs_red_fill if check_box_states[2] is True: col = 16 sheet.cell(row=row, column=col).value = differences_list[1][i] sheet.column_dimensions[convertToTitle(col)].width = 15.5 if str(differences_list[1][i]) == 'MATCH': sheet.cell(row=row, column=col).style = Style(font=Font( color=Color(colors.WHITE))) sheet.cell(row=row, column=col).fill = greenFill elif str(differences_list[1][i]) == 'NO MATCH': sheet.cell(row=row, column=col).style = Style(font=Font( color=Color(colors.WHITE))) sheet.cell(row=row, column=col).fill = evs_red_fill if check_box_states[3] is True: col = 17 sheet.cell(row=row, column=col).value = differences_list[2][i] sheet.column_dimensions[convertToTitle(col)].width = 13.5 if str(differences_list[2][i]) == 'MATCH': sheet.cell(row=row, column=col).style = Style(font=Font( color=Color(colors.WHITE))) sheet.cell(row=row, column=col).fill = greenFill elif str(differences_list[2][i]) == 'NO MATCH': sheet.cell(row=row, column=col).style = Style(font=Font( color=Color(colors.WHITE))) sheet.cell(row=row, column=col).fill = evs_red_fill if check_box_states[5] is True: sheet.cell(row=row, column=18).value = differences_list[3][i] sheet.column_dimensions[convertToTitle(18)].width = 22 if str(differences_list[3][i]) == 'MATCH': sheet.cell(row=row, column=18).style = Style(font=Font( color=Color(colors.WHITE))) sheet.cell(row=row, column=18).fill = greenFill elif str(differences_list[3][i]) == 'NO MATCH': sheet.cell(row=row, column=18).style = Style(font=Font( color=Color(colors.WHITE))) sheet.cell(row=row, column=18).fill = evs_red_fill row += 1 data_file.save( "{}/Desktop/Entity Data Validation-Output.xlsx".format(home)) return was_File_existing except IOError: tkMessageBox.showerror( "Output File Open", "Your Output File was open. Close it and run tool again!") sys.exitfunc()
def applicationWillTerminate_( self, notification ): # The sys.exitfunc() won't get called unless we explicitly # call it here, because OS X is going to terminate our app, # not Python. sys.exitfunc()
os.environ=self.oldenv def RunCGI(self, path): global codecache if path not in codecache: try: codecache[path]=compile(open(path, 'r').read(), path, 'exec') except Exception, e: print>>sys.stderr, 'Could not compile script at '+path+': '+repr(e) import traceback traceback.print_exc() raise plugin.nss.HTTPError(500, longdesc='Script compile failed.') newns={'nss': plugin.nss, '_server': self.server} sys.path.append(os.path.split(path)[0]) exec codecache[path] in newns if hasattr(sys, 'exitfunc'): sys.exitfunc() del sys.path[-1] ret=sys.stdout.getvalue() #This stagger may help just in case sys.stdout is no return plugin.nss.HTTPRawResponse(ret) #longer a StringIO def Hook(self, cgifs): cgifs.DoRun=self.DoRun def UnHook(self, cgifs): del cgifs.DoRun #Should del from __dict__, not type(cgifs).__dict__ (hopefully) class HyperCGIPlugin(plugin.BasePlugin): def Load(self, server): self.server=server self.hooked=[] self.cgienv=CGIEnvironment(self.server) def Unload(self): self.UnloadFrom(self.hooked)
#!/usr/bin/env python # -*- coding: UTF-8 -*- """ O registro usando o DML, ficará ravado na memória ate que seja executado o comando commit, caso contrário não será gravado. """ import sys try: import psycopg2 except: sys.exitfunc( "[!] Por favor, intale a biblioteca psycopg2 com o comando: sudo apt-get install python-psycopg2" ) try: con = psycopg2.connect( "host=10.25.26.245 dbname=projeto user=admin password=123456") cur = con.cursor() cur.execute( "insert into cliente(id,nome,cpf) values(1, 'vitor','333.333.222.555')" ) con.commit() print("Registro criado com sucesso") except Exception as e: print("Erro: %s" % e) print("Fazendo o rollback") con.rollback() finally: print("Finalizando a conexão com o banco de dados")
def _do_exit(code): if hasattr(sys, 'exitfunc'): sys.exitfunc() sys.stdout.flush() os._exit(code)
def transformation(self, coord_proc, method, direction): #initialize the Errorhandler errorhandler = ErrorHandler(self) profilnr_proc = listToList(self, coord_proc, 4) fehler_check = False ns_fehler_vorhanden = ns_error_determination(self, coord_proc) if ns_fehler_vorhanden: # Profil um 45 Grad drehen rotationresult = rotation(self, coord_proc, 45, False) fehler_check = True for i in range(len(coord_proc)): coord_proc[i][0] = rotationresult['x_trans'][i] coord_proc[i][1] = rotationresult['y_trans'][i] coord_proc[i][2] = rotationresult['z_trans'][i] #write the x and v values in the corresponding lists # instantiate an empty list for the transformed coordinates and other values # instantiate lists for the x and y values x_coord_proc = listToList(self, coord_proc, 0) y_coord_proc = listToList(self, coord_proc, 1) z_coord_proc = listToList(self, coord_proc, 2) selection_proc = listToList(self, coord_proc, 5) id_proc = listToList(self, coord_proc, 6) rangcheck_orginal = [] for i in range(len(coord_proc)): tmplist = [] for k in range(len(coord_proc[i])): tmplist.append(coord_proc[i][k]) rangcheck_orginal.append(tmplist) for coords in range(len(rangcheck_orginal)): del rangcheck_orginal[coords][5] del rangcheck_orginal[coords][4] del rangcheck_orginal[coords][3] #distanz zwischen den beiden Punkten oben CHANGE # create the valuelists that are used #EINFUEGEN WENN Spalte = x verwenden xw = [] yw = [] #CHANGE xw_check = [] yw_check = [] for x in range(len(x_coord_proc)): #CHANGE Nur Auswahl zum berechnen der Steigung verwenden if (selection_proc[x] == 1): xw.append(x_coord_proc[x] - min(x_coord_proc)) yw.append(y_coord_proc[x] - min(y_coord_proc)) xw_check.append(x_coord_proc[x] - min(x_coord_proc)) yw_check.append(y_coord_proc[x] - min(y_coord_proc)) #QgsMessageLog.logMessage(str(xw), 'MyPlugin') #CHANGE #There is a problem with lingress if the points are nearly N-S oriented #To solve this, it is nessecary to change the input values of the regression # Calculate the regression for both directions linegress_x = scipy.stats.linregress(scipy.array(xw), scipy.array(yw)) linegress_y = scipy.stats.linregress(scipy.array(yw), scipy.array(xw)) # get the sum of residuals for both direction #We like to use the regression with less sum of the residuals res_x = self.calculateResidual(linegress_x, scipy.array(xw), scipy.array(yw), profilnr_proc[0]) res_y = self.calculateResidual(linegress_y, scipy.array(yw), scipy.array(xw), profilnr_proc[0]) if isnan(res_y) or res_x >= res_y: linegress = linegress_x slope = linegress[0] elif isnan(res_x) or res_x < res_y: linegress = linegress_y # if the linear regression with the changed values was used, the angle of the slope is rotated by 90° slope = tan((-90 - (((atan(linegress[0]) * 180) / pi))) * pi / 180) else: criticalMessageToBar(self, ' Error', 'Calculation failed! Corrupt data!') sys.exitfunc() #CHANGE Check the distance with all points distance = errorhandler.calculateError(linegress, xw_check, yw_check, coord_proc[0][4]) # calculate the degree of the slope #Defining the starting point for the export of the section slope_deg = 0.0 #Variable for determining the paint direction of the cutting line cutting_start = '' if slope < 0 and coord_proc[0][3] in ["N", "E"]: slope_deg = 180 - fabs((atan(slope) * 180) / pi) * -1 cutting_start = 'E' elif slope < 0 and coord_proc[0][3] in ["S", "W"]: slope_deg = fabs((atan(slope) * 180) / pi) cutting_start = 'W' elif slope > 0 and coord_proc[0][3] in ["S", "E"]: slope_deg = ((atan(slope) * 180) / pi) * -1 cutting_start = 'W' elif slope > 0 and coord_proc[0][3] in ["N", "W"]: slope_deg = 180 - ((atan(slope) * 180) / pi) cutting_start = 'E' elif slope == 0 and coord_proc[0][3] == "N": slope_deg = 180 cutting_start = 'E' # instantiate lists for the transformed coordinates x_trans = [] y_trans = [] z_trans = [] first_rotationresult = rotation(self, coord_proc, slope_deg, True) for i in range(len(coord_proc)): x_trans.append(first_rotationresult['x_trans'][i]) y_trans.append(first_rotationresult['y_trans'][i]) z_trans.append(first_rotationresult['z_trans'][i]) if direction == "absolute height": #To get an export for the absolute height it is necessary to rotate the profile like the horizontal way #and move it on the y-axis x_coord_proc = listToList(self, coord_proc, 0) y_coord_proc = listToList(self, coord_proc, 1) z_coord_proc = listToList(self, coord_proc, 2) # calculate the minimal x mean_x = mean(x_coord_proc) mean_y = mean(y_coord_proc) mean_z = mean(z_coord_proc) for i in range(len(x_trans)): x_trans[i] = x_trans[i] - mean_x z_trans[i] = z_trans[i] - mean_y + mean_z # printLogMessage(self, str(x_coord_proc[i]), 'ttt') # printLogMessage(self, str(x_trans[i]), 'ttt') #printLogMessage(self,str(min_x),'ttt') new_min_x = min(x_trans) for i in range(len(x_trans)): x_trans[i] = x_trans[i] + abs(new_min_x) # instantiate a list for the transformed coordinates coord_trans = [] #CHANGE rangcheck_trans = [] # build the finished list for i in range(len(coord_proc)): coord_trans.append([ x_trans[i], y_trans[i], z_trans[i], coord_proc[i][4], coord_proc[i][2], distance[i], selection_proc[i], id_proc[i] ]) rangcheck_trans.append([x_trans[i], z_trans[i], y_trans[i]]) #If the aim is to get the view of the surface, the x-axis has to be rotated aswell if method == "surface": # calculating the slope, therefore preparing lists z_yw = [] z_zw = [] for i in range(len(coord_proc)): z_yw.append(y_trans[i] - min(y_trans + z_trans)) z_zw.append(z_trans[i] - min(y_trans + z_trans)) # actual calculation of the slope using the linear regression again z_slope = scipy.stats.linregress(scipy.array(z_yw), scipy.array(z_zw))[0] # transform the radians of the slope into degrees z_slope_deg = 0.0 if z_slope < 0: z_slope_deg = -(90 - fabs(((atan(z_slope) * 180) / pi))) elif z_slope > 0: z_slope_deg = 90 - ((atan(z_slope) * 180) / pi) elif z_slope == 0: z_slope_deg = 0.0 # calculate the centerpoint z_center_y = mean(y_trans) z_center_z = mean(z_trans) # rewrite the lists for the y and z values y_trans = [] z_trans = [] for i in range(len(coord_trans)): y_trans.append(z_center_y + (coord_trans[i][1] - z_center_y) * cos(z_slope_deg / 180 * pi) - (coord_trans[i][2] - z_center_z) * sin(z_slope_deg / 180 * pi)) z_trans.append(z_center_z + (coord_trans[i][1] - z_center_y) * sin(z_slope_deg / 180 * pi) + (coord_trans[i][2] - z_center_z) * cos(z_slope_deg / 180 * pi)) # empty and rewrite the output list coord_trans = [] rangcheck_trans = [] for i in range(len(coord_proc)): # CHANGE coord_trans.append([ x_trans[i], y_trans[i], z_trans[i], coord_proc[i][4], coord_proc[i][2], distance[i], selection_proc[i], id_proc[i] ]) rangcheck_trans.append([x_trans[i], z_trans[i], y_trans[i]]) # If the direction is in the "original" setting, the points have to be rotated back to their original orientation if direction == "original": # the rotation angle is the negative angle of the first rotation if fehler_check == True: y_slope_deg = -slope_deg - 45 else: y_slope_deg = -slope_deg # get the centerpoint y_center_x = mean(x_trans) y_center_z = mean(z_trans) #rewrite the lists for the x and z values x_trans = [] z_trans = [] for i in range(len(coord_trans)): x_trans.append(y_center_x + (coord_trans[i][0] - y_center_x) * cos(y_slope_deg / 180 * pi) - (coord_trans[i][2] - y_center_z) * sin(y_slope_deg / 180 * pi)) z_trans.append(y_center_z + (coord_trans[i][0] - y_center_x) * sin(y_slope_deg / 180 * pi) + (coord_trans[i][2] - y_center_z) * cos(y_slope_deg / 180 * pi)) # empty and rewrite the output list coord_trans = [] rangcheck_trans = [] for i in range(len(coord_proc)): # CHANGE coord_trans.append([ x_trans[i], y_trans[i], z_trans[i], coord_proc[i][4], coord_proc[i][2], distance[i], selection_proc[i], id_proc[i] ]) rangcheck_trans.append([x_trans[i], z_trans[i], y_trans[i]]) #change # check the distances of the outter points from the old points and the converted ones original_outer_points = self.outer_profile_points(coord_proc) original_distance = self.calculate_distance_from_outer_profile_points_orgiginal( original_outer_points) new_outer_points = [] for point in coord_trans: if point[7] == original_outer_points[0][6] or point[ 7] == original_outer_points[1][6]: new_outer_points.append(point) new_distance = self.calculate_distance_from_outer_profile_points_proc( new_outer_points) printLogMessage(self, 'PR:' + str(coord_proc[0][4]), 'Distance') printLogMessage(self, 'Original Distance: ' + str(original_distance), 'Distance') printLogMessage(self, 'New Distance: ' + str(new_distance), 'Distance') printLogMessage( self, 'Diff. Distance: ' + str(abs(original_distance - new_distance)), 'Distance') if abs(original_distance - new_distance) > 0.01: criticalMessageToBar( self, 'Error', 'Profile was calculated incorrect (1cm acc.) See Log-Window: ' + str(str(coord_proc[0][4]))) printLogMessage(self, 'DISTANCE WARNING!', 'Distance') return { 'coord_trans': coord_trans, 'cutting_start': cutting_start, 'linegress': linegress, 'ns_error': ns_fehler_vorhanden }
def execute(): # Get the pipes that were passed from the parent. request_pipe, response_pipe = [int(x) for x in sys.argv[1:3]] request_fh = os.fdopen(request_pipe, 'r') response_fh = os.fdopen(response_pipe, 'w') # Just in case someone calls `exit()` which won't be caught below. result_package = {'status': 'failed'} try: # Get the job/agenda package from the parent, but we cannot unpack it # yet since the preflight may be required in order to setup the # environment in which it can function. job = pickle.load(request_fh) package = pickle.load(request_fh) # Run any requested preflight functions. preflight = package.get('preflight') if preflight: log('running preflight %s' % utils.get_func_name(package['preflight'])) sys.stdout.flush() preflight = utils.get_func(preflight) preflight(package) # Finally, unpack it. package = utils.unpack(package) # Assemble the command to execute func = utils.get_func(package['func']) func_str = utils.get_func_name(package['func']) args = package.get('args') or () kwargs = package.get('kwargs') or {} # Print out what we are doing. arg_spec = ', '.join([repr(x) for x in args] + ['%s=%r' % x for x in sorted(kwargs.iteritems())]) log('calling %s(%s)' % (func_str, arg_spec)) sys.stdout.flush() result_package = { 'result': func(*args, **kwargs), 'status': 'complete', } except Exception as e: traceback.print_exc() result_package = { 'exception': e, 'status': 'failed', } log('child sending result_package') # Send the results to the child. pickle.dump(result_package, response_fh, -1) request_fh.close() response_fh.close() log('child shutting down') # We are going to make a best effort to clean up Python, but we can't # let it go through its normal process. # Run atexit. if hasattr(sys, "exitfunc"): sys.exitfunc() # Collect the highest generation that we can. gc.collect(2) os._exit(0)
def applicationWillTerminate_(self, notification): # The sys.exitfunc() won't get called unless we explicitly # call it here, because OS X is going to terminate our app, # not Python. sys.exitfunc()
for s in select_list: s(*args) finally: # XXX: We have a possible race condition if a signal is handled at # the beginning of this clause, just before the following line. exit.acquire(0) # inhibit signals while cleanup: try: cleanup.pop()() except: pass exit.release() except ReexecException, e: logging.info(e) except Exception: utils.log_exception() sys.exit(1) try: sys.exitfunc() finally: os.execvp(sys.argv[0], sys.argv) if __name__ == "__main__": try: main() except SystemExit, e: if type(e.code) is str: if hasattr(logging, 'trace'): # utils.setupLog called logging.critical(e.code) raise
def killGame(self): sys.exitfunc() sys.exit()
def BIOS (): del BIOS ##print "pyvm BIOS 1.0" try: _COMPAT () except: print "BIOS: (errors in _COMPAT)" if not sys.argv: print "BIOS: Hi!" return if sys.argv [0] == '-V': print __version__ return if sys.argv [0].endswith ('.pyc'): pycfile = sys.argv [0] elif sys.argv [0].endswith ('.py') or sys.argv [0].endswith ('.pe'): try: import pyc except: print "BIOS: Cannot import the pyc compiler", sys.exc_info () return try: pycfile = pyc.compileFile (sys.argv [0], pyvm=True, dynlocals=True, marshal_builtin=True) ##pycfile = pyc.compileFile (sys.argv [0], pyvm=True, dynlocals=True) except: print 'BIOS:', sys.argv [0], ":Syntax Error", traceback.format_exc () return elif sys.argv [0] == '-cc': try: import pyc ##pyc.compileFile (sys.argv [1], dynlocals=True) pyc.compileFile (sys.argv [1], pyvm=True, dynlocals=True, marshal_builtin=True) except: print "Compilation Failed", sys.exc_info () return else: print "BIOS: No script" return if '/' in pycfile: try: import os basedir = os.path.dirname (os.path.abspath (pycfile)) + '/' sys.path.insert (0, basedir) except: print "BIOS: cannot import os.path. Will not add basedir to sys.path" try: __import_compiled__ (pycfile, '__main__') except SystemExit: pass except: try: print "BIOS: Uncaught exception:", sys.exc_info (), traceback.format_exc () except: try: print "BIOS: exception while formatting exception!!", sys.exc_info () except: pass try: sys.exitfunc () except: pass
def setUp(): #Ruta al perfil de Firefox profile = webdriver.FirefoxProfile("C:\\Users\\alan.zacarias\\AppData\\Roaming\\Mozilla\\Firefox\\Profiles\\k4sorz7l.alan") driver = webdriver.Firefox(firefox_profile=profile) singFirst() driver.get(queue) # Load page time.sleep(0.2) # Let the page load #ssoLogin() driver.implicitly_wait(3) # 3 seconds driver.switch_to_frame("mycase") select_dropdown(driver, "allComm", "322555") # Selects the community select_dropdown(driver, "selIdProcessAjax", "1820234") # Selects the process select_dropdown(driver, "selIdStepAjax", "1820246") #Selects the step driver.find_element_by_xpath("//a[contains(@href, 'fnSubmit')]").click() #clickbutton Cases = [] SSOs = [] table = driver.find_element_by_xpath("//table[@bgColor = '#dddddd']") rowCount = table.find_elements_by_xpath("//tbody/tr[@bgColor = '#ffffff']") #length of table totalNumber = len(rowCount) #Open File writing today = datetime.datetime.today().strftime('%m/%d/%Y') filename = "C:\\Users\\alan.zacarias\\Desktop\\AssigningLog.txt" target = open(filename, 'a') target.write("+++ %s +++" % today) target.write("\n") target.write("Total Cases to be assigned: %s" % totalNumber) target.write("\n") #this cycle gets all case numbers from the workflow if totalNumber != 0: numCase = driver.find_elements_by_xpath("//td[@class = 'reqId']") Cases = [td.text for td in numCase] numSSO = driver.find_elements_by_xpath("//tr[@bgColor = '#ffffff']/td[8]") SSOs = [td.text for td in numSSO] else: target.write("There's no cases on the queue") target.write("\n") target.close() sys.exitfunc() #this cycle goes through the whole list of cases count = 0 for i in enumerate(SSOs): workflow(Cases[count]) console, isDgInstalled = verify_DG(SSOs[count]) close_tab() time.sleep(1) if console != 'Null': selectConsole(console, isDgInstalled) time.sleep(1) check_elements() target.write("SSO: %s Console: %s " % (SSOs[count], console)) target.write("\n") submit() close_tab() else: target.write("SSO: %s Investigation Tool Error" % SSOs[count]) target.write("\n") count = count + 1 target.close()
def run(self): """Run method that performs all the real work""" #trigger help button #helpButton.clicked.connect(showPluginHelp()) # Create the dialog (after translation) and keep reference self.dlg = profileAARDialog() #initialize the Errorhandler errorhandler = ErrorHandler(self.iface) magicbox = Magic_Box(self.iface) export = Export(self.iface) '''DEFINE OUTPUT PATH''' #Choose file if button is clicked self.dlg.outputPath.clear() self.dlg.outputButton.clicked.connect(self.select_output_file) '''SELECT INPUT IN GUI''' # CHOOSE INPUT LAYER # read layers from qgis layers and filter out the pointlayers to display in the input combobox self.dlg.inputCombo.setFilters(QgsMapLayerProxyModel.PointLayer) # CHOOSE COLUMNS FOR Z-VALUE, VIEW AND PR-NUMBER # CALLS FUNCTION LAYER_FIELD (once on startup on activation, to enable using when only one point fc is present) self.dlg.inputCombo.activated.connect(self.layer_field) self.dlg.inputCombo.currentIndexChanged.connect(self.layer_field) self.dlg.helpButton.clicked.connect(self.show_help) '''SHORT BLOCK OF PLUGIN CODE (runs the dialog and triggers the event after the OK button was pressed)''' # create/show the dialog self.dlg.show() # Run the dialog event loop result = self.dlg.exec_() # See if OK was pressed if result: inputCheck = False fieldCheck = False #Check if input fields are filled correctly an if the layer has correct properties inputCheck = errorhandler.input_check(self.dlg.outputPath.text()) '''GET INPUT FROM GUI TO VARIABLES/PREPARE LIST OF DATA''' #GET TEXT FROM METHOD AND DIRECTION #Read the method that is selected method = unicode(self.dlg.methodCombo.currentText()) #read the direction, that is selected direction = unicode(self.dlg.directionCombo.currentText()) #Get the selected layer selectedLayer = self.dlg.inputCombo.currentLayer() #PREPARE DATA LIST #Go thought all data rows in the selected layer iter = selectedLayer.getFeatures() #list for the data coord = [] #list for the different profile names profile_names = [] #check if the z values have the correct type and if the crs is projected fieldCheck = errorhandler.field_check(selectedLayer, self.dlg.zCombo.currentText()) height = False section = False if fieldCheck == True or inputCheck == True: sys.exitfunc() if self.dlg.hightBox.isChecked(): height = True if self.dlg.sectionBox.isChecked(): section = True point_id = 0 for feature in iter: # retrieve every feature with its geometry and attributes # fetch geometry # TODO: 3Nachkommastellen!! Bisher sind es nur 2..... geom = feature.geometry() #getting x and y coordinate x = round(geom.asPoint().x(), 3) y = round(geom.asPoint().y(), 3) #write coordinates and attributes (view, profile and z) in a list # TODO: Use dictinary or object #add an ID to each point point_id += 1 coord.append([x,y,feature[self.dlg.zCombo.currentText()],feature[self.dlg.viewCombo.currentText()], feature[self.dlg.profileCombo.currentText()], feature[self.dlg.useCombo.currentText()], point_id]) #write a list of profilenames (unique entries) if feature[self.dlg.profileCombo.currentText()] not in profile_names: profile_names.append(feature[self.dlg.profileCombo.currentText()]) '''WORK ON EVERY PROFILE IN LOOP''' # CREATE A LIST OF DATA FOR EVERY PROFILE # select every single profile in a loop coord_trans = [] height_points = [] cutting_line = [] for i in range(len(profile_names)): # instantiate a temporary list for a single profile coord_proc = [] # instantiate list for the view to check if all entries in one profile are the same view_check = [] #CHANGE # instantiate list for the selection to check if all entries in one profile are the same selection_check = [] # iterate through the features in coord, if the profilename matches store the features datalist in templist for x in range(len(coord)): if coord[x][4] == profile_names[i]: coord_proc.append(coord[x]) # write the unique view values in the checklist if coord[x][3] not in view_check: view_check.append(coord[x][3]) # CHANGE write the unique selection values in the checklist if coord[x][4] not in selection_check: selection_check.append(coord[x][5]) #Handle Errors depending on the attributes in the fields #Errorhandling: Checking the single Profiles for inconsestency #Therefore we need the data of the actual profile, the view_check with the view values and actual profile name, selection is 0 or 1 profileCheck = False if fieldCheck == False and inputCheck == False: profileCheck = errorhandler.singleprofile(coord_proc, view_check, str(profile_names[i]), selection_check) if profileCheck == False and fieldCheck == False and inputCheck == False: #Calculating the profile and add it to the list transform_return = magicbox.transformation(coord_proc, method, direction) coord_height_list = transform_return['coord_trans'] coord_trans.append(coord_height_list) #CHANGE If checked, the upper right poitn has to be exportet as point if height == True: height_points.append(magicbox.height_points(coord_height_list)) if section == True: cutting_line.append(sectionCalc(self, coord_proc, transform_return['cutting_start'],transform_return['linegress'], transform_return['ns_error']), ) if profileCheck == False: '''Export the data''' #For exporting we need the data, the path and the crs of the input data export.export(coord_trans, self.dlg.outputPath.text(), selectedLayer.crs()) #If points are checked, export them #CHANGE if height == True: export.export_height(height_points, self.dlg.outputPath.text(), selectedLayer.crs()) if section == True: #if a profile is recommended, we have to export it. To make it easy to display everything, export left point first printLogMessage(self,str(coord_proc[0][4]),'sec111t') export.export_section(cutting_line, coord_proc[0][4],self.dlg.outputPath.text(), selectedLayer.crs()) #Load the file to qgis automaticly layer = self.iface.addVectorLayer(self.dlg.outputPath.text(), "", "ogr") #CHANGE if height == True: filename = self.dlg.outputPath.text().split(".shp")[0] filename = filename + "_height.shp" layer = self.iface.addVectorLayer(filename, "", "ogr") if section == True: filename = self.dlg.outputPath.text().split(".shp")[0] filename = filename + "_section.shp" layer = self.iface.addVectorLayer(filename, "", "ogr") #if the loading of the layer fails, give a message if not layer: criticalMessageToBar(self, 'Error', 'Failed to open '+self.dlg.outputPath.text()) pass