def read_metadata(): if not find_path('stimuli_meta.json'): raise FileNotFoundError else: # Read from disk otherwise print('Found metadata at {}'.format(find_path('meta.json'))) with open(find_path('stimuli_meta.json'), 'r') as f: stimuli_meta = json.load(f) return stimuli_meta
def check_proxy(i, proxy): proxy_auth = proxy[1] service_args = [ '--proxy={}'.format(proxy[0]), '--proxy-type=http', '--ignore-ssl-errors=true' ] if proxy_auth: service_args.append('--proxy-auth={}'.format(proxy_auth)) print("") logging.info("[{}/{}] Testing proxy: {}".format(i + 1, len(proxies), proxy)) user_agent = get_user_agent() desired_capabilities = get_desired_capabilities_phantom(user_agent) browser = webdriver.PhantomJS(executable_path=find_path('phantomjs'), service_args=service_args, desired_capabilities=desired_capabilities) browser.set_page_load_timeout(30) # Proxy testing try: browser.get('http://tools.yzy.io/ip.php') element = WebDriverWait(browser, 30).until(EC.presence_of_element_located((By.TAG_NAME, "body"))) if not element: logging.error('[{}/{}] Proxy could not load URL: {}'.format(i + 1, len(proxies), 'http://tools.yzy.io/ip.php')) return logging.info('[{}/{}] Proxy using IP: {}'.format(i + 1, len(proxies), element.text)) except TimeoutException: logging.error('[{}/{}] Proxy Time-out: {}'.format(i + 1, len(proxies), proxy)) return except Exception as e: logging.error('[{}/{}] {}'.format(i + 1, len(proxies), e)) return try: browser.get(PRODUCT_URL) if ('you have been blocked' in browser.page_source.lower()) or ('a security issue was automatically identified' in browser.page_source.lower()): logging.error('[{}/{}] Proxy Banned on {}'.format(i + 1, len(proxies), PRODUCT_URL)) return element = WebDriverWait(browser, 30).until(EC.presence_of_element_located((By.TAG_NAME, "div"))) if not element: logging.error('[{}/{}] Proxy could not load URL: {}'.format(i + 1, len(proxies), PRODUCT_URL)) logging.info('[{}/{}] Proxy Test Success'.format(i + 1, len(proxies))) time.sleep(5) faceless_browsers.append({ 'browser': browser, 'user_agent': user_agent, 'proxy': proxy }) except TimeoutException: logging.error('[{}/{}] Proxy Time-out: {}'.format(i + 1, len(proxies), proxy)) return except Exception as e: logging.error('[{}/{}] {}'.format(i + 1, len(proxies), e)) return
def inner(): # Location of bokeh script exec_path = find_path(script + '.py') # Serve script on specified port cmd = "bokeh serve {} --port {} ".format(exec_path, port) # Whitelist cross site connections e.g. this flask application cmd += "--allow-websocket-origin {} ".format(SOCKET) cmd += "--log-level info " # Log memory usage, frequency in ms cmd += "--mem-log-frequency=30000" # Spawn proc = subprocess.Popen( [cmd], shell=True, stderr=subprocess.PIPE, # stdout=open('out.txt', 'w'), universal_newlines=True ) # Streaming stderr of bokeh serve to webpage for line in iter(proc.stderr.readline, ''): yield line.rstrip() + '<br/>\n' # Update globals if newer version is up and running bokeh_pid = re.findall(r'process id: (\d+)', line) if bokeh_pid: SERVING[script] = dict(port=port, shell_pid=proc.pid, bokeh_pid=int(bokeh_pid.pop()) )
def colonize(self, target_cell, starting_cell, breadth, min_breath=1): p = find_path(starting_cell, target_cell, [], self.game) if not p or len(p) <= 1: return False self.reloc_path = p[1:] # First cell is already owned, so skip self.relocation_timer.start() return True
def transfer_session(browser): driver = browser['browser'] logging.info("[New Thread] Transferring session {}".format( driver.session_id)) # Save cookies write_cookies_to_file(driver.get_cookies()) url = driver.current_url chrome_options = Options() chrome_options.add_argument("user-agent={}".format(browser['user_agent'])) chrome_options.add_argument("--proxy-server=http://{}".format( browser['proxy'][0])) chrome = webdriver.Chrome(executable_path=find_path('chromedriver'), chrome_options=chrome_options) # Open URL and wait for proxy login chrome.get(url) logging.info("[CHROME/PROXY] Login with {}".format(browser['proxy'][1])) chrome.implicitly_wait(30) element = WebDriverWait(chrome, 1000).until( EC.presence_of_element_located((By.TAG_NAME, "div"))) # Transfer Cookies chrome.delete_all_cookies() for cookie in driver.get_cookies(): chrome.add_cookie(cookie) chrome.refresh() time.sleep(10000)
def _render_image(self,src, x, y, image_part=None): from utils import find_path from PIL import Image from img_xfos import img_modes #print 'Rendering Image at src %s at %s:%s'%(src,x,y), 'for a size of ', src = find_path(src) image = Image.open(src) mode = image.mode W, H = self.get_extents("W") W1, H1 = self.get_extents("_") W,H = max(W,H,W1,H1), max(W,H, W1, H1) image = image.resize((W, H)) #print 'extent calculted', W,H self.texture.blit_buffer(image.tobytes(), colorfmt=img_modes[mode], size=(W,H), pos=(x,y))
def ask_config_line(key_to_ask): # Asks user to select new configuration item based on the contents of the received token cfg_type = ConfigHandler.config_layout[key_to_ask] if cfg_type == "yn": print(f"Would you like to {key_to_ask}") new_value = ask_yes_no() else: print(f"Select your {key_to_ask}") new_value = "" if cfg_type == "path": new_value = find_path() elif cfg_type == "browser": new_value = ask_preferred_browser() elif cfg_type == "text": new_value = input("> ") return new_value
def get_img(stimulus): """ Cds for fourth component given a :param: stimulus (file name) """ # Loading with PIL raw_img = Image.open(find_path(stimulus)).convert('RGBA') # Its dimensions xdim, ydim = raw_img.size # Its array representation, 8-bit img = np.empty((ydim, xdim), dtype=np.uint32) view = img.view(dtype=np.uint8).reshape((ydim, xdim, 4)) # Copy the RGBA image into view, flipping it upside down # with a lower-left origin view[:, :, :] = np.flipud(np.asarray(raw_img)) return Cds({'image': [img], 'width': [xdim], 'height': [ydim], })
def read_main_df(): # Reading main data into memory df = read_sv(return_as=pd.DataFrame, path=find_path('up.csv'), encoding='utf-8', delimiter='\t', header=True) assert 'FixationOOB' in df.columns # Mapping FixationOOB strings to numpy booleans OOBstatus = {'True': True, 'False': False} df['FixationOOB'] = df['FixationOOB'].replace(OOBstatus) convert_type(df, Timestamp='int64', FixationDuration='int64', MappedFixationPointX='int64', MappedFixationPointY='int64', StimuliName='object') return df
def load_file(self, filepath='myxlsfile.xlsx'): stack = self.ids['stack'] from kivy.resources import resource_add_path from os.path import split resource_add_path(split(filepath)[0]) from utils import XLRDDictReader from utils import find_path self.record_last_file(filepath) boxes = list() with open(filepath, 'rb') as XLFile: stds_headers = { 'qt', 'source', 'template', 'dual', 'layout_x', 'layout_y', 'layout_w', 'layout_h', 'layout_angle', 'layout_page' } for obj in XLRDDictReader(XLFile): box = StackPart() box.qt = int(obj.get('qt', 0)) box.dual = 'dual' in obj and obj['dual'] values = dict() if "template" in obj and obj['template']: box.template = obj['template'] if 'source' in obj and obj['source']: _s = find_path(obj['source']) if _s is None: _s = obj['source'] box.source = _s if 'layout_x' in obj and 'layout_y' in obj and 'layout_w' in obj and 'layout_h' in obj and 'layout_angle' in obj and 'layout_page' in obj: box.layout = obj['layout_x'], obj['layout_y'], obj[ 'layout_w'], obj['layout_h'], obj['layout_angle'], obj[ 'layout_page'] for attr in obj: if attr in stds_headers: continue if obj[attr]: values[attr] = obj[attr] box.values = values stack.add_widget(box) boxes.append(box) boxes.reverse() self.refresh(boxes, msg='Import %s over' % split(filepath)[-1])
def find_template_path(filename): from os.path import abspath, isfile, join from conf import gamepath from utils import find_path if "@" in filename: #Only using subone name, filename = filename.split('@') else: name = "" r = find_path(filename) from kivy.resources import resource_paths, resource_find #for p in resource_paths: print p, isfile(join(p,filename)) if not r or not isfile(r): #try with gamepath ? if isfile(join(gamepath,path_reader(filename))): r = join(gamepath, path_reader(filename)) #Here, we convert to abspath / normpath, as filename is used to index rules by kivy. avoid reimporting rules if r: filepath = abspath(path_reader(r)) else: filepath = filename return name, filepath
def get_mask_info() -> bool: global shop_id, mask_id, mask_name, url, act_info_url, flag response = session.get(url=url, headers=header) data: Dict = response.json() print(data) man = find_path(data) user_data = data['data']['userinfo'] print(user_data) mask_paths = man.in_value_path('口罩') mask_path = mask_paths[0][:-len('[link_name]') - 2] # 南通市区 # mask_path = mask_paths[1][:-len('[link_name]') - 2] # 南通通州 # mask_path = mask_paths[2][:-len('[link_name]') - 2] # 南通海门 shiqu_mask = eval('data' + mask_path) mask_id = shiqu_mask['params']['id'] mask_name = shiqu_mask['link_name'] # default value if '口罩' in mask_name: act_info_url = act_info_url + '?fid=' + str( mask_id) + '&share_uid=&coupon_uid=' response = session.get(url=act_info_url, headers=header) data = response.json() for i, shop in enumerate(data['data']['get_all_price']['attr']): print('id:{}, {}, 库存:{}, 价格:{}, 限购:{}'.format( shop['id'], shop['name'], shop['kucun'], shop['show_price'], shop['xiangou_num'])) if '唐闸' in shop[ 'name']: # 这里注意修改 就是你线下要去领取的药店的名称 部分名称和完整名称都可以 主要是用来判断获取shop_id shop_id = shop['id'] PARAMS_MASK['goods_attr_id'] = shop_id PARAMS_MASK['fid'] = mask_id flag = True print('shop id: {}, mask id: {}, mask name: {}'.format( shop_id, mask_id, str(mask_name))) return flag
def load_file(self, filepath='myxlsfile.xlsx'): stack = self.ids['stack'] from kivy.resources import resource_add_path from os.path import split resource_add_path(split(filepath)[0]) from utils import XLRDDictReader from utils import find_path self.record_last_file(filepath) boxes = list() with open(filepath, 'rb') as XLFile: stds_headers = {'qt', 'source', 'template', 'dual', 'layout_x', 'layout_y', 'layout_w', 'layout_h', 'layout_angle', 'layout_page' } for obj in XLRDDictReader(XLFile): box = StackPart() box.qt = int(obj.get('qt', 0)) box.dual = 'dual' in obj and obj['dual'] values = dict() if "template" in obj and obj['template']: box.template = obj['template'] if 'source' in obj and obj['source']: _s = find_path(obj['source']) if _s is None: _s = obj['source'] box.source = _s if 'layout_x' in obj and 'layout_y' in obj and 'layout_w' in obj and 'layout_h' in obj and 'layout_angle' in obj and 'layout_page' in obj: box.layout = obj['layout_x'], obj['layout_y'], obj['layout_w'], obj['layout_h'], obj['layout_angle'], obj['layout_page'] for attr in obj: if attr in stds_headers: continue if obj[attr]: values[attr] = obj[attr] box.values = values stack.add_widget(box) boxes.append(box) boxes.reverse() self.refresh(boxes, msg='Import %s over'%split(filepath)[-1])
def read_main_df(): # Reading main data into memory df = read_sv(return_as=pd.DataFrame, path=find_path('up.csv'), encoding='utf-8', delimiter='\t', header=True) assert 'FixationOOB' in df.columns for i, row in df.iterrows(): df.at[i, 'FixationOOB'] = eval(df.at[i, 'FixationOOB']) convert_type(df, Timestamp='int64', FixationDuration='int64', MappedFixationPointX='int64', MappedFixationPointY='int64', StimuliName='str', FixationOOB='?') print('Read df as pd.DataFrame') return df
service_args = [ '--proxy={}'.format(proxy[0]), '--proxy-type=http', '--ignore-ssl-errors=true' ] if proxy_auth: service_args.append('--proxy-auth={}'.format(proxy_auth)) print("") logging.info("[{}/{}] Testing proxy: {}".format( i + 1, len(proxies), proxy)) user_agent = get_user_agent() desired_capabilities = get_desired_capabilities_phantom(user_agent) browser = webdriver.PhantomJS( executable_path=find_path('phantomjs'), service_args=service_args, desired_capabilities=desired_capabilities) browser.set_page_load_timeout(30) # Proxy testing try: browser.get('http://tools.yzy.io/ip.php') element = WebDriverWait(browser, 10).until( EC.presence_of_element_located((By.TAG_NAME, "body"))) if not element: logging.error('[{}/{}] Proxy could not load URL: {}'.format( i + 1, len(proxies), 'http://ipecho.net/plain')) continue
def get_connectivity_metrics(gt_graph_file, osm_diff_graph_file, post_graph_file, verbose=False, num_pairs=1000): s_points, e_points = generate_pairs(n=num_pairs) gt_graph = GraphSuper(gt_graph_file) post_graph = GraphSuper(post_graph_file) osm_diff_graph = GraphSuper(osm_diff_graph_file) osm_diff_metric = Metric(num_pairs) post_metric = Metric(num_pairs) gt_metric = Metric(num_pairs) for start_points, end_points in zip(s_points, e_points): gt_val = find_path(gt_graph, start_points, end_points, gt_metric, length_key='weight') if gt_val == -1: # osm_diff_metric.reduce_total_paths() # post_metric.reduce_total_paths() if verbose: print('couldnt find path in gt', start_points, end_points) # continue osm_val = find_path(osm_diff_graph, start_points, end_points, osm_diff_metric, length_key='weight') if osm_val == -1: if verbose: print('couldnt find path in osm', start_points, end_points) osm_diff_metric.update_fn(gt_val) else: osm_diff_metric.update_correct(gt_val, osm_val) post_val = find_path(post_graph, start_points, end_points, post_metric, length_key='weight') if post_val == -1: if verbose: print('couldnt find path in post', start_points, end_points) post_metric.update_fn(gt_val) else: post_metric.update_correct(gt_val, post_val) if verbose: print('\n osm diff') osm_diff_metric.print_all() print('\n post') post_metric.print_all() print('\n gt') gt_metric.print_all() return osm_diff_metric.get_all(), post_metric.get_all()
def load_file_csv(self, filepath='mycsvfile.csv'): #Now also CSV export import csv from utils import find_path self.record_last_file(filepath) with open(filepath, 'rb') as csvfile: dialect = csv.Sniffer().sniff(csvfile.read(1024), delimiters=";,") csvfile.seek(0) reader = csv.DictReader(csvfile, dialect=dialect) header = reader.fieldnames stack = self.ids['stack'] remaining_header = set(header) - { 'qt', 'source', 'template', 'dual', 'layout' } boxes = list() for index, obj in enumerate(reader): #print index, obj if set([obj[_v] for _v in obj]) == set( dialect.delimiter ): #skipping empty row made of ;;;;;; or ,,,,, continue qt = 1 if 'qt' in obj: qt = int(obj['qt']) verso = 'normal' if 'dual' in obj: if not obj['dual'] or obj['dual'].lower() == 'false': verso = 'normal' else: verso = 'down' box = StackPart() box.verso = verso box.qt = qt if 'template' in obj and obj['template']: box.template = obj['template'] if 'source' in obj and obj['source']: box.source = find_path(obj['source']) if 'layout' in obj and obj['layout']: box.layout = obj['layout'] values = dict() for attr in remaining_header: v = obj.get(attr, '') if v is not '': #'' means cell is empty if isinstance(v, basestring): v = unicode(v, 'latin-1') values[attr] = v box.values = values stack.add_widget(box) boxes.append(box) boxes.reverse() progress = self.ids['load_progress'] progress.value = 0 progress.max = len(boxes) #ensure the stop button is reachable self.ids['stop_action'].width = 80 self.ids['stop_action'].text = 'Stop' self.cancel_action = False def inner(*args): progress.value += 1 b = boxes.pop() b.realise(withValue=True) if self.cancel_action or not boxes: self.cancel_action = False Clock.unschedule(inner) self.ids['stop_action'].width = 0 self.ids['stop_action'].text = '' progress.value = 0 from utils import alert from os.path import split alert('Import %s over' % split(filepath)[-1]) return False Clock.schedule_interval(inner, .1)
def __init__(self, ad, loop, logger, access, **config): self.AD = ad self.logger = logger self.acc = access self.dashboard_dir = None self._process_arg("dashboard_dir", config) self.dash_password = None self._process_arg("dash_password", config) self.dash_url = None self._process_arg("dash_url", config) self.config_dir = None self._process_arg("config_dir", config) self.dash_compile_on_start = False self._process_arg("dash_compile_on_start", config) self.dash_force_compile = False self._process_arg("dash_force_compile", config) self.work_factor = 8 self._process_arg("work_factor", config) self.profile_dashboard = False self._process_arg("profile_dashboard", config) self.dash_ssl_certificate = None self._process_arg("dash_ssl_certificate", config) self.dash_ssl_key = None self._process_arg("dash_ssl_key", config) self.rss_feeds = None self._process_arg("rss_feeds", config) self.rss_update = None self._process_arg("rss_update", config) self.rss_last_update = None self.stopping = False url = urlparse(self.dash_url) dash_net = url.netloc.split(":") self.dash_host = dash_net[0] try: self.dash_port = dash_net[1] except IndexError: self.dash_port = 80 if self.dash_host == "": raise ValueError("Invalid host for 'dash_url'") # find dashboard dir if self.dashboard_dir is None: if self.config_dir is None: self.dashboard_dir = utils.find_path("dashboards") else: self.dashboard_dir = os.path.join(self.config_dir, "dashboards") # # Setup compile directories # if self.config_dir is None: self.compile_dir = utils.find_path("compiled") else: self.compile_dir = os.path.join(self.config_dir, "compiled") # Setup WS handler self.app = web.Application() self.app['websockets'] = {} self.loop = loop self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=5) try: self.dashboard_obj = dashboard.Dashboard(self.config_dir, access, dash_compile_on_start=self.dash_compile_on_start, dash_force_compile=self.dash_force_compile, profile_dashboard=self.profile_dashboard, dashboard_dir = self.dashboard_dir, ) self.setup_routes() if self.dash_ssl_certificate is not None and self.dash_ssl_key is not None: context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) context.load_cert_chain(self.dash_ssl_certificate, self.dash_ssl_key) else: context = None handler = self.app.make_handler() f = loop.create_server(handler, "0.0.0.0", int(self.dash_port), ssl=context) loop.create_task(f) loop.create_task(self.update_rss()) except: self.log("WARNING", '-' * 60) self.log("WARNING", "Unexpected error in dashboard thread") self.log("WARNING", '-' * 60) self.log("WARNING", traceback.format_exc()) self.log("WARNING", '-' * 60)
def generation_step(self, with_realize=False): "Will be called in order to show some advancement" i, row, col, face, item = self.index.pop() #Determine wheter a new page should be appended face_index = 0 if self.current_face == 'F' else 1 if self.current_face != face: self.current_face = face self.current_page_num[1 - face_index] = i self.AddPage() elif self.current_page_num[face_index] != i: self.current_page_num[face_index] = i self.AddPage() #Determine Image X/Y/W/H depending on print mode if self.mode == 'LAYOUT': x, y, self.x, self.y, angle, pageindex = item.layout x /= cm(1) y /= cm(1) self.x /= cm(1) self.y /= cm(1) elif self.mode == 'BINCAN': x, y, self.x, self.y, angle, item = item #hackick: replace item in place ! x /= cm(1) y /= cm(1) self.x /= cm(1) self.y /= cm(1) else: if face == 'F': x, y = col * self.x + left, height - (1 + row) * self.y - top else: x, y = width - (1 + col) * self.x - left - right, height - ( 1 + row) * self.y - top #x, y = width - (1+col)*self.x - right, height-(1+row)*self.y - top #self.x & slef.y has already been setp by calculate_size #Check is there is a layout that could be used, just for the angle if getattr(item, 'layout', 0): angle = item.layout[4] else: angle = 0 #Now that in item lies the Stack Item, before rendering it, inject special print vairables item.print_index = { 'pagenum': i, 'stackrow': row, 'stackcol': col, 'pageface': face } # Now, define source for image: we either get the source or convert to image if item.image: #speicla case for complex image manip src = ImageReader(item.image.rotate(angle)) elif item.template: if with_realize: item.realise(True, True) tmplWidget = item.tmplWidget elif item.tmplWidget: #it has been modified tmplWidget = item.tmplWidget else: from template import BGTemplate Logger.info('[Printer] Generation Step without tmplWidget') tmplWidget = BGTemplate.FromFile(item.template) if tmplWidget: #only taking the last one tmplWidget = tmplWidget[-1] else: raise NameError('No such template: ' + item.template) if item.values: tmplWidget.apply_values(item.values) from kivy.base import EventLoop EventLoop.idle() cim = tmplWidget.toImage(for_print=True) pim = frombuffer('RGBA', cim.size, cim._texture.pixels, 'raw', 'RGBA', 0, 1) src = ImageReader(pim.rotate(angle)) else: src = item.source from utils import find_path src = find_path(src) if angle: src = ImageReader(PILOpen(src).rotate(angle)) #print "Adding Image to pdf", i, row, col, face, item, src, x, y, self.x, self.y, angle self.pdf.drawImage(src, x * r_cm, y * r_cm, self.x * r_cm, self.y * r_cm, mask='auto') from conf import CP if CP.getboolean('Print', 'draw_cut_rect'): #add line after image: they ll be above self.AddLines(x, y, self.x, self.y)
def generation_step(self, with_realize = False): "Will be called in order to show some advancement" i, row, col, face, item = self.index.pop() #Determine wheter a new page should be appended face_index = 0 if self.current_face == 'F' else 1 if self.current_face != face: self.current_face = face self.current_page_num[1-face_index] = i self.AddPage() elif self.current_page_num[face_index] != i: self.current_page_num[face_index] = i self.AddPage() #Determine Image X/Y/W/H depending on print mode if self.mode == 'LAYOUT': x, y, self.x, self.y, angle, pageindex = item.layout x /= cm(1) y /= cm(1) self.x /= cm(1) self.y /= cm(1) elif self.mode == 'BINCAN': x, y, self.x, self.y, angle, item = item #hackick: replace item in place ! x /= cm(1) y /= cm(1) self.x /= cm(1) self.y /= cm(1) else: if face == 'F': x, y = col * self.x + left, height-(1+row)*self.y - top else: x, y = width - (1+col)*self.x - left - right, height-(1+row)*self.y - top #x, y = width - (1+col)*self.x - right, height-(1+row)*self.y - top #self.x & slef.y has already been setp by calculate_size #Check is there is a layout that could be used, just for the angle if getattr(item, 'layout',0): angle = item.layout[4] else: angle = 0 #Now that in item lies the Stack Item, before rendering it, inject special print vairables item.print_index = { 'pagenum' : i, 'stackrow': row, 'stackcol': col, 'pageface': face } # Now, define source for image: we either get the source or convert to image if item.image:#speicla case for complex image manip src = ImageReader(item.image.rotate(angle)) elif item.template: if with_realize: item.realise(True,True) tmplWidget = item.tmplWidget elif item.tmplWidget:#it has been modified tmplWidget = item.tmplWidget else: from template import BGTemplate Logger.info( '[Printer] Generation Step without tmplWidget') tmplWidget = BGTemplate.FromFile(item.template) if tmplWidget: #only taking the last one tmplWidget = tmplWidget[-1] else: raise NameError('No such template: '+ item.template) if item.values: tmplWidget.apply_values(item.values) from kivy.base import EventLoop EventLoop.idle() cim = tmplWidget.toImage(for_print=True) pim = frombuffer('RGBA', cim.size, cim._texture.pixels, 'raw', 'RGBA',0,1) src = ImageReader(pim.rotate(angle)) else: src = item.source from utils import find_path src = find_path(src) if angle: src = ImageReader(PILOpen(src).rotate(angle)) #print "Adding Image to pdf", i, row, col, face, item, src, x, y, self.x, self.y, angle self.pdf.drawImage(src, x*r_cm, y*r_cm, self.x*r_cm, self.y*r_cm, mask='auto') from conf import CP if CP.getboolean('Print','draw_cut_rect'): #add line after image: they ll be above self.AddLines(x,y,self.x,self.y)
def read_metadata(): with open(find_path('stimuli_meta.json'), 'r') as f: stimuli_meta = json.load(f) return stimuli_meta
def load_file_csv(self, filepath='mycsvfile.csv'): #Now also CSV export import csv from utils import find_path self.record_last_file(filepath) with open(filepath, 'rb') as csvfile: dialect = csv.Sniffer().sniff(csvfile.read(1024), delimiters=";,") csvfile.seek(0) reader = csv.DictReader(csvfile, dialect=dialect) header = reader.fieldnames stack = self.ids['stack'] remaining_header = set(header) - {'qt', 'source', 'template', 'dual', 'layout'} boxes = list() for index, obj in enumerate(reader): #print index, obj if set([obj[_v] for _v in obj]) == set(dialect.delimiter):#skipping empty row made of ;;;;;; or ,,,,, continue qt = 1 if 'qt' in obj: qt = int(obj['qt']) verso = 'normal' if 'dual' in obj: if not obj['dual'] or obj['dual'].lower() == 'false': verso = 'normal' else: verso = 'down' box = StackPart() box.verso = verso box.qt = qt if 'template' in obj and obj['template']: box.template = obj['template'] if 'source' in obj and obj['source']: box.source = find_path(obj['source']) if 'layout' in obj and obj['layout']: box.layout = obj['layout'] values = dict() for attr in remaining_header: v = obj.get(attr, '') if v is not '':#'' means cell is empty if isinstance(v, basestring): v= unicode(v, 'latin-1') values[attr] = v box.values = values stack.add_widget(box) boxes.append(box) boxes.reverse() progress = self.ids['load_progress'] progress.value = 0 progress.max = len(boxes) #ensure the stop button is reachable self.ids['stop_action'].width = 80 self.ids['stop_action'].text = 'Stop' self.cancel_action = False def inner(*args): progress.value +=1 b= boxes.pop() b.realise(withValue=True) if self.cancel_action or not boxes: self.cancel_action = False Clock.unschedule(inner) self.ids['stop_action'].width = 0 self.ids['stop_action'].text = '' progress.value = 0 from utils import alert from os.path import split alert('Import %s over'%split(filepath)[-1]) return False Clock.schedule_interval(inner, .1)
def main(self): # import appdaemon.stacktracer # appdaemon.stacktracer.trace_start("/tmp/trace.html") # Windows does not support SIGUSR1 or SIGUSR2 if platform.system() != "Windows": signal.signal(signal.SIGUSR1, self.handle_sig) signal.signal(signal.SIGINT, self.handle_sig) signal.signal(signal.SIGHUP, self.handle_sig) # Get command line args parser = argparse.ArgumentParser() parser.add_argument("-c", "--config", help="full path to config directory", type=str, default=None) parser.add_argument("-p", "--pidfile", help="full path to PID File", default="/tmp/hapush.pid") parser.add_argument( "-t", "--tick", help="time that a tick in the schedular lasts (seconds)", default=1, type=float) parser.add_argument( "-s", "--starttime", help="start time for scheduler <YYYY-MM-DD HH:MM:SS>", type=str) parser.add_argument( "-e", "--endtime", help="end time for scheduler <YYYY-MM-DD HH:MM:SS>", type=str, default=None) parser.add_argument("-i", "--interval", help="multiplier for scheduler tick", type=float, default=1) parser.add_argument( "-D", "--debug", help="debug level", default="INFO", choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]) parser.add_argument('-v', '--version', action='version', version='%(prog)s ' + utils.__version__) parser.add_argument('--profiledash', help=argparse.SUPPRESS, action='store_true') # Windows does not have Daemonize package so disallow if platform.system() != "Windows": parser.add_argument("-d", "--daemon", help="run as a background process", action="store_true") args = parser.parse_args() config_dir = args.config if platform.system() != "Windows": from daemonize import Daemonize if platform.system() != "Windows": isdaemon = args.daemon else: isdaemon = False if config_dir is None: config_file_yaml = utils.find_path("appdaemon.yaml") else: config_file_yaml = os.path.join(config_dir, "appdaemon.yaml") if config_file_yaml is None: print( "FATAL: no configuration directory defined and defaults not present\n" ) parser.print_help() sys.exit(1) config = None # # First locate secrets file # try: # # Initially load file to see if secret directive is present # yaml.add_constructor('!secret', utils._dummy_secret) with open(config_file_yaml, 'r') as yamlfd: config_file_contents = yamlfd.read() config = yaml.load(config_file_contents) if "secrets" in config: secrets_file = config["secrets"] else: secrets_file = os.path.join(os.path.dirname(config_file_yaml), "secrets.yaml") # # Read Secrets # if os.path.isfile(secrets_file): with open(secrets_file, 'r') as yamlfd: secrets_file_contents = yamlfd.read() utils.secrets = yaml.load(secrets_file_contents) else: if "secrets" in config: print( "ERROR", "Error loading secrets file: {}".format( config["secrets"])) sys.exit() # # Read config file again, this time with secrets # yaml.add_constructor('!secret', utils._secret_yaml) with open(config_file_yaml, 'r') as yamlfd: config_file_contents = yamlfd.read() config = yaml.load(config_file_contents) except yaml.YAMLError as exc: print("ERROR", "Error loading configuration") if hasattr(exc, 'problem_mark'): if exc.context is not None: print("ERROR", "parser says") print("ERROR", str(exc.problem_mark)) print("ERROR", str(exc.problem) + " " + str(exc.context)) else: print("ERROR", "parser says") print("ERROR", str(exc.problem_mark)) print("ERROR", str(exc.problem)) sys.exit() if "appdaemon" not in config: print("ERROR", "no 'appdaemon' section in {}".format(config_file_yaml)) sys.exit() appdaemon = config["appdaemon"] if "disable_apps" not in appdaemon: appdaemon["disable_apps"] = False appdaemon["config_dir"] = config_dir appdaemon["config_file"] = config_file_yaml appdaemon["app_config_file"] = os.path.join( os.path.dirname(config_file_yaml), "apps.yaml") if args.starttime is not None: appdaemon["starttime"] = args.starttime if args.endtime is not None: appdaemon["endtime"] = args.endtime appdaemon["tick"] = args.tick appdaemon["interval"] = args.interval appdaemon["loglevel"] = args.debug appdaemon["config_dir"] = os.path.dirname(config_file_yaml) appdaemon["stop_function"] = self.stop if "hadashboard" in config: hadashboard = config["hadashboard"] hadashboard["profile_dashboard"] = args.profiledash hadashboard["config_dir"] = config_dir hadashboard["config_file"] = config_file_yaml hadashboard["config_dir"] = os.path.dirname(config_file_yaml) if args.profiledash: hadashboard["profile_dashboard"] = True if "dashboard" not in hadashboard: hadashboard["dashboard"] = True else: hadashboard = {"dashboard": False} if "log" not in config: logfile = "STDOUT" errorfile = "STDERR" diagfile = "STDOUT" log_size = 1000000 log_generations = 3 accessfile = None else: logfile = config['log'].get("logfile", "STDOUT") errorfile = config['log'].get("errorfile", "STDERR") diagfile = config['log'].get("diagfile", "NONE") if diagfile == "NONE": diagfile = logfile log_size = config['log'].get("log_size", 1000000) log_generations = config['log'].get("log_generations", 3) accessfile = config['log'].get("accessfile") if isdaemon and (logfile == "STDOUT" or errorfile == "STDERR" or logfile == "STDERR" or errorfile == "STDOUT"): print("ERROR", "STDOUT and STDERR not allowed with -d") sys.exit() # Setup Logging self.logger = logging.getLogger("log1") numeric_level = getattr(logging, args.debug, None) self.logger.setLevel(numeric_level) self.logger.propagate = False # formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') # Send to file if we are daemonizing, else send to console fh = None if logfile != "STDOUT": fh = RotatingFileHandler(logfile, maxBytes=log_size, backupCount=log_generations) fh.setLevel(numeric_level) # fh.setFormatter(formatter) self.logger.addHandler(fh) else: # Default for StreamHandler() is sys.stderr ch = logging.StreamHandler(stream=sys.stdout) ch.setLevel(numeric_level) # ch.setFormatter(formatter) self.logger.addHandler(ch) # Setup compile output self.error = logging.getLogger("log2") numeric_level = getattr(logging, args.debug, None) self.error.setLevel(numeric_level) self.error.propagate = False # formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') if errorfile != "STDERR": efh = RotatingFileHandler(errorfile, maxBytes=log_size, backupCount=log_generations) else: efh = logging.StreamHandler() efh.setLevel(numeric_level) # efh.setFormatter(formatter) self.error.addHandler(efh) # setup diag output self.diag = logging.getLogger("log3") numeric_level = getattr(logging, args.debug, None) self.diag.setLevel(numeric_level) self.diag.propagate = False # formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') if diagfile != "STDOUT": dfh = RotatingFileHandler(diagfile, maxBytes=log_size, backupCount=log_generations) else: dfh = logging.StreamHandler() dfh.setLevel(numeric_level) # dfh.setFormatter(formatter) self.diag.addHandler(dfh) # Setup dash output if accessfile is not None: self.access = logging.getLogger("log4") numeric_level = getattr(logging, args.debug, None) self.access.setLevel(numeric_level) self.access.propagate = False # formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') efh = RotatingFileHandler(config['log'].get("accessfile"), maxBytes=log_size, backupCount=log_generations) efh.setLevel(numeric_level) # efh.setFormatter(formatter) self.access.addHandler(efh) else: self.access = self.logger # Startup message self.log(self.logger, "INFO", "AppDaemon Version {} starting".format(utils.__version__)) self.log(self.logger, "INFO", "Configuration read from: {}".format(config_file_yaml)) self.log(self.logger, "DEBUG", "AppDaemon Section: {}".format(config.get("AppDaemon"))) self.log(self.logger, "DEBUG", "HADashboard Section: {}".format(config.get("HADashboard"))) if isdaemon: keep_fds = [fh.stream.fileno(), efh.stream.fileno()] pid = args.pidfile daemon = Daemonize(app="appdaemon", pid=pid, action=self.run, keep_fds=keep_fds) daemon.start() while True: time.sleep(1) else: self.run(appdaemon, hadashboard)
def plan_path(self, start, goal): """Return optimal path from start to goal.""" path = utils.find_path(start, goal, self.nav_mesh, self.grid, MAP_TILE_SIZE) return path