def start(): global dev global arduino if use_arduino: # if device is not specified, pick the first one in the list if dev == '': devlist = subprocess.check_output(['python', '-m', 'serial.tools.list_ports']) dev = devlist.split()[0] # check or guess if Arduino is connected has_arduino = False for initial in DEV_INITIALS: if dev.startswith(initial): has_arduino = True # didn't find Arduino, so exit the program if not has_arduino: log.fail('Didn\'t find an Arduino port.') os.exit(1) log.msg('Connecting to Arduino at %s' % dev) arduino = serial.Serial(dev, 115200, timeout=1) arduino.read() log.ok('Arduino is connected')
def change_font(self, font: str): if 'font' not in self.config: self.config['font'] = {} log.warn('"font" prop was not present in alacritty.yml') fonts_file = self._resource_path('fonts') fonts = self._load(fonts_file) if fonts is None: raise ConfigError(f'File "{fonts_file}" is empty') if 'fonts' not in fonts: raise ConfigError(f'No font config found in "{fonts_file}"') fonts = fonts['fonts'] if font not in fonts: raise ConfigError(f'Config for font "{font}" not found') font_types = ['normal', 'bold', 'italic'] if isinstance(fonts[font], str): font_name = fonts[font] fonts[font] = {} for t in font_types: fonts[font][t] = font_name if not isinstance(fonts[font], Mapping): raise ConfigError(f'Font "{font}" has wrong format') for t in font_types: if t not in fonts[font]: raise ConfigError(f'Font "{font}" does not have "{t}" property') if t not in self.config['font']: self.config['font'][t] = {'family': 'tmp'} self.config['font'][t]['family'] = fonts[font][t] log.ok(f'Font {font} applied')
def change_font_size(self, size: float): if size <= 0: raise ConfigError('Font size cannot be negative or zero') if 'font' not in self.config: self.config['font'] = {} log.warn('"font" prop config was not present in alacritty.yml') self.config['font']['size'] = size log.ok(f'Font size set to {size:.1f}')
def send_image_to_arduino(user): global arduino # write data to file pydata = distort_and_format_image(user) data = struct.pack('%sB' % len(pydata), *pydata) # tell arduino to return motor to initial position arduino.write(struct.pack('B', RETURN)) arduino.flush() # send some metadata to arduino metadata = [nthbyte(out_width, 1), nthbyte(out_width, 0), nthbyte(motor_begin, 1), nthbyte(motor_begin, 0), nthbyte(motor_end, 1), nthbyte(motor_end, 0)] arduino.write(struct.pack('%sB' % len(metadata), *metadata)) arduino.flush() # wait until motor is back time.sleep(MOTOR_RETURN_TIME) # start focusing and capturing photo cap_thread = Thread(target=subprocess.check_output, args=[['gphoto2', '--force-overwrite', '--capture-image-and-download']]) cap_thread.start() log.msg('Capturing..') # wait until the camera finish focusing time.sleep(CAMERA_FOCUS_TIME) # send light data to arduino idx = 0 size = line_length pos = motor_begin arduino.readlines() while pos < motor_end - 3: s = arduino.readline().strip() if s.isdigit() == True: pos = float(s) if pos >= motor_begin and pos <= motor_end: idx = int(plot(pos, motor_begin, motor_end, 0, out_width)) buf = data[idx*size : idx*size+size] arduino.write(struct.pack('B', LINE)) arduino.write(buf) arduino.flush() arduino.write(struct.pack('B', FLUSH)) arduino.flush() # mark end of data arduino.write(struct.pack('B', END)) arduino.flush() # stop capturing photo cap_thread.join() log.ok('Finished capturing image') save_image(user) create_image_preview(user)
def check_video_extensions (self): if self.VIDEO_EXTENSIONS: self._params['video_extensions'] = [e.lower() for e in self._params['video_extensions']] log.ok('Found video extensions in parameters: {0}.'.format( ', '.join(self.VIDEO_EXTENSIONS))) else: log.info('No video extensions in parameters. Taking defaults.') self._params['video_extensions'] = ['avi', 'mov', 'mpg', 'mpeg', 'mp4', 'mkv'] self.save()
def open_connection(self): try: self.ser.open() log.ok('port {} is open'.format(self.ser.port)) except serial.SerialException: log.err('port {} opening is fail'.format(self.ser.port)) exit(1) #time.sleep(2) self.ser.reset_input_buffer()
def change (if_not_valid=False, **kwargs): if if_not_valid: lang = if_not_valid else: lang = cinput('<White>Enter language</White> [Default: en]', default='en', **kwargs) if not tvdb.language.is_supported (lang): log.warn('Language not supported: ' + lang) return tvdb.language.change() log.ok('Language supported by thetvdb.') return lang
def check_language (self): utils.tvdb.language.print_all() if self.LANGUAGE: log.ok('Found language in parameters: {}.'.format( self.LANGUAGE)) else: log.info('No language found in parameters. Asking user.') self._params['language'] = utils.tvdb.language.change(if_not_valid=self.LANGUAGE) self.save()
def insert_team(number, name, key): try: conn, db = get_db() db.execute( "INSERT INTO teams (team, name, public_key) VALUES (?, ?, ?)", (number, name, key)) conn.commit() log.ok("Database", "Added team %s to database" % number) return True except: # Probably the Unique Constraint return False
def create_image_preview(user): log.msg('Creating preview..') im = cv2.imread(user.imagepath) preview = cv2.resize(im, PREVIEW_SIZE) filename = os.path.basename(user.imagepath) path = main.PREVIEW_DIR + filename cv2.imwrite(path, preview) user.previewpath = path log.ok('Created preview at ' + path)
def change_font_offset(self, offset: List[int]): if len(offset) != 2: raise ConfigError('Wrong offset config, should be [x, y]') x, y = offset if 'font' not in self.config: self.config['font'] = {} if 'offset' not in self.config['font']: log.warn('"offset" prop was not set') self.config['font']['offset'] = {} self.config['font']['offset']['x'] = x self.config['font']['offset']['y'] = y log.ok(f'Offset set to x: {x}, y: {y}')
def change_padding(self, padding: List[int]): if len(padding) != 2: raise ConfigError('Padding should only have an x and y value') x, y = padding if 'window' not in self.config: self.config['window'] = {} log.warn('"window" prop was not present in config file') if 'padding' not in self.config['window']: self.config['window']['padding'] = {} log.warn('"padding" prop was not present in config file') self.config['window']['padding']['x'] = x self.config['window']['padding']['y'] = y log.ok(f'Padding set to x: {x}, y: {y}')
def open_connection(self): """ open connection """ self.ser.port = self.port self.ser.baudrate = 115200 self.ser.stopbits = serial.STOPBITS_ONE self.ser.parity = serial.PARITY_NONE self.ser.bytesize = serial.EIGHTBITS self.ser.timeout = 1.5 # in seconds try: self.ser.open() log.ok('port {} is open'.format(self.port)) except serial.SerialException: log.err('port {} opening is fail'.format(self.port)) exit(1)
def save_image(user): log.msg('Saving image..') filepath = main.OUTPUT_DIR + str(user.uid) + '.jpg' infile = open('capt0000.jpg', 'rb') outfile = open(filepath, 'wb') dupfile = open(main.DUP_DIR + str(user.uid) + '_' + str(user.num_images) + '.jpg', 'wb') outfile.write(infile.read()) infile.seek(0) dupfile.write(infile.read()) outfile.close() dupfile.close() infile.close() user.imagepath = filepath user.num_images = user.num_images + 1 log.ok('Saved the image at ' + filepath)
def server(): ss = socket.socket(socket.AF_INET, socket.SOCK_STREAM) ss.settimeout(SOCKET_TIMEOUT) ss.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) ss.bind(("0.0.0.0", PORT)) ss.listen() thread.start_new_thread(peerscan, ()) log.ok("Network", "Network started!") while True: if ENABLED: try: sock, addr = ss.accept() thread.start_new_thread(handle_request, (sock, )) except: # socket timed out. continue else: break scan_timer.cancel() log.ok("Network", "Network stopped!")
def distort_and_format_image(user): log.msg('Distorting image ' + user.imagepath + ' with size (%d, %d)..' % (out_width, out_height)) img = cv2.imread(user.imagepath) img = cv2.resize(img, (out_width, out_height)) h, w = len(img), len(img[0]) hh, hw = h / 2, w /2 img_array = np.zeros(img.shape) # distort for i in range(w): f = float(i) / w angle = float(i) / w * pi hy = sin(angle) * hh hx = cos(angle) * hw y1 = hh - hy x1 = hw - hx y2 = hh + hy x2 = hw + hx for j in range(h): f = float(j) / h y = hh + (y2 - y1) * (f - 0.5) x = hw + (x2 - x1) * (f - 0.5) if y >= h or x >= w: break img_array[j][i] = img[y][x] # format output = [] for x in range(w): for y in range(h): r = img_array[y][w-x-1][2] / 64 g = img_array[y][w-x-1][1] / 64 b = img_array[y][w-x-1][0] / 64 output.append(r) output.append(g) output.append(b) cv2.imwrite(main.DISTORT_DIR + os.path.basename(user.imagepath), img_array) log.ok('Created distorted image') return output
def peerscan(): start = time.time() global peers global fed_peers global scan_timer # Discover peers tmp_peers = fed_peers tmp_peers += peers tmp_peers += expand_lan() for peer in man_peers: if not peer in peers: tmp_peers.append(peer) fed_peers = [] # Connect to discovered nodes chunk_size = ceil(len(tmp_peers) / PEERSCAN_THREADS) ranges = [ tmp_peers[i:i + chunk_size] for i in range(0, len(tmp_peers), chunk_size) ] lthreads = [] for i in range(0, len(ranges)): peer_range = [str(ip) for ip in ranges[i]] lthreads.append( threading.Thread(target=scan_range, args=(peer_range, ))) lthreads[i].start() log.ok( "Network", "Scanning network with %d threads, waiting until finished..." % len(lthreads)) for thread in lthreads: thread.join() scan_timer = threading.Timer(SCAN_INTERVAL, peerscan) scan_timer.start() log.ok( "Network", "Finished LAN scan in %.2f seconds, rescaning in %d seconds." % (time.time() - start, SCAN_INTERVAL))
def change_theme(self, theme: str): themes_directory = self._resource_path('themes') theme_file = themes_directory / f'{theme}.yaml' if not theme_file.is_file(): raise ConfigError(f'Theme "{theme}" not found') theme_yaml = self._load(theme_file) if theme_yaml is None: raise ConfigError(f'File {theme_file.name} is empty') if 'colors' not in theme_yaml: raise ConfigError(f'{theme_file} does not contain color config') expected_colors = [ 'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white', ] expected_props = { 'primary': ['background', 'foreground'], 'normal': expected_colors, 'bright': expected_colors, } for k in expected_props: if k not in theme_yaml['colors']: log.warn(f'Missing "colors:{k}" for theme "{theme}"') continue for v in expected_props[k]: if v not in theme_yaml['colors'][k]: log.warn(f'Missing "colors:{k}:{v}" for theme "{theme}"') self.config['colors'] = theme_yaml['colors'] log.ok(f'Theme {theme} applied')
def check_action (self): available_actions = [a[:-6] for a in dir(action) if re.match('[^_].*Action', a)] print() cprint('Actions available: ', 'white', attrs=['bold']) for act in available_actions: print(' - ' + act) print() if 'action' in PARAMS: ACTION = PARAMS['action'] log.ok('Found action in parameters: %s.' % ACTION) else: log.info('No action found in parameters. Asking user.') ACTION = cinput('Enter action') while ACTION not in available_actions: log.warn('Action not available: %s.' % ACTION) ACTION = cinput('Enter action') log.ok('Action available.') PARAMS['action'] = ACTION save_params()
def load (self): """ Loads parameters file into class' params argument. """ try: f = open(self.get_file(True)) log.ok('Config file {0} found.'.format( self.get_file(expanded=False))) except FileNotFoundError: log.info('Config file not found in {0}. Creating it.'.format( self.get_path(expanded=False))) try: os.makedirs(self.get_path(expanded=True), exist_ok=True) f = open(self.get_file(expanded=True), 'a') f.write('{}') f = open(self.get_file(expanded=True)) log.ok('Creation of file {0} successfull.'.format( self.get_file(expanded=False))) except: log.warn('Creation of file {0} unsuccessfull.'.format( self.get_file(expanded=False))) try: self._params = json.loads(f.read()) log.ok('Parameters parsed successfully.') except: self._params = {} log.warn('Failed to parse parameters. Using empty ones.') return self
# pprint('writing data in file') # out_file.write(json.dumps(fresh_data, sort_keys=False, indent=2)) # out_file.close() # req = do_call_api(did, json.dumps(item)) # try: # pprint('put data for ' + did) # r = urllib2.urlopen(req) # pprint(r.read()) # except urllib2.HTTPError as e: # print e.code # print e.read() # time.sleep(4) # finally: # time.sleep(1) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--file", help="file to scan for data sources") try: args = parser.parse_args() if args.file is None: args.file = 'all-dev-ip.json' log.ok('running... with options: %s ' % args) run(args.file) except: raise parser.print_help()
conf = '-a -n 100' diag = 'dlsim is not running normally (Segmentation fault / Exiting / FATAL), debugging might be needed' trace = logdir + '/log_' + host + case + test + '_1.txt;' tee = ' 2>&1 | tee ' + trace oai.send_expect_false('./dlsim.rel8.'+ host + ' ' + conf + tee, 'Segmentation fault', 30) trace = logdir + '/log_' + host + case + test + '_2.txt;' tee = ' 2>&1 | tee ' + trace oai.send_expect_false('./dlsim.rel8.'+ host + ' ' + conf + tee, 'Exiting', 30) trace = logdir + '/log_' + host + case + test + '_3.txt;' tee = ' 2>&1 | tee ' + trace oai.send_expect_false('./dlsim.rel8.'+ host + ' ' + conf + tee, 'FATAL', 30) except log.err, e: log.fail(case, test, name, conf, e.value, diag, logfile,trace) else: log.ok(case, test, name, conf, '', logfile) try: test = 210 name = 'Run oai.dlsim.perf.'+str(PERF)+'%' diag = 'no diagnostic is available, check the log file' for i in range(len(PRB)): for o in range(len(CHANNEL)): MIN_SNR = 0 for j in range(len(MCS)): for k in range(1,ANT_TX): for m in range (1,ANT_RX): for n in range(1,PDCCH): for p in range(1,TX_MODE): for q in range(MIN_SNR,MAX_SNR):
log.start() test = "00" name = "Check oai.svn.add" conf = "svn st -q | grep makefile" diag = "Makefile(s) changed. If you are adding a new file, make sure that it is added to the svn" rsp = oai.send_recv("svn st -q | grep -i makefile;") for item in rsp.split("\n"): if "Makefile" in item: rsp2 = item.strip() + "\n" oai.find_false_re(rsp, "Makefile") except log.err, e: diag = diag + "\n" + rsp2 # log.skip(case, test, name, conf, e.value, logfile) log.skip(case, test, name, conf, "", diag, logfile) else: log.ok(case, test, name, conf, "", logfile) oai.send("cd SIMU/USER;") oai.send("mkdir " + logdir + ";") try: log.start() test = "01" name = "Compile oai.rel8.make" conf = "make" trace = logdir + "/log_" + case + test + ".txt;" tee = " 2>&1 | tee " + trace diag = "check the compilation errors for oai" oai.send("make cleanall;") oai.send("make cleanasn1;") oai.send("rm -f ./oaisim.rel8." + host)
elif command.startswith("addpeer"): peer = command.split(" ")[1] network.add_peer(peer) elif command.startswith("mkevent"): competition = command.split(" ")[1] jstr = command[command.find("{"):command.rfind("}")+1] try: event = json.loads(jstr) database.create_event(event) except Exception as e: log.error("mkevent","Error while making event") traceback.print_exc() else: log.ok("mkevent","inserted into database") elif command.startswith("ftevent"): query = command.split(" ")[1] if query == "match": competition = command.split(" ")[2] match = command.split(" ")[3] elif query == "competition": competition = command.split(" ")[2] elif command == "rescan": network.scan_timer.cancel() network.peerscan() elif command.startswith("competitions"): year = int(command.split(" ")[1])
def run(start, offset): log.ok('starting... with options: %s,%s ' % (start, offset)) cnx = mysql.connector.connect(**config) cursor = cnx.cursor() catCursor = cnx.cursor() query = ( "SELECT n.nid, n.title, r.body_value, fdfi.field_intro_value, fdfb.field_byline_value, fdfsl.field_short_headline_value, fdfc.field_caption_value, fdfco.field_color_value, fdfcn.field_contact_no_value, fdfd.field_date_value, fdfdn.field_designation_value, fdfe.field_email_value, fdfel.field_external_link_value, fdffiat.field_file_image_alt_text_value, fdffitt.field_file_image_title_text_value, fdflnb.field_lead_news_behavior_value, fdfnb.field_news_body_value, fdfpd.field_publish_date_value, fdfst.field_section_title_value, fdfs.field_shoulder_value, fdfsh.field_sub_headline_value, fdffi.field_featured_image_fid, fm.uri, fm.filename, fdfa.field_author_target_id, fdfab.field_author_bio_value FROM node n LEFT JOIN field_data_body r ON n.nid = r.entity_id AND n.vid = r.revision_id LEFT JOIN field_data_field_intro fdfi ON n.nid = fdfi.entity_id AND n.vid = fdfi.revision_id LEFT JOIN field_data_field_byline fdfb ON n.nid = fdfb.entity_id AND n.vid = fdfb.revision_id LEFT JOIN field_data_field_short_headline fdfsl ON n.nid = fdfsl.entity_id AND n.vid = fdfsl.revision_id LEFT JOIN field_data_field_featured_image fdffi ON n.nid = fdffi.entity_id AND n.vid = fdffi.revision_id LEFT JOIN field_data_field_author fdfa ON n.nid = fdfa.entity_id AND n.vid = fdfa.revision_id LEFT JOIN field_data_field_author_bio fdfab ON n.nid = fdfab.entity_id AND n.vid = fdfab.revision_id LEFT JOIN file_managed fm ON fdffi.field_featured_image_fid = fm.fid LEFT JOIN field_data_field_caption fdfc ON n.nid = fdfc.entity_id AND n.vid = fdfc.revision_id LEFT JOIN field_data_field_color fdfco ON n.nid = fdfco.entity_id AND n.vid = fdfco.revision_id LEFT JOIN field_data_field_contact_no fdfcn ON n.nid = fdfcn.entity_id AND n.vid = fdfcn.revision_id LEFT JOIN field_data_field_date fdfd ON n.nid = fdfd.entity_id AND n.vid = fdfd.revision_id LEFT JOIN field_data_field_designation fdfdn ON n.nid = fdfdn.entity_id AND n.vid = fdfdn.revision_id LEFT JOIN field_data_field_email fdfe ON n.nid = fdfe.entity_id AND n.vid = fdfe.revision_id LEFT JOIN field_data_field_external_link fdfel ON n.nid = fdfel.entity_id AND n.vid = fdfel.revision_id LEFT JOIN field_data_field_file_image_alt_text fdffiat ON n.nid = fdffiat.entity_id AND n.vid = fdffiat.revision_id LEFT JOIN field_data_field_file_image_title_text fdffitt ON n.nid = fdffitt.entity_id AND n.vid = fdffitt.revision_id LEFT JOIN field_data_field_lead_news_behavior fdflnb ON n.nid = fdflnb.entity_id AND n.vid = fdflnb.revision_id LEFT JOIN field_data_field_news_body fdfnb ON n.nid = fdfnb.entity_id AND n.vid = fdfnb.revision_id LEFT JOIN field_data_field_publish_date fdfpd ON n.nid = fdfpd.entity_id AND n.vid = fdfpd.revision_id LEFT JOIN field_data_field_section_title fdfst ON n.nid = fdfst.entity_id AND n.vid = fdfst.revision_id LEFT JOIN field_data_field_shoulder fdfs ON n.nid = fdfs.entity_id AND n.vid = fdfs.revision_id LEFT JOIN field_data_field_sub_headline fdfsh ON n.nid = fdfsh.entity_id AND n.vid = fdfsh.revision_id where n.type = 'news' and n.status = 1 order by n.nid desc" " limit %s,%s") for result in cursor.execute(query, (start, offset), multi=True): if result.with_rows: # print("Rows produced by statement '{}':".format(result.statement)) rows = result.fetchall() for row in rows: news = process_row(row) squery = 'slug:' + news['slug'] print(squery) news_response = search_news(squery) print(news_response) if news_response.status_code == 200: news_found = news_response.json() if len(news_found) < 1: img_hash = upload_img(news['featuredImage'], news['id']) news['featuredImage'] = img_hash catQuery = ( "select fdfnc.entity_id, ttd.name, ttd.description from field_data_field_news_cat fdfnc LEFT JOIN taxonomy_term_data ttd ON ttd.tid = fdfnc.field_news_cat_tid where fdfnc.entity_id=%s" " limit %s,%s") for cat_result in catCursor.execute( catQuery, (news.get('id'), 0, 3), multi=True): if cat_result.with_rows: # print("Rows produced by statement '{}':".format(cat_result.statement)) cat_rows = cat_result.fetchall() for cat_row in cat_rows: (entity_id, cat_name, cat_description) = cat_row print('category: ', entity_id, cat_name, cat_description) cat_slug = cat_name.replace(' ', '-') cat_response = search_categories( 'slug:' + cat_slug + '+AND+title:' + cat_name) print(cat_response) if cat_response.status_code == 200: cats = cat_response.json() if len(cats) < 1: category = { "slug": cat_slug, "title": cat_name, "description": cat_description, "image": "", "language": "BN", "parentId": None, "parentSlug": "", "status": "DRAFT" } cat_create_result = create_category( category) if cat_create_result.status_code == 201: news.get('categories').append( cat_create_result.json()) else: news.get('categories').append( cats[0]) # else: # print("Number of rows affected by statement '{}': {}".format(result.statement, result.rowcount)) del (news['id']) if news.get('byline') == None: news['byline'] = news['title'] if news.get('imageCaption') == None: news['imageCaption'] = news['title'] print('Publishing news: %s' % news) cnews_response = create_news(news) print(cnews_response) if cnews_response.status_code == 201: print('news created', cnews_response.json().get('slug')) else: print('news with slug %s already exists' % news.get('slug')) cursor.close() catCursor.close() cnx.close()
def close_connection(self): self.ser.close() log.ok('port is close')
def change_opacity(self, opacity: float): if opacity < 0.0 or opacity > 1.0: raise ConfigError('Opacity should be between 0.0 and 1.0') self.config['background_opacity'] = opacity log.ok(f'Opacity set to {opacity:.2f}')
def close_connection(self): """ end connection """ self.ser.close() log.ok('port is close')
def remove_peer(peer, reason): global peers if peer in peers: peers.remove(peer) log.ok("Network", "Removed peer '" + peer + "' reason: " + reason)
def sync(peer): request_season(peer, datetime.now().year) push_all(peer, datetime.now().year) for comp in database.list_competitions(datetime.now().year): request_matches(peer, comp['competition']) log.ok("Network", "Finished syncing with '%s'" % peer)
def add_peer(peer): global peers if not peer in peers and not peer in localips: peers.append(peer) log.ok("Network", "Added peer '" + peer + "'. Syncing now...") sync(peer)