def index_column(self, column, index_config): column.prepare_data() doc_body = { 'name': column.name, 'semantic_type': column.semantic_type, 'content_length': column.content_length, 'data_size': len(column.value_list), 'values': column.value_list, 'histogram': column.histogram_list } if column.is_numeric(): doc_body['numeric'] = column.numeric_list doc_body['sample_numeric'] = column.sample_list else: doc_body['textual'] = column.value_text self.es.index(index=Utils.get_index_name(index_config), doc_type=column.semantic_type, body=doc_body) doc_body = { 'semantic_type': column.semantic_type } if not self.es.search_exists(index=Utils.get_index_name(index_config), doc_type='semantic', body=doc_body): self.es.index(index=Utils.get_index_name(index_config), doc_type='semantic', body=doc_body)
def op_remove_build(): validate_input() if not Utils.confirm("You are about to remove Marathon build for {}.".format( MarathonConfig.marathon_version() )): LOG.info("You have cancelled the action.") exit(0) Utils.cmd("rm -rf {}/{}".format( MarathonConfig.packages_dir(), MarathonConfig.marathon_version()))
def apply_mesos_patches(build_dir_mesos): ## LOOKUP order: # - <mesos-version>-<os-family>-<os-version> # - <mesos-version>-<os-family> # - <mesos-version> patch_files = [ "{}/{}-{}.patch".format( MesosConfig.mesos_patches_dir(), MesosConfig.mesos_version(), MesosConfig.operating_system().replace(":", "-") ), "{}/{}-{}.patch".format( MesosConfig.mesos_patches_dir(), MesosConfig.mesos_version(), MesosConfig.operating_system().split(":")[0] ), "{}/{}.patch".format( MesosConfig.mesos_patches_dir(), MesosConfig.mesos_version() ) ] patch_file_to_use = None for patch_file in patch_files: if os.path.isfile(patch_file): patch_file_to_use = patch_file break if patch_file_to_use != None: LOG.info("Found a patch file {} for mesos. Applying...".format( patch_file_to_use )) result = Utils.cmd("cd {} && git apply {}".format(build_dir_mesos, patch_file_to_use)) if result['ExitCode'] != 0: Utils.print_result_error(LOG, "Patch could not be applied to {}.".format( build_dir_mesos ), result) exit(105) else: LOG.info("Patch applied.") else: LOG.info("No patches for mesos {}.".format( MesosConfig.deb_packaging_sha() ))
def validate_input(): marathon_version = MarathonConfig.marathon_version() if marathon_version == "": Utils.exit_with_cmd_error( __file__, "Marathon version not given. Run with show-releases to see what the available versions are.") if not marathon_version in list_releases(): Utils.exit_with_cmd_error( __file__, "Marathon version ({}) is not supported. Run with show-releases to see what the available versions are.".format( marathon_version ))
def validate_input(): chronos_version = ChronosConfig.chronos_version() if chronos_version == "": Utils.exit_with_cmd_error( __file__, "Chronos version not given. Run with show-releases to see what the available versions are.") if not chronos_version in list_releases(): Utils.exit_with_cmd_error( __file__, "Chronos version ({}) is not supported. Run with show-releases to see what the available versions are.".format( chronos_version ))
def op_remove_build(): validate_input() if not Utils.confirm("You are about to remove Mesos build for {} {}.".format( MesosConfig.mesos_version(), MesosConfig.operating_system() )): exit(0) Utils.cmd("rm -rf {}/{}-{}".format( MesosConfig.packages_dir(), MesosConfig.mesos_version(), MesosConfig.operating_system().replace(":", "-")))
def list_releases(): result = Utils.cmd("cd {} && git tag -l".format(MesosConfig.mesos_repository_dir())) if result['ExitCode'] == 0: releases = result['StdOut'].split("\n") releases.append(MesosConfig.mesos_master_branch()) return releases else: Utils.print_result_error(LOG, "Failed listing releases.", result) return []
def test_basic_pypandoc_example(self): """ This test is testing a basic pypandoc function call. """ pypandoc_result = pypandoc.convert('- *foo* bar', 'html5', format='org') expected_html5_result = u'<ul>\n<li><strong>foo</strong> bar</li>\n</ul>\n' self.assertEqual(Utils.normalize_lineendings(pypandoc_result), Utils.normalize_lineendings(expected_html5_result))
def prepare_data(self): if not self.is_prepared: self.histogram_list = Utils.get_distribution(self.value_list) self.numeric_list = Utils.clean_examples_numeric(self.value_list) if self.is_numeric(): self.sample_list = sc.parallelize(self.numeric_list).sample(False, 100.0 / len(self.numeric_list)).collect() else: self.value_text = sc.parallelize(self.value_list).map(lambda x: " %s " % x).reduce(lambda x, y: x + y) self.is_prepared = True
def __init__(self, column, train_examples_map, sc): self.train_examples_map = train_examples_map self.test_examples = column.value_list self.true_label = column.semantic_type self.name = column.name self.numeric_test_examples = Utils.clean_examples_numeric(self.test_examples) self.is_numeric = column.is_numeric() self.hist_examples = Utils.get_distribution(self.test_examples) self.sc = sc
def test_is_ipv6(self): self.assertFalse(Utils.is_ipv6("")) self.assertFalse(Utils.is_ipv6(None)) self.assertFalse(Utils.is_ipv6(self)) self.assertTrue(Utils.is_ipv6("1:2:3:4:5:6:7:8")) self.assertTrue(Utils.is_ipv6("2001:db8:a0b:12f0::1")) self.assertTrue(Utils.is_ipv6("FF02:0:0:0:0:0:0:2")) self.assertTrue(Utils.is_ipv6("F::1")) self.assertFalse(Utils.is_ipv6("G::1")) self.assertFalse(Utils.is_ipv6("AB:02:3008:8CFD:AB:02:3008:8CFD:02")) # tpp long self.assertFalse(Utils.is_ipv6("AB:02:3008:8CFD::02::8CFD")) # can't have two ::
def test_pypandoc_with_umlauts(self): """ This test is testing umlaut and charset with pypandoc. """ pypandoc_result = pypandoc.convert('This is an umlaut test: öÄ߀', 'html5', format='org', encoding='utf-8') expected_html5_result = u'<p>This is an umlaut test: öÄ߀</p>\n' ## FIXXME: Umlaut conversion does habe encoding issues. self.assertEqual(Utils.normalize_lineendings(pypandoc_result), Utils.normalize_lineendings(expected_html5_result))
def check_parameters(self, args): """ Checks the values of a dict of configured parameters. Returns a dict with all defined parameters with values and a bool indicating if all parameters are ok (True) or if a mandatory parameter is not configured (False). It returns default values for parameters that have not been configured. The resulting dict contains the values in the the datatype of the parameter definition :param args: Configured parameters with the values :type args: dict of parameter-values (values as string) :return: All defined parameters with values, Flag if all parameters are ok (no mandatory is missing) :rtype: dict, bool """ addon_params = collections.OrderedDict() hide_params = collections.OrderedDict() if self.meta == None: logger.info(self._log_premsg+"No metadata found" ) return (addon_params, True, hide_params) if self.parameters == None: logger.info(self._log_premsg+"No parameter definitions found in metadata" ) return (addon_params, True, hide_params) allparams_ok = True for param in self._paramlist: value = Utils.strip_quotes(args.get(param)) if value == None: if (self.parameters[param] is not None) and self.parameters[param].get('mandatory'): logger.error(self._log_premsg+"'{}' is mandatory, but was not found in /etc/{}".format(param, self._addon_type+YAML_FILE)) allparams_ok = False else: addon_params[param] = self.get_parameter_defaultvalue(param) hide_params[param] = Utils.to_bool(self.parameters[param].get('hide'), default=False) logger.info(self._log_premsg+"value not found in plugin configuration file for parameter '{}' -> using default value '{}' instead".format(param, addon_params[param] ) ) # logger.warning(self._log_premsg+"'{}' not found in /etc/{}, using default value '{}'".format(param, self._addon_type+YAML_FILE, addon_params[param])) else: value = self._expand_listvalues(param, value) if self._test_value(param, value): addon_params[param] = self._convert_value(param, value) hide_params[param] = Utils.to_bool(self.parameters[param].get('hide'), default=False) logger.debug(self._log_premsg+"Found '{}' with value '{}' in /etc/{}".format(param, value, self._addon_type+YAML_FILE)) else: if self.parameters[param].get('mandatory') == True: logger.error(self._log_premsg+"'{}' is mandatory, but no valid value was found in /etc/{}".format(param, self._addon_type+YAML_FILE)) allparams_ok = False else: addon_params[param] = self.get_parameter_defaultvalue(param) hide_params[param] = Utils.to_bool(self.parameters[param].get('hide'), default=False) logger.error(self._log_premsg+"Found invalid value '{}' for parameter '{}' (type {}) in /etc/{}, using default value '{}' instead".format(value, param, self.parameters[param]['type'], self._addon_type+YAML_FILE, str(addon_params[param]))) return (addon_params, allparams_ok, hide_params)
def test_is_int(self): self.assertFalse(Utils.is_int("")) self.assertFalse(Utils.is_int(None)) self.assertFalse(Utils.is_int(self)) self.assertFalse(Utils.is_int("1.2.3.4")) self.assertFalse(Utils.is_int("xyzabcd")) self.assertFalse(Utils.is_int("1.0")) self.assertTrue(Utils.is_int("255")) self.assertTrue(Utils.is_int("0")) self.assertTrue(Utils.is_int("-1"))
def setup(program): from lib.config import Config Config.add_argument( "command", help="Command to execute.", metavar="COMMAND", default="", choices=[ "build", "show-releases", "show-builds", "show-sources", "remove-build","remove-sources", "check-this-system" ] ) Config.add_argument( "--chronos-version", dest="chronos_version", help="Chronos version to build.", metavar="CHRONOS_VERSION", default=Utils.env_with_default("CHRONOS_VERSION","") ) Config.add_argument( "--chronos-master-branch", dest="chronos_master_branch", help="Chronos master branch name.", metavar="CHRONOS_MASTER_BRANCH_NAME", default=Utils.env_with_default("CHRONOS_MASTER_BRANCH_NAME","master") ) Config.add_argument( "--chronos-git-repository", dest="chronos_git_repository", help="Chronos git repository to use.", metavar="CHRONOS_GIT_REPOSITORY", default=Utils.env_with_default("CHRONOS_GIT_REPOSITORY", "https://github.com/mesos/chronos.git") ) Config.add_argument( "--docker-templates", dest="docker_templates_dir", help="Docker templates base directory.", metavar="DOCKER_TEMPLATES_DIR", default=Utils.env_with_default("DOCKER_TEMPLATES_DIR", "{}/docker/chronos".format( os.path.dirname(os.path.dirname(os.path.dirname(__file__))) ))) Config.add_argument( "--source-dir", dest="source_dir", help="Directory in which the Chronos sources are stored.", metavar="SOURCE_DIR", default=Utils.env_with_default("SOURCE_DIR", os.path.expanduser("~/.mesos-toolbox/chronos/sources") ) ) Config.add_argument( "--packages-dir", dest="packages_dir", help="Directory in which packaged versions of Chronos are stored.", metavar="PACKAGES_DIR", default=Utils.env_with_default("PACKAGES_DIR", os.path.expanduser("~/.mesos-toolbox/chronos/packages") ) ) Config.add_argument( "--work-dir", dest="work_dir", help="Directory in which this program does the work.", metavar="WORK_DIR", default=Utils.env_with_default("WORK_DIR", os.path.expanduser("~/.mesos-toolbox/chronos/temp") ) ) Config.add_argument( "--m2-dir", dest="m2_dir", help="Maven dependencies directory.", metavar="MVN_DIR", default=Utils.env_with_default("MVN_DIR", os.path.expanduser("~/.mesos-toolbox/.m2/chronos") ) ) Config.add_argument( "--with-tests", dest="with_tests", help="Run unit tests when building Chronos.", action="store_true" ) return Config.ready(program)
def get(self): spread = SpreadFactory().get_instance() result = {} week_req = self.request.get( d.GAME_WEEK, default_value=utils.default_week()) week = (self._validate_params(d.GAME_WEEK, week_req) or utils.default_week()) result = spread.fetch(week) self.response.headers['Content-Type'] = 'application/json' self.response.headers['Access-Control-Allow-Origin'] = '*' self.response.set_status(http_code.OK) self.response.out.write(json.dumps(result, indent = 4))
def post(self): data = {} result = { "data": 0, "Success": "Success", "status_code": 201 } score = ScoreFactory().get_instance() week = utils.default_week() parameters = self.request.POST.items() for item in parameters: value = self._validate_params(item[0], item[1]) if value != None: data[item[0]] = value if d.NFL_GAME_ID in data: if d.GAME_WEEK in data: week = data[d.GAME_WEEK] result['data'] = score.save(week, [data]) self.response.headers['Content-Type'] = 'application/json' self.response.headers['Access-Control-Allow-Origin'] = '*' self.response.set_status(result['status_code']) self.response.out.write(json.dumps(result, indent = 4))
def _fetch_score(self, week): stale_timestamp = ( datetime.datetime.utcnow() - datetime.timedelta(seconds=_ScoreDatastore.__THRESHOLD)) scores = [] result = [] scores = self.__query_scores(week) for game in scores: # Only check for staleness when dealing with current games if week%100 >= utils.default_week(): # Reject data if any of it is stale if game.timestamp <= stale_timestamp: return [] result.append( { d.AWAY_NAME: game.away_name, d.AWAY_SCORE: game.away_score, d.GAME_CLOCK: game.game_clock, d.GAME_DAY: game.game_day, d.GAME_SEASON: game.year, d.GAME_STATUS: game.game_status, d.GAME_TAG: game.game_tag, d.GAME_TIME: game.game_time, d.GAME_WEEK: game.week, d.HOME_NAME: game.home_name, d.HOME_SCORE: game.home_score, d.NFL_GAME_ID: game.game_id, d.SPREAD_MARGIN: game.spread_margin, d.SPREAD_ODDS: game.spread_odds } ) return result
def generateXml(self): try: root = initial_report() property = Property() me_particular = ME_Particular() me_table = ME_Table() property.ship_name = self.ui.shipNameEdit.text() property.call_sign = self.ui.callSignEdit.text() property.imo_number = self.ui.imoNumberEdit.text() property.port_registry = self.ui.portRegistryEdit.text() property.company_name = self.ui.companyNameEdit.text() property.class_society = self.ui.classSocietyEdit.text() property.email = self.ui.emailEdit.text() property.telphone = self.ui.telEdit.text() property.fax = self.ui.faxEdit.text() property.ship_type = self.ui.shipTypeComboBox.currentText() property.international_gross_tonnage = self.ui.igtEdit.text() property.international_net_tonnage = self.ui.intEdit.text() property.loa = self.ui.loaEdit.text() property.lbp = self.ui.lbpEdit.text() property.breadth_moulded = self.ui.breadthMouldedEdit.text() property.depth_moulded = self.ui.depthMouldedEdit.text() property.ecdis_maker = self.ui.ecdisMakerEdit.text() property.ecdis_model = self.ui.depthMouldedEdit.text() property.captain_name = self.ui.captain_name_edit.text() property.captain_crew_id = self.ui.captain_crew_id_edit.text() property.maintain_paper_chart = self.ui.yesRadioBtn.isChecked() me_particular.ME_Table = me_table cells=[] pattern='[./\s]' for i in range(self.ui.meTableWidget.rowCount()): vertical_name = self.ui.meTableWidget.verticalHeaderItem(i).text() repl_h_name = re.sub(pattern, '_', str(vertical_name)) for j in range(self.ui.meTableWidget.columnCount()): column_name = self.ui.meTableWidget.horizontalHeaderItem(j).text() repl_c_name = re.sub(pattern, '_', str(column_name)) cells.append(cell(repl_c_name, repl_h_name, self.ui.meTableWidget.item(i,j).text())) me_table.cell = cells me_particular.ME_Maker = self.ui.meMakerEdit.text() me_particular.ME_Model = self.ui.meModelEdit.text() property.ME_Particular = me_particular root.Property = property data_dir = Utils.createDataDir() fn_path = abs_lambda(os.path.join(data_dir , 'initial.xml')) with open(fn_path, 'w') as f: f.write('''<?xml version="1.0" encoding="UTF-8"?>\n''') root.export(f, 1, namespacedef_='xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') msg_box = QMessageBox(QMessageBox.Information, "Success", "Initial Report config file generated successfully") msg_box.exec_() except Exception,ex: self.logger.error('Generate the initial report config file failed') self.logger.error(ex) msg_box = QMessageBox(QMessageBox.Warning, "Warning", "Initial Report config file generated failed \n" + str(ex)) msg_box.exec_()
def _convert_valuetotype(self, typ, value): """ Returns the value converted to the parameters type """ if typ == 'bool': result = Utils.to_bool(value) elif typ in ['int','scene']: result = int(value) elif typ in ['float','num']: result = float(value) elif typ == 'str': result = str(value) elif typ == 'list': if isinstance(value, list): result = value else: result = [value] elif typ == 'dict': result = dict(value) elif typ in ['ip', 'ipv4', 'mac']: result = str(value) elif typ == FOO: result = value else: logger.error(self._log_premsg+"unhandled type {}".format(typ)) return result
def setup(program): from lib.config import Config Config.add_argument( "command", help="Command to execute.", metavar="COMMAND", default="", choices=[ "build", "show-releases", "show-builds", "show-sources", "remove-build","remove-sources" ] ) Config.add_argument( "--marathon-version", dest="marathon_version", help="Marathon version to build.", metavar="MARATHON_VERSION", default=Utils.env_with_default("MARATHON_VERSION","") ) Config.add_argument( "--marathon-master-branch", dest="marathon_master_branch", help="Marathon master branch name.", metavar="MARATHON_MASTER_BRANCH_NAME", default=Utils.env_with_default("MARATHON_MASTER_BRANCH_NAME","master") ) Config.add_argument( "--marathon-git-repository", dest="marathon_git_repository", help="Marathon git repository to use.", metavar="MARATHON_GIT_REPOSITORY", default=Utils.env_with_default("MARATHON_GIT_REPOSITORY", "https://github.com/mesosphere/marathon.git") ) Config.add_argument( "--docker-templates", dest="docker_templates_dir", help="Docker templates base directory.", metavar="DOCKER_TEMPLATES_DIR", default=Utils.env_with_default("DOCKER_TEMPLATES_DIR", "{}/docker/marathon".format( os.path.dirname(os.path.dirname(os.path.dirname(__file__))) ))) Config.add_argument( "--source-dir", dest="source_dir", help="Directory in which the Marathon sources are stored.", metavar="SOURCE_DIR", default=Utils.env_with_default("SOURCE_DIR", os.path.expanduser("~/.mesos-toolbox/marathon/sources") ) ) Config.add_argument( "--packages-dir", dest="packages_dir", help="Directory in which packaged versions of Mesos are stored.", metavar="PACKAGES_DIR", default=Utils.env_with_default("PACKAGES_DIR", os.path.expanduser("~/.mesos-toolbox/marathon/packages") ) ) Config.add_argument( "--work-dir", dest="work_dir", help="Directory in which this program does the work.", metavar="WORK_DIR", default=Utils.env_with_default("WORK_DIR", os.path.expanduser("~/.mesos-toolbox/marathon/temp") ) ) Config.add_argument( "--ivy2-dir", dest="ivy2_dir", help="Ivy2 dependencies directory.", metavar="IVY2_DIR", default=Utils.env_with_default("IVY2_DIR", os.path.expanduser("~/.mesos-toolbox/.ivy2/marathon") ) ) Config.add_argument( "--with-tests", dest="with_tests", help="Run unit tests when building Marathon.", action="store_true" ) return Config.ready(program)
def test_is_timeframe(self): self.assertFalse(Utils.is_timeframe("")) self.assertFalse(Utils.is_timeframe("abc")) self.assertTrue(Utils.is_timeframe("1")) self.assertTrue(Utils.is_timeframe("1i")) self.assertTrue(Utils.is_timeframe("1h")) self.assertTrue(Utils.is_timeframe("1d")) self.assertTrue(Utils.is_timeframe("1m")) self.assertTrue(Utils.is_timeframe("1y"))
def test_move_and_rename_file(mocker): mocker.patch('os.makedirs') mocker.patch('os.path.dirname') mock_move = mocker.patch('shutil.move') from lib.utils import Utils source = '/tmp/1' dest = '/tmp/2' Utils.move_and_rename_file(source, dest) assert mock_move.call_count == 1 mock_move.assert_called_once_with(source, dest) mock_move.reset_mock() # no shutil.move call when source and destination are same. Utils.move_and_rename_file(source, source) assert mock_move.call_count == 0
def __init__(self, smarthome, host='127.0.0.1', port='1883'): self._sh = smarthome self.clients = [] self.items = {} self.logics = {} if Utils.is_ip(host): self.broker_ip = host else: self.broker_ip = '' logger.error('MQTT: Invalid ip address for broker specified, plugin not starting') return if Utils.is_int(port): self.broker_port = int(port) else: self.broker_port = 1883 logger.error('MQTT: Invalid port number for broker specified, plugin trying standard port 1883') self.publisher = self.create_client('main')
def generateXml(self): root = request_report() property = Property() root.Property = property #general data property.ship_name = self.ui.ship_name_edit.text() property.call_sign = self.ui.call_sign_edit.text() property.voyage_number = self.ui.voyage_number_edit.text() property.date = self.ui.date_edit.text() #voyage data departure voyage_det = voyage_detail() depart = departure() arr = arrival() depart.departure_date = self.ui.depart_date_edit.text() depart.departure_time = self.ui.departure_time_edit.text() depart.country = self.ui.departure_country_combo.currentText() depart.port = self.ui.departure_port_combo.currentText() depart.unlo_code = self.ui.departure_unlo_edit.text() depart.terminal = self.ui.departure_terminal_edit.text() #voyage data arrival arr.arrival_date = self.ui.arrival_date_edit.text() arr.arrival_time = self.ui.arrival_time_edit.text() arr.country = self.ui.arrival_country_combo.currentText() arr.port = self.ui.arrival_port_combo.currentText() arr.unlo_code = self.ui.arrival_unlo_edit.text() arr.terminal = self.ui.arrival_terminal_edit.text() voyage_det.departure=depart voyage_det.arrival = arr property.voyage_detail = voyage_det #add via place via_places=[] vias = via() list_item_count = self.ui.via_listWidget.count() for index in range(list_item_count): via_places.append(self.ui.via_listWidget.item(index).text()) vias.place_name = via_places property.via = vias property.use_dw_route = self.ui.use_dw_route_yes_radio.isChecked() property.prefered_route_type = self.ui.prefer_route_type_combo.currentText() property.maximum_draft = self.ui.maximum_draft_edit.text() property.load_condition = self.ui.load_condition_edit.text() property.speed_setting = self.ui.speed_setting_edit.text() property.etd = self.ui.etd_edit.text() data_dir = Utils.createDataDir() fn_path = abs_lambda(os.path.join(data_dir , 'request.xml')) with open(fn_path, 'w') as f: f.write('''<?xml version="1.0" encoding="UTF-8"?>\n''') root.export(f, 1, namespacedef_='xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') msg_box = QMessageBox(QMessageBox.Information, "Success", "Revert Route config file generated successfully") msg_box.exec_()
def loadData(self): self.data = self.sc.textFile(self.appConf.userProfileDir) self.cachedData = self.data.map( lambda line: Utils.transform_json(line)).cache() parsedData = self.cachedData.map( lambda p: str(list(p.values())).replace('[', '').replace( ']', '').split(',').map(lambda x: float(x))) vector = parsedData.map(lambda x: Vectors.dense(x)) model = KMeans.train(vector, self.numCenter, self.numIteration)
def setModelData(self, editor, model, index): value = editor.text() if not value.isEmpty(): try: format_value = Utils.formatPoint(value, PointType.LONGTITUDE) model.setData(index, format_value) except Exception,ex: self.logger.error(ex, exc_info=1)
def create_item(self, label, thumbnail_image, action, url=None): list_item = xbmcgui.ListItem(label=label) list_item.setArt({'thumb': thumbnail_image}) list_item.setInfo('Video', { 'title': label }) call = Utils.get_url(action=action, url=url if url is not None else self._url) return list_item, call
def convertPoint(self, text): formated = '' if not text.isEmpty() and not text.isNull(): if Utils.checkState(text, Utils.FORMAT_PATTERN): orient = text[-1] formated = text.replace('-','')[:-2] if orient == 'S' or orient == 'W': formated.push_front('-') return formated
def vod_channel_day(self, channel_id, day): xbmcplugin.setPluginCategory(self._handle, 'Annatel') xbmcplugin.setContent(self._handle, 'episodes') try: uri = Utils.build_uri(self._handle, self._vod_uri, act="program", channel=channel_id, day=day) xml_data = Utils.download_binary(uri) except StandardError as e: xbmcgui.Dialog().ok('tric', e) return listing = [] parsed_xml = ET.fromstring(xml_data) for program in parsed_xml.findall('program'): try: name = program.find("name").text.encode('utf-8').strip() except AttributeError: name = '' try: description = program.find("description").text.encode('utf-8').strip() except AttributeError: description = '' try: url = program.find("url").text.encode('utf-8').strip() except AttributeError: url = None if url is not None: logo = self.retrieve_channel_logo(channel_id) list_item = xbmcgui.ListItem(label=name) list_item.setProperty('IsPlayable', 'true') list_item.setArt({'thumb': logo}) list_item.setInfo('video', { 'title': name, 'episodeguide': description }) call = Utils.get_url(action='channel_select', url=url) listing.append((call, list_item, False)) xbmcplugin.addDirectoryItems(self._handle, listing, len(listing)) xbmcplugin.endOfDirectory(self._handle)
def save_logic_parameters(self, logicname): params = self.get_body() self.logger.info( "LogicsController.save_logic_parameters: logic = {}, params = {}". format(logicname, params)) config_filename = os.path.join(self.etc_dir, 'logic') logic_conf = shyaml.yaml_load_roundtrip(config_filename) sect = logic_conf.get(logicname) if sect is None: response = { 'result': 'error', 'description': "Configuration section '{}' does not exist".format(logicname) } else: self.logger.info( "LogicsController.save_logic_parameters: logic = {}, alte params = {}" .format(logicname, dict(sect))) for param, value in params.items(): if value == None: sect.pop(param, None) else: self.logger.info( "- param = {}, value = {}, type(value) = {}".format( param, value, Utils.get_type(value))) if (Utils.get_type(value) == 'str') and (value == ''): sect.pop(param, None) elif (Utils.get_type(value) == 'list') and (value == []): sect.pop(param, None) elif (Utils.get_type(value) == 'dict') and (value == {}): sect.pop(param, None) else: sect[param] = value self.logger.info( "LogicsController.save_logic_parameters: logic = {}, neue params = {}" .format(logicname, dict(sect))) shyaml.yaml_save_roundtrip(config_filename, logic_conf, False) response = {'result': 'ok'} return json.dumps(response)
def _prepare_data_for_tx_eep_A5_38_08_03(self, item, tx_eep): """ ### --- Data for A5-38_08 command 3--- ### Eltako Devices: FDG14, FDG71L, FKLD61, FLD61, FRGBW71L, FSG14/1-10V, FSG71/1-10V FSUD-230V, FUD14, FUD14-800W, FUD61NP, FUD61NPN, FUD71 This method has the function to prepare the packet data in case of dimming the light. In case of dim_value == 0 the dimmer is switched off. """ self.logger.debug( 'enocean-PrepareData: prepare data for tx_eep {}'.format(tx_eep)) rorg = 0xa5 block = 0 # check if item has attribute block_dim_value if self._plugin_instance.has_iattr(item.conf, 'block_dim_value'): block_value = self._plugin_instance.get_iattr_value( item.conf, 'block_dim_value') if Utils.to_bool(block_value): block = 4 # check if item has attribite dim_speed if self._plugin_instance.has_iattr(item.conf, 'dim_speed'): dim_speed = self._plugin_instance.get_iattr_value( item.conf, 'dim_speed') # bound dim_speed values to [0 - 100] % dim_speed = max(0, min(100, int(dim_speed))) self.logger.debug( 'enocean-PrepareData: {} use dim_speed = {} %'.format( tx_eep, dim_speed)) # calculate dimspeed from percent into hex # 0x01 --> fastest speed --> 100 % # 0xFF --> slowest speed --> 0 % dim_speed = (255 - (254 * dim_speed / 100)) else: # use intern dim_speed of the dim device dim_speed = 0x00 self.logger.debug( 'enocean-PrepareData: {} no attribute dim_speed --> use intern dim speed', format(tx_eep)) if item() == 0: # if value is == 0 switch off dimmer dim_value = 0x00 payload = [0x02, int(dim_value), int(dim_speed), int(8 + block)] self.logger.debug( 'enocean-PrepareData: {} prepare data to switch off'.format( tx_eep)) else: dim_value = item() # check range of dim_value [0 - 100] % dim_value = max(0, min(100, dim_value)) self.logger.debug( 'enocean-PrepareData: {} dim_value set to {} %'.format( tx_eep, dim_value)) dim_value = dim_value payload = [0x02, int(dim_value), int(dim_speed), int(9 + block)] optional = [] return rorg, payload, optional
def search_types_data(self, index_config, semantic_types): result = self.es.search(index=Utils.get_index_name(index_config), doc_type=','.join(semantic_types), body={"query": {"match_all": {}}}) return sc.parallelize(result['hits']['hits']).map( lambda hit: (hit['_type'], hit['_source'].items())).groupByKey().flatMap(lambda x: sc.parallelize(x[1]).map( lambda y: ((x[0], y[0]), y[1]) if isinstance(y[1], list) else ((x[0], y[0]), [y[1]]))).reduceByKey( lambda x, y: x + y).map(lambda x: (x[0][0], {x[0][1]: x[1]})).collectAsMap() return result
def autoloadConfig(self): configs, msg = Utils.readGeneralConfigFromXml() if configs is not None and len(configs) !=0: self.ui.ship_name_edit.setText(configs['ship_name']) self.ui.call_sign_edit.setText(configs['call_sign']) self.ui.captain_name_edit.setText(configs['captain_name']) cur_time = time.strftime('%Y/%m/%d',time.localtime(time.time())) self.ui.date_edit.setText(cur_time) else: self.logger.error('[Revert Report]:' + msg, exc_info=1)
def draw_corner_harris(self): img = Utils.fetch_image(ImageURL.CHESS) orig = img.copy() gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) dts = cv2.cornerHarris(src=gray, blockSize=2, ksize=3, k=.0001) img[dts > 0.01 * dts.max()] = [0, 0, 255] return img, orig
def validate_password(self, realm, username, password): if username != self._user or password is None or password == "": return False if self._hashed_password is not None: return Utils.check_hashed_password(password, self._hashed_password) elif self._password is not None: return password == self._password return False
def retrieve_channel_logo(self, channel_id): try: uri = Utils.build_uri(self._handle, self._vod_uri) xml_data = Utils.download_binary(uri) except StandardError as e: xbmcgui.Dialog().ok('tric', e) return parsed_xml = ET.fromstring(xml_data) for channel in parsed_xml.findall('channel'): cid = channel.find('stream').text.encode('utf-8').strip() name = channel.find('name').text.encode('utf-8').strip() if cid == channel_id: if name in self._channels_map: return self._channels_map[name]['logo'] return None
def get_image_link(self): latest_date = Utils.http_request_json( self.dates_url)["timestamps_int"][0] image_url_template = self.images_url.format(date=str(latest_date)[0:8], datetime=latest_date, image_name="{image_name}") img_url_tab = [] for s in ["000_000", "000_001", "001_000", "001_001"]: img_url_tab.append(image_url_template.format(image_name=s)) return [latest_date, img_url_tab]
def __week_offset(self): current_week = utils.default_week() if current_week <= 0: # Preseason return nfl.WEEK_PREFIX['PRE'] elif current_week > 17: return nfl.WEEK_PREFIX['POS'] else: return nfl.WEEK_PREFIX['REG']
def blockly_update_config(self, code, name=''): """ Fill configuration section in /etc/logic.yaml from header lines in generated code Method is called from blockly_save_logic() :param code: Python code of the logic :param name: name of configuration section, if ommited, the section name is read from the source code :type code: str :type name: str """ section = '' active = False config_list = [] for line in code.splitlines(): if (line.startswith('#comment#')): if config_list == []: sc, fn, ac, fnco = line[9:].split('#') fnk, fnv = fn.split(':') ack, acv = ac.split(':') active = Utils.to_bool(acv.strip(), False) if section == '': section = sc self.logger.info( "blockly_update_config: #comment# section = '{}'". format(section)) config_list.append([fnk.strip(), fnv.strip(), fnco]) elif line.startswith('#trigger#'): sc, fn, tr, co = line[9:].split('#') trk, trv = tr.split(':') if config_list == []: fnk, fnv = fn.split(':') fnco = '' config_list.append([fnk.strip(), fnv.strip(), fnco]) if section == '': section = sc self.logger.info( "blockly_update_config: #trigger# section = '{}'". format(section)) config_list.append([trk.strip(), trv.strip(), co]) elif line.startswith( '"""'): # initial .rst-comment reached, stop scanning break else: # non-metadata lines between beginning of code and initial .rst-comment pass if section == '': section = name if self._section_prefix != '': section = self._section_prefix + section self.logger.info( "blockly_update_config: section = '{}'".format(section)) self.logics.update_config_section(active, section, config_list)
def _prepare_data_for_tx_eep_A5_38_08_02(self, item, tx_eep): """ ### --- Data for A5-38_08 command 2 --- ### Eltako Devices: FDG14, FDG71L, FKLD61, FLD61, FRGBW71L, FSG14/1-10V, FSG71/1-10V FSUD-230V, FUD14, FUD14-800W, FUD61NP, FUD61NPN, FUD71 This method has the function to prepare the packet data in case of switching the dimmer device on or off, but calculate also the correct data of dim_speed and dim_value for further solutions. """ #self.logger.debug(f'enocean-PrepareData: prepare data for tx_eep {tx_eep}') rorg = 0xa5 block = 0 # check if item has attribute block_dim_value if self._plugin_instance.has_iattr(item.level.conf, 'block_dim_value'): block_value = self._plugin_instance.get_iattr_value( item.level.conf, 'block_dim_value') if Utils.to_bool(block_value): block = 4 # check if item has attribite dim_speed if self._plugin_instance.has_iattr(item.level.conf, 'dim_speed'): dim_speed = self._plugin_instance.get_iattr_value( item.level.conf, 'dim_speed') # bound dim_speed values to [0 - 100] % dim_speed = max(0, min(100, int(dim_speed))) #self.logger.debug(f'enocean-PrepareData: {tx_eep} use dim_speed = {dim_speed} %') # calculate dimspeed from percent into integer # 0x01 --> fastest speed --> 100 % # 0xFF --> slowest speed --> 0 % dim_speed = (255 - (254 * dim_speed / 100)) else: # use intern dim_speed of the dim device dim_speed = 0 #self.logger.debug('enocean-PrepareData: no attribute dim_speed --> use intern dim speed') if not item(): # if value is False --> Switch off dim_value = 0 payload = [0x02, int(dim_value), int(dim_speed), int(8 + block)] #self.logger.debug('enocean-PrepareData: prepare data to switch off for command for A5_38_08_02') else: # check if reference dim value exists if 'ref_level' in item.level.conf: dim_value = int(item.level.conf['ref_level']) # check range of dim_value [0 - 100] % dim_value = max(0, min(100, int(dim_value))) #self.logger.debug(f'enocean-PrepareData: {tx_eep} ref_level {dim_value} % found for A5_38_08_02') else: # set dim_value on 100 % == 0x64 dim_value = 0x64 self.logger.debug( f'enocean-PrepareData: {tx_eep} no ref_level found! Setting to default 100 %' ) payload = [0x02, int(dim_value), int(dim_speed), int(9 + block)] optional = [] return rorg, payload, optional
def _test_definitions(self, definition_list, definition_dict): """ Test parameter or item-attribute definitions for validity """ definition_list = list(definition_dict.keys()) # logger.warning(self._log_premsg+"Metadata definition_list = '{}'".format( definition_list ) ) for definition in definition_list: if definition_dict[definition] != None: typ = str(definition_dict[definition].get('type', FOO)).lower() # to be implemented: timeframe definition_dict[definition]['listtype'] = [FOO] definition_dict[definition]['listlen'] = 0 if definition_dict[definition].get('type', FOO) == 'list': logger.debug(self._log_premsg+"definition = '{}' of type '{}'".format(definition, str(definition_dict[definition].get('type', FOO)).lower() ) ) if not (typ in META_DATA_TYPES): # test for list with specified datatype if typ.startswith('list(') and typ.endswith(')'): logger.debug(self._log_premsg+"definition = '{}' of type '{}'".format(definition, str(definition_dict[definition].get('type', FOO)).lower() ) ) definition_dict[definition]['type'] = 'list' listparam = typ[5:] listparam = listparam[:-1].strip().split(',') if len(listparam) > 0: if Utils.is_int(listparam[0]): l = int(listparam[0]) if l < 0: l = 0 definition_dict[definition]['listlen'] = l listparam.pop(0) if len(listparam) == 0: listparam = [FOO] subtyp = '' if len(listparam) > 0: listparam2 = [] for i in range(0,len(listparam)): if listparam[i] in META_DATA_TYPES: listparam2.append(listparam[i]) else: listparam2.append(FOO) logger.error(self._log_premsg+"definition = '{}': Invalid subtype '{}' specified, using '{}' instead".format(definition, listparam[i], FOO)) listparam = listparam2 definition_dict[definition]['listtype'] = listparam else: logger.error(self._log_premsg+"Invalid definition in metadata file '{}': type '{}' for parameter '{}' -> using type '{}' instead".format( self.relative_filename, typ, definition, FOO ) ) definition_dict[definition]['type'] = FOO if definition_dict[definition].get('type', FOO) == 'list': logger.debug(self._log_premsg+"definition = '{}' list of subtype_list = {}, listlen={}".format(definition, definition_dict[definition]['listtype'], definition_dict[definition]['listlen'] ) ) else: logger.debug(self._log_premsg+"definition = '{}' list of listparam = >{}<, listlen={}".format(definition, definition_dict[definition]['listtype'], definition_dict[definition]['listlen'] ) ) else: logger.info(self._log_premsg+"definition = '{}'".format( definition ) ) return
def channel_select(self, url): m3u8_data = io.StringIO(unicode(Utils.download_binary(url))) last_line = None for i in m3u8_data.readlines(): last_line = i uri = url.rsplit('/', 1)[0] + '/' + last_line.strip() play_item = xbmcgui.ListItem() xbmc.Player().play(uri, play_item, False)
def _convert_to_spread_object(self, owner_name, spread_data): result = { 'year': nfl.YEAR, 'week': utils.default_week(), 'owner': owner_name } for game_id, picks in spread_data.iteritems(): result[game_id] = picks return result
def validate_password(self, realm, username, password): """ Validate a given user/password combination :param realm: :param username: :param password: :return: """ pwd_hash = Utils.create_hash(password) # self.logger.warning("realm: {}, username: {}, password: {}, self._password: {}, self._hashed_password: {}".format(realm, username, password, self._password, self._hashed_password)) # self.logger.warning("pwd_hash: {}, self._user_dict: {}".format(pwd_hash, self._user_dict)) user = self._user_dict.get(username, None) if user is None: return False; user_pwd_hash = user.get('password_hash', '') pwd_hash = Utils.create_hash(password) return pwd_hash == user_pwd_hash
def submitJob(self): job_file = os.path.join(self._getOoziePath(self.PkgType.CONFIGS), 'job.properties') if not os.path.exists(job_file): self.logger.error( "Could not find the job.properties file before submit the oozie job %s" % self.oozie_job.app) cmd = 'oozie job -config %s -submit' % job_file if os.system(cmd) != 0: self.logger.error("submit the job for %s failed" % self.oozie_job.app) Utils.send_notify_mail( 'Oozie Submit: Failed', 'Submit the job for %s failed' % self.oozie_job.app) else: self.logger.info("submit the job for %s successfully" % self.oozie_job.app) Utils.send_notify_mail( 'Oozie Submit: Succeed', 'Submit the job for %s successfuly' % self.oozie_job.app)
def _prepare_data_for_tx_eep_A5_3F_7F(self, item, tx_eep): """ ### --- Data for A5-3F-7F - Universal Actuator Command --- ### Eltako Devices: FSB14, FSB61, FSB71 This method has the function to prepare the packet data in case of actuation a shutter device. The Runtime is set in [0 - 255] s """ self.logger.debug( f'enocean-PrepareData: prepare data for tx_eep {tx_eep}') rorg = 0xa5 block = 0 # check if item has attribute block_switch if self._plugin_instance.has_iattr(item.conf, 'block_switch'): block_value = self._plugin_instance.get_iattr_value( item.conf, 'block_switch') if Utils.to_bool(block_value): block = 4 # check if item has attribite enocean_rtime if self._plugin_instance.has_iattr(item.conf, 'enocean_rtime'): rtime = self._plugin_instance.get_iattr_value( item.conf, 'enocean_rtime') # rtime [0 - 255] s rtime = max(0, min(255, int(rtime))) self.logger.debug( f'enocean-PrepareData: {tx_eep} actuator runtime of {rtime} s specified.' ) else: # set rtime to 5 s rtime = 5 self.logger.debug( f'enocean-PrepareData: {tx_eep} actuator runtime not specified set to {rtime} s.' ) # check command (up, stop, or down) command = int(item()) if (command == 0): # Stopp moving command_hex_code = 0x00 elif (command == 1): # moving up command_hex_code = 0x01 elif (command == 2): # moving down command_hex_code = 0x02 else: self.logger.error( f'enocean-PrepareData: {tx_eep} sending actuator command failed: invalid command {command}' ) return None # define payload payload = [0x00, rtime, command_hex_code, int(8 + block)] optional = [] return rorg, payload, optional
def map_slides_to_videos(self, videos_metadata, slides_metadata): mapping = dict() # slides upload threshold... expect slides be uploaded within N days of video upload. threshold_duration = self.conf.get('slides_upload_window') for video_item in videos_metadata: video_upload_date = str.split(video_item['startTime'], ' ')[0] for slide_item in slides_metadata: slide_upload_date = slide_item['fileDate'] diff_days = Utils.date_difference(slide_upload_date, video_upload_date) if 0 <= diff_days <= threshold_duration: mapping[video_item['ttid']] = slide_item['filePath'] return mapping
def get_nearest_cities(self): nearest_cities = Utils.extract_contents_of_nested_brackets( self.payload_location) if len(nearest_cities) < 1: tm_logger.warn( 'Couldnt extract nearest_cities from payload: {}'.format( self.payload_location)) nearest_cities = ['NULL'] return nearest_cities
def run(self): self.logger.info("Start to exec hive job") #import time #ts = Utils.get_unix_timestamp(time.time()) hive_result = self.exec_hive() if hive_result == 0: redis_result = self.import2redis() if redis_result: # msg = json.dumps(Utils.convert_object(TopNewsUpdate(ts, NewsUpdateStatus.SUCCESS))) # self.send_msg(msg) Utils.send_notify_mail('Top_New: All Job Successed', 'All the job execute successfully') else: # msg = json.dumps(Utils.convert_object(TopNewsUpdate(ts, NewsUpdateStatus.FAILED))) # self.logger.info("Exception: failed to import the data to redis, send failed msg to api service") # self.send_msg(msg) Utils.send_notify_mail( 'Top_News: Redis Import Failed', 'Import the top news data to redis failed, please check the log' ) else: self.logger.info( "Excepton: failed to exec hive, skip import result to redis and notify kafka" ) # msg = json.dumps(Utils.convert_object(TopNewsUpdate(ts, NewsUpdateStatus.FAILED))) # self.send_msg(msg) Utils.send_notify_mail( 'Top_News: Hive Job Failed', 'Hive job exec failed, please check the job log')
def test_create_hash(self): with self.assertRaises(Exception): Utils.create_hash(None) self.assertEqual(Utils.create_hash(''), 'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e') self.assertEqual(Utils.create_hash('42'), '39ca7ce9ecc69f696bf7d20bb23dd1521b641f806cc7a6b724aaa6cdbffb3a023ff98ae73225156b2c6c9ceddbfc16f5453e8fa49fc10e5d96a3885546a46ef4') self.assertEqual(Utils.create_hash('very_secure_password'), '1245a9633edf47b7091f37c4d294b5be5a9936c81c5359b16d1c4833729965663f1943ef240959c53803fedef7ac19bd59c66ad7e7092d7dbf155ce45884607d') self.assertEqual(Utils.create_hash('1245a9633edf47b7091f37c4d294b5be5a9936c81c5359b16d1c4833729965663f1943ef240959c53803fedef7ac19bd59c66ad7e7092d7dbf155ce45884607d'), '00faf4a142f087e55edf6e91ea333d9a4bcd9b2d6bba8fab42869c6e00e28a3acba6d5fe3495f037221d633e01b3c7baa6e915028407548f77b5b9710899bfbe')
def __init__(self, sh): """ Initalizes the plugin. If the sh object is needed at all, the method self.get_sh() should be used to get it. There should be almost no need for a reference to the sh object any more. Plugins have to use the new way of getting parameter values: use the SmartPlugin method get_parameter_value(parameter_name). Anywhere within the Plugin you can get the configured (and checked) value for a parameter by calling self.get_parameter_value(parameter_name). It returns the value in the datatype that is defined in the metadata. """ # Call init code of parent class (SmartPlugin) super().__init__() from bin.smarthome import VERSION if '.'.join(VERSION.split('.', 2)[:2]) <= '1.5': self.logger = logging.getLogger(__name__) self.items = Items.get_instance() self.updates_allowed = self.get_parameter_value('update') self.ip = self.get_parameter_value('ip') self.port = self.get_parameter_value('port') self.hashed_password = self.get_parameter_value('hashed_password') if self.hashed_password is None or self.hashed_password == '': self.logger.warning( "CLI: You should set a password for this plugin.") self.hashed_password = None elif self.hashed_password.lower() == 'none': self.hashed_password = None elif not Utils.is_hash(self.hashed_password): self.logger.error( "CLI: Value given for 'hashed_password' is not a valid hash value. Login will not be possible" ) name = 'plugins.' + self.get_fullname() self.server = Tcp_server(interface=self.ip, port=self.port, name=name, mode=Tcp_server.MODE_TEXT_LINE) self.server.set_callbacks(incoming_connection=self.handle_connection) self.commands = CLICommands(self.get_sh(), self.updates_allowed, self) self.alive = False # On initialization error use: # self._init_complete = False # return # if plugin should start even without web interface self.init_webinterface(WebInterface)
def __init__(self, smarthome, update='False', ip='127.0.0.1', port=2323, hashed_password=''): """ Constructor :param smarthome: smarthomeNG instance :param update: Flag: Updates allowed :param ip: IP to bind on :param port: Port to bind on :param hashed_password: Hashed password that is required to logon """ self.logger = logging.getLogger(__name__) self.items = Items.get_instance() if hashed_password is None or hashed_password == '': self.logger.warning( "CLI: You should set a password for this plugin.") hashed_password = None elif hashed_password.lower() == 'none': hashed_password = None elif not Utils.is_hash(hashed_password): self.logger.error( "CLI: Value given for 'hashed_password' is not a valid hash value. Login will not be possible" ) self.server = Tcp_server(interface=ip, port=port, name='CLI', mode=Tcp_server.MODE_TEXT_LINE) self.server.set_callbacks(incoming_connection=self.handle_connection) self.sh = smarthome self.updates_allowed = Utils.to_bool(update) self.hashed_password = hashed_password self.commands = CLICommands(self.sh, self.updates_allowed, self) self.alive = False
def hough_circles_process(self): image = Utils.fetch_image(url=ImageURL.OPENCV_LOGO) orig = image.copy() circles = self.get_hough_circles(image) for [x, y, raduis] in circles[0, :]: cv2.circle(img=image, center=(x, y), radius=raduis, color=(0, 0, 255), thickness=2) return image, orig
def _expand_listvalues(self, param, value): """ If a parameter is defined as a list, but the value is of a basic datatype, value is expanded to a list. In all other cases, the value is returned nuchanged """ result = value if param in self._paramlist: typ = self.get_parameter_type(param) if (typ == 'list') and (not isinstance(value, list)): result = Utils.string_to_list(value) # if (typ == 'list'): # logger.warning(self._log_premsg+"_expand_listvalues: value = >{}<, type(value) = >{}<, result = >{}<, type(result) = >{}<".format(value, type(value), result, type(result))) return result
def on_reset_clicked(self, action, param): """ Reset fields to default parameters """ default = Utils.read_config("default.json") for k, v in zip(["lst", "min", "max", "wrd", "sep"], [self.lst, self.min, self.max, self.wrd, self.sep]): v.set_text(str(default[k])) for k, v in zip(["cap", "num", "sym"], [self.cap, self.num, self.sym]): v.set_active(default[k]) self.password.set_text("") self.password_length.set_text("") copyfile(join(Utils.CONFIG_DIR, "default.json"), join(Utils.CONFIG_DIR, "settings.json"))
def process(): parser = argparse.ArgumentParser() parser.add_argument('--contours', '-co', action='store_true') parser.add_argument('--circle', '-ci', action='store_true') parser.add_argument('--lines', '-l', action='store_true', default=True) args = parser.parse_args() shapsesBasic = ShapsesBasic() orig = None result = None if args.contours: result, orig = shapsesBasic.contours_process() elif args.circle: result, orig = shapsesBasic.hough_circles_process() elif args.lines: result, orig = shapsesBasic.hough_lines_process() Utils.show_image_compare(orig, result)
def encode_headers(self, node: 'Node', bitarray: 'BitArray', char_size: int) -> None: if node.isLeaf(): bitarray.append('0b1') encoded_char = Utils.encode_char(node.symbol) encoded_char = "".join( "0" for n in range(char_size - len(encoded_char))) + encoded_char bitarray.append(f"0b{encoded_char}") else: bitarray.append('0b0') self.encode_headers(node.left, bitarray, char_size) self.encode_headers(node.right, bitarray, char_size)
def get_bool(self, key): """ Return the value for a global key as a bool :param key: global key to look up (in section 'plugin' or 'module') :type key: str :return: value for the key :rtype: bool """ if self.addon_metadata is None: return False return Utils.to_bool(self.addon_metadata.get(key, ''))
def get_decoded_text(file) -> str: decCont = BitArray() for line in file: decCont.append(line) x = BitStream(decCont) char_size = Utils.get_encoded_file_char_size(x) decoded_header_2 = decode.decode_headers(x, char_size) hash_table = {} decoded_header_2.generate_hashT(hash_table, "") inv_map = {v: k for k, v in hash_table.items()} return decode.decode_text(decCont.bin[x.bitpos:x.len], inv_map)