def main(): if check_folder(): config.get_data() if len(__os.listdir('red_media')) > 2: start_upload() return for j in JSONs: get_links(j) write_meme() dload(j) start_upload() else: print("Error has occured in creating file")
def get_data(id_, k=None): data = None try: data = config.get_data(os.path.join(id_, config.OPTIONS_FILE)) if k != None: data = data.get(k) except TestsuiteError as e: message = u'Не удалось прочитать данные' return data
def fetch_grid_size(): global grid try: response = webconfig.get_data(client, '/grid') grid = { k: response[k] for k in ('width', 'height') } except webconfig.GetException as e: return e.response return None
def add_input(self, data_name: tuple, data_input: tuple): """ add single input row to database :param data_name: tuple of names to be inputted :param data_input: data to be inputted :return: None """ # test this command if "ISO_Code" not in data_name or "News_list" not in data_name: raise ValueError( "Passed invalid inputs -- no ISO_Code or news_list found") data = get_data(self.schema, self.name, tuple("iso_code")) if data_input in data: self._update_input(data_input) else: super().add_input(data_name, data_input)
def _parse_commands(self, data_input: tuple, pos_iso: int) -> (list, list): """ Parse the commands to see what is to be updated and what is to be added :param data_input: tuple of all input-ed data :param pos_iso: position of the iso_code (the key for this database) :return: (list, list) of lists to add, lists to update """ to_return_data = list() to_unique_update = list() data = get_data(self.schema, self.name, tuple(("ISO_Code", ))) data = [int(item[0]) for item in data] for item in data_input: if item[pos_iso] in data: to_unique_update.append(item) else: to_return_data.append(item) return to_return_data, to_unique_update
cosB = ((b**2) - (a**2) - (c**2))/(-2*a*c) return math.degrees(math.acos(cosB)) def refine_raw_data(points): angles = {} [A, B, C, D, E, F] = points angles['lra'] = get_angle(B, A, C) angles['lla'] = get_angle(B, C, A) angles['ura'] = get_angle(D, A, C) angles['ula'] = get_angle(D, C, A) return angles if __name__ == '__main__': from config import get_data fuzzy_rules = readFuzzySetFile('smile_data_set.data') data = get_data() correct = 0 total = 0 for subject, states in data.iteritems(): for face, points in states.iteritems(): print subject, ': ', face angles = refine_raw_data(points) print 'angles: ', angles upper_max = max(angles['ura'], angles['ula']) lower_max = max(angles['lra'], angles['lla']) result = [] for state, fuzzy_rule in fuzzy_rules.iteritems():
def get_db_schema(self): return os.path.join(self.get_root(), 'db-schema') def get_config_file(self): return os.path.join(self.get_root(), 'config.ini') if __name__ == "__main__": import config config = config.Config() cmd = sys.argv[1] if cmd == "root": print(config.get_root()) elif cmd == "src": print(config.get_src()) elif cmd == "data": print(config.get_data()) elif cmd == "reports": print(config.get_reports()) elif cmd == "profile_data": print(config.get_profile_data()) elif cmd == "profile_reports": print(config.get_profile_reports()) elif cmd == "db_files": print(config.get_db_files()) elif cmd == "db_schema": print(config.get_db_schema())
for v_name in variables_list: arr, nzidx[v_name], count = prune_tf(variables_list[v_name], percent) sess.run(variables_list[v_name].assign(arr)) # Retrain networks cross_entropy = tf.get_collection('cross_entropy')[0] trainer = tf.train.AdamOptimizer(1e-4, name='retrain_trainer') grads_and_vars = trainer.compute_gradients(cross_entropy) grads_and_vars = apply_prune_on_grads(grads_and_vars, nzidx) retrain_step = trainer.apply_gradients(grads_and_vars) accuracy = tf.get_collection('accuracy')[0] for var in tf.global_variables(): if tf.is_variable_initialized(var).eval() == False: sess.run(tf.variables_initializer([var])) for step in range(config.retrain_step): image, label = config.get_data() sess.run(retrain_step, feed_dict={x: image, y: label, "keep_prob:0": 1}) if (step + 1) % 100 == 0: retrain_accuracy = accuracy.eval(feed_dict={x: image, y: label, "keep_prob:0": 1}) print("step %d, training accuracy %g" % (step + 1, retrain_accuracy)) # Prune sparse_w = gen_sparse_dict(variables_list) image, label = config.get_data() retrain_accuracy = accuracy.eval(feed_dict={x: image, y: label, "keep_prob:0": 1}) print("After pruning, training accuracy %g" % (retrain_accuracy)) # Initialize new variables in a sparse form for var in tf.global_variables(): if tf.is_variable_initialized(var).eval() == False: sess.run(tf.variables_initializer([var]))