def sync(request): import sync try: sync.sync() except: logging.exception(msg='error syncing flickr photos', exception=True) return HttpResponseRedirect(request.GET.get('redirect') if 'redirect' in request.GET else reverse('photos.index'))
def sync_wrapper(): import sync if len(sys.argv) < 2: hostname = "localhost" else: hostname = sys.argv[2] sync.sync(hostname)
def module_factory(args, env, db, req): mode = args.get('mode', 'wiki') module_name, constructor_name, need_svn = modules[mode] module = __import__(module_name, globals(), locals()) constructor = getattr(module, constructor_name) module = constructor() module.pool = None module.args = args module.env = env module.log = env.log module.req = req module._name = mode module.db = db module.perm = perm.PermissionCache(module.db, req.authname) module.perm.add_to_hdf(req.hdf) module.authzperm = None # Only open the subversion repository for the modules that really # need it. This saves us some precious time. if need_svn: import sync module.authzperm = authzperm.AuthzPermission(env, req.authname) repos_dir = env.get_config('trac', 'repository_dir') pool, rep, fs_ptr = open_svn_repos(repos_dir) module.repos = rep module.fs_ptr = fs_ptr sync.sync(module.db, rep, fs_ptr, pool) module.pool = pool return module
def run(): if len(sys.argv) <= 1: show_help() else: command = sys.argv[1].lower() if command == "courses": print_courses() elif command == "setup": setup() elif command == "download": course_ids = get_course_choices("Select courses to download") if 3 <= len(sys.argv): output_dir = os.path.expanduser(sys.argv[2]) else: output_dir = os.getcwd() for course_id in course_ids: download_files(course_id, output_dir) elif command == "sync": config = get_config() sync(config) elif command == "config": verify = [{ "type": "rawlist", "message": "Select which variables you want to change", "name": "verification", "choices": ["Download directory", "Selected courses", "Both", "Exit"] }] answer = prompt(verify)["verification"] if answer == "Exit": exit() if answer == "Download directory" or answer == "Both": directory_question = [({ "type": "input", "message": "Enter your preferred download directory. Leave empty to reset.", "name": "directory" })] directory_answer = prompt(directory_question) output_directory = directory_answer["directory"] if output_directory == "": output_directory = "~/UforaFileFetcher" write_to_config("output_directory", output_directory) if answer == "Selected courses" or answer == "Both": course_ids = get_course_choices( "Select courses to add to config") write_to_config("courses", course_ids) print(answer) else: show_help()
def importXPRSSubjects(school_id, start, end, increase, checkLevels=False, levels=["01", "02", "03", "04", "05", "06"]): try: objectList = xprsSubjectsApi.xprs_subjects(start, end, increase, school_id, checkLevels, levels) for row in objectList: unique = {"xprs_subject_id": row["xprs_subject_id"]} notice_ids = [] for x in row["notices"]: status = sync.sync(db.notices, {"name": x}, {"name": x}) notice_ids.append(status["_id"]) level = row["level"] if "-" in row["code"]: level = "-" element = { "xprs_subject_id": row["xprs_subject_id"], "context_card_id": row["context_card_id"], "name": row["name"], "code": row["code"].replace("-", ""), "subject_sub_type": row["subject_sub_type"], "level": level, "code_full": row["code_full"], "notices": row["notices"], "notice_ids": notice_ids, "code_full_name": row["code_full_name"] } status = sync.sync(db.xprs_subjects, unique, element) return True except Exception, e: error.log(__file__, False, str(e)) return False #importXPRSSubjects(517, 6043, 6048, 1, True, ["01"]) #importXPRSSubjects(517, 7000, 7019, 1, True, ["01", "02"]) #importXPRSSubjects(517, 1453150702, 1453150720, 1, False, False) #importXPRSSubjects(517, 1453150720, 1453150750, 1, False, False) #importXPRSSubjects(517, 1453150750, 1453150790, 1, False, False) #importXPRSSubjects(517, 1453150790, 1453150830, 1, False, False) #importXPRSSubjects(517, 1453150830, 1453150867, 1, False, False)
def test_elastic_sync_not_newer(self): cassandra = DumbCassandra(sync.ChangeList([])) elastic = DumbElastic(sync.ChangeList([ sync.Change('1', datetime.datetime.now()) ])) elements_in_cassandra = cassandra.total_elements() sync.sync(elastic, cassandra, datetime.datetime.now()) self.assertGreater(cassandra.total_elements(), elements_in_cassandra)
def authorized(): if 'error' in request.args: return redirect('/app') auth_code = request.args['code'] url = '%s/oauth/token?client_id=%s&client_secret=%s&code=%s&grant_type=authorization_code' \ % (EXT_API, CLIENT_ID, CLIENT_SECRET, auth_code) response = requests.post(url).json() sync(response['access_token']) cache.clear() return redirect('/app')
def importXPRSSubject(school_id, xprs_subject_id): try: objectList = context_card.xprs_subject({ "school_id": school_id, "context_card_id": "XF" + str(xprs_subject_id) }) if objectList["status"] == "ok": row = objectList["xprs_subject"] unique = {"xprs_subject_id": row["xprs_subject_id"]} notice_ids = [] for x in row["notices"]: status = sync.sync(db.notices, {"name": x}, {"name": x}) notice_ids.append(status["_id"]) level = row["level"] if "-" in row["code"]: level = "-" element = { "xprs_subject_id": row["xprs_subject_id"], "context_card_id": row["context_card_id"], "name": row["name"], "code": row["code"].replace("-", ""), "subject_sub_type": row["subject_sub_type"], "level": level, "code_full": row["code_full"], "notices": row["notices"], "notice_ids": notice_ids, "code_full_name": row["code_full_name"] } status = sync.sync(db.xprs_subjects, unique, element) return True else: if "error" in objectList: error.log(__file__, False, objectList["error"]) return False else: error.log(__file__, False, "Unknown error") return False except Exception, e: error.log(__file__, False, str(e)) return False
def importSubjects ( school_id, branch_id ): try: objectList = subjectApi.subjects({ "school_id" : school_id, "branch_id" : branch_id }) if objectList is None: error.log(__file__, False, "Unknown Object") return False if not "status" in objectList: error.log(__file__, False, "Unknown Object") return False if objectList["status"] == "ok": for row in objectList["subjects"]: unique = { "subject_id" : str(row["subject_id"]), "term" : objectList["term"]["value"] } element = { "subject_id" : str(row["subject_id"]), "abbrevation" : row["initial"], "name" : row["name"] } status = sync.sync(db.subjects, unique, element) unique = { "school_id" : str(row["school_id"]), "branch_id" : str(row["branch_id"]), "term" : objectList["term"]["value"], "subject_id" : str(row["subject_id"]), "type" : row["type"] } status = sync.sync(db.school_subjects, unique, unique) # Possible Connect with XPRS Subjects return True else: if "error" in objectList: error.log(__file__, False, objectList["error"]) return False else: error.log(__file__, False, "Unknown error") return False except Exception, e: error.log(__file__, False, str(e)) return False
def main(requirements_path): with open(requirements_path, 'rt') as f: package_list = yaml.safe_load(f) token = os.getenv('BINSTAR_TOKEN') for package in package_list: print(package['name']) try: sync(FROM, TO, package['name'], token, to_label='main') except Conflict: print('Skipping because of a conflict anaconda.org')
def sync_now(): conf = data.get_conf() if 'user' not in conf or 'dir' not in conf: return "Conf. incomplete" # def sync(user, secret, friends, user_dir, chunk_dir, backup_dir): chunk_dir = os.path.join(data.data_dir, "chunks") if not os.path.exists(chunk_dir): os.mkdir(chunk_dir) backup_dir = os.path.join(data.data_dir, "backups") if not os.path.exists(backup_dir): os.mkdir(backup_dir) sync.sync(conf['user'], conf['secret'], conf['friends'], conf['dir'], chunk_dir, backup_dir) return "Okay!"
def test_cassandra_sync_having_a_newer(self): cassandra = DumbCassandra(sync.ChangeList([ sync.Change('1', datetime.datetime(2015, 5, 8, 23, 9)) ])) elastic = DumbElastic(sync.ChangeList([ sync.Change('1', datetime.datetime(2015, 5, 8, 23, 10)) ])) elements_in_elastic = elastic.total_elements() sync.sync(cassandra, elastic, datetime.datetime.now()) self.assertEqual(elastic.total_elements(), elements_in_elastic)
def importXPRSSubjects ( school_id, start, end, increase, checkLevels = False, levels = ["01", "02", "03", "04", "05", "06"] ): try: objectList = xprsSubjectsApi.xprs_subjects(start, end, increase, school_id, checkLevels, levels) for row in objectList: unique = { "xprs_subject_id" : row["xprs_subject_id"] } notice_ids = [] for x in row["notices"]: status = sync.sync(db.notices, {"name" : x}, {"name" : x}) notice_ids.append(status["_id"]) level = row["level"] if "-" in row["code"]: level = "-" element = { "xprs_subject_id" : row["xprs_subject_id"], "context_card_id" : row["context_card_id"], "name" : row["name"], "code" : row["code"].replace("-", ""), "subject_sub_type" : row["subject_sub_type"], "level" : level, "code_full" : row["code_full"], "notices" : row["notices"], "notice_ids" : notice_ids, "code_full_name" : row["code_full_name"] } status = sync.sync(db.xprs_subjects, unique, element) return True except Exception, e: error.log(__file__, False, str(e)) return False #importXPRSSubjects(517, 6043, 6048, 1, True, ["01"]) #importXPRSSubjects(517, 7000, 7019, 1, True, ["01", "02"]) #importXPRSSubjects(517, 1453150702, 1453150720, 1, False, False) #importXPRSSubjects(517, 1453150720, 1453150750, 1, False, False) #importXPRSSubjects(517, 1453150750, 1453150790, 1, False, False) #importXPRSSubjects(517, 1453150790, 1453150830, 1, False, False) #importXPRSSubjects(517, 1453150830, 1453150867, 1, False, False)
def heal(work, n=100): trainwreck(work, n) sync.sync() time.sleep(5) results = get_all(nodes) for node, r in results.items(): format_remote(node, r, n) else: id = r.json()['rows'][0]['id'] doc = get_one_by_quorum(nodes, id) if doc: print "Got %s by quorum!" % id else: print "Failed to get %s by quorum" % id
def importSubjectList(school_id, start, end): try: objectList = subjectListApi.subject_list(start, end, school_id) for row in objectList: unique = {"subject_id": row["subject_id"]} notice_ids = [] element = { "subject_id": row["subject_id"], "name": row["name"], "abbrevation": row["abbrevation"] } status = sync.sync(db.subjects, unique, element) return True except Exception, e: error.log(__file__, False, str(e)) return False #importSubjectList(517, 500, 540) #importSubjectList(517, 540, 580) #importSubjectList(517, 580, 615) #importSubjectList(517, 1363322359, 1363322359) #importSubjectList(517, 1364002147, 1364002147) #importSubjectList(517, 1452164869, 1452164869)
def importSchoolState(school_id): try: info = schoolStateApi.school_state({"school_id": str(school_id)}) if info is None: error.log(__file__, False, "Unknown Object") return False if not "status" in info: error.log(__file__, False, "Unknown Object") return False if info["status"] == "ok": unique = {"school_id": str(school_id)} element = { "school_id": str(school_id), "state": info["state"], "active": "True" if info["state"] == "ok" else "False" } status = sync.sync(db.schools, unique, element) else: if "error" in info: error.log(__file__, False, info["error"]) else: error.log(__file__, False, "Unknown Error") except Exception, e: error.log(__file__, False, str(e))
def importSubjectList ( school_id, start, end ): try: objectList = subjectListApi.subject_list(start, end, school_id) for row in objectList: unique = { "subject_id" : row["subject_id"] } notice_ids = [] element = { "subject_id" : row["subject_id"], "name" : row["name"], "abbrevation" : row["abbrevation"] } status = sync.sync(db.subjects, unique, element) return True except Exception, e: error.log(__file__, False, str(e)) return False #importSubjectList(517, 500, 540) #importSubjectList(517, 540, 580) #importSubjectList(517, 580, 615) #importSubjectList(517, 1363322359, 1363322359) #importSubjectList(517, 1364002147, 1364002147) #importSubjectList(517, 1452164869, 1452164869)
def main(): args = utils.parse_args(REQUIRED_CONFIG_KEYS) url = "https://api.businesscentral.dynamics.com/v2.0/{}/{}/api/BCSItera/dashboards/v1.0/".format(args.config["tenantId"], args.config["environment"]) service = ODataService( url, reflect_entities=True, auth=requests.auth.HTTPBasicAuth(args.config["username"], args.config["password"]) ) catalog = args.catalog or do_discover(service) if args.discover: catalog = discover(service) catalog.dump() else: sync(service, catalog, args.state, args.config["start_date"],)
def importSchoolState ( school_id ): try: info = schoolStateApi.school_state({ "school_id" : str(school_id) }) if info is None: error.log(__file__, False, "Unknown Object") return False if not "status" in info: error.log(__file__, False, "Unknown Object") return False if info["status"] == "ok": unique = { "school_id" : str(school_id) } element = { "school_id" : str(school_id), "state" : info["state"], "active" : "True" if info["state"] == "ok" else "False" } status = sync.sync(db.schools, unique, element) else: if "error" in info: error.log(__file__, False, info["error"]) else: error.log(__file__, False, "Unknown Error") except Exception, e: error.log(__file__, False, str(e))
def blockchain(): ''' Shoots back the blockchain, which in our case, is a json list of hashes with the block information which is: index timestamp data hash prev_hash ''' node_blocks = sync.sync() #update if they've changed # Convert our blocks into dictionaries # so we can send them as json objects later python_blocks = [] for block in node_blocks: ''' block_index = str(block.index) block_timestamp = str(block.timestamp) block_data = str(block.data) block_hash = block.hash block = { "index": block.index, "timestamp": block.timestamp, "data": block.data, "hash": block.hash, "prev_hash": block.prev_hash } ''' python_blocks.append(block.__dict__()) json_blocks = json.dumps(python_blocks) return json_blocks
def getdrive(self): drives=win32api.GetLogicalDriveStrings() drives=string.splitfields(drives,'\000') for drive in drives: if self.is_good_drive(drive): from sync import sync self.current_sync = sync(drive).start()
def main(): import argparse cmd_parser = argparse.ArgumentParser( description='Toolchain for working with the TiLDA Mk4') cmd_parser.add_argument('command', nargs=1, help='command') cmd_parser.add_argument('-d', '--device', help='the serial device of the badge') cmd_parser.add_argument('-s', '--storage', help='the usb mass storage path of the badge') cmd_parser.add_argument('-b', '--baudrate', default=115200, help='the baud rate of the serial device') cmd_parser.add_argument('--boot', help='defines which app to boot into after reboot') cmd_parser.add_argument('--run', help='like run, but after a sync') cmd_parser.add_argument( '-w', '--wait', default=0, type=int, help='seconds to wait for USB connected board to become available') cmd_parser.add_argument('paths', nargs='*', help='input files') args = cmd_parser.parse_args() command = args.command[0] if command in ["reset", "sync"]: pyboard_util.stop_badge(args) if command == "sync": paths = args.paths if len(args.paths) else None sync.sync(get_storage(args), paths) if command in ["reset", "sync"]: sync.set_boot_app(get_storage(args), args.boot or "") pyboard_util.soft_reset(args) if args.run: command = "run" args.paths = [args.run] if command == "run": pyboard_util.run(args) pyboard_util.close_pyb()
def commit(request): #if request.method != 'POST': # return syncState = sync.sync() rsp = [] for id,msg in syncState.items(): rsp.append({'id':id,'msg':msg}) return HttpResponse(json.dumps(rsp),content_type="application/json")
def test_when_a_file_exists_in_the_source_but_not_the_destination(): try: source = tempfile.mkdtemp() dest = tempfile.mkdtemp() content = "I am a very useful file" (Path(source) / "my-file").write_text(content) sync(source, dest) expected_path = Path(dest) / "my-file" assert expected_path.exists() assert expected_path.read_text() == content finally: shutil.rmtree(source) shutil.rmtree(dest)
def importTeamElements ( school_id, branch_id, subject_id ): try: objectList = teamElementsApi.team_elements({ "school_id" : school_id, "branch_id" : branch_id, "subject_id" : subject_id }) if objectList is None: error.log(__file__, False, "Unknown Object") return False if not "status" in objectList: error.log(__file__, False, "Unknown Object") return False if objectList["status"] == "ok": for row in objectList["team_elements"]: unique = { "team_element_id" : str(row["team_element_id"]), } contextCards = [] contextCards.append(row["context_card_id"]) existsing = db.team_elements.find(unique).limit(1) if existsing.count() > 0: existsing = existsing[0] if "context_cards" in existsing: for card in existsing["context_cards"]: if not card in contextCards: contextCards.append(card) element = { "team_element_id" : str(row["team_element_id"]), "school_id" : str(row["school_id"]), "branch_id" : str(row["branch_id"]), "subject_id" : str(subject_id), "type" : "team", "context_cards" : contextCards, "name" : unicode(str(row["name"]).decode("utf8")), "term" : str(objectList["term"]["value"]) } status = sync.sync(db.team_elements, unique, element) return True else: if "error" in objectList: error.log(__file__, False, objectList["error"]) return False else: error.log(__file__, False, "Unknown error") return False except Exception, e: error.log(__file__, False, str(e)) return False
def importTeamElements(school_id, branch_id, subject_id): try: objectList = teamElementsApi.team_elements({ "school_id": school_id, "branch_id": branch_id, "subject_id": subject_id }) if objectList is None: error.log(__file__, False, "Unknown Object") return False if not "status" in objectList: error.log(__file__, False, "Unknown Object") return False if objectList["status"] == "ok": for row in objectList["team_elements"]: unique = { "team_element_id": str(row["team_element_id"]), } contextCards = [] contextCards.append(row["context_card_id"]) existsing = db.team_elements.find(unique).limit(1) if existsing.count() > 0: existsing = existsing[0] if "context_cards" in existsing: for card in existsing["context_cards"]: if not card in contextCards: contextCards.append(card) element = { "team_element_id": str(row["team_element_id"]), "school_id": str(row["school_id"]), "branch_id": str(row["branch_id"]), "subject_id": str(subject_id), "type": "team", "context_cards": contextCards, "name": unicode(str(row["name"]).decode("utf8")), "term": str(objectList["term"]["value"]) } status = sync.sync(db.team_elements, unique, element) return True else: if "error" in objectList: error.log(__file__, False, objectList["error"]) return False else: error.log(__file__, False, "Unknown error") return False except Exception, e: error.log(__file__, False, str(e)) return False
def importTeamAccounting ( school_id, branch_id, team_element_id ): try: objectList = teamAccountingApi.team_accounting({ "school_id" : school_id, "branch_id" : branch_id, "team_element_id" : team_element_id }) if objectList is None: error.log(__file__, False, "Unknown Object") return False if not "status" in objectList: error.log(__file__, False, "Unknown Object") return False if objectList["status"] == "ok": unique = { "school_id" : str(school_id), "branch_id" : str(branch_id), "team_element_id" : str(team_element_id) } element = { "school_id" : str(school_id), "branch_id" : str(branch_id), "team_element_id" : str(team_element_id), "team_accounting" : objectList["total"] } teachers = [] for row in objectList["teachers"]: existsing = db.persons.find({"name" : row["name"], "type" : "teacher", "abbrevation" : row["abbrevation"]}).limit(1) if existsing.count() > 0: existsing = existsing[0] row["teacher_id"] = existsing["teacher_id"] teachers.append(row) element["team_accounting"]["teachers"] = teachers status = sync.sync(db.team_elements, unique, element) return True else: if "error" in objectList: error.log(__file__, False, objectList["error"]) return False else: error.log(__file__, False, "Unknown error") return False except Exception, e: error.log(__file__, False, str(e)) return False
def importFieldOfStudyPresentation ( school_id, branch_id, field_of_study_id ): try: objectList = field_of_study({ "school_id" : school_id, "branch_id" : branch_id }, field_of_study_id) if objectList is None: error.log(__file__, False, "Unknown Object") return False if not "status" in objectList: error.log(__file__, False, "Unknown Object") return False if objectList["status"] == "ok": unique = { "field_of_study_id" : str(field_of_study_id) } contextCards = [] contextCards.append("SR" + str(field_of_study_id)) existsing = db.field_of_studies.find(unique).limit(1) if existsing.count() > 0: existsing = existsing[0] if "context_cards" in existsing: for card in existsing["context_cards"]: if not card in contextCards: contextCards.append(card) element = { "years" : objectList["years"], "semesters" : objectList["semesters"], "presentation" : objectList["presentation"], "subject_types" : objectList["subject_types"], "elective_groups" : objectList["elective_groups"], "subjects" : objectList["subjects"], "field_of_study_id" : str(field_of_study_id), "school_id" : str(school_id), "branch_id" : str(branch_id), "context_cards" : contextCards } # Link with XPRS Subjects status = sync.sync(db.field_of_studies, unique, element) else: if "error" in objectList: error.log(__file__, False, objectList["error"]) return False else: error.log(__file__, False, "Unknown error") return False except Exception, e: error.log(__file__, False, str(e)) return False
def importPrivateActivity ( school_id, branch_id, student_id, activity_id, session = False, username = False, password = False ): try: objectList = privateActivityApi.private_activity({ "school_id" : school_id, "branch_id" : branch_id, "student_id" : student_id, "username" : username, "password" : password, "activity_id" : activity_id }, session) if objectList is None: error.log(__file__, False, "Unknown Object") return False if not "status" in objectList: error.log(__file__, False, "Unknown Object") return False if objectList["status"] == "ok": row = objectList["appointment"] unique = { "activity_id" : str(activity_id), "type" : "private" } element = { "activity_id" : str(activity_id), "type" : "private", "school_id" : str(school_id), "branch_id" : str(branch_id), "student_id" : str(student_id), "title" : row["title"], "comment" : row["comment"], "start" : row["start"], "end" : row["end"] } status = sync.sync(db.events, unique, element) return True else: if "error" in objectList: error.log(__file__, False, objectList["error"]) db.events.remove({"activity_id" : str("activity_id")}) return False elif "type" in objectList: error.log(__file__, False, objectList["type"]) return False else: error.log(__file__, False, "Unknown error") except Exception, e: error.log(__file__, False, str(e)) return False
def importFieldOfStudyPresentation(school_id, branch_id, field_of_study_id): try: objectList = field_of_study( { "school_id": school_id, "branch_id": branch_id }, field_of_study_id) if objectList is None: error.log(__file__, False, "Unknown Object") return False if not "status" in objectList: error.log(__file__, False, "Unknown Object") return False if objectList["status"] == "ok": unique = {"field_of_study_id": str(field_of_study_id)} contextCards = [] contextCards.append("SR" + str(field_of_study_id)) existsing = db.field_of_studies.find(unique).limit(1) if existsing.count() > 0: existsing = existsing[0] if "context_cards" in existsing: for card in existsing["context_cards"]: if not card in contextCards: contextCards.append(card) element = { "years": objectList["years"], "semesters": objectList["semesters"], "presentation": objectList["presentation"], "subject_types": objectList["subject_types"], "elective_groups": objectList["elective_groups"], "subjects": objectList["subjects"], "field_of_study_id": str(field_of_study_id), "school_id": str(school_id), "branch_id": str(branch_id), "context_cards": contextCards } # Link with XPRS Subjects status = sync.sync(db.field_of_studies, unique, element) else: if "error" in objectList: error.log(__file__, False, objectList["error"]) return False else: error.log(__file__, False, "Unknown error") return False except Exception, e: error.log(__file__, False, str(e)) return False
def importSchools(): try: schoolList = schoolsApi.schools() if schoolList is None: error.log(__file__, False, "Unknown Object") return False if not "status" in schoolList: error.log(__file__, False, "Unknown Object") return False if schoolList["status"] == "ok": for school in schoolList["schools"]: status = sync.sync( db.schools, { "school_id": school["school_id"], "branch_id": school["branch_id"] }, { "school_id": school["school_id"], "branch_id": school["branch_id"], "name": school["name"], "full_name": school["full_name"] }) if sync.check_action_event(status) == True: # Launch School_info Scraper # Launch Address scraper for url in sync.find_listeners( 'school', { "school_id": school["school_id"], "branch_id": school["branch_id"] }): sync.send_event( url, status["action"], { "school_id": school["school_id"], "branch_id": school["branch_id"], "name": school["name"] }) for url in sync.find_general_listeners('school_general'): sync.send_event( url, status["action"], { "school_id": school["school_id"], "branch_id": school["branch_id"], "name": school["name"] }) else: if "error" in schoolList: error.log(__file__, False, schoolList["error"]) else: error.log(__file__, False, "Unknown Error") except Exception, e: error.log(__file__, False, str(e))
def oper_sync(request): c = tools.default_context(request) if not tools.has_access(request, "tech team"): return permission_denied(request) c['title'] = "Manage Operators" if request.method == "POST": c['result'] = sync.sync()[1] logger.log_action(request.user, action=logger.ACTION_SYNC_OPERATORS) c['opers'] = Operator.objects.all() return render_to_response('sync.html', c)
def test_when_a_file_has_been_renamed_in_the_source(): try: source = tempfile.mkdtemp() dest = tempfile.mkdtemp() content = "I am a file that was renamed" source_path = Path(source) / "source-filename" old_dest_path = Path(dest) / "dest-filename" expected_dest_path = Path(dest) / "source-filename" source_path.write_text(content) old_dest_path.write_text(content) sync(source, dest) assert old_dest_path.exists() is False assert expected_dest_path.read_text() == content finally: shutil.rmtree(source) shutil.rmtree(dest)
def test_when_file_renamed_in_source(): try: source = tempfile.mkdtemp() dest = tempfile.mkdtemp() content = 'I am a file that was renamed' source_path = Path(source) / 'source-filename' dest_path = Path(dest) / 'dest-filename' source_path.write_text(content) dest_path.write_text(content) expected_path = Path(dest) / 'source-filename' sync(source, dest) assert dest_path.exists() is False assert expected_path.read_text() == content finally: shutil.rmtree(source) shutil.rmtree(dest)
def estimateOffset(): logPath = agisoftUtil.getLogPath() imagesPath = agisoftUtil.getImagesPath() minOff,maxOff = sync.sync(logPath,imagesPath,None) print('\nMin: '+ str(minOff)+'\tMax: '+str(maxOff)) minOff = PhotoScan.app.getFloat('Min. offset (s):',minOff) maxOff = PhotoScan.app.getFloat('Max. offset (s):',maxOff) interval = maxOff - minOff step = interval/PhotoScan.app.getInt('Get number of steps for the offset search (%2.0fs interval)'%(int(interval)),50) # Run iteration offset = minOff while offset<maxOff: sync.sync(logPath, imagesPath,offset) agisoftUtil.applyGeoreference(imagesPath) error = agisoftUtil.getTotalError() print('offset:{:8.1f}s \terror: {:8.2f}m'.format(offset,error)+' - '+'|'*int(error/2)) PhotoScan.app.update() offset = offset +step;
def importWorkMethods(school_id, branch_id, team_element_id): try: objectList = wordMethodsApi.work_methods({ "school_id": school_id, "branch_id": branch_id, "team_element_id": team_element_id }) if objectList is None: error.log(__file__, False, "Unknown Object") return False if not "status" in objectList: error.log(__file__, False, "Unknown Object") return False if objectList["status"] == "ok": unique = { "school_id": str(school_id), "branch_id": str(branch_id), "team_element_id": str(team_element_id) } element = { "school_id": str(school_id), "branch_id": str(branch_id), "team_element_id": str(team_element_id), "work_methods": objectList["methods"] } status = sync.sync(db.team_elements, unique, element) return True else: if "error" in objectList: error.log(__file__, False, objectList["error"]) return False else: error.log(__file__, False, "Unknown error") return False except Exception, e: error.log(__file__, False, str(e)) return False
def blockchain(): ''' Shoots back the blockchain, which in our case, is a json list of hashes with the block information which is: index, timestamp, data, hash, prev_hash ''' node_blocks = sync.sync() # update if they've changed # Convert our blocks into dictionaries # so we can send them as json objects later python_blocks = [] for block in node_blocks: python_blocks.append(block.__dict__()) json_blocks = json.dumps(python_blocks) # TODO look up the docs for json.dumps return json_blocks
def importWorkMethods ( school_id, branch_id, team_element_id ): try: objectList = wordMethodsApi.work_methods({ "school_id" : school_id, "branch_id" : branch_id, "team_element_id" : team_element_id }) if objectList is None: error.log(__file__, False, "Unknown Object") return False if not "status" in objectList: error.log(__file__, False, "Unknown Object") return False if objectList["status"] == "ok": unique = { "school_id" : str(school_id), "branch_id" : str(branch_id), "team_element_id" : str(team_element_id) } element = { "school_id" : str(school_id), "branch_id" : str(branch_id), "team_element_id" : str(team_element_id), "work_methods" : objectList["methods"] } status = sync.sync(db.team_elements, unique, element) return True else: if "error" in objectList: error.log(__file__, False, objectList["error"]) return False else: error.log(__file__, False, "Unknown error") return False except Exception, e: error.log(__file__, False, str(e)) return False
def blockchain(): """ Shoots back the blockchain, which in our case, is a json list of hashes with the block information which is: index timestamp data hash prev_hash """ #Regrab the nodes if they have changed node_blocks = sync.sync() python_blocks = [] for block in node_blocks: python_blocks.append(block.__dict__()) json_blocks = json.dumps(python_blocks) return json_blocks
class filecontrol: current_time = current_time() sync = sync() def __init__(self): global xdays global userid global path global directories xdays = 1 userid = 1 path = '/home/pi/Desktop/project/hardware/data' directories = os.listdir(path) def file_check(self): print("\nList all files older than " + str(xdays) + " days") print("===========================" + "=" * len(str(xdays)) + "====") for directory in directories: files = os.listdir((path + "/" + directory)) for current_file in files: # for current_file in path: # print(current_file) if (os.stat(path + "/" + directory + "/" + current_file).st_mtime) < (self.current_time.get_now() - xdays * 86400): print(current_file) self.sync.file_remove_local(directory, current_file) self.sync.file_remove_database(current_file) self.sync.file_remove_remote(directory, current_file) print("\nList all files newer than " + str(xdays) + " days") print("===========================" + "=" * len(str(xdays)) + "====") for current_file in files: if (os.stat(path + "/" + directory + "/" + current_file).st_mtime) > (self.current_time.get_now() - xdays * 86400): print(current_file)
def test_sync(): with open('./test_config.json') as data: test_config = json.load(data) if os.path.isdir(test_config['mount']): shutil.rmtree(test_config['mount']) # setup: create tmp filesystem for k, v in test_config['actives'].iteritems(): master = os.sep.join([test_config['mount'], k]) os.makedirs(master) f = open(os.sep.join([master, 'test']), 'w') f.write(master) f.close() for v2 in v: slave = os.sep.join([test_config['mount'], v2]) os.makedirs(slave) # execute out = sync.sync(test_config) # validate for k, v in test_config['actives'].iteritems(): master = os.sep.join([test_config['mount'], k]) for v2 in v: synced_filename = os.sep.join([test_config['mount'], v2, 'test']) f = open(synced_filename, 'r') assert(os.path.isfile(synced_filename)) assert(f.read() == master) f.close() # teardown shutil.rmtree(test_config['mount'], True)
def importSchoolInfo ( school_id ): try: info = school_info({ "school_id" : str(school_id) }) if info is None: error.log(__file__, False, "Unknown Object") return False if not "status" in info: error.log(__file__, False, "Unknown Object") return False if info["status"] == "ok": unique = { "school_id" : str(school_id), "branch_id" : info["information"]["branch_id"] } element = { "school_id" : str(school_id), "branch_id" : info["information"]["branch_id"], "terms" : info["information"]["terms"], "name" : info["information"]["name"] } status = sync.sync(db.schools, unique, element) else: if "error" in info: error.log(__file__, False, info["error"]) else: error.log(__file__, False, "Unknown Error") except Exception, e: error.log(__file__, False, str(e))
assert first_block.is_valid() return first_block if __name__ == '__main__': parser = argparse.ArgumentParser(description='Generating Blockchain') parser.add_argument('--first', '-f', dest='first', help='generate the first node ourselves') args = parser.parse_args() if not os.path.exists(CHAINDATA_DIR): os.mkdir(CHAINDATA_DIR) if args.first: if os.listdir(CHAINDATA_DIR) == []: first_block = mine_first_block() first_block.self_save() filename = '%sdata.txt' % CHAINDATA_DIR with open(filename, 'w') as data_file: data_file.write('First Block. (this is the hook m**********r)') else: print( 'chaindata directory already has files. If you want to generate a first block, delete files and rerun' ) else: # this is the expected case, sync from peers (f**k you mist y u take so long) print('syncing') sync.sync(save=True)
def sync_calendars(): from sync import sync return sync(app, mongo)
def main(): import argparse cmd_parser = argparse.ArgumentParser( description='Toolchain for working with the TiLDA Mk4') cmd_parser.add_argument( 'command', nargs=1, help= 'command [test|reset|sync|run|validate|wifi|firmware-update|app|bootstrap]', choices=[ 'test', 'reset', 'sync', 'validate', 'run', 'wifi', 'firmware-update', 'app', 'bootstrap' ]) cmd_parser.add_argument('-c', '--clean', action='store_true', help='clean mass storage before writing') cmd_parser.add_argument('-d', '--device', help='the serial device of the badge') cmd_parser.add_argument('-s', '--storage', help='the usb mass storage path of the badge') cmd_parser.add_argument('-b', '--baudrate', default=115200, help='the baud rate of the serial device') cmd_parser.add_argument('-v', '--verbose', action='store_true', help='adds more output') cmd_parser.add_argument('--skip-wifi', action='store_true', help='does not sync wifi.json') cmd_parser.add_argument('--bootstrapped-apps', action='store_true', help='[Sync] only bootstrapped apps by default') cmd_parser.add_argument('--print_resources', action='store_true', help='prints resources in json') cmd_parser.add_argument('--boot', help='defines which app to boot into after reboot') cmd_parser.add_argument('--run', help='like run, but after a sync') cmd_parser.add_argument( '-w', '--wait', default=0, type=int, help='seconds to wait for USB connected board to become available') cmd_parser.add_argument('paths', nargs='*', help='input files') args = cmd_parser.parse_args() command = args.command[0] path = sync.get_root() run_tests = command == "test" if command not in ["validate"]: try: import serial except Exception as e: print( "Please install pyserial first: https://pyserial.readthedocs.io/en/latest/pyserial.html" ) sys.exit(1) if command == "firmware-update": firmware_update.firmware_update(args.verbose) if command == "wifi": wifi.select_wifi() if command == "app": command = "sync" args.run = "%s/main.py" % args.paths[0] #args.boot = args.paths[0] if command in ["test", "validate", "sync", "bootstrap"]: resources = get_resources(path) add_metadata(path, resources) validate(path, resources) resolve_dependencies(resources) remove_upip(resources) if args.print_resources: print(pretty_print_resources(resources)) errors = get_error_summary(resources) if errors: print("Problems found:\n") print(errors) sys.exit(1) print("Local Test: PASS") if command == "test": command = "sync" if len(args.paths) == 0: print( "Please define an app or lib to sync: tilda_tools sync my_app\n" ) sys.exit(1) else: args.paths = ["lib/test_%s.py" % p for p in args.paths] if command in ["reset", "sync", "bootstrap"]: pyboard_util.stop_badge(args, args.verbose) if command == "bootstrap": sync.clean(args) sync.sync(args, ["bootstrap.py"], {}, args.verbose, args.skip_wifi) pyboard_util.hard_reset(args) if command == "sync": paths = args.paths if len(args.paths) else None if args.bootstrapped_apps: for k, val in list(resources.items()): if val.get("type", None) == "app": if not k in paths and not val.get("bootstrapped", False): if args.verbose: print( "Removing app '{0}' from sync list".format(k)) del resources[k] if args.clean: sync.clean(args) synced_resources = sync.sync(args, paths, resources, args.verbose, args.skip_wifi) if (command in ["reset", "sync"]) or run_tests: sync.set_boot_app(args, args.boot or "") if args.run: command = "run" args.paths = [args.run] sync.set_no_boot(args) pyboard_util.soft_reset(args) if command == "run": pyboard_util.check_run(args.paths) pyboard_util.run(args, args.paths) if run_tests: for resource in synced_resources: pyboard_util.check_run([resource]) pyboard_util.run(args, [resource], False) pyboard_util.soft_reset(args, False) pyboard_util.close_pyb()
def importClassTeams ( school_id, branch_id, class_id ): try: objectList = classTeamsApi.class_teams({ "school_id" : school_id, "branch_id" : branch_id, "class_id" : class_id }) if objectList is None: error.log(__file__, False, "Unknown Object") return False if not "status" in objectList: error.log(__file__, False, "Unknown Object") return False if objectList["status"] == "ok": teams = [] for row in objectList["teams"]: unique = { "team_element_id" : str(row["team_element_id"]), "school_id" : str(school_id), "branch_id" : str(branch_id), } element = { "name" : row["name"], "team_element_id" : str(row["team_element_id"]), "school_id" : str(school_id), "branch_id" : str(branch_id), } status = sync.sync(db.team_elements, unique, element) teams.append(status["_id"]) unique = { "school_id" : str(school_id), "branch_id" : str(branch_id), "class_id" : str(class_id) } element = { "school_id" : str(school_id), "branch_id" : str(branch_id), "class_id" : str(class_id), "teams" : teams } status = sync.sync(db.classes, unique, element) return True else: if "error" in objectList: error.log(__file__, False, objectList["error"]) return False else: error.log(__file__, False, "Unknown error") return False except Exception, e: error.log(__file__, False, str(e)) return False
def importGrades ( school_id, branch_id, student_id, term, session = False, username = False, password = False ): try: objectList = gradesApi.grades({ "school_id" : school_id, "branch_id" : branch_id, "student_id" : student_id, "username" : username, "password" : password }, term, session) if objectList is None: error.log(__file__, False, "Unknown Object") return False if not "status" in objectList: error.log(__file__, False, "Unknown Object") return False if objectList["status"] == "ok": diplomaLines = [] for row in objectList["diploma"]: existsing = db.xprs_subjects.find({"name" : row["subject_name"], "level" : row["subject_level"]}) if existsing.count() > 0: row["xprs_subject_id"] = existsing[0]["_id"] row["context_card_id"] = existsing[0]["context_card_id"] diplomaLines.append(row) existsing = db.persons.find({"student_id" : str(student_id)}) unique = { "student_id" : str(student_id) } if existsing.count() > 0: element = existsing[0] else: element = { "student_id" : str(student_id) } if not "grades" in element: element["grades"] = {} if not "comments" in element["grades"]: element["grades"]["comments"] = {} if not "grades" in element["grades"]: element["grades"]["grades"] = {} if not "notes" in element["grades"]: element["grades"]["notes"] = {} element["grades"]["protocol_average"] = objectList["average"] element["grades"]["diploma"] = diplomaLines element["grades"]["protocol"] = objectList["protocol_lines"] element["grades"]["grades"][objectList["term"]["value"]] = objectList["grades"] element["grades"]["comments"][objectList["term"]["value"]] = objectList["comments"] element["grades"]["notes"][objectList["term"]["value"]] = objectList["grade_notes"] status = sync.sync(db.persons, unique, element) else: if "error" in objectList: error.log(__file__, False, objectList["error"]) return False else: error.log(__file__, False, "Unknown error") except Exception, e: error.log(__file__, False, str(e)) return False
def importGroups ( school_id, branch_id ): try: objectList = groupsApi.groups({ "school_id" : school_id, "branch_id" : branch_id }) if objectList is None: error.log(__file__, False, "Unknown Object") return False if not "status" in objectList: error.log(__file__, False, "Unknown Object") return False if objectList["status"] == "ok": for row in objectList["groups"]: unique = { "team_element_id" : str(row["group_id"]), "term" : objectList["term"]["value"], "school_id" : str(row["school_id"]), "branch_id" : str(row["branch_id"]) } contextCards = [] contextCards.append(row["context_card_id"]) existsing = db.team_elements.find(unique).limit(1) if existsing.count() > 0: existsing = existsing[0] if "context_cards" in existsing: for card in existsing["context_cards"]: if not card in contextCards: contextCards.append(card) element = { "team_element_id" : str(row["group_id"]), "school_id" : str(row["school_id"]), "branch_id" : str(row["branch_id"]), "name" : row["name"], "type" : row["type"], "group_type" : row["group_type"], "term" : objectList["term"]["value"], "type" : "group", "context_cards" : contextCards, "subject_id" : "1361688526" } status = sync.sync(db.team_elements, unique, element) # Launch Team Info Scraper '''if sync.check_action_event(status) == True: for url in sync.find_listeners('group', unique): sync.send_event(url, status["action"], element) for url in sync.find_listeners('school', {"school" : school_id, "branch_id" : branch_id}): sync.send_event(url, "group", element) for url in sync.find_general_listeners('group_general'): sync.send_event(url, status["action"], element)''' #deleted = sync.find_deleted(db.groups, {"school_id" : school_id, "branch_id" : branch_id, "term" : objectList["term"]["value"], "type" : "group"}, ["group_id"], objectList["groups"]) '''for element in deleted: for url in sync.find_listeners('group', {"group_id" : element["group_id"]}): sync.send_event(url, 'deleted', element) for url in sync.find_listeners('school', {"school" : school_id, "branch_id" : branch_id}): sync.send_event(url, "group_deleted", element)''' return True else: if "error" in objectList: error.log(__file__, False, objectList["error"]) else: error.log(__file__, False, "Unknown Error") except Exception, e: error.log(__file__, False, str(e)) return False
def importTeamBooks ( school_id, branch_id, team_id, session = False, username = False, password = False ): try: objectList = teamBooksApi.team_books({ "school_id" : school_id, "branch_id" : branch_id, "team_id" : team_id, "username" : username, "password" : password }, session) if objectList is None: error.log(__file__, False, "Unknown Object") return False if not "status" in objectList: error.log(__file__, False, "Unknown Object") return False if objectList["status"] == "ok": for row in objectList["books"]: unique = { "title" : row["title"], "type" : row["type"] } element = { "title" : row["title"], "type" : row["type"] } status = sync.sync(db.books, unique, element) row["_id"] = status["_id"] del(row["team_id"]) unique = { "school_id" : str(school_id), "branch_id" : str(branch_id), "team_id" : str(team_id) } element = { "school_id" : str(school_id), "branch_id" : str(branch_id), "team_id" : str(team_id), "books" : objectList["books"] } status = sync.sync(db.teams, unique, element) return True else: if "error" in objectList: error.log(__file__, False, objectList["error"]) return False else: error.log(__file__, False, "Unknown error") return False except Exception, e: error.log(__file__, False, str(e)) return False
def importClassMembers ( school_id, branch_id, class_id, session = False, username = False, password = False ): try: objectList = class_members({ "school_id" : school_id, "class_id" : class_id, "username" : username, "password" : password, "branch_id" : branch_id }, session) if objectList is None: error.log(__file__, False, "Unknown Object") return False if not "status" in objectList: error.log(__file__, False, "Unknown Object") return False if objectList["status"] == "ok": members = [] for row in objectList["students"]: unique = { "student_id" : row["person_id"] } contextCards = [] contextCards.append(row["context_card_id"]) existsing = db.persons.find(unique).limit(1) if existsing.count() > 0: existsing = existsing[0] if "context_cards" in existsing: for card in existsing["context_cards"]: if not card in contextCards: contextCards.append(card) element = { "type" : "student", "student_id" : row["person_id"], "name" : unicode(str(row["full_name"]).decode("utf8")), "class_student_id" : unicode(str(row["person_text_id"]).decode("utf8")), "last_name" : unicode(str(row["last_name"]).decode("utf8")), "first_name" : unicode(str(row["first_name"]).decode("utf8")), "context_cards" : contextCards, "school_id" : str(school_id), "branch_id" : str(branch_id) } # Add Field of Study element["field_of_study"] = { "name" : row["field_of_study"]["name"], "field_of_study_id" : row["field_of_study"]["field_of_study_id"] } if "picture_id" in row: # Launch Fetch Picture Task element["picture_id"] = row["picture_id"] status = sync.sync(db.persons, unique, element) members.append(status["_id"]) for row in objectList["teachers"]: unique = { "teacher_id" : row["person_id"] } contextCards = [] contextCards.append(row["context_card_id"]) existsing = db.persons.find(unique).limit(1) if existsing.count() > 0: existsing = existsing[0] if "context_cards" in existsing: for card in existsing["context_cards"]: if not card in contextCards: contextCards.append(card) element = { "teacher_id" : str(row["person_id"]), "last_name" : unicode(str(row["last_name"]).decode("utf8")), "first_name" : unicode(str(row["first_name"]).decode("utf8")), "type" : "teacher", "name" : unicode(str(row["full_name"]).decode("utf8")), "abbrevation" : unicode(str(row["person_text_id"]).decode("utf8")), "context_cards" : contextCards, "school_id" : str(school_id), "branch_id" : str(branch_id) } if "picture_id" in row: # Launch Fetch Picture Task element["picture_id"] = row["picture_id"] status = sync.sync(db.persons, unique, element) # Possible Teams Feature members.append(status["_id"]) unique = { "class_id" : str(class_id) } element = { "members" : members, "class_id" : str(class_id), "school_id" : str(school_id), "branch_id" : str(branch_id) } status = sync.sync(db.classes, unique, element) else: if "error" in objectList: error.log(__file__, False, objectList["error"]) return False else: error.log(__file__, False, "Unknown error") return False except Exception, e: error.log(__file__, False, str(e)) return False
phase = 0 k = 1 for p in r: p = p + phase phase = phase + 0 s = complex(math.cos(2.*p*math.pi/(1<<16)),math.sin(2.*p*math.pi/(1<<16))) cs = aCh.ce(s) if k==16: k = 0 rx = aRx.ce(cs) c.append(rx) d.append(cs) k = k + 1 ( pos, ret ) = sync.sync(c[0:2*60],2) ( pos2, ret2, aret ) = sync.syncBeforeDemod(d[0:2*60],2,0.32) print pos," ",pos2 pos = pos2 - 2*40 - 3 rec = [] for i in range(256+40): cd = c[pos] if cd<0: rec.append(1) else: rec.append(0) pos = (pos + 2) % len(c) error = 0
def importOutgoingCensor ( school_id, branch_id, outgoing_censor_id ): #try: objectList = outgoingCensorApi.outgoing_censor({ "school_id" : school_id, "branch_id" : branch_id, "outgoing_censor_id" : outgoing_censor_id }) if objectList is None: error.log(__file__, False, "Unknown Object") return False if not "status" in objectList: error.log(__file__, False, "Unknown Object") return False if objectList["status"] == "ok": row = objectList censor = { "name" : row["censor"]["name"], "abbrevation" : row["censor"]["abbrevation"] } existing = db.persons.find({"school_id" : str(school_id), "branch_id" : str(branch_id), "name" : censor["name"], "abbrevation" : censor["abbrevation"]}) if existing.count() > 0: existing = existing[0] censor["_id"] = existing["_id"] institution = { "institution" : row["institution"]["name"], "institution_id" : row["institution"]["institution_id"] } # Institutions Link existing = db.schools.find({"$or" : [{"name" : re.compile("^" + institution["institution"] + "$", re.IGNORECASE)}, {"institution" : institution["institution"]}, {"institution_id" : institution["institution_id"]}]}) if existing.count() > 0: existing = existing[0] institution["_id"] = existing["_id"] element = { "institution_id" : x["institution_id"], "institution" : x["institution"] } status = sync.sync(db.schools, {"school_id" : existing["school_id"]}, element) # XPRS Subjects xprs = row["xprs"] existing = db.xprs_subjects.find({"code_full" : xprs["code_full"]}) if existing.count() > 0: existing = existing[0] xprs["_id"] = existing["_id"] insitution_types = [] test_name_codes = [] test_types = [] test_type_long_codes = [] if "insitution_types" in existing: insitution_types = existing["insitution_types"] if "test_name_codes" in existing: test_name_codes = existing["test_name_codes"] if "test_types" in existing: test_types = existing["test_types"] if "test_type_long_codes" in existing: test_type_long_codes = existing["test_type_long_codes"] if not xprs["gym_type"] in insitution_types: insitution_types.append(xprs["gym_type"]) if not xprs["test_type_code"] in test_name_codes: test_name_codes.append(xprs["test_type_code"]) if not xprs["xprs_type"] in test_types: test_types.append(xprs["xprs_type"]) if not xprs["test_type_long_code"] in test_type_long_codes: test_type_long_codes.append(xprs["test_type_long_code"]) element = { "insitution_types" : insitution_types, "test_name_codes" : test_name_codes, "test_types" : test_types, "test_type_long_codes" : test_type_long_codes } status = sync.sync(db.xprs_subjects, {"xprs_subject_id" : existing["xprs_subject_id"]}, element) unique = { "outgoing_censor_id" : str(outgoing_censor_id) } element = { "outgoing_censor_id" : str(outgoing_censor_id), "branch_id" : str(branch_id), "school_id" : str(school_id), "censor" : censor, "note" : row["note"], "number_of_students" : row["number_of_students"], "test_type_team_name" : row["test_type_team_name"], "test_team" : row["test_team"], "institution" : institution, "period" : row["period"], "xprs" : xprs, "subject" : { "name" : xprs["subject"], "level" : xprs["level"] } } if row["description"] == True: element["subject"] = row["information"]["subject"] element["terms"] = row["information"]["terms"] element["institution"]["name"] = row["information"]["institution"] element["team_name"] = row["information"]["team_name"] existing = db.schools.find({"name" : re.compile(r".*" + element["institution"]["name"] + ".*")}) if existing.count() > 0: existing = existing[0] element["institution"]["school_id"] = existing["school_id"] teachers = [] team = db.teams.find({"school_id" : existing["school_id"], "name" : row["information"]["team_name"]}) print row["information"]["team_name"] if team.count() > 0: print "Team Found!" teams = [] team = team[0] element["team_id"] = team["team_id"] for x in row["information"]["teams"]: team_element = db.team_elements.find({"$or" : [{"team_id" : team["team_id"], "name" : x["name"]}, {"name" : x["name"], "school_id" : existing["school_id"]}]}) if team_element.count() > 0: team_element = team_element[0] teams.append({"name" : x["name"], "_id" : team_element["_id"]}) else: teams.append(x) element["team_elements"] = teams else: element["team_elements"] = row["information"]["teams"] for x in row["information"]["teachers"]: teacher = db.persons.find({"name" : x["name"], "school_id" : existing["school_id"]}) if teacher.count() > 0: teacher = teacher[0] teachers.append({"name" : x["name"], "_id" : teacher["_id"]}) else: teachers.append({"name" : x["name"]}) element["teachers"] = teachers else: element["team_elements"] = row["information"]["teams"] element["teachers"] = row["information"]["teachers"] sync.sync(db.events, unique, element) return True else: if "error" in objectList: error.log(__file__, False, objectList["error"]) db.events.remove({"outgoing_censor_id" : str("outgoing_censor_id")}) return False elif "type" in objectList: error.log(__file__, False, objectList["type"]) return False else: error.log(__file__, False, "Unknown error") '''except Exception, e:
from block import Block import mine from flask import Flask, jsonify, request import sync import requests import os import json import sys import apscheduler import argparse from config import * node = Flask(__name__) sync.sync(save=True ) # want to sync and save the overall "best" blockchain from peers from apscheduler.schedulers.background import BackgroundScheduler sched = BackgroundScheduler(standalone=True) @node.route('/blockchain.json', methods=['GET']) def blockchain(): ''' Shoots back the blockchain, which in our case, is a json list of hashes with the block information which is: index timestamp data hash
def main(): from optparse import OptionParser parser = OptionParser("usage: %progname [options]") parser.add_option("-d", "--debug", dest="debug", action="store_true", help="enable debugging output") options, args = parser.parse_args() if args: parser.error("invalid number of arguments") curr_dir, _ = P.split(P.abspath(__file__)) lmd_user, lmd_pass = open(P.join(curr_dir, "gitignore", "login.txt")).read().strip().split("\n") if options.debug: print "Logging in to LMD" fetcher = HTMLFetcher(lmd_user, lmd_pass) fetcher.login() if options.debug: print "Logging in to Google Calendar" calendar_service = login() for goog_name, lmd_name, regex in CALENDARS: if options.debug: print "Copying entries from LMD calendar (%s) to Google calendar (%s)" % (goog_name, lmd_name) google_id = get_calendar_id(calendar_service, goog_name) cid = CALENDAR_ID[lmd_name] if options.debug: print "Fetching LMD calendar (%s)" % cid stream = fetcher.get_calendar(cid) utf = codecs.decode(stream.read(), CALENDAR_ENCODING) if options.debug: print "Parsing calendar entries" entries = parse_entries(cid, utf) if options.debug: print "%d total entries parsed. Filtering entries" % len(entries) filtered = filter(lambda f: regex.search(f.title), entries) if options.debug: print "%d filtered entries. Parsing entry details" % len(filtered) for i, entry in enumerate(filtered): stream = fetcher.get_details(entry.cid, entry.event_id) html = codecs.decode(stream.read(), CALENDAR_ENCODING) entry.details = parse_details(html) if options.debug: print "Processed %d/%d entries" % (i, len(filtered)) if entries: time_min = sorted(entries, key=lambda e: e.start_datetime)[0].start_datetime else: time_min = datetime.datetime.now().replace(tzinfo=JST()) if options.debug: print "Fetching Google Calendar entries" google_entries = get_entries(calendar_service, google_id, time_min) if options.debug: print "all entries in LMD calendar `%s':" % lmd_name for e in entries: print "\t", e print "relevant entries in LMD calendar `%s':" % lmd_name for e in filtered: print "\t", e print "entries in Google calendar `%s':" % goog_name for e in sorted(google_entries): print "\t", e added, updated, deleted = sync(calendar_service, google_id, filtered, google_entries) if added or updated or deleted: print "from: %s to: %s total: %d filtered: %d added: %d updated: %d deleted: %d" % ( lmd_name, goog_name, len(entries), len(filtered), added, updated, deleted, )