def saveDB(self, isFullSave=False): if isFullSave: Log(ErrorLevel.info, "saving every record to db") else: Log(ErrorLevel.info, "saving finished records to db") timeStamp = GetTimeStamp() copiedDB = copy.deepcopy(self.db) for uid in copiedDB: for userState in ["online", "mobile", "active"]: user = copiedDB[uid][userState] if len(user) != 0: lastEntry = user[-1] if lastEntry[END] is None: if not isFullSave: # ongoing records ignored copiedDB[uid][userState].pop() else: # close every ongoing recording modifiedLastEntry = user.pop() modifiedLastEntry[END] = timeStamp user.append(modifiedLastEntry) copiedDB[uid][userState] = user # export db to json file with open(self.dbPath, 'w') as dbFile: dbFile.write(json.dumps(copiedDB))
def processPresence(self, presence: Dict, state: str): timeIntervals = self.getUser(presence["uid"], state) if presence[state] is True: if len(timeIntervals) == 0: # user has no records yet of this state Log(ErrorLevel.debug, "{} with state {} has no records yet, create one", presence["uid"], state) timeIntervals.append([presence["lastactive"], None]) self.setUser(presence["uid"], state, timeIntervals) else: # so the array is not empty isLastIntervalClosed = timeIntervals[-1][END] is not None if isLastIntervalClosed: # user has no open entry, start new Log(ErrorLevel.debug, "{} with state {} has no open entry, create new", presence["uid"], state) timeIntervals.append([presence["lastactive"], None]) self.setUser(presence["uid"], state, timeIntervals) elif presence[state] is False: if len(timeIntervals) != 0: lastEntryOpen = timeIntervals[-1][END] is None if lastEntryOpen: modifiedLastEntry = timeIntervals.pop() #print("[debug]: end value of LAT: {}".format(str(presenceData["lastactive"]))) modifiedLastEntry[END] = presence["lastactive"] timeIntervals.append(modifiedLastEntry) #print("[debug]: {} ->{} left".format(presenceType, userName)) self.setUser(presence["uid"], state, timeIntervals) else: Log(ErrorLevel.warning, "presence value is not valid in object: {}", presence)
def __wait_for_complete(self): #Check all threads Log.d(TAG, "start join thread") for thread in self.threads: if thread.isAlive(): #if the thread is alive thread.join() Log.d(TAG, "end join thread...")
def clean(): """ Delete the config file """ Log.d(DEBUG_TAG, "Delete config file...") try: os.remove(CONFIG_FILE) except os.error as e: Log.e(DEBUG_TAG, "Delete config file%s error, reason:%s"%(CONFIG_FILE, e))
def create_xml_source( xml_source_parent, source_name ): Log.log("Creating new source folder: " + source_name ) xml_source = xml_doc.createElement( "source" ) xml_source.setAttribute( "src", source_name ) xml_source.setAttribute( "created", current_date_time() ) xml_source_parent.appendChild( xml_source ) return xml_source
def process_album_items( items_info_type_set, xml_album ): album_size = 0 global g_total_processed_items for i in items_info_type_set: #print( "- adding: " + i ); #g_total_processed_items = g_total_processed_items + 1 f = file_type_info( i.work_folder, i.item ) if i.is_comment: #if opt.update_mode != None: process_album_commented_item( item_type_info( i.work_folder, i.item, True ), xml_album ) continue xml_item = None #if opt.update_mode: xml_item = find_node_by_value( xml_album, "item", "src", f.local_file ) if xml_item == None: #Log.log(" + found new item") create_new_update_entry( "added_new_item", f.full_path ) else: attr_sz = xml_item.attributes["size"] if attr_sz: #print "attr_sz = " + attr_sz.value + " || f.size = " + str( f.size ) if long(attr_sz.value) != f.size: Log.log(" + updating existing item with different size") create_new_update_entry( "update_modified_item", f.full_path ) xml_album.removeChild( xml_item ) xml_item = None if xml_item != None: continue g_total_processed_items = g_total_processed_items + 1 xml_item = create_xml_item( f ) xml_item.setAttribute( "add", current_date_time() ) set_xml_item_size( xml_item, f ) set_xml_item_dates( xml_item, f ) if f.size != None: album_size = album_size + f.size set_xml_item_audio_info( xml_item, f ) #xml_item = xml_doc.createTextNode( i ) xml_album.appendChild( xml_item ) return album_size
def __init__(self, configPath: str, dbPath: str = DEFAULT_DB_PATH): ### init folder structure self.resourcePath = RESOURCE_DIR if not exists(self.resourcePath): mkdir(self.resourcePath) ### load DB from file dbFile = None if exists(dbPath): dbFile = open(dbPath, 'r') else: tmpFile = open(dbPath, 'w') tmpFile.write(json.dumps(DB_DEFAULT_STRUCTURE)) tmpFile.close() dbFile = open(dbPath, 'r') self.db = json.loads(dbFile.read()) dbFile.close() self.dbPath = dbPath Log(ErrorLevel.info, "db loaded") ### load config file self.secrets = {} if not exists(configPath): Log(ErrorLevel.error, "config file path ({}) does not exist", configPath) sys.exit(1) with open(configPath) as configFile: for line in configFile: vals = line.strip().split('=', 1) self.secrets[vals[0].lower()] = vals[1] ### fill up request header with valid informations from secret self.PullRequestHeader = PULL_REQUEST_HEADER_SKELETON self.PullRequestHeader["Cookie"] = self.secrets["cookie"] self.PullRequestHeader["User-Agent"] = self.secrets["useragent"] Log(ErrorLevel.info, "config loaded") ### reset params of request header self.resetParameters() ### load user manager that handles unique query logic self.queryManager = UserQueryManager( userFBID=self.secrets["uid"], cookie=self.secrets["cookie"], userAgent=self.secrets["useragent"])
def getUserInfo(self, uid: str) -> Dict: infoBody = self.INFORMATION_REQUEST_BODY.copy() infoBody["ids[0]"] = uid response_obj = requests.post(INFORMATION_URL, data=infoBody, headers=self.JSON_POST_HEADERS) Log(ErrorLevel.debug, "raw query response: {}", response_obj.text) userInfo = self.getParsedUserInfo(response_obj.text) return userInfo
def setUser(self, uid: str, state: str, intervalList: List[List]): newValue = intervalList[-1] if (len(self.db[uid][state]) == 0) or (self.db[uid][state][0] != intervalList[-1][0]): oldValue = [] else: oldValue = self.db[uid][state] Log(ErrorLevel.debug, "changing {}'s {} state from {} to {}", uid, state, oldValue, newValue) self.db[uid][state] = intervalList
def load_target_xml_in_memory( full_output_filename ): global xml_doc global xml_section global xml_updates global opt with open( full_output_filename, "r" ) as f: s = f.read() s = s.replace('\r\n','') #s = s.rstrip() #s = s.lstrip() #xml_doc = parse( full_output_filename ) xml_doc = parseString( s ) #sec_node = xml_mngr.find_node( xml_doc, "section" ) #opt.section_id = sec_node.attribute("id") #opt.source_folder = sec_node.attribute("source") xml_upds_list = xml_doc.getElementsByTagName( "updates" ) xml_updates = xml_upds_list[0] if xml_updates == None: Log.log_error( "Oops! not found updates section node while loading xml" ) exit(1) xml_sections_list = xml_doc.getElementsByTagName( "section" ) xml_section = xml_sections_list[0] if xml_section == None: Log.log_error( "Oops! not found section node while loading xml" ) exit(1) #attr_src = xml_section.attributes[ "source" ] #if attr_src != None: # opt.source_root_path = attr_src.value attr_id = xml_section.attributes[ "id" ] if attr_id != None: opt.section_id = attr_id.value
def initialize_target_xml( full_output_filename ): global opt # need to create it "virtually" due to design and traverse items #if opt.dry_run and opt.update_mode == False: # create_memory_xml() # return # create target folder if not os.path.exists(opt.output_path): Log.log( "Creating folder: " + opt.output_path ) os.makedirs(opt.output_path) # check if target file exists is_file = os.path.isfile( full_output_filename ) # asks before overwrite existing one #if opt.dry_run == False and opt.update_mode == False and is_file: # resp = raw_input( "\nFile: " + full_output_filename + " exists. \ndo you want to be overwritted (y/N) ?" ) # if resp!="y" and resp!="Y": # Log.log( "\nOperation canceled by the user" ) # exit(0) # we're updating so exit if file does not exists #if opt.update_mode and is_file == False: # Log.log("\nCan not update a non existing target file: " + full_output_filename ) # exit(0) # open and set working variables if file exists and we're updating #if opt.update_mode and is_file: if is_file: load_target_xml_in_memory( full_output_filename ) else: create_target_xml_in_memory() # create initial folders if needed ( opt.source_path argument ) create_xml_sources_entries()
def create_xml_sources_entries(): global xml_source_parent global xml_root # get main <sources> section xml_source_parent = find_node( xml_doc, "sources" ) if xml_source_parent == None: Log.log_error("Not found main <sources> xml section") exit(1) # iterate over each source path folder for e in opt.source_root_path.split('/'): if e==None or e=="": continue print "************ iterating source: " + e # do not create the last one, it will be created later #if e == opt.source_root_path_last_folder: # continue print "************ processing source: " + e nd = find_node_by_value( xml_source_parent, "source", "src", e ) if nd == None: xml_source_parent = create_xml_source( xml_source_parent, e ) else: xml_source_parent = nd # build last folder nd = find_node_by_value( xml_source_parent, "source", "src", opt.source_root_path_last_folder ) if nd == None: xml_source_parent = create_xml_source( xml_source_parent, opt.source_root_path_last_folder ) else: xml_source_parent = nd
def init_config(): """ Initiliaze all of the software configurations """ cf = ConfigParser.ConfigParser() try : cf.read(CONFIG_FILE) except os.errno: Log.e(TAG, "Open configuration file error!") return False try : SW_CONFIG['7zpath'] = cf.get("dir_config", "7zpath") SW_CONFIG['sharefolder'] = cf.get("dir_config", "sharefolder") SW_CONFIG['distpath'] = cf.get("dir_config", "distpath") SW_CONFIG['sw_version'] = cf.get("sw_config", "version") SW_CONFIG['startup'] = cf.get("sw_config", "startup") except ConfigParser.Error: Log.e(DEBUG_TAG, "Config file parse error!") clean() return False try : RUN_CONFIG['backup'] = (cf.get("run_config", "backup") == "True") RUN_CONFIG['pop'] = (cf.get("run_config", "pop") == "True") except ConfigParser.Error: Log.e(TAG, "no run config in config file!") RUN_CONFIG['backup'] = False RUN_CONFIG['pop'] = False try : for option in cf.options("hook_config"): HOOK_CONFIG[option] = cf.get("hook_config", option) except ConfigParser.Error: Log.e(TAG, "No hook config in config file!") if not os.path.exists(SW_CONFIG['sharefolder']): try: os.makedirs(SW_CONFIG['sharefolder']) except os.error: print "Can't create the local folder:" + SW_CONFIG['distpath'] + ", Please set another one" clean() exit() if not os.path.exists(SW_CONFIG['distpath']): try: os.makedirs(SW_CONFIG['distpath']) except os.error: print "Can't create the share folder:" + SW_CONFIG['sharefolder'] + " temp directory!" clean() os.system("pause") exit() return True
def save_xml( full_filename ): tot_sz_gb = "{0:.2f}".format( total_saved_albums_size / 1024.0 / 1024.0 / 1024.0 ) + " Gb" Log.log("") Log.log( " . total albums found : " + str(total_saved_albums) ) Log.log( " . total items found : " + str(g_total_processed_items) ) Log.log( " . total size : " + tot_sz_gb + " ( " + str(total_saved_albums_size/1024.0/1024.0) + " Mb )" ) if opt.dry_run: return xml_section.setAttribute( "albums", str(total_saved_albums) ) xml_section.setAttribute( "items", str(g_total_processed_items) ) xml_section.setAttribute( "size", tot_sz_gb ) xml_section.setAttribute( "update", current_date_time() ) #if opt.update_mode == False or ( opt.update_mode and g_total_processed_items ): f = open( full_filename, "wb" ) #xml_doc.writexml( f, xml_doc.toprettyxml(indent=' ', newl='\n' ) ) #indent=" ", addindent=" ", newl="\r\n" ) # if opt.update_mode: # s1 = xml_doc.toxml() # #xml_doc.writexml( f, newl='\n' ) # #xml_doc.writexml( f ) # xml_doc.writexml( f, indent=' ', addindent=' ', newl='\r\n' ) # else: # xml_doc.writexml( f, indent=' ', addindent=' ', newl='\r\n' ) # #xml_doc.writexml( f ) xml_doc.writexml( f, indent=' ', addindent=' ', newl='\r\n' ) Log.log( "\nData have been saved to the XML file: " + full_filename ) # last unlink xml xml_doc.unlink()
def getParsedPresenceInfo(rawResponse: str) -> Dict: result = {"isOnline": None} Log(ErrorLevel.debug, "query presence raw response: {}", rawResponse) responseObj = UserQueryManager.getParsedResponse(rawResponse) if ("payload" in responseObj) \ and (responseObj["payload"] is dict) \ and ("availability" in responseObj["payload"] ): availabilityObj = responseObj["payload"]["availability"] availabilityIDs = availabilityObj.keys() if len(availabilityIDs) == 1: availabilityValue = availabilityObj[next( iter(availabilityIDs))] result["isOnline"] = (availabilityValue != 0) return result
def processQueryResponse(self): # get presence IF the last user status is older than time delta (3mins) # we don't want to be suspicious by querying every uid every time for uid in self.db.keys(): if self.isUserStateOpenedButOld(uid, "online"): presenceData = self.queryManager.getPresence(uid) isOnline = presenceData["isOnline"] isActive = None isMobile = None if isOnline is False: isActive = False isMobile = False Log(ErrorLevel.debug, "query response: {} is {}", uid, ("online" if isOnline else "offline")) self.processByMatchingStates( self.createPresence(uid, GetTimeStamp(), isOnline, isActive, isMobile))
def processMessageContent(self, msContent: Dict): for msItem in msContent: itemType = msItem["type"] if itemType == "chatproxy-presence": self.processFriendStatusList(msItem) elif itemType == "buddylist_overlay": self.processUniqueFriendStatus(msItem) elif itemType == "t_tp": self.processPhoneInfo(msItem) elif itemType == "delta": self.processDelta(msItem) elif itemType == "typ": self.processTyping(msItem) elif itemType == "inbox": pass # we are not interested in inbox info else: Log(ErrorLevel.debug, "unknown message type {}", msItem["type"])
def getToken(self) -> str: if hasattr(self, "token"): return self.token else: response_obj = requests.get(WEBSITE_URL, headers=self.WEBSITE_REQUEST_HEADERS, allow_redirects=True) matchTokenRegex = """name="fb_dtsg" ?value="([^\\"]+)""" # matching attribute in html with regex m = re.search(matchTokenRegex, response_obj.text) if m: self.token = m.group(1) return self.token else: Log( ErrorLevel.error, "token is missing from fb main page or invalid data in config file" ) sys.exit(1)
def getRawFeedResponse(self) -> Dict: responseObj = None try: response_obj = requests.get(DEFAULT_PULL_URL, params=self.params, headers=self.PullRequestHeader) raw_response = response_obj.text if not raw_response: return None if raw_response.startswith(JSON_PAYLOAD_PREFIX): responseObj = raw_response[len(JSON_PAYLOAD_PREFIX):].strip() responseObj = json.loads(responseObj) else: # If it didn't start with for (;;); then something weird is happening. responseObj = json.loads(raw_response) except: Log(ErrorLevel.warning, "error happened while requesting json: {}", sys.exc_info()[0]) return responseObj
def main(): globals.LOG_LEVEL = ErrorLevel.warning # default log level cmdArgs = InitArguments() thread = Thread(target=StartPresenceMonitor, args=(cmdArgs, )) thread.start() # the while loop makes sure we wait for ^C # if the press shows up we change the running flag # to stop the monitoring thread try: while thread.is_alive(): sleep(1) except KeyboardInterrupt: globals.RUN_PROGRAM = False Log( ErrorLevel.info, "please wait (and don't tap on CTRL-C) while the long-lived tcp connection ends" ) if thread.is_alive(): thread.join() return 0
def processFeedResponse(self): # first we make a request to fb responseObj = self.getRawFeedResponse() # if its empty there is a problem if responseObj is None: print("[error]: request error, restarting") self.resetParameters() return # We got info about which pool/sticky we should be using I think??? Something to do with load balancers? if "lb_info" in responseObj: self.params["sticky_pool"] = responseObj["lb_info"]["pool"] self.params["sticky_token"] = responseObj["lb_info"]["sticky"] # seq apparently isn't tcp seq, does nothing if "seq" in responseObj: self.params["seq"] = responseObj["seq"] # ms contains the friends infos if "ms" in responseObj: self.processMessageContent(responseObj["ms"]) else: Log(ErrorLevel.debug, "'ms' was not found in response. content: {}", responseObj)
def getParsedUserInfo(rawResponse: str) -> Dict: result = { "fullname": None, "thumbnailURL": None, } responseObj = UserQueryManager.getParsedResponse(rawResponse) if (("payload" in responseObj) and (type(responseObj["payload"]) is dict) and ("profiles" in responseObj["payload"])): profilesObj = responseObj["payload"]["profiles"] profileIDs = profilesObj.keys() if len(profileIDs) == 1: profileValue = profilesObj[next(iter(profileIDs))] result = UserQueryManager.getParsedOneUserInfo( profileValue, result) else: result = UserQueryManager.getParsedAllUserInfo( profilesObj, result) else: Log(ErrorLevel.warning, "unexpected user info: {}", responseObj) return result
def StartPresenceMonitor(args: Namespace): if args.log is not None: globals.LOG_LEVEL = int(args.log[0]) args.config = args.config[0] if args.db is None: pm = PresenceMonitor(args.config) else: pm = PresenceMonitor(args.config, args.db) counter = 0 saveCount = 10 # db save frequency sleepTime = 2 # sleep between two monitor action globals.RUN_PROGRAM = True while globals.RUN_PROGRAM: try: pm.query() counter = (counter + 1) % saveCount if counter == 0: pm.saveDB() sleep(sleepTime) except: Log(ErrorLevel.warning, "{}", format_exc()) pm.resetParameters() pm.saveAll()
def process_subfolder_data( subfolder, children_folders, items_info_type_set ): work_folder = opt.source_root_path + "/" + subfolder since_date_localtime = time.strptime( opt.since_date, "%Y-%m-%d" ); #print "----------- since_date_localtime: " + str( since_date_localtime ) Log.log( "processing: " + subfolder + "...", opt.verbose_mode ) # Here store subfolders, items and commented_items try: for ss in os.listdir(work_folder): f_full = work_folder + "/" + ss if os.path.isdir( f_full ): #work_folder + "/" + ss ): #print("dir entry : " + ss ) children_folders.add( subfolder + ss ) if time.localtime( os.path.getmtime( f_full ) ) < since_date_localtime: #print "* omitted item : " + ss + " -- date " + \ # time.strftime("%Y-%m-%d",time.localtime(os.path.getmtime(f_full))) + " previous to " + opt.since_date continue for t in supported_audio_file_types: if fnmatch.fnmatch( ss, "*." + t ): #opt.file_type ): items_info_type_set.add( item_type_info( work_folder, ss ) ) break for t in supported_comment_file_types: if fnmatch.fnmatch( ss, "*." + t ): items_info_type_set.add( item_type_info( work_folder, ss, True ) ) break except OSError as e: Log.log_error( "Iterating over : " + work_folder + "\nException:\n" + repr(e) ) raise except Exception as e: Log.log_error("Unknow exception when iterating over: " + work_folder + "\nException:\n" + repr(e) ) raise
def traverse_folder( subfolder, first_time = False ): global total_saved_albums global total_saved_albums_size global xml_source_parent work_folder = FileManager.concat_folder( opt.source_root_path, subfolder ) xml_previous_source_parent = xml_source_parent # creates xml source entry ( parent ) last_subfolder = os.path.basename( os.path.normpath( subfolder ) ); nd_folder = find_node_by_value( xml_source_parent, "source", "src", last_subfolder ) if nd_folder != None: xml_source_parent = nd_folder if first_time == False and node_attribute( xml_source_parent, "src" ) != last_subfolder: #print( "going to create: \'"+last_subfolder+"\' during traverse_folder" ) #nd = find_node_by_value( xml_source_parent, "source", "src", last_subfolder ) #if nd == None: # xml_source_parent = create_xml_source( xml_source_parent, last_subfolder ) #else: # xml_source_parent = nd xml_source_parent = create_xml_source( xml_source_parent, last_subfolder ) print( "\n** entering traverse_folder:" ) print( " subfolder : " + subfolder ) print( " last_subfolder : " + last_subfolder ) print( " xml_source_node: " + str(node_attribute( xml_source_parent, "src" )) ) print( " xml_previous_source_node: " + str(node_attribute( xml_previous_source_parent, "src" )) ) #if subfolder.endswith('/'): # subfolder = subolder[:-1] subfolder = subfolder + "/" # if subfolder != None: # subfolder = subfolder + "/" # work_folder = FileManager.concat_folder( opt.source_root_path, subfolder ) # else: # work_folder = FileManager.concat_folder( opt.source_root_path, "" ) # subfolder=opt.source_root_path_last_folder + "/" ##work_folder = opt.source_root_path + "/" + subfolder #work_folder = FileManager.concat_folder( opt.source_root_path, subfolder ) ##for path_name, dirs, files in os.walk( root_folder ): children_folders = set() items_type_info_set = set() try: # get items from subfolder process_subfolder_data( subfolder, children_folders, items_type_info_set ) # by default do not create albums where all items are comments all_comments = True for i in items_type_info_set: if i.is_comment != True: all_comments = False break if all_comments != True: if items_type_info_set: xml_album = None #if opt.update_mode: #xml_album = find_node_by_value( xml_section, "album", "src", subfolder ) xml_album = find_node_by_value( xml_source_parent, "album", "src", last_subfolder ) if xml_album == None: #Log.log(" + found new album") create_new_update_entry( "added_new_album", last_subfolder ) if xml_album == None: xml_album = xml_doc.createElement( "album" ) xml_album.setAttribute( "src", last_subfolder ) xml_album.setAttribute( "add", current_date_time() ) #xml_section.appendChild( xml_album ) xml_source_parent.appendChild( xml_album ) total_saved_albums = total_saved_albums + 1 core.utils.log_album( last_subfolder ) album_size=0 # process songs album_size = process_album_items( items_type_info_set, xml_album ) set_xml_album_size( xml_album, album_size ) #total_saved_albums = total_saved_albums + 1 total_saved_albums_size = total_saved_albums_size + album_size # iterate over the rest of subfolders for ss in children_folders: traverse_folder( ss ) xml_source_parent = xml_previous_source_parent print( "\n******** END traverse_folder:" ) print( " subfolder : " + subfolder ) print( " last_subfolder : " + last_subfolder ) print( " xml_source_node: " + str(node_attribute( xml_source_parent, "src" )) ) print( " xml_previous_source_node: " + str(node_attribute( xml_previous_source_parent, "src" )) ) except Exception as e: Log.log_error("Something wrong in traverse_folder.\nException: \n" + repr(e) ) raise
def show_settings_info( title, request_user_confirm = False ): str_mode = None if opt.dry_run: str_mode = "dry run" # if opt.update_mode: # if str_mode: # str_mode = str_mode + " and update" # else: # str_mode = "update" if str_mode == None: str_mode = "normal" Log.log("") Log.log(" " + title ) Log.log("") Log.log(" - source root_path : " + str(opt.source_root_path) ) Log.log(" - source starting folder : " + str(opt.source_root_path_last_folder) ) Log.log(" - output file : " + full_output_filename ) Log.log(" - section id : " + str(opt.section_id) ) Log.log(" - since date : " + opt.since_date ) Log.log(" - supported audio file types : " + str(supported_audio_file_types) ) Log.log(" - supported comment file types : " + str(supported_comment_file_types)) Log.log(" - verbose : " + str(opt.verbose_mode)) Log.log(" - dry_run mode : " + str(opt.dry_run)) # Log.log(" - update : " + str(opt.update_mode)) Log.log("") Log.log(" Notes:") Log.log(" * folders/albums with no audio files will not be added by default" ) Log.log(" * commented files with characters '-' in its name will be marked with '***' and replaced with ' '") Log.log("") Log.log(" - working mode : " + str_mode ) if request_user_confirm: resp = raw_input( "\nContinue with this settings (Y/n) ?" ) if resp=="n" or resp=="N": Log.log( "\nOperation canceled by the user" ) exit(0)
def init(): """ Module initiliaze """ init_config() Log.d(TAG, "modules init!")