def get_next_episode(file_path=None):
    redis = redis_connect()
    if file_path is not None:
        file_path = utils.base64_encode(file_path)

    episode = get_current_episode(file_path)
    while is_episode_over(episode) == True:
        current_tv_show_name_b64 = redis_circular_list.current(
            redis, "STATE.USB_STORAGE.LIBRARY.TV_SHOWS")
        current_tv_show_name = utils.base64_decode(current_tv_show_name_b64)
        file_path_b64 = redis_circular_list.next(
            redis,
            f"STATE.USB_STORAGE.LIBRARY.TV_SHOWS.{current_tv_show_name_b64}")
        print(file_path_b64)
        episode = get_current_episode(file_path_b64)

    return episode
Пример #2
0
def data_cleaning(can_be_used):
    vmess_list = []

    index = 0
    for subscribe_vmess in can_be_used:
        index += 1
        try:
            v = json.loads(
                utils.base64_decode(subscribe_vmess.url.replace(
                    "vmess://", "")))
            v["ps"] = "{}-{}".format(index, title_service.get())
            vmess_list.append("vmess://" + utils.base64_encode(v))
        except:
            logger.error("err: {}".format(traceback.format_exc()))
            vmess_list.append(subscribe_vmess.url)

    return vmess_list
Пример #3
0
 def detect_faces(self, *args, **kwargs):
     """
 # the interface form hasn't been decided, so here use the common form
 :param args:
   args[0]: ImageDetectRequest Object
 :param kwargs: dict
   Temporarily not used, remain
 :return:DetectFacesResult Object
 """
     if not isinstance(args[0], DetectFacesRequest):
         raise TypeError(
             "The first argument must be a DetectRequest Object!")
     # here temporary use deepcopy avoid image content be changed
     detect_faces_request = copy.deepcopy(args[0])
     image_detect_request = ImageDetectRequest()
     image_detect_request.set_detect_faces_request(detect_faces_request)
     # if only uri specified, the content will be None
     if image_detect_request.detectFacesRequest is not None \
         and image_detect_request.detectFacesRequest.image is not None:
         image = image_detect_request.detectFacesRequest.image
         self.__check_parameter(image)
         if image.content is not None:
             image_detect_request.detectFacesRequest.image.content = \
               utils.base64_encode(image_detect_request.detectFacesRequest.image.content)
     params = utils.obj2json(image_detect_request)
     headers = utils.auth_headers(self.__method, self.__uri,
                                  self.__set_headers(), self.__credential)
     http_conf = {
         "method": self.__method,
         "host": self.__host,
         "port": self.__port,
         "resource": self.IMAGE_DETECT_RESOURCE,
         "timeout": configs.DEFAULT_CLIENT_TIMEOUT
     }
     response = httpclient.execute_http_request(http_conf, params, headers)
     try:
         result = self.__result2obj(response)
         if result is None:
             raise VisionException(
                 errMsg="error is occurred, the response is none!")
     except VisionException, ve:
         print ve
Пример #4
0
    def client_handshake(self):
        self.session_key = ""
        while not len(self.session_key) == 32:
            self.session_key = get_random_bytes(32)

        session_key_encrypted = PKCS1_OAEP.new(self.key).encrypt(
            self.session_key)
        session_key_b64 = utils.base64_encode(session_key_encrypted)

        self.locked_socket.send("session:{}".format(session_key_b64))

        try:
            handshake_server_verify = self.recv()
        except Exception as e:
            raise HandshakeError

        if handshake_server_verify != "handshake_verify":
            raise HandshakeError

        try:
            self.send("handshake_verify2")
        except:
            raise HandshakeError
	def rebuild_tv_shows( self ):
		print( "Rebuilding TV Shows" )
		self.redis.delete( "STATE.USB_STORAGE.LIBRARY.TV_SHOWS" )
		for key in self.redis.scan_iter( "STATE.USB_STORAGE.LIBRARY.TV_SHOWS.*" ):
			self.redis.delete( key )
		# Testing Delete All via CLI
		# redis-cli -n 1 --raw keys "STATE.USB_STORAGE.LIBRARY.*" | xargs redis-cli -n 1 del
		print( self.paths["tv_shows"] )
		tv_shows = self.scan_posix_path( self.paths["tv_shows"] )
		tv_shows_map = {}
		for index , posix_episode in enumerate( tv_shows ):
			print( str( posix_episode ) )
			items = str( posix_episode ).split( str( self.paths["tv_shows"] ) + "/" )[ 1 ].split( "/" )
			if len( items ) == 3:
				show_name = items[ 0 ]
				season_name = items[ 1 ]
				episode_name = items[ 2 ]
			elif len( items ) == 2:
				show_name = items[ 0 ]
				season_name = items[ 1 ]
				episode_name = items[ 2 ]
			elif len( items ) == 1:
				show_name = "SINGLES"
				season_name = "000"
				episode_name = items[ 0 ]
			else:
				print( "wadu" )
				print( items )
				continue

			# Don't Ask
			show_name_b64 = utils.base64_encode( show_name )
			season_name_b64 = utils.base64_encode( season_name )
			episode_name_b64 = utils.base64_encode( episode_name )
			if show_name_b64 not in tv_shows_map:
				tv_shows_map[ show_name_b64 ] = {}
			if season_name_b64 not in tv_shows_map[ show_name_b64 ]:
				tv_shows_map[ show_name_b64 ][ season_name_b64 ] = []
			tv_shows_map[ show_name_b64 ][ season_name_b64 ].append( episode_name_b64 )

		# Also Don't Ask
		# since you did, its just some bulll shit to double verify that they order is correct.
		# glob sorts it fine, but why not sort it again 4Head Pepega
		# also, why not encode and decode the same thing like 50 times
		tv_shows_map_organized = {}
		show_names_b64 = tv_shows_map.keys()
		show_names = [ utils.base64_decode( x ) for x in show_names_b64 ]
		show_names.sort()
		for index , show_name in enumerate( show_names ):
			season_names_b64 = tv_shows_map[ utils.base64_encode( show_name ) ].keys()
			tv_shows_map_organized[ show_name ] = [ utils.base64_decode( x ) for x in season_names_b64 ]
			tv_shows_map_organized[ show_name ].sort()
			for season_index , season in enumerate( tv_shows_map_organized[ show_name ] ):
				episode_names_b64 = tv_shows_map[ utils.base64_encode( show_name ) ][ utils.base64_encode( season ) ]
				episode_names = [ utils.base64_decode( x ) for x in episode_names_b64 ]
				episode_names.sort()
				tv_shows_map_organized[ show_name ][ season_index ] = episode_names

		# Finally Store into Redis
		#pprint( tv_shows_map_organized )

		# 1.) Store All Show Names into Circular List
		show_keys = tv_shows_map_organized.keys()
		show_names_b64 = [ utils.base64_encode( x ) for x in show_keys ]
		for x in show_names_b64:
			self.redis.rpush( "STATE.USB_STORAGE.LIBRARY.TV_SHOWS" , x )
		self.redis.set( "STATE.USB_STORAGE.LIBRARY.TV_SHOWS.INDEX" , 0 )

		# 2.) Store All Episodes into Giant List
		for show_index , show in enumerate( show_keys ):
			list_key = f"STATE.USB_STORAGE.LIBRARY.TV_SHOWS.{show_names_b64[show_index]}"
			for season_index , season in enumerate( tv_shows_map_organized[ show ] ):
				for episode_index , episode in enumerate( tv_shows_map_organized[ show ][ season_index ] ):
					final_path = str( self.paths["tv_shows"].joinpath( show , str( season_index + 1 ).zfill( 2 ) , episode ) )
					self.redis.rpush( list_key , utils.base64_encode( final_path ) )
Пример #6
0
 def serialize(data, delimiter=':', text_encoding="utf-8"):
     return delimiter.join (utils.base64_encode (
         data.encryption_hash_salt, data.storage_hash, data.storage_hash_salt, data.iv, data.data_length, \
         text_encoding = text_encoding
     ))
Пример #7
0
def subscription():
    def get_new_db():
        return (global_variable.get_db().query(SubscribeVmss).filter(
            SubscribeVmss.death_count >= 0).filter(
                or_(SubscribeVmss.is_closed == False,
                    SubscribeVmss.is_closed is None)
            ).filter(SubscribeVmss.speed_youtube > 0).filter(
                SubscribeVmss.network_delay_youtube > 0).filter(
                    SubscribeVmss.speed_internet > 0).filter(
                        SubscribeVmss.network_delay_internet > 0).filter(
                            SubscribeVmss.speed_google > 0).filter(
                                SubscribeVmss.network_delay_google > 0).filter(
                                    SubscribeVmss.next_at > 0))

    can_be_used = []

    req = VariableManager(request.args)

    auto_select = req.get_conf_bool("auto", False)
    limit = req.get_conf_int("limit", default=20)

    if auto_select:
        new_db = get_new_db().order_by(
            SubscribeVmss.network_delay_google.desc())

        if limit > 0:
            new_db.limit(limit * 2)

        low_delay_list = new_db.all()

        fast_list = get_new_db().order_by(
            SubscribeVmss.speed_internet.desc()).all()

        low_delay_id_list = []
        for node in low_delay_list:
            low_delay_id_list.append(node.id)

        for node in fast_list:
            if node.id in low_delay_id_list:
                can_be_used.append(node)
            if len(can_be_used) >= limit > 0:
                break
    else:
        subscription_site = req.get_conf_str("site", default="google")
        subscription_type = req.get_conf_str("type", default="delayed")

        new_db = get_new_db()

        if subscription_site == "youtube":
            if subscription_type == "speed":
                new_db = new_db.order_by(
                    SubscribeVmss.speed_youtube.desc()).order_by(
                        SubscribeVmss.network_delay_youtube.desc())
            else:
                new_db = new_db.order_by(
                    SubscribeVmss.network_delay_youtube.desc()).order_by(
                        SubscribeVmss.speed_youtube.desc())
        elif subscription_site == "internet":
            if subscription_type == "speed":
                new_db = new_db.order_by(
                    SubscribeVmss.speed_internet.desc()).order_by(
                        SubscribeVmss.network_delay_internet.desc())
            else:
                new_db = new_db.order_by(
                    SubscribeVmss.network_delay_internet.desc()).order_by(
                        SubscribeVmss.speed_youtube.desc())
        else:
            if subscription_type == "speed":
                new_db = new_db.order_by(
                    SubscribeVmss.speed_google.desc()).order_by(
                        SubscribeVmss.network_delay_google.desc())
            else:
                new_db = new_db.order_by(
                    SubscribeVmss.network_delay_google.desc()).order_by(
                        SubscribeVmss.speed_google.desc())

        network_protocol_type = req.get_conf("network_type")

        if network_protocol_type is not None:
            new_db.filter(
                SubscribeVmss.network_protocol_type == network_protocol_type)

        logger.debug("执行的sql为 {}".format(str(new_db)))

        if limit > 0:
            new_db.limit(limit)

        can_be_used = new_db.all()

    return utils.base64_encode(("\n".join(data_cleaning(can_be_used))))
Пример #8
0
def test_encode():
    assert base64_encode('hola') == 'aG9sYQ=='
Пример #9
0
# You must initialize logging, otherwise you'll not see debug output.
# logging.basicConfig()
# logging.getLogger().setLevel(logging.DEBUG)
# requests_log = logging.getLogger("requests.packages.urllib3")
# requests_log.setLevel(logging.DEBUG)
# requests_log.propagate = True
############################################################################################################


PING_BASE_URL = "https://directory-api.pingone.com"
API_DIR = "/api/directory"
USER_ENDPOINT = PING_BASE_URL + API_DIR + "/user"
GROUP_ENDPOINT = PING_BASE_URL + API_DIR + "/group"
SCHEMAS = "urn:scim:schemas:core:1.0"

auth_header = utils.base64_encode(utils.secret_parser("ping.clientid") + ":" + utils.secret_parser("ping.apikey"))

ping_headers = {"Content-Type": "application/json", "Accept": "application/json",
                "Authorization": "Basic " + auth_header}


def bulk_get_users():
    """ Get all users. Handy if you want to build a list to delete them all

    Keyword parameters:
    :param out_file: Path and filename of the output file
    :param format: File format. Either csv or json, all lower case.
    :return: Json HTTP response
    """
    request = requests.get(USER_ENDPOINT, headers=ping_headers)
Пример #10
0
def inline_query_handler(update: telegram.Update,
                         context: telegram.ext.CallbackContext) -> None:
    inline_query = update.inline_query

    if inline_query is None:
        return

    bot = context.bot

    user = inline_query.from_user

    create_or_update_user(bot, user)

    query = None

    if not cli_args.fragment:
        if cli_args.query:
            query = cli_args.query
        else:
            query = inline_query.query

        if not query:
            analytics_handler.track(context,
                                    analytics.AnalyticsType.EMPTY_QUERY, user)

            return

    if not cli_args.server:
        user_identification = f'#{user.id}'
        user_name = None

        if user.first_name and user.last_name:
            user_name = f'{user.first_name} {user.last_name}'
        elif user.first_name:
            user_name = user.first_name
        elif user.last_name:
            user_name = user.last_name

        if user_name:
            user_identification += f': {user_name}'

        if user.username:
            user_identification += f' (@{user.username})'

        user_identification += ':'

        logger.info(f'{user_identification} {query}')

    links_toggle = False

    (definitions,
     offset) = utils.get_query_definitions(update, context, query,
                                           links_toggle, analytics_handler,
                                           cli_args, BOT_NAME)

    definitions_count = len(definitions)

    no_results_text = None
    no_results_parameter = None

    if definitions_count == 0:
        no_results_text = 'Niciun rezultat'
        no_results_parameter = utils.base64_encode(
            query) if query is not None else ''
    else:
        definitions = definitions[:telegram.constants.MAX_INLINE_QUERY_RESULTS]

    cache_time = int(constants.RESULTS_CACHE_TIME.total_seconds())

    if cli_args.debug:
        cache_time = 0

    next_offset = None

    if definitions_count > len(definitions):
        next_offset = str(offset + telegram.constants.MAX_INLINE_QUERY_RESULTS)

    definitions_results = list(
        map(utils.get_inline_query_definition_result, definitions))

    inline_query.answer(results=definitions_results,
                        cache_time=cache_time,
                        next_offset=next_offset,
                        switch_pm_text=no_results_text,
                        switch_pm_parameter=no_results_parameter)