def check_url(url):
    '''
    Verify the url before fetching the image.

    :param url: url string
    :return: TRUE or FALSE
    '''
    try:
        timeout = Timeout(connect=1.0, read=1.0)
        conn = PoolManager()
        response = conn.request('HEAD', url, timeout=timeout)
        status = int(response.status)
        is_url_valid = False

        # Check the HTTP response status code and whether the url redirects to another page
        if status in range(200,
                           209) and response.get_redirect_location() is False:

            # Check the content length of the response(if present) to verify whether the url
            # contains an image which can be used in scene creation
            content_length = int(response.headers.get('Content-Length', 0))
            if content_length != 0 and content_length > 2100:
                is_url_valid = True

        conn.clear()
        return is_url_valid

    except Exception:
        return False
Example #2
0
async def covid(event):
    try:
        url = 'http://67.158.54.51/corona.php'
        http = PoolManager()
        request = http.request('GET', url)
        result = jsloads(request.data.decode('utf-8'))
        http.clear()
    except:
        await event.edit("`Bir hata oluştu.`")
        return

    sonuclar = ("** Koronavirüs Verileri **\n" +
                "\n**Dünya geneli**\n" +
                f"**🌎 Vaka:** `{result['tum']}`\n" +
                f"**🌎 Ölüm:** `{result['tumolum']}`\n" +
                f"**🌎 İyileşen:** `{result['tumk']}`\n" +
                "\n**Türkiye**\n" +
                f"**🇹🇷 Vaka (toplam):** `{result['trtum']}`\n" +
                f"**🇹🇷 Vaka (bugün):** `{result['trbtum']}`\n" +
                f"**🇹🇷 Vaka (aktif):** `{result['tra']}`\n" +
                f"**🇹🇷 Ölüm (toplam):** `{result['trolum']}`\n" +
                f"**🇹🇷 Ölüm (bugün):** `{result['trbolum']}`\n" +
                f"**🇹🇷 İyileşen:** `{result['trk']}`")

    await event.edit(sonuclar)
Example #3
0
async def parseqr(qr_e):
    """ .decode komutu cevap verilen fotoğraftan QR kodu / Barkod içeriğini alır """
    downloaded_file_name = await qr_e.client.download_media(
        await qr_e.get_reply_message())

    # QR kodunu çözmek için resmi ZXing web sayfasını ayrıştır
    files = {'f': open(downloaded_file_name, 'rb').read()}
    t_response = None

    try:
        http = PoolManager()
        t_response = http.request('POST',
                                  "https://zxing.org/w/decode",
                                  fields=files)
        t_response = t_response.data
        http.clear()
    except:
        pass

    os.remove(downloaded_file_name)
    if not t_response:
        await qr_e.edit("decode başarısız oldu.")
        return
    soup = BeautifulSoup(t_response, "html.parser")
    qr_contents = soup.find_all("pre")[0].text
    await qr_e.edit(qr_contents)
Example #4
0
async def parseqr(qr_e):
    """ .decode """
    downloaded_file_name = await qr_e.client.download_media(
        await qr_e.get_reply_message())

    # QR
    files = {'f': open(downloaded_file_name, 'rb').read()}
    t_response = None

    try:
        http = PoolManager()
        t_response = http.request(
            'POST', "https://zxing.org/w/decode", fields=files)
        t_response = t_response.data
        http.clear()
    except:
        pass

    os.remove(downloaded_file_name)
    if not t_response:
        await qr_e.edit(LANG['ERROR'])
        return
    soup = BeautifulSoup(t_response, "html.parser")
    qr_contents = soup.find_all("pre")[0].text
    await qr_e.edit(qr_contents)
Example #5
0
async def covid(event):
    try:
        url = 'https://quiec.tech/corona.php'
        http = PoolManager()
        request = http.request('GET', url)
        result = jsloads(request.data.decode('utf-8'))
        http.clear()
    except:
        await event.edit("`Bir hata oluÅŸtu.`")
        return

    sonuclar = ("** Koronavirüs Verileri **\n" +
                "\n**Dünya geneli**\n" +
                f"**🌎 Vaka:** `{result['tum']}`\n" +
                f"**🌎 Ölüm:** `{result['tumolum']}`\n" +
                f"**🌎 İyileşen:** `{result['tumk']}`\n" +
                "\n**Türkiye**\n" +
                f"**🇹🇷 Vaka (toplam):** `{result['trtum']}`\n" +
                f"**🇹🇷 Vaka (bugün):** `{result['trbtum']}`\n" +
                f"**🇹🇷 Vaka (aktif):** `{result['tra']}`\n" +
                f"**🇹🇷 Ölüm (toplam):** `{result['trolum']}`\n" +
                f"**🇹🇷 Ölüm (bugün):** `{result['trbolum']}`\n" +
                f"**🇹🇷 İyileşen:** `{result['trk']}`")

    await event.edit(sonuclar)
Example #6
0
 def get_gesources(self, url):
     manager = PoolManager(10)
     urllib3.disable_warnings()
     ttf = manager.request('GET', url)
     font = TTFont(BytesIO(ttf._body))
     camp = font.getBestCmap()
     font.close()
     manager.clear()
     return camp
def parseqr(message):
    reply = message.reply_to_message
    if (reply and reply.media and
        (reply.photo or (reply.sticker and not reply.sticker.is_animated) or
         (reply.document and 'image' in reply.document.mime_type))):
        edit(message, f'`{get_translation("processing")}`')
    else:
        edit(message, f'`{get_translation("wrongCommand")}`')
        return

    output = download_media_wc(reply, f'{get_download_dir()}/decode.png')

    if reply.sticker and not reply.sticker.is_animated:
        image = Image.open(output)
        output = f'{get_download_dir()}/decode.png'
        image.save(output)

    dw = open(output, 'rb')
    files = {'f': dw.read()}
    t_response = None

    try:
        http = PoolManager()
        t_response = http.request('POST',
                                  'https://zxing.org/w/decode',
                                  fields=files)
        t_response = t_response.data
        http.clear()
        dw.close()
    except BaseException:
        pass

    remove(output)
    if not t_response:
        edit(message, f'`{get_translation("decodeFail")}`')
        return
    try:
        soup = BeautifulSoup(t_response, 'html.parser')
        qr_contents = soup.find_all('pre')[0].text
        edit(message, qr_contents)
    except BaseException:
        edit(message, f'`{get_translation("decodeFail")}`')
def parseqr(message):
    reply = message.reply_to_message
    if not reply:
        return edit(message, f'`{get_translation("wrongCommand")}`')

    if not (reply.photo or reply.sticker or
            (reply.document and 'image' in reply.document.mime_type)):
        edit(message, f'`{get_translation("wrongCommand")}`')
        return

    downloaded_file_name = download_media_wc(reply)

    dw = open(downloaded_file_name, 'rb')
    files = {'f': dw.read()}
    t_response = None

    try:
        http = PoolManager()
        t_response = http.request('POST',
                                  'https://zxing.org/w/decode',
                                  fields=files)
        t_response = t_response.data
        http.clear()
        dw.close()
    except BaseException:
        pass

    remove(downloaded_file_name)
    if not t_response:
        edit(message, f'`{get_translation("decodeFail")}`')
        return
    try:
        soup = BeautifulSoup(t_response, 'html.parser')
        qr_contents = soup.find_all('pre')[0].text
        edit(message, qr_contents)
    except BaseException:
        edit(message, f'`{get_translation("decodeFail")}`')
Example #9
0
async def covid(event):
    try:
        url = 'https://api.quiec.tech/corona.php'
        http = PoolManager()
        request = http.request('GET', url)
        result = jsloads(request.data.decode('utf-8'))
        http.clear()
    except:
        await event.edit(LANG['SOME_ERRORS'])
        return

    sonuclar = (f"** {LANG['SOME_ERRORS']} **\n" + f"\n**{LANG['EARTH']}**\n" +
                f"**{LANG['CASE']}** `{result['tum']}`\n" +
                f"**{LANG['DEATH']}** `{result['tumolum']}`\n" +
                f"**{LANG['HEAL']}** `{result['tumk']}`\n" +
                f"\n**{LANG['TR']}**\n" +
                f"**{LANG['TR_ALL_CASES']}** `{result['trtum']}`\n" +
                f"**{LANG['TR_CASES']}** `{result['trbtum']}`\n" +
                f"**{LANG['TR_CASE']}** `{result['tra']}`\n" +
                f"**{LANG['TR_ALL_DEATHS']}** `{result['trolum']}`\n" +
                f"**{LANG['TR_DEATHS']}** `{result['trbolum']}`\n" +
                f"**{LANG['TR_HEAL']}** `{result['trk']}`")

    await event.edit(sonuclar)
class HttpRpcProvider(HTTPProvider):
    """ http rpc provider """
    def __init__(self, ssl_args, url, disable_conn_pool=False):
        """
        http rpc provider init

        :type  ssl_args: :class:`dict`
        :param ssl_args: ssl arguments
        :type  url: :class:`str`
        :param url: url to connected to
        :type disable_conn_pool: :class: 'bool'
        :param disable_conn_pool: disable connection pooling
        """
        HTTPProvider.__init__(self)
        self.ssl_enabled = False
        self.ssl_args = ssl_args

        scheme, host, port, user, password, path, _ = parse_addr_url(url)
        assert (scheme in ['http', 'https'])
        if scheme == 'https':
            self.ssl_enabled = True
        assert (user is None and password is None)  # NYI
        if host.startswith('!'):
            # Unix domain socket: hostname is '!' followed by
            # the URL-encoded socket path
            self.host = None
            self.uds = urllib.parse.unquote(host[1:])
            # SSL currently not supported for Unix domain sockets
            if self.ssl_enabled:
                raise Exception('SSL not supported on Unix domain sockets')
        else:
            self.host = host
            self.port = port
            self.uds = None
        self.path = path
        self.cookie = ''
        self.accept_compress_response = True

        global use_connection_pool
        if disable_conn_pool:
            use_connection_pool = False

        if self.uds is None and use_connection_pool:
            self.manager = PoolManager(num_pools=NUM_OF_POOL,
                                       maxsize=POOL_SIZE,
                                       timeout=CONNECTION_POOL_TIMEOUT,
                                       **self.ssl_args)

    def __del__(self):
        """ http rpc provider on delete """
        self.disconnect()

    def connect(self):
        """
        connect

        :rtype: :class:`vmware.vapi.protocol.client.rpc.provider.RpcProvider`
        :return: http rpc provider
        """
        return self

    def disconnect(self):
        """ disconnect """
        if use_connection_pool and self.manager is not None:
            self.manager.clear()

    def _get_connection(self):
        """
        get connection from pool

        :rtype: :class:`PoolManager` (or)
            :class:`UnixSocketConnection`
        :return: http(s) connection or unix socket connection
        """
        conn = None

        if self.uds:
            conn = UnixSocketConnection(self.uds)
        elif use_connection_pool:
            http_scheme = 'http'
            if self.ssl_enabled:
                http_scheme = 'https'
            conn = self.manager.connection_from_host(host=self.host,
                                                     port=self.port,
                                                     scheme=http_scheme)
        else:
            if self.ssl_enabled:
                conn = http_client.HTTPSConnection(host=self.host,
                                                   port=self.port,
                                                   **self.ssl_args)
            else:
                conn = http_client.HTTPConnection(host=self.host,
                                                  port=self.port)

        return conn

    def do_request(self, http_request):
        """
        Send an HTTP request

        :type  http_request: :class:`vmware.vapi.protocol.client.http_lib.HTTPRequest`    # pylint: disable=line-too-long
        :param http_request: The http request to be sent
        :rtype: :class:`vmware.vapi.protocol.client.http_lib.HTTPResponse`
        :return: The http response received
        """
        # pylint can't detect request, getresponse and close methods from
        # Http(s)Connection/UnixSocketConnection
        # pylint: disable=E1103
        request_ctx = http_request.headers
        request = http_request.body
        content_type = request_ctx.get('Content-Type')
        if not content_type:
            # For http, content-type must be set
            raise Exception('do_request: request_ctx content-type not set')

        response_ctx, response = {'Content-Type': content_type}, None
        if request:
            request_length = len(request)
            # Send request
            headers = {'Cookie': self.cookie, 'Content-Type': content_type}
            if self.accept_compress_response:
                headers['Accept-Encoding'] = 'gzip, deflate'

            try:
                conn = self._get_connection()
                logger.debug('do_request: request_len %d', request_length)

                if use_connection_pool:
                    resp = conn.request(method=http_request.method,
                                        url=self.path,
                                        body=request,
                                        headers=headers,
                                        preload_content=False)
                else:
                    conn.request(method=http_request.method,
                                 url=self.path,
                                 body=request,
                                 headers=headers)
                    resp = conn.getresponse()
            except:
                logger.exception('do_request() failed')
                raise

            # Debug
            # logger.debug('do_request: response headers', resp.getheaders())

            cookie = resp.getheader('Set-Cookie')
            if cookie:
                self.cookie = cookie

            status = resp.status
            if status in [200, 500]:
                try:
                    encoding = resp.getheader('Content-Encoding', 'identity').lower()  # pylint: disable=line-too-long
                    if encoding in ['gzip', 'deflate']:
                        response = resp.read(decode_content=True)
                    else:
                        response = resp.read()

                    logger.debug('do_request: response len %d', len(response))
                except:
                    conn.close()
                    raise
                else:
                    if resp:
                        resp.read()

                content_type = resp.getheader('Content-Type')
                if content_type:
                    response_ctx['Content-Type'] = content_type
            else:
                raise http_client.HTTPException('%d %s' % (resp.status, resp.reason))  # pylint: disable=line-too-long

            if self.cookie:
                response_ctx['Cookie'] = self.cookie
        return HTTPResponse(status=status, headers=response_ctx, body=response)
Example #11
0
class NanoHandle:
    def __init__(self,
                 license_id='default',
                 license_file="~/.BoonLogic.license",
                 timeout=120.0,
                 verify=True,
                 cert=None):
        """Primary handle for BoonNano Pod instances

        The is the primary handle to manage a nano pod instance

        Args:
            license_id (str): license identifier label found within the .BoonLogic.license configuration file
            license_file (str): path to .BoonLogic license file
            timeout (float): read timeout for http requests
            verify:  Either a boolean, in which case it controls whether we verify the server’s TLS certificate, or a string, in which case it must be a path to a CA bundle to use
            cert (bool): if String, path to ssl client cert file (.pem). If Tuple, (‘cert’, ‘key’) pair.
        

        Environment:
            BOON_LICENSE_FILE: sets license_file path
            BOON_LICENSE_ID: sets license_id
            BOON_API_KEY: overrides the api-key as found in .BoonLogic.license file
            BOON_API_TENANT: overrides the api-tenant as found in .BoonLogic.license file
            BOON_SERVER: overrides the server as found in .BoonLogic.license file
            PROXY_SERVER: overrides the proxy server as found in .BoonLogic.license file
            BOON_SSL_CERT: path to ssl client cert file (.pem)
            BOON_SSL_VERIFY: Either a boolean, in which case it controls whether we verify the server’s TLS certificate, or a string, in which case it must be a path to a CA bundle to use


        Example:
            ```python
            try:
                nano = bn.NanoHandle()
            except bn.BoonException as be:
                print(be)
                sys.exit(1)
            ```

        """
        self.license_id = None
        self.api_key = None
        self.api_tenant = None
        self.instance = ''
        self.numeric_format = ''

        env_license_file = os.environ.get('BOON_LICENSE_FILE', None)
        env_license_id = os.environ.get('BOON_LICENSE_ID', None)
        env_api_key = os.environ.get('BOON_API_KEY', None)
        env_api_tenant = os.environ.get('BOON_API_TENANT', None)
        env_server = os.environ.get('BOON_SERVER', None)
        env_proxy_server = os.environ.get('PROXY_SERVER', None)
        env_cert = os.environ.get('BOON_SSL_CERT', None)
        env_verify = os.environ.get('BOON_SSL_VERIFY', None)

        # certificates
        self.cert = 'CERT_REQUIRED' if env_cert else {
            None: 'CERT_NONE',
            True: 'CERT_REQUIRED'
        }[cert]
        if env_verify:
            if env_verify.lower() == 'false':
                self.verify = False
            elif env_verify.lower() == 'true':
                self.verify = True
            else:
                self.verify = env_verify
        else:
            self.verify = verify

        # when license_id comes in as None, use 'default'
        if license_id is None:
            license_id = 'default'

        license_file = env_license_file if env_license_file else license_file
        self.license_id = env_license_id if env_license_id else license_id

        license_path = os.path.expanduser(license_file)
        if not os.path.exists(license_path):
            raise BoonException(
                "license file {} does not exist".format(license_path))
        try:
            with open(license_path, "r") as json_file:
                file_data = json.load(json_file)
        except json.JSONDecodeError as e:
            raise BoonException(
                "json formatting error in .BoonLogic.license file, {}, line: {}, col: {}"
                .format(e.msg, e.lineno, e.colno))
        try:
            license_data = file_data[self.license_id]
        except KeyError:
            raise BoonException(
                "license_id \"{}\" not found in license file".format(
                    self.license_id))

        try:
            self.api_key = env_api_key if env_api_key else license_data[
                'api-key']
        except KeyError:
            raise BoonException(
                "\"api-key\" is missing from the specified license in license file"
            )

        try:
            self.api_tenant = env_api_tenant if env_api_tenant else license_data[
                'api-tenant']
        except KeyError:
            raise BoonException(
                "\"api-tenant\" is missing from the specified license in license file"
            )

        try:
            self.server = env_server if env_server else license_data['server']
        except KeyError:
            raise BoonException(
                "\"server\" is missing from the specified license in license file"
            )

        self.proxy_server = env_proxy_server
        if not self.proxy_server and 'proxy-server' in license_data.keys():
            self.proxy_server = license_data['proxy-server']

        # set up base url
        self.url = self.server + '/expert/v3/'
        if "http" not in self.server:
            self.url = "http://" + self.url

        # create pool manager
        timeout_inst = Timeout(connect=30.0, read=timeout)
        if self.proxy_server:
            # proxy pool
            self.http = ProxyManager(self.proxy_server,
                                     maxsize=10,
                                     timeout=timeout_inst,
                                     cert_reqs=self.cert)
        else:
            # non-proxy pool
            self.http = PoolManager(timeout=timeout_inst, cert_reqs=self.cert)

    def _is_configured(f):
        @wraps(f)
        def inner(*args, **kwargs):
            if args[0].numeric_format not in ['int16', 'uint16', 'float32']:
                return False, "nano instance is not configured"
            return f(*args, **kwargs)

        return inner

    def open_nano(self, instance_id):
        """Creates or attaches to a nano pod instance

        Args:
            instance_id (str): instance identifier to assign to new pod instance

        Returns:
            boolean: true if successful (instance is created or attached)

            str: None when result is true, error string when result=false

        """
        instance_cmd = self.url + 'nanoInstance/' + instance_id + '?api-tenant=' + self.api_tenant

        success, response = simple_post(self, instance_cmd)
        if not success:
            return False, response

        self.instance = instance_id
        return success, response

    def close_nano(self):
        """Closes the pod instance

        Returns:
            result (boolean):  true if successful (nano pod instance was closed)
            response (str): None when result is true, error string when result=false

        """
        close_cmd = self.url + 'nanoInstance/' + self.instance + '?api-tenant=' + self.api_tenant

        # delete instance
        result, response = simple_delete(self, close_cmd)
        if not result:
            return result, response

        self.http.clear()
        return result, None

    def create_config(self,
                      feature_count,
                      numeric_format,
                      cluster_mode='batch',
                      min_val=0,
                      max_val=1,
                      weight=1,
                      label=None,
                      percent_variation=0.05,
                      streaming_window=1,
                      accuracy=0.99,
                      autotune_pv=True,
                      autotune_range=True,
                      autotune_by_feature=True,
                      autotune_max_clusters=1000,
                      exclusions=None,
                      streaming_autotune=True,
                      streaming_buffer=10000,
                      learning_numerator=10,
                      learning_denominator=10000,
                      learning_max_clusters=1000,
                      learning_samples=1000000):
        """Generate a configuration template for the given parameters

        A discrete configuration is specified as a list of min, max, weights, and labels

        Args:
            feature_count (int): number of features per vector
            numeric_format (str): numeric type of data (one of "float32", "uint16", or "int16")
            cluster_mode (str): 'streaming' or 'batch' for expert run type
            min_val: the value that should be considered the minimum value for this feature. This
                can be set to a value larger than the actual min if you want to treat all value less
                than that as the same (for instance, to keep a noise spike from having undue influence
                in the clustering.  a single element list assigns all features with same min_val
            max_val: corresponding maximum value, a single element list assigns all features with same max_val
            weight: weight for this feature, a single element list assigns all features with same weight
            label (list): list of labels to assign to features
            percent_variation (float): amount of variation allowed within clusters
            streaming_window (integer): number of consecutive vectors treated as one inference (parametric parameter)
            accuracy (float): statistical accuracy of the clusters
            autotune_pv (bool): whether to autotune the percent variation
            autotune_range (bool): whether to autotune the min and max values
            autotune_by_feature (bool): whether to have individually set min and max values for each feature
            autotune_max_clusters (int): max number of clusters allowed
            exclusions (list): features to exclude while autotuning
            streaming_autotune (bool): whether to autotune while in streaming mode
            streaming_buffer (int): number of samples to autotune on
            learning_numerator (int): max number of new clusters learned
            learning_denominator (int): number of samples over which the new clusters are learned
            learning_max_clusters (int): max number of clusters before turning off learning
            learning_samples (int): max number of samples before turning off learning


        Returns:
            result (boolean): true if successful (configuration was successfully created)
            response (dict or str): configuration dictionary when result is true, error string when result is false

        """

        if isinstance(min_val, int) or isinstance(min_val, float):
            min_val = [min_val] * feature_count
        if isinstance(max_val, int) or isinstance(max_val, float):
            max_val = [max_val] * feature_count
        if isinstance(weight, int):
            weight = [weight] * feature_count

        if exclusions is None:
            exclusions = []

        config = {}
        config['clusterMode'] = cluster_mode
        config['numericFormat'] = numeric_format
        config['features'] = []

        if (isinstance(min_val, list) or isinstance(min_val, np.ndarray)) and (
                isinstance(max_val, list)
                or isinstance(max_val, np.ndarray)) and (isinstance(
                    weight, list) or isinstance(weight, np.ndarray)):
            if len(min_val) != len(max_val) or len(min_val) != len(weight):
                return False, "parameters must be lists of the same length"

            for min, max, w in zip(min_val, max_val, weight):
                tempDict = {}
                tempDict['minVal'] = min
                tempDict['maxVal'] = max
                tempDict['weight'] = w
                config['features'].append(tempDict)
        else:
            return False, "min_val, max_val and weight must be list or numpy array"

        if isinstance(label, list):
            if len(label) != len(min_val):
                return False, "label must be the same length as other parameters"
            for i, l in enumerate(label):
                config['features'][i]['label'] = l
        elif label:
            return False, "label must be list"

        config['percentVariation'] = percent_variation
        config['accuracy'] = accuracy
        config['streamingWindowSize'] = streaming_window

        config['autoTuning'] = {}
        config['autoTuning']['autoTuneByFeature'] = autotune_by_feature
        config['autoTuning']['autoTunePV'] = autotune_pv
        config['autoTuning']['autoTuneRange'] = autotune_range
        config['autoTuning']['maxClusters'] = autotune_max_clusters
        if isinstance(exclusions, list):
            config['autoTuning']['exclusions'] = exclusions
        elif exclusions:
            return False, 'exclusions must be a list'

        if config['clusterMode'] == 'streaming':
            config['streaming'] = {}
            config['streaming']['enableAutoTuning'] = streaming_autotune
            config['streaming']['samplesToBuffer'] = streaming_buffer
            config['streaming']['learningRateNumerator'] = learning_numerator
            config['streaming'][
                'learningRateDenominator'] = learning_denominator
            config['streaming']['learningMaxClusters'] = learning_max_clusters
            config['streaming']['learningMaxSamples'] = learning_samples

        return True, config

    def configure_nano(self,
                       feature_count=1,
                       numeric_format='float32',
                       cluster_mode='batch',
                       min_val=0,
                       max_val=1,
                       weight=1,
                       label=None,
                       percent_variation=.05,
                       streaming_window=1,
                       accuracy=.99,
                       autotune_pv=True,
                       autotune_range=True,
                       autotune_by_feature=True,
                       autotune_max_clusters=1000,
                       exclusions=None,
                       streaming_autotune=True,
                       streaming_buffer=10000,
                       learning_numerator=10,
                       learning_denominator=10000,
                       learning_max_clusters=1000,
                       learning_samples=1000000,
                       config=None):
        """Returns the posted clustering configuration

         Args:
             feature_count (int): number of features per vector
             numeric_format (str): numeric type of data (one of "float32", "uint16", or "int16")
             cluster_mode (str): 'streaming' or 'batch' mode to run expert
             min_val: list of minimum values per feature, if specified as a single value, use that on all features
             max_val: list of maximum values per feature, if specified as a single value, use that on all features
             weight: influence each column has on creating a new cluster
             label (list): name of each feature (if applicable)
             percent_variation (float): amount of variation within each cluster
             streaming_window (integer): number of consecutive vectors treated as one inference (parametric parameter)
             accuracy (float): statistical accuracy of the clusters
             autotune_pv (bool): whether to autotune the percent variation
             autotune_range (bool): whether to autotune the min and max values
             autotune_by_feature (bool): whether to have individually set min and max values for each feature
             autotune_max_clusters (int): max number of clusters allowed
             exclusions (list): features to exclude while autotuning
             streaming_autotune (bool): whether to autotune while in streaming mode
             streaming_buffer (int): number of samples to autotune on
             learning_numerator (int): max number of new clusters learned
             learning_denominator (int): number of samples over which the new clusters are learned
             learning_max_clusters (int): max number of clusters before turning off learning
             learning_samples (int): max number of samples before turning off learning
             config (dict): dictionary of configuration parameters

         Returns:
             result (boolean): true if successful (configuration was successfully loaded into nano pod instance)
             response (dict or str): configuration dictionary when result is true, error string when result is false

         """

        if config is None:
            success, config = self.create_config(
                feature_count, numeric_format, cluster_mode, min_val, max_val,
                weight, label, percent_variation, streaming_window, accuracy,
                autotune_pv, autotune_range, autotune_by_feature,
                autotune_max_clusters, exclusions, streaming_autotune,
                streaming_buffer, learning_numerator, learning_denominator,
                learning_max_clusters, learning_samples)
            if not success:
                return False, config
        body = json.dumps(config)

        config_cmd = self.url + 'clusterConfig/' + self.instance + '?api-tenant=' + self.api_tenant
        result, response = simple_post(self, config_cmd, body=body)
        if result:
            self.numeric_format = config['numericFormat']

        return result, response

    def nano_list(self):
        """Returns list of nano instances allocated for a pod

        Returns:
            result (boolean):  true if successful (list was returned)
            response (str): json dictionary of pod instances when result=true, error string when result=false

        """

        # build command
        instance_cmd = self.url + 'nanoInstances' + '?api-tenant=' + self.api_tenant

        return simple_get(self, instance_cmd)

    @_is_configured
    def save_nano(self, filename):
        """serialize a nano pod instance and save to a local file

        Args:
            filename (str): path to local file where saved pod instance should be written

        Returns:
            result (boolean):  true if successful (pod instance was written)
            response (str): None when result is true, error string when result=false

        """

        # build command
        snapshot_cmd = self.url + 'snapshot/' + self.instance + '?api-tenant=' + self.api_tenant

        # serialize nano
        result, response = simple_get(self, snapshot_cmd)
        if not result:
            return result, response

        # at this point, the call succeeded, saves the result to a local file
        try:
            with open(filename, 'wb') as fp:
                fp.write(response)
        except Exception as e:
            return False, e.strerror

        return True, None

    def restore_nano(self, filename):
        """Restore a nano pod instance from local file

        Args:
            filename (str): path to local file containing saved pod instance

        Returns:
            result (boolean):  true if successful (nano pod instance was restored)
            response (str): None when result is true, error string when result=false

        """

        # verify that input file is a valid nano file (gzip'd tar with Magic Number)
        try:
            with tarfile.open(filename, 'r:gz') as tp:
                with tp.extractfile('/CommonState/MagicNumber') as magic_fp:
                    magic_num = magic_fp.read()
                    if magic_num != b'\xda\xba':
                        return False, 'file {} is not a Boon Logic nano-formatted file, bad magic number'.format(
                            filename)
        except KeyError:
            return False, 'file {} is not a Boon Logic nano-formatted file'.format(
                filename)
        except Exception as e:
            return False, 'corrupt file {}'.format(filename)

        with open(filename, 'rb') as fp:
            nano = fp.read()

        # build command
        snapshot_cmd = self.url + 'snapshot/' + self.instance + '?api-tenant=' + self.api_tenant

        fields = {'snapshot': (filename, nano)}

        result, response = multipart_post(self, snapshot_cmd, fields=fields)

        if not result:
            return result, response

        self.numeric_format = response['numericFormat']

        return True, response

    @_is_configured
    def autotune_config(self):
        """Autotunes the percent variation, min and max for each feature

        Returns:
            result (boolean): true if successful (autotuning was completed)
            response (dict or str): configuration dictionary when result is true, error string when result is false

        """

        # build command
        config_cmd = self.url + 'autoTune/' + self.instance + '?api-tenant=' + self.api_tenant

        # autotune parameters
        return simple_post(self, config_cmd)

    @_is_configured
    def get_config(self):
        """Gets the configuration for this nano pod instance

        Returns:
            result (boolean): true if successful (configuration was found)
            response (dict or str): configuration dictionary when result is true, error string when result is false

        """
        config_cmd = self.url + 'clusterConfig/' + self.instance + '?api-tenant=' + self.api_tenant
        return simple_get(self, config_cmd)

    @_is_configured
    def load_file(self, file, file_type, gzip=False, append_data=False):
        """Load nano data from a file

        Args:
            file (str): local path to data file
            file_type (str): file type specifier, must be either 'cvs' or 'raw'
            gzip (boolean): true if file is gzip'd, false if not gzip'd
            append_data (boolean): true if data should be appended to previous data, false if existing
                data should be truncated

        Returns:
            result (boolean): true if successful (file was successful loaded into nano pod instance)
            response (str): None when result is true, error string when result=false

        """

        # load the data file
        try:
            with open(file, 'rb') as fp:
                file_data = fp.read()
        except FileNotFoundError as e:
            return False, e.strerror
        except Exception as e:
            return False, e

        # verify file_type is set correctly
        if file_type not in ['csv', 'csv-c', 'raw', 'raw-n']:
            return False, 'file_type must be "csv", "csv-c", "raw" or "raw-n"'

        file_name = os.path.basename(file)

        fields = {'data': (file_name, file_data)}

        # build command
        dataset_cmd = self.url + 'data/' + self.instance + '?api-tenant=' + self.api_tenant
        dataset_cmd += '&fileType=' + file_type
        dataset_cmd += '&gzip=' + str(gzip).lower()
        dataset_cmd += '&appendData=' + str(append_data).lower()

        return multipart_post(self, dataset_cmd, fields=fields)

    @_is_configured
    def load_data(self, data, append_data=False):
        """Load nano data from an existing numpy array or simple python list

        Args:
            data (np.ndarray or list): numpy array or list of data values
            append_data (boolean): true if data should be appended to previous data, false if existing
                data should be truncated

        Returns:
            result (boolean): true if successful (data was successful loaded into nano pod instance)
            response (str): None when result is true, error string when result=false

        """
        data = normalize_nano_data(data, self.numeric_format)
        file_name = 'dummy_filename.bin'
        file_type = 'raw'

        fields = {'data': (file_name, data)}

        # build command
        dataset_cmd = self.url + 'data/' + self.instance + '?api-tenant=' + self.api_tenant
        dataset_cmd += '&fileType=' + file_type
        dataset_cmd += '&appendData=' + str(append_data).lower()

        return multipart_post(self, dataset_cmd, fields=fields)

    def set_learning_status(self, status):
        """returns list of nano instances allocated for a pod

        Args:
            status (boolean): true or false of whether to learning is on or off

        Returns:
            result (boolean):  true if successful (list was returned)
            response (str): json dictionary of pod instances when result=true, error string when result=false

        """
        if status not in [True, False]:
            return False, 'status must be a boolean'
        # build command
        learning_cmd = self.url + 'learning/' + self.instance + '?enable=' + str(
            status).lower() + '&api-tenant=' + self.api_tenant

        return simple_post(self, learning_cmd)

    def set_root_cause_status(self, status):
        """configures whether or not to save new clusters coming in for root cause analysis

        Args:
            status (boolean): true or false of whether root cause is on or off

        Returns:
            result (boolean):  true if successful (list was returned)
            response (str): status of root cause

        """
        if status not in [True, False]:
            return False, 'status must be a boolean'
        # build command
        learning_cmd = self.url + 'rootCause/' + self.instance + '?enable=' + str(
            status).lower() + '&api-tenant=' + self.api_tenant

        return simple_post(self, learning_cmd)

    def run_nano(self, results=None):
        """Clusters the data in the nano pod buffer and returns the specified results

        Args:
            results (str): comma separated list of result specifiers

                ID = cluster ID

                SI = smoothed anomaly index

                RI = raw anomaly index

                FI = frequency index

                DI = distance index

                All = ID,SI,RI,FI,DI

        Returns:
            result (boolean): true if successful (nano was successfully run)
            response (dict or str): dictionary of results when result is true, error message when result = false

        """

        results_str = ''
        if str(results) == 'All':
            results_str = 'ID,SI,RI,FI,DI'
        elif results:
            for result in results.split(','):
                if result not in ['ID', 'SI', 'RI', 'FI', 'DI']:
                    return False, 'unknown result "{}" found in results parameter'.format(
                        result)
            results_str = results

        # build command
        nano_cmd = self.url + 'nanoRun/' + self.instance + '?api-tenant=' + self.api_tenant
        if results:
            nano_cmd += '&results=' + results_str

        return simple_post(self, nano_cmd)

    @_is_configured
    def run_streaming_nano(self, data, results=None):
        """Load streaming data into self-autotuning nano pod instance, run the nano and return results

        Args:
            data (np.ndarray or list): numpy array or list of data values
            results (str): comma separated list of result specifiers

                ID = cluster ID

                SI = smoothed anomaly index

                RI = raw anomaly index

                FI = frequency index

                DI = distance index

                All = ID,SI,RI,FI,DI

        Returns:
            result (boolean): true if successful (data was successful streamed to nano pod instance)
            response (dict or str): dictionary of results when result is true, error message when result = false

        """
        data = normalize_nano_data(data, self.numeric_format)
        file_name = 'dummy_filename.bin'
        file_type = 'raw'

        fields = {'data': (file_name, data)}

        results_str = ''
        if str(results) == 'All':
            results_str = 'ID,SI,RI,FI,DI'
        elif results:
            for result in results.split(','):
                if result not in ['ID', 'SI', 'RI', 'FI', 'DI']:
                    return False, 'unknown result "{}" found in results parameter'.format(
                        result)
            results_str = results

        # build command
        streaming_cmd = self.url + 'nanoRunStreaming/' + self.instance + '?api-tenant=' + self.api_tenant
        streaming_cmd += '&fileType=' + file_type
        if results:
            streaming_cmd += '&results=' + results_str

        return multipart_post(self, streaming_cmd, fields=fields)

    def get_version(self):
        """Version information for this nano pod

        Returns:
            result (boolean): true if successful (version information was retrieved)
            response (dict or str): dictionary of results when result is true, error message when result = false

        """

        # build command (minus the v3 portion)
        version_cmd = self.url[:-3] + 'version' + '?api-tenant=' + self.api_tenant
        return simple_get(self, version_cmd)

    @_is_configured
    def get_buffer_status(self):
        """Results related to the bytes processed/in the buffer

        Returns:
            result (boolean): true if successful (nano was successfully run)
            response (dict or str): dictionary of results when result is true, error message when result = false

        """
        status_cmd = self.url + 'bufferStatus/' + self.instance + '?api-tenant=' + self.api_tenant
        return simple_get(self, status_cmd)

    @_is_configured
    def get_nano_results(self, results='All'):
        """Results per pattern

        Args:
            results (str): comma separated list of results

                ID = cluster ID

                SI = smoothed anomaly index

                RI = raw anomaly index

                FI = frequency index

                DI = distance index

                All = ID,SI,RI,FI,DI

        Returns:
            result (boolean): true if successful (nano was successfully run)
            response (dict or str): dictionary of results when result is true, error message when result = false

        """
        # build results command
        if str(results) == 'All':
            results_str = 'ID,SI,RI,FI,DI'
        else:
            for result in results.split(','):
                if result not in ['ID', 'SI', 'RI', 'FI', 'DI']:
                    return False, 'unknown result "{}" found in results parameter'.format(
                        result)
            results_str = results

        # build command
        results_cmd = self.url + 'nanoResults/' + self.instance + '?api-tenant=' + self.api_tenant
        results_cmd += '&results=' + results_str

        return simple_get(self, results_cmd)

    @_is_configured
    def get_nano_status(self, results='All'):
        """Results in relation to each cluster/overall stats

        Args:
            results (str): comma separated list of results

                PCA = principal components (includes 0 cluster)

                clusterGrowth = indexes of each increase in cluster (includes 0 cluster)

                clusterSizes = number of patterns in each cluster (includes 0 cluster)

                anomalyIndexes = anomaly index (includes 0 cluster)

                frequencyIndexes = frequency index (includes 0 cluster)

                distanceIndexes = distance index (includes 0 cluster)

                totalInferences = total number of patterns clustered (overall)

                averageInferenceTime = time in milliseconds to cluster per
                    pattern (not available if uploading from serialized nano) (overall)

                numClusters = total number of clusters (includes 0 cluster) (overall)

                All = PCA,clusterGrowth,clusterSizes,anomalyIndexes,frequencyIndexes,distanceIndexes,totalInferences,numClusters

        Returns:
            result (boolean): true if successful (nano was successfully run)
            response (dict or str): dictionary of results when result is true, error message when result = false

        """

        # build results command
        if str(results) == 'All':
            results_str = 'PCA,clusterGrowth,clusterSizes,anomalyIndexes,frequencyIndexes,' \
                          'distanceIndexes,totalInferences,numClusters'
        else:
            for result in results.split(','):
                if result not in [
                        'PCA', 'clusterGrowth', 'clusterSizes',
                        'anomalyIndexes', 'frequencyIndexes',
                        'distanceIndexes', 'totalInferences', 'numClusters',
                        'averageInferenceTime'
                ]:
                    return False, 'unknown result "{}" found in results parameter'.format(
                        result)
            results_str = results

        # build command
        results_cmd = self.url + 'nanoStatus/' + self.instance + '?api-tenant=' + self.api_tenant
        results_cmd = results_cmd + '&results=' + results_str

        return simple_get(self, results_cmd)

    def get_root_cause(self, id_list=None, pattern_list=None):
        """Get root cause

        Args:
            id_list (list): list of IDs to return the root cause for
            pattern_list (list): list of pattern vectors to calculate the root cause against the model

        Returns:
            A list containing the root cause for each pattern/id provided for a sensor:

                [float]

        Raises:
            BoonException: if Amber cloud gives non-200 response
        """
        if id_list is None and pattern_list is None:
            raise BoonException(
                'Must specify either list of ID(s) or list of pattern(s).')

        response = {'RootCauseFromID': [], 'RootCauseFromPattern': []}
        if id_list is not None:
            id_list = [str(element) for element in id_list]
            rc_cmd = self.url + 'rootCauseFromID/' + self.instance + '?api-tenant=' + self.api_tenant
            rc_cmd = rc_cmd + '&clusterID=' + ",".join(id_list)

            success, status = simple_get(self, rc_cmd)
            if success:
                response['RootCauseFromID'] = status
            else:
                return success, status

        if pattern_list is not None:
            if len(np.array(
                    pattern_list).shape) == 1:  # only 1 pattern provided
                pattern_list = [pattern_list]
            else:
                for i, pattern in enumerate(pattern_list):
                    pattern_list[i] = ','.join(
                        [str(element) for element in pattern])
            rc_cmd = self.url + 'rootCauseFromPattern/' + self.instance + '?api-tenant=' + self.api_tenant
            rc_cmd = rc_cmd + '&pattern=' + '[[' + "],[".join(
                pattern_list) + ']]'

            success, status = simple_get(self, rc_cmd)
            if success:
                response['RootCauseFromPattern'] = status
            else:
                return success, status

        return True, response