示例#1
0
def print_certificate_info(buff, sock, cert):
    cert_pem = ssl.DER_cert_to_PEM_cert(sock.getpeercert(True))

    x509 = crypto.load_certificate(crypto.FILETYPE_PEM, cert_pem)

    public_key = x509.get_pubkey()

    key_type = ("RSA" if public_key.type() == crypto.TYPE_RSA else "DSA")
    key_size = str(public_key.bits())
    sha256_fingerprint = x509.digest(n(b"SHA256"))
    sha1_fingerprint = x509.digest(n(b"SHA1"))
    signature_algorithm = x509.get_signature_algorithm()

    key_info = ("key info: {key_type} key {bits} bits, signed using "
                "{algo}").format(key_type=key_type,
                                 bits=key_size,
                                 algo=n(signature_algorithm))

    validity_info = ("        Begins on:  {before}\n"
                     "        Expires on: {after}").format(
                         before=cert["notBefore"], after=cert["notAfter"])

    rdns = chain(*cert["subject"])
    subject = ", ".join(["{}={}".format(name, value) for name, value in rdns])

    rdns = chain(*cert["issuer"])
    issuer = ", ".join(["{}={}".format(name, value) for name, value in rdns])

    subject = "subject: {sub}, serial number {serial}".format(
        sub=subject, serial=cert["serialNumber"])

    issuer = "issuer: {issuer}".format(issuer=issuer)

    fingerprints = ("        SHA1:   {}\n"
                    "        SHA256: {}").format(n(sha1_fingerprint),
                                                 n(sha256_fingerprint))

    wrapper = textwrap.TextWrapper(initial_indent="    - ",
                                   subsequent_indent="        ")

    message = ("{prefix}matrix: received certificate\n"
               " - certificate info:\n"
               "{subject}\n"
               "{issuer}\n"
               "{key_info}\n"
               "    - period of validity:\n{validity_info}\n"
               "    - fingerprints:\n{fingerprints}").format(
                   prefix=W.prefix("network"),
                   subject=wrapper.fill(subject),
                   issuer=wrapper.fill(issuer),
                   key_info=wrapper.fill(key_info),
                   validity_info=validity_info,
                   fingerprints=fingerprints)

    W.prnt(buff, message)
示例#2
0
def plot_rose(data,
              data_column,
              dir_column,
              title='',
              var_name='',
              var_unit='',
              fig_filename='',
              legend_position='upper left',
              xlabels=('E', 'N-E', 'N', 'N-W', 'W', 'S-W', 'S', 'S-E')):
    plot.get_default_plot_style(context='talk')

    var_label = plot.get_var_label(var_name, var_unit)

    ax = plot_windrose(data,
                       kind='bar',
                       var_name=data_column,
                       direction_name=dir_column,
                       normed=True,
                       opening=0.8,
                       edgecolor=n(b'white'))

    plot.plot_title(ax, title)

    ax.set_xlabel(var_label)
    ax.legend(loc=legend_position, fontsize='small')
    ax.set_xticklabels(xlabels)

    plot.save_figure(fig_filename)
示例#3
0
    def decrypt(obj):
        """Decrypt the object.

        It is an inner function because we must first configure our KMS
        client. Then we call this recursively on the object.
        """
        if isinstance(obj, list):
            res_v = []
            for item in obj:
                res_v.append(decrypt(item))
            return res_v
        elif isinstance(obj, dict):
            if '_kms' in obj:
                try:
                    res = client.decrypt(CiphertextBlob=b64decode(obj['_kms']))
                    obj = n(res['Plaintext'])
                except ClientError as err:
                    if 'AccessDeniedException' in err.args[0]:
                        log.warning(
                            'Unable to decrypt %s. Key does not exist or no access',
                            obj['_kms'])
                    else:
                        raise
            else:
                for k, v in obj.items():
                    obj[k] = decrypt(v)
        else:
            pass
        return obj
示例#4
0
 def process_selectors(self):
     tree = lxml.html.fromstring(self.html)
     for process_selector in self.selectors:
         for el in tree.xpath(process_selector):
             self.add_attribute_to_element(el)
     out = n(et.tostring(tree, pretty_print=False))
     self.html = six.text_type(out)
示例#5
0
    def decrypt(obj):
        """Decrypt the object.

        It is an inner function because we must first configure our KMS
        client. Then we call this recursively on the object.
        """
        if isinstance(obj, list):
            res_v = []
            for item in obj:
                res_v.append(decrypt(item))
            return res_v
        elif isinstance(obj, dict):
            if "_kms" in obj:
                try:
                    res = client.decrypt(CiphertextBlob=b64decode(obj["_kms"]))
                    obj = n(res["Plaintext"])
                except ClientError as err:
                    if "AccessDeniedException" in err.args[0]:
                        log.warning("Unable to decrypt %s. Key does not exist or no access", obj["_kms"])
                    else:
                        raise
            else:
                for k, v in obj.items():
                    obj[k] = decrypt(v)
        else:
            pass
        return obj
示例#6
0
 def test_kms_encrypt(self):
     key = 'alias/figgypy-test'
     secret = 'test password 1234567890 !@#$%^&*()'
     client = boto3.client('kms')
     encrypted = kms_encrypt(secret, key)
     dec_res = client.decrypt(CiphertextBlob=b64decode(encrypted))
     decrypted = n(dec_res['Plaintext'])
     assert decrypted == secret
示例#7
0
    def decrypt(obj):
        """Decrypt the object.

        It is an inner function because we must first verify that gpg
        is ready. If we did them in the same function we would end up
        calling the gpg checks several times, potentially, since we are
        calling this recursively.
        """
        if isinstance(obj, list):
            res_v = []
            for item in obj:
                res_v.append(decrypt(item))
            return res_v
        elif isinstance(obj, dict):
            if '_gpg' in obj:
                try:
                    decrypted = gpg.decrypt(obj['_gpg'])
                    if decrypted.ok:
                        obj = n(decrypted.data.decode('utf-8').encode())
                    else:
                        log.error("gpg error unpacking secrets %s",
                                  decrypted.stderr)
                except Exception as err:
                    log.error("error unpacking secrets %s", err)
            else:
                for k, v in obj.items():
                    obj[k] = decrypt(v)
        else:
            try:
                if 'BEGIN PGP' in obj:
                    try:
                        decrypted = gpg.decrypt(obj)
                        if decrypted.ok:
                            obj = n(decrypted.data.decode('utf-8').encode())
                        else:
                            log.error("gpg error unpacking secrets %s",
                                      decrypted.stderr)
                    except Exception as err:
                        log.error("error unpacking secrets %s", err)
            except TypeError:
                log.debug('Pass on decryption. Only decrypt strings')
        return obj
示例#8
0
    def decrypt(obj):
        """Decrypt the object.

        It is an inner function because we must first verify that gpg
        is ready. If we did them in the same function we would end up
        calling the gpg checks several times, potentially, since we are
        calling this recursively.
        """
        if isinstance(obj, list):
            res_v = []
            for item in obj:
                res_v.append(decrypt(item))
            return res_v
        elif isinstance(obj, dict):
            if "_gpg" in obj:
                try:
                    decrypted = gpg.decrypt(obj["_gpg"])
                    if decrypted.ok:
                        obj = n(decrypted.data.decode("utf-8").encode())
                    else:
                        log.error("gpg error unpacking secrets %s", decrypted.stderr)
                except Exception as err:
                    log.error("error unpacking secrets %s", err)
            else:
                for k, v in obj.items():
                    obj[k] = decrypt(v)
        else:
            try:
                if "BEGIN PGP" in obj:
                    try:
                        decrypted = gpg.decrypt(obj)
                        if decrypted.ok:
                            obj = n(decrypted.data.decode("utf-8").encode())
                        else:
                            log.error("gpg error unpacking secrets %s", decrypted.stderr)
                    except Exception as err:
                        log.error("error unpacking secrets %s", err)
            except TypeError:
                log.debug("Pass on decryption. Only decrypt strings")
        return obj
示例#9
0
 def kill_selectors(self):
     tree = lxml.html.fromstring(self.html)
     for kill_selector in self.kill:
         for el in tree.xpath(kill_selector):
             self.remove_keeping_tail(el)
     for kill_empty_selector in self.kill_empty:
         for el in tree.xpath(
                 '//{}['
                 'not(descendant-or-self::*/text()[normalize-space()])'
                 ' and not(descendant-or-self::*/attribute::*)'
                 ']'.format(kill_empty_selector)):
             self.remove_keeping_tail(el)
     out = n(et.tostring(tree, pretty_print=False))
     self.html = six.text_type(out)
示例#10
0
def s(value):
    if compat.is_py2:
        if isinstance(value, unicode):
            return value.decode('ascii', 'ignore')

        if isinstance(value, str):
            return value

    else:
        if isinstance(value, bytes):
            return n(value)

        if isinstance(value, str):
            return value

    return value
示例#11
0
文件: utils.py 项目: theherk/figgypy
def kms_encrypt(value, key, aws_config=None):
    """Encrypt and value with KMS key.

    Args:
        value (str): value to encrypt
        key (str): key id or alias
        aws_config (optional[dict]): aws credentials
            dict of arguments passed into boto3 session
            example:
                aws_creds = {'aws_access_key_id': aws_access_key_id,
                             'aws_secret_access_key': aws_secret_access_key,
                             'region_name': 'us-east-1'}

    Returns:
        str: encrypted cipher text
    """
    aws_config = aws_config or {}
    aws = boto3.session.Session(**aws_config)
    client = aws.client("kms")
    enc_res = client.encrypt(KeyId=key, Plaintext=value)
    return n(b64encode(enc_res["CiphertextBlob"]))
示例#12
0
def kms_encrypt(value, key, aws_config=None):
    """Encrypt and value with KMS key.

    Args:
        value (str): value to encrypt
        key (str): key id or alias
        aws_config (optional[dict]): aws credentials
            dict of arguments passed into boto3 session
            example:
                aws_creds = {'aws_access_key_id': aws_access_key_id,
                             'aws_secret_access_key': aws_secret_access_key,
                             'region_name': 'us-east-1'}

    Returns:
        str: encrypted cipher text
    """
    aws_config = aws_config or {}
    aws = boto3.session.Session(**aws_config)
    client = aws.client('kms')
    enc_res = client.encrypt(KeyId=key, Plaintext=value)
    return n(b64encode(enc_res['CiphertextBlob']))
示例#13
0
    def do_POST(self):
        self.logger.debug("Webhook triggered")
        if self.path == self.server.webhook_path and \
           'content-type' in self.headers and \
           'content-length' in self.headers and \
           self.headers['content-type'] == 'application/json':
            json_string = \
                n(self.rfile.read(int(self.headers['content-length'])))

            self.send_response(200)
            self.end_headers()

            self.logger.debug("Webhook received data: " + json_string)

            update = Update.de_json(json.loads(json_string))
            self.logger.info("Received Update with ID %d on Webhook" %
                             update.update_id)
            self.server.update_queue.put(update)

        else:
            self.send_error(403)
            self.end_headers()
    def do_POST(self):
        self.logger.debug("Webhook triggered")
        if self.path == self.server.webhook_path and \
           'content-type' in self.headers and \
           'content-length' in self.headers and \
           self.headers['content-type'] == 'application/json':
            json_string = \
                n(self.rfile.read(int(self.headers['content-length'])))

            self.send_response(200)
            self.end_headers()

            self.logger.debug("Webhook received data: " + json_string)

            update = Update.de_json(json.loads(json_string))
            self.logger.info("Received Update with ID %d on Webhook" %
                             update.update_id)
            self.server.update_queue.put(update)

        else:
            self.send_error(403)
            self.end_headers()
示例#15
0
def MySQLInfo(command):
    try:
        # get sql status info
        if (command == 'status'):
            sql = mode(command)
            status = show_update_status(sql)
            return status
        # get sql process list
        if (command == 'process'):
            sql = mode(command)
            processlist = query(sql)
            for process in processlist:
                for key in process:
                    if (key == 'Info'):
                        if (process[key] is not None):
                            process[key] = n(process[key])
                        if (process[key] is None):
                            process[key] = "None"
            return processlist

    except Exception as err:
        logging.exception(err)
        print(err)
        sys.exit()
示例#16
0
        self.delete_slice__2 = self.nop
        self.delete_slice__3 = self.nop

        super(FindFTrace, self).__init__(*args, **kwargs)

        self._locals = AlmostReadOnlyDict(self._locals)
        self._globals = AlmostReadOnlyDict(self._globals)

    def store_attr(self):
        """STORE_ATTR opcode"""
        if self.names[self.oparg] == "f_trace":
            self._stop = True
            self.result = self.stack.pop() if self.stack else True


FTraceExe = type(n(b"FTraceExe"), (FindFTrace, PyInterpreter), {})               # pylint: disable=invalid-name


def get_f_trace(code, loc, glob):
    """Get frame from frame.f_trace attribution"""
    interpreter = FTraceExe(code, loc, glob)
    interpreter.execute()
    return interpreter.result


def find_f_trace(code, loc, glob, lasti):
    """Check if code has frame.f_trace attribution"""
    if "f_trace" not in code.co_names:
        return False
    interpreter = FindFTrace(code, loc, glob)
    interpreter.execute()
def main(args):
    logger = logging.getLogger(__name__)
    merge_cfg_from_file(args.cfg)
    cfg.NUM_GPUS = 1
    args.weights = cache_url(args.weights, cfg.DOWNLOAD_CACHE)
    assert_and_infer_cfg(cache_urls=False)

    assert not cfg.MODEL.RPN_ONLY, \
        'RPN models are not supported'
    assert not cfg.TEST.PRECOMPUTED_PROPOSALS, \
        'Models that require precomputed proposals are not supported'

    model = infer_engine.initialize_model_from_cfg(args.weights)
    dummy_coco_dataset = dummy_datasets.get_coco_dataset()

    if os.path.isdir(args.im_or_folder):
        im_list = glob.iglob(args.im_or_folder + '/*.' + args.image_ext)
    else:
        im_list = [args.im_or_folder]
    """
    Add support for webcam
    """
    # Set and get camera from OpenCV
    cap = cv2.VideoCapture('/detectron/mypython/people-walking.mp4')
    width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH) + 0.5)
    height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT) + 0.5)

    s = n(b'XVID')
    fourcc = cv2.VideoWriter_fourcc(*s)
    out = cv2.VideoWriter('output.avi', fourcc, 24.0, (width, height))
    im_name = 'tmp_im'
    count = 0
    fileOut = open('people-walking.txt', 'w')

    while True:
        count += 1
        # Fetch image from camera
        _, im = cap.read()

        timers = defaultdict(Timer)
        t = time.time()

        with c2_utils.NamedCudaScope(0):
            cls_boxes, cls_segms, cls_keyps = infer_engine.im_detect_all(
                model, im, None, timers=timers)
        logger.info('Inference time: {:.3f}s'.format(time.time() - t))
        for k, v in timers.items():
            logger.info(' | {}: {:.3f}s'.format(k, v.average_time))
        if 0:
            logger.info(
                ' \ Note: inference on the first image will be slower than the '
                'rest (caches and auto-tuning need to warm up)')
        box_list = [b for b in cls_boxes if len(b) > 0]
        if len(box_list) > 0:
            boxes = np.concatenate(box_list)
        else:
            boxes = None
        vis_utils.vis_one_image(
            im[:, :, ::-1],  # BGR -> RGB for visualization
            im_name,
            args.output_dir,
            cls_boxes,
            cls_segms,
            cls_keyps,
            dataset=dummy_coco_dataset,
            box_alpha=0.3,
            show_class=True,
            thresh=0.7,
            kp_thresh=2,
            ext='jpg'  # default is PDF, but we want JPG.
        )
        time.sleep(0.05)
        img = cv2.imread('/detectron/mypython/tmp_im.jpg')
        cv2.putText(img, 'Frame: ', (5, 30), cv2.FONT_HERSHEY_SIMPLEX, 1,
                    (255, 255, 255), 2)
        cv2.putText(img, str(count), (130, 30), cv2.FONT_HERSHEY_SIMPLEX, 1,
                    (255, 255, 255), 2)
        cv2.putText(img, 'Model: e2e_mask_rcnn_R-101-FPN_2x.yaml', (200, 30),
                    cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2)
        cv2.putText(
            img,
            'WEIGHTS: https://s3-us-west-2.amazonaws.com/detectron/ImageNetPretrained/MSRA/R-101.pkl',
            (5, 60), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2)
        for i in range(len(boxes)):
            x1 = "{:.6f}".format(boxes[i][0] / width)
            y1 = "{:.6f}".format(boxes[i][1] / height)
            x2 = "{:.6f}".format(boxes[i][2] / width)
            y2 = "{:.6f}".format(boxes[i][3] / height)
            conf = "{:.6f}".format(boxes[i][4])
            fileOut.write("Frame " + str(count).zfill(5) + ":" + "     " +
                          str(x1) + "     " + str(y1) + "     " + str(x2) +
                          "     " + str(y2) + "     " + str(conf) + "\n")
            #cv2.putText(img, str(x1),(5, 90+30*i), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 2)
            #cv2.putText(img, str(y1),(185, 90+30*i), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 2)
            #cv2.putText(img, str(x2),(365, 90+30*i), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 2)
            #cv2.putText(img, str(y2),(545, 90+30*i), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 2)
            #cv2.putText(img, str(conf),(725, 90+30*i), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 2)
        time.sleep(0.05)
        out.write(img)
    fileOut.close()
    cap.release()
    out.release()
    cv2.destroyAllWindows()
示例#18
0
class Emoji(object):
    """This object represents an Emoji."""

    GRINNING_FACE_WITH_SMILING_EYES = n(b'\xF0\x9F\x98\x81')
    FACE_WITH_TEARS_OF_JOY = n(b'\xF0\x9F\x98\x82')
    SMILING_FACE_WITH_OPEN_MOUTH = n(b'\xF0\x9F\x98\x83')
    SMILING_FACE_WITH_OPEN_MOUTH_AND_SMILING_EYES = n(b'\xF0\x9F\x98\x84')
    SMILING_FACE_WITH_OPEN_MOUTH_AND_COLD_SWEAT = n(b'\xF0\x9F\x98\x85')
    SMILING_FACE_WITH_OPEN_MOUTH_AND_TIGHTLY_CLOSED_EYES = n(
        b'\xF0\x9F\x98\x86')
    WINKING_FACE = n(b'\xF0\x9F\x98\x89')
    SMILING_FACE_WITH_SMILING_EYES = n(b'\xF0\x9F\x98\x8A')
    FACE_SAVOURING_DELICIOUS_FOOD = n(b'\xF0\x9F\x98\x8B')
    RELIEVED_FACE = n(b'\xF0\x9F\x98\x8C')
    SMILING_FACE_WITH_HEART_SHAPED_EYES = n(b'\xF0\x9F\x98\x8D')
    SMIRKING_FACE = n(b'\xF0\x9F\x98\x8F')
    UNAMUSED_FACE = n(b'\xF0\x9F\x98\x92')
    FACE_WITH_COLD_SWEAT = n(b'\xF0\x9F\x98\x93')
    PENSIVE_FACE = n(b'\xF0\x9F\x98\x94')
    CONFOUNDED_FACE = n(b'\xF0\x9F\x98\x96')
    FACE_THROWING_A_KISS = n(b'\xF0\x9F\x98\x98')
    KISSING_FACE_WITH_CLOSED_EYES = n(b'\xF0\x9F\x98\x9A')
    FACE_WITH_STUCK_OUT_TONGUE_AND_WINKING_EYE = n(b'\xF0\x9F\x98\x9C')
    FACE_WITH_STUCK_OUT_TONGUE_AND_TIGHTLY_CLOSED_EYES = n(b'\xF0\x9F\x98\x9D')
    DISAPPOINTED_FACE = n(b'\xF0\x9F\x98\x9E')
    ANGRY_FACE = n(b'\xF0\x9F\x98\xA0')
    POUTING_FACE = n(b'\xF0\x9F\x98\xA1')
    CRYING_FACE = n(b'\xF0\x9F\x98\xA2')
    PERSEVERING_FACE = n(b'\xF0\x9F\x98\xA3')
    FACE_WITH_LOOK_OF_TRIUMPH = n(b'\xF0\x9F\x98\xA4')
    DISAPPOINTED_BUT_RELIEVED_FACE = n(b'\xF0\x9F\x98\xA5')
    FEARFUL_FACE = n(b'\xF0\x9F\x98\xA8')
    WEARY_FACE = n(b'\xF0\x9F\x98\xA9')
    SLEEPY_FACE = n(b'\xF0\x9F\x98\xAA')
    TIRED_FACE = n(b'\xF0\x9F\x98\xAB')
    LOUDLY_CRYING_FACE = n(b'\xF0\x9F\x98\xAD')
    FACE_WITH_OPEN_MOUTH_AND_COLD_SWEAT = n(b'\xF0\x9F\x98\xB0')
    FACE_SCREAMING_IN_FEAR = n(b'\xF0\x9F\x98\xB1')
    ASTONISHED_FACE = n(b'\xF0\x9F\x98\xB2')
    FLUSHED_FACE = n(b'\xF0\x9F\x98\xB3')
    DIZZY_FACE = n(b'\xF0\x9F\x98\xB5')
    FACE_WITH_MEDICAL_MASK = n(b'\xF0\x9F\x98\xB7')
    GRINNING_CAT_FACE_WITH_SMILING_EYES = n(b'\xF0\x9F\x98\xB8')
    CAT_FACE_WITH_TEARS_OF_JOY = n(b'\xF0\x9F\x98\xB9')
    SMILING_CAT_FACE_WITH_OPEN_MOUTH = n(b'\xF0\x9F\x98\xBA')
    SMILING_CAT_FACE_WITH_HEART_SHAPED_EYES = n(b'\xF0\x9F\x98\xBB')
    CAT_FACE_WITH_WRY_SMILE = n(b'\xF0\x9F\x98\xBC')
    KISSING_CAT_FACE_WITH_CLOSED_EYES = n(b'\xF0\x9F\x98\xBD')
    POUTING_CAT_FACE = n(b'\xF0\x9F\x98\xBE')
    CRYING_CAT_FACE = n(b'\xF0\x9F\x98\xBF')
    WEARY_CAT_FACE = n(b'\xF0\x9F\x99\x80')
    FACE_WITH_NO_GOOD_GESTURE = n(b'\xF0\x9F\x99\x85')
    FACE_WITH_OK_GESTURE = n(b'\xF0\x9F\x99\x86')
    PERSON_BOWING_DEEPLY = n(b'\xF0\x9F\x99\x87')
    SEE_NO_EVIL_MONKEY = n(b'\xF0\x9F\x99\x88')
    HEAR_NO_EVIL_MONKEY = n(b'\xF0\x9F\x99\x89')
    SPEAK_NO_EVIL_MONKEY = n(b'\xF0\x9F\x99\x8A')
    HAPPY_PERSON_RAISING_ONE_HAND = n(b'\xF0\x9F\x99\x8B')
    PERSON_RAISING_BOTH_HANDS_IN_CELEBRATION = n(b'\xF0\x9F\x99\x8C')
    PERSON_FROWNING = n(b'\xF0\x9F\x99\x8D')
    PERSON_WITH_POUTING_FACE = n(b'\xF0\x9F\x99\x8E')
    PERSON_WITH_FOLDED_HANDS = n(b'\xF0\x9F\x99\x8F')
    BLACK_SCISSORS = n(b'\xE2\x9C\x82')
    WHITE_HEAVY_CHECK_MARK = n(b'\xE2\x9C\x85')
    AIRPLANE = n(b'\xE2\x9C\x88')
    ENVELOPE = n(b'\xE2\x9C\x89')
    RAISED_FIST = n(b'\xE2\x9C\x8A')
    RAISED_HAND = n(b'\xE2\x9C\x8B')
    VICTORY_HAND = n(b'\xE2\x9C\x8C')
    PENCIL = n(b'\xE2\x9C\x8F')
    BLACK_NIB = n(b'\xE2\x9C\x92')
    HEAVY_CHECK_MARK = n(b'\xE2\x9C\x94')
    HEAVY_MULTIPLICATION_X = n(b'\xE2\x9C\x96')
    SPARKLES = n(b'\xE2\x9C\xA8')
    EIGHT_SPOKED_ASTERISK = n(b'\xE2\x9C\xB3')
    EIGHT_POINTED_BLACK_STAR = n(b'\xE2\x9C\xB4')
    SNOWFLAKE = n(b'\xE2\x9D\x84')
    SPARKLE = n(b'\xE2\x9D\x87')
    CROSS_MARK = n(b'\xE2\x9D\x8C')
    NEGATIVE_SQUARED_CROSS_MARK = n(b'\xE2\x9D\x8E')
    BLACK_QUESTION_MARK_ORNAMENT = n(b'\xE2\x9D\x93')
    WHITE_QUESTION_MARK_ORNAMENT = n(b'\xE2\x9D\x94')
    WHITE_EXCLAMATION_MARK_ORNAMENT = n(b'\xE2\x9D\x95')
    HEAVY_EXCLAMATION_MARK_SYMBOL = n(b'\xE2\x9D\x97')
    HEAVY_BLACK_HEART = n(b'\xE2\x9D\xA4')
    HEAVY_PLUS_SIGN = n(b'\xE2\x9E\x95')
    HEAVY_MINUS_SIGN = n(b'\xE2\x9E\x96')
    HEAVY_DIVISION_SIGN = n(b'\xE2\x9E\x97')
    BLACK_RIGHTWARDS_ARROW = n(b'\xE2\x9E\xA1')
    CURLY_LOOP = n(b'\xE2\x9E\xB0')
    ROCKET = n(b'\xF0\x9F\x9A\x80')
    RAILWAY_CAR = n(b'\xF0\x9F\x9A\x83')
    HIGH_SPEED_TRAIN = n(b'\xF0\x9F\x9A\x84')
    HIGH_SPEED_TRAIN_WITH_BULLET_NOSE = n(b'\xF0\x9F\x9A\x85')
    METRO = n(b'\xF0\x9F\x9A\x87')
    STATION = n(b'\xF0\x9F\x9A\x89')
    BUS = n(b'\xF0\x9F\x9A\x8C')
    BUS_STOP = n(b'\xF0\x9F\x9A\x8F')
    AMBULANCE = n(b'\xF0\x9F\x9A\x91')
    FIRE_ENGINE = n(b'\xF0\x9F\x9A\x92')
    POLICE_CAR = n(b'\xF0\x9F\x9A\x93')
    TAXI = n(b'\xF0\x9F\x9A\x95')
    AUTOMOBILE = n(b'\xF0\x9F\x9A\x97')
    RECREATIONAL_VEHICLE = n(b'\xF0\x9F\x9A\x99')
    DELIVERY_TRUCK = n(b'\xF0\x9F\x9A\x9A')
    SHIP = n(b'\xF0\x9F\x9A\xA2')
    SPEEDBOAT = n(b'\xF0\x9F\x9A\xA4')
    HORIZONTAL_TRAFFIC_LIGHT = n(b'\xF0\x9F\x9A\xA5')
    CONSTRUCTION_SIGN = n(b'\xF0\x9F\x9A\xA7')
    POLICE_CARS_REVOLVING_LIGHT = n(b'\xF0\x9F\x9A\xA8')
    TRIANGULAR_FLAG_ON_POST = n(b'\xF0\x9F\x9A\xA9')
    DOOR = n(b'\xF0\x9F\x9A\xAA')
    NO_ENTRY_SIGN = n(b'\xF0\x9F\x9A\xAB')
    SMOKING_SYMBOL = n(b'\xF0\x9F\x9A\xAC')
    NO_SMOKING_SYMBOL = n(b'\xF0\x9F\x9A\xAD')
    BICYCLE = n(b'\xF0\x9F\x9A\xB2')
    PEDESTRIAN = n(b'\xF0\x9F\x9A\xB6')
    MENS_SYMBOL = n(b'\xF0\x9F\x9A\xB9')
    WOMENS_SYMBOL = n(b'\xF0\x9F\x9A\xBA')
    RESTROOM = n(b'\xF0\x9F\x9A\xBB')
    BABY_SYMBOL = n(b'\xF0\x9F\x9A\xBC')
    TOILET = n(b'\xF0\x9F\x9A\xBD')
    WATER_CLOSET = n(b'\xF0\x9F\x9A\xBE')
    BATH = n(b'\xF0\x9F\x9B\x80')
    CIRCLED_LATIN_CAPITAL_LETTER_M = n(b'\xE2\x93\x82')
    NEGATIVE_SQUARED_LATIN_CAPITAL_LETTER_A = n(b'\xF0\x9F\x85\xB0')
    NEGATIVE_SQUARED_LATIN_CAPITAL_LETTER_B = n(b'\xF0\x9F\x85\xB1')
    NEGATIVE_SQUARED_LATIN_CAPITAL_LETTER_O = n(b'\xF0\x9F\x85\xBE')
    NEGATIVE_SQUARED_LATIN_CAPITAL_LETTER_P = n(b'\xF0\x9F\x85\xBF')
    NEGATIVE_SQUARED_AB = n(b'\xF0\x9F\x86\x8E')
    SQUARED_CL = n(b'\xF0\x9F\x86\x91')
    SQUARED_COOL = n(b'\xF0\x9F\x86\x92')
    SQUARED_FREE = n(b'\xF0\x9F\x86\x93')
    SQUARED_ID = n(b'\xF0\x9F\x86\x94')
    SQUARED_NEW = n(b'\xF0\x9F\x86\x95')
    SQUARED_NG = n(b'\xF0\x9F\x86\x96')
    SQUARED_OK = n(b'\xF0\x9F\x86\x97')
    SQUARED_SOS = n(b'\xF0\x9F\x86\x98')
    SQUARED_UP_WITH_EXCLAMATION_MARK = n(b'\xF0\x9F\x86\x99')
    SQUARED_VS = n(b'\xF0\x9F\x86\x9A')
    REGIONAL_INDICATOR_SYMBOL_LETTER_D_PLUS_REGIONAL_INDICATOR_SYMBOL_LETTER_E\
        = n(b'\xF0\x9F\x87\xA9\xF0\x9F\x87\xAA')
    REGIONAL_INDICATOR_SYMBOL_LETTER_G_PLUS_REGIONAL_INDICATOR_SYMBOL_LETTER_B\
        = n(b'\xF0\x9F\x87\xAC\xF0\x9F\x87\xA7')
    REGIONAL_INDICATOR_SYMBOL_LETTER_C_PLUS_REGIONAL_INDICATOR_SYMBOL_LETTER_N\
        = n(b'\xF0\x9F\x87\xA8\xF0\x9F\x87\xB3')
    REGIONAL_INDICATOR_SYMBOL_LETTER_J_PLUS_REGIONAL_INDICATOR_SYMBOL_LETTER_P\
        = n(b'\xF0\x9F\x87\xAF\xF0\x9F\x87\xB5')
    REGIONAL_INDICATOR_SYMBOL_LETTER_K_PLUS_REGIONAL_INDICATOR_SYMBOL_LETTER_R\
        = n(b'\xF0\x9F\x87\xB0\xF0\x9F\x87\xB7')
    REGIONAL_INDICATOR_SYMBOL_LETTER_F_PLUS_REGIONAL_INDICATOR_SYMBOL_LETTER_R\
        = n(b'\xF0\x9F\x87\xAB\xF0\x9F\x87\xB7')
    REGIONAL_INDICATOR_SYMBOL_LETTER_E_PLUS_REGIONAL_INDICATOR_SYMBOL_LETTER_S\
        = n(b'\xF0\x9F\x87\xAA\xF0\x9F\x87\xB8')
    REGIONAL_INDICATOR_SYMBOL_LETTER_I_PLUS_REGIONAL_INDICATOR_SYMBOL_LETTER_T\
        = n(b'\xF0\x9F\x87\xAE\xF0\x9F\x87\xB9')
    REGIONAL_INDICATOR_SYMBOL_LETTER_U_PLUS_REGIONAL_INDICATOR_SYMBOL_LETTER_S\
        = n(b'\xF0\x9F\x87\xBA\xF0\x9F\x87\xB8')
    REGIONAL_INDICATOR_SYMBOL_LETTER_R_PLUS_REGIONAL_INDICATOR_SYMBOL_LETTER_U\
        = n(b'\xF0\x9F\x87\xB7\xF0\x9F\x87\xBA')
    SQUARED_KATAKANA_KOKO = n(b'\xF0\x9F\x88\x81')
    SQUARED_KATAKANA_SA = n(b'\xF0\x9F\x88\x82')
    SQUARED_CJK_UNIFIED_IDEOGRAPH_7121 = n(b'\xF0\x9F\x88\x9A')
    SQUARED_CJK_UNIFIED_IDEOGRAPH_6307 = n(b'\xF0\x9F\x88\xAF')
    SQUARED_CJK_UNIFIED_IDEOGRAPH_7981 = n(b'\xF0\x9F\x88\xB2')
    SQUARED_CJK_UNIFIED_IDEOGRAPH_7A7A = n(b'\xF0\x9F\x88\xB3')
    SQUARED_CJK_UNIFIED_IDEOGRAPH_5408 = n(b'\xF0\x9F\x88\xB4')
    SQUARED_CJK_UNIFIED_IDEOGRAPH_6E80 = n(b'\xF0\x9F\x88\xB5')
    SQUARED_CJK_UNIFIED_IDEOGRAPH_6709 = n(b'\xF0\x9F\x88\xB6')
    SQUARED_CJK_UNIFIED_IDEOGRAPH_6708 = n(b'\xF0\x9F\x88\xB7')
    SQUARED_CJK_UNIFIED_IDEOGRAPH_7533 = n(b'\xF0\x9F\x88\xB8')
    SQUARED_CJK_UNIFIED_IDEOGRAPH_5272 = n(b'\xF0\x9F\x88\xB9')
    SQUARED_CJK_UNIFIED_IDEOGRAPH_55B6 = n(b'\xF0\x9F\x88\xBA')
    CIRCLED_IDEOGRAPH_ADVANTAGE = n(b'\xF0\x9F\x89\x90')
    CIRCLED_IDEOGRAPH_ACCEPT = n(b'\xF0\x9F\x89\x91')
    COPYRIGHT_SIGN = n(b'\xC2\xA9')
    REGISTERED_SIGN = n(b'\xC2\xAE')
    DOUBLE_EXCLAMATION_MARK = n(b'\xE2\x80\xBC')
    EXCLAMATION_QUESTION_MARK = n(b'\xE2\x81\x89')
    DIGIT_EIGHT_PLUS_COMBINING_ENCLOSING_KEYCAP = n(b'\x38\xE2\x83\xA3')
    DIGIT_NINE_PLUS_COMBINING_ENCLOSING_KEYCAP = n(b'\x39\xE2\x83\xA3')
    DIGIT_SEVEN_PLUS_COMBINING_ENCLOSING_KEYCAP = n(b'\x37\xE2\x83\xA3')
    DIGIT_SIX_PLUS_COMBINING_ENCLOSING_KEYCAP = n(b'\x36\xE2\x83\xA3')
    DIGIT_ONE_PLUS_COMBINING_ENCLOSING_KEYCAP = n(b'\x31\xE2\x83\xA3')
    DIGIT_ZERO_PLUS_COMBINING_ENCLOSING_KEYCAP = n(b'\x30\xE2\x83\xA3')
    DIGIT_TWO_PLUS_COMBINING_ENCLOSING_KEYCAP = n(b'\x32\xE2\x83\xA3')
    DIGIT_THREE_PLUS_COMBINING_ENCLOSING_KEYCAP = n(b'\x33\xE2\x83\xA3')
    DIGIT_FIVE_PLUS_COMBINING_ENCLOSING_KEYCAP = n(b'\x35\xE2\x83\xA3')
    DIGIT_FOUR_PLUS_COMBINING_ENCLOSING_KEYCAP = n(b'\x34\xE2\x83\xA3')
    NUMBER_SIGN_PLUS_COMBINING_ENCLOSING_KEYCAP = n(b'\x23\xE2\x83\xA3')
    TRADE_MARK_SIGN = n(b'\xE2\x84\xA2')
    INFORMATION_SOURCE = n(b'\xE2\x84\xB9')
    LEFT_RIGHT_ARROW = n(b'\xE2\x86\x94')
    UP_DOWN_ARROW = n(b'\xE2\x86\x95')
    NORTH_WEST_ARROW = n(b'\xE2\x86\x96')
    NORTH_EAST_ARROW = n(b'\xE2\x86\x97')
    SOUTH_EAST_ARROW = n(b'\xE2\x86\x98')
    SOUTH_WEST_ARROW = n(b'\xE2\x86\x99')
    LEFTWARDS_ARROW_WITH_HOOK = n(b'\xE2\x86\xA9')
    RIGHTWARDS_ARROW_WITH_HOOK = n(b'\xE2\x86\xAA')
    WATCH = n(b'\xE2\x8C\x9A')
    HOURGLASS = n(b'\xE2\x8C\x9B')
    BLACK_RIGHT_POINTING_DOUBLE_TRIANGLE = n(b'\xE2\x8F\xA9')
    BLACK_LEFT_POINTING_DOUBLE_TRIANGLE = n(b'\xE2\x8F\xAA')
    BLACK_UP_POINTING_DOUBLE_TRIANGLE = n(b'\xE2\x8F\xAB')
    BLACK_DOWN_POINTING_DOUBLE_TRIANGLE = n(b'\xE2\x8F\xAC')
    ALARM_CLOCK = n(b'\xE2\x8F\xB0')
    HOURGLASS_WITH_FLOWING_SAND = n(b'\xE2\x8F\xB3')
    BLACK_SMALL_SQUARE = n(b'\xE2\x96\xAA')
    WHITE_SMALL_SQUARE = n(b'\xE2\x96\xAB')
    BLACK_RIGHT_POINTING_TRIANGLE = n(b'\xE2\x96\xB6')
    BLACK_LEFT_POINTING_TRIANGLE = n(b'\xE2\x97\x80')
    WHITE_MEDIUM_SQUARE = n(b'\xE2\x97\xBB')
    BLACK_MEDIUM_SQUARE = n(b'\xE2\x97\xBC')
    WHITE_MEDIUM_SMALL_SQUARE = n(b'\xE2\x97\xBD')
    BLACK_MEDIUM_SMALL_SQUARE = n(b'\xE2\x97\xBE')
    BLACK_SUN_WITH_RAYS = n(b'\xE2\x98\x80')
    CLOUD = n(b'\xE2\x98\x81')
    BLACK_TELEPHONE = n(b'\xE2\x98\x8E')
    BALLOT_BOX_WITH_CHECK = n(b'\xE2\x98\x91')
    UMBRELLA_WITH_RAIN_DROPS = n(b'\xE2\x98\x94')
    HOT_BEVERAGE = n(b'\xE2\x98\x95')
    WHITE_UP_POINTING_INDEX = n(b'\xE2\x98\x9D')
    WHITE_SMILING_FACE = n(b'\xE2\x98\xBA')
    ARIES = n(b'\xE2\x99\x88')
    TAURUS = n(b'\xE2\x99\x89')
    GEMINI = n(b'\xE2\x99\x8A')
    CANCER = n(b'\xE2\x99\x8B')
    LEO = n(b'\xE2\x99\x8C')
    VIRGO = n(b'\xE2\x99\x8D')
    LIBRA = n(b'\xE2\x99\x8E')
    SCORPIUS = n(b'\xE2\x99\x8F')
    SAGITTARIUS = n(b'\xE2\x99\x90')
    CAPRICORN = n(b'\xE2\x99\x91')
    AQUARIUS = n(b'\xE2\x99\x92')
    PISCES = n(b'\xE2\x99\x93')
    BLACK_SPADE_SUIT = n(b'\xE2\x99\xA0')
    BLACK_CLUB_SUIT = n(b'\xE2\x99\xA3')
    BLACK_HEART_SUIT = n(b'\xE2\x99\xA5')
    BLACK_DIAMOND_SUIT = n(b'\xE2\x99\xA6')
    HOT_SPRINGS = n(b'\xE2\x99\xA8')
    BLACK_UNIVERSAL_RECYCLING_SYMBOL = n(b'\xE2\x99\xBB')
    WHEELCHAIR_SYMBOL = n(b'\xE2\x99\xBF')
    ANCHOR = n(b'\xE2\x9A\x93')
    WARNING_SIGN = n(b'\xE2\x9A\xA0')
    HIGH_VOLTAGE_SIGN = n(b'\xE2\x9A\xA1')
    MEDIUM_WHITE_CIRCLE = n(b'\xE2\x9A\xAA')
    MEDIUM_BLACK_CIRCLE = n(b'\xE2\x9A\xAB')
    SOCCER_BALL = n(b'\xE2\x9A\xBD')
    BASEBALL = n(b'\xE2\x9A\xBE')
    SNOWMAN_WITHOUT_SNOW = n(b'\xE2\x9B\x84')
    SUN_BEHIND_CLOUD = n(b'\xE2\x9B\x85')
    OPHIUCHUS = n(b'\xE2\x9B\x8E')
    NO_ENTRY = n(b'\xE2\x9B\x94')
    CHURCH = n(b'\xE2\x9B\xAA')
    FOUNTAIN = n(b'\xE2\x9B\xB2')
    FLAG_IN_HOLE = n(b'\xE2\x9B\xB3')
    SAILBOAT = n(b'\xE2\x9B\xB5')
    TENT = n(b'\xE2\x9B\xBA')
    FUEL_PUMP = n(b'\xE2\x9B\xBD')
    ARROW_POINTING_RIGHTWARDS_THEN_CURVING_UPWARDS = n(b'\xE2\xA4\xB4')
    ARROW_POINTING_RIGHTWARDS_THEN_CURVING_DOWNWARDS = n(b'\xE2\xA4\xB5')
    LEFTWARDS_BLACK_ARROW = n(b'\xE2\xAC\x85')
    UPWARDS_BLACK_ARROW = n(b'\xE2\xAC\x86')
    DOWNWARDS_BLACK_ARROW = n(b'\xE2\xAC\x87')
    BLACK_LARGE_SQUARE = n(b'\xE2\xAC\x9B')
    WHITE_LARGE_SQUARE = n(b'\xE2\xAC\x9C')
    WHITE_MEDIUM_STAR = n(b'\xE2\xAD\x90')
    HEAVY_LARGE_CIRCLE = n(b'\xE2\xAD\x95')
    WAVY_DASH = n(b'\xE3\x80\xB0')
    PART_ALTERNATION_MARK = n(b'\xE3\x80\xBD')
    CIRCLED_IDEOGRAPH_CONGRATULATION = n(b'\xE3\x8A\x97')
    CIRCLED_IDEOGRAPH_SECRET = n(b'\xE3\x8A\x99')
    MAHJONG_TILE_RED_DRAGON = n(b'\xF0\x9F\x80\x84')
    PLAYING_CARD_BLACK_JOKER = n(b'\xF0\x9F\x83\x8F')
    CYCLONE = n(b'\xF0\x9F\x8C\x80')
    FOGGY = n(b'\xF0\x9F\x8C\x81')
    CLOSED_UMBRELLA = n(b'\xF0\x9F\x8C\x82')
    NIGHT_WITH_STARS = n(b'\xF0\x9F\x8C\x83')
    SUNRISE_OVER_MOUNTAINS = n(b'\xF0\x9F\x8C\x84')
    SUNRISE = n(b'\xF0\x9F\x8C\x85')
    CITYSCAPE_AT_DUSK = n(b'\xF0\x9F\x8C\x86')
    SUNSET_OVER_BUILDINGS = n(b'\xF0\x9F\x8C\x87')
    RAINBOW = n(b'\xF0\x9F\x8C\x88')
    BRIDGE_AT_NIGHT = n(b'\xF0\x9F\x8C\x89')
    WATER_WAVE = n(b'\xF0\x9F\x8C\x8A')
    VOLCANO = n(b'\xF0\x9F\x8C\x8B')
    MILKY_WAY = n(b'\xF0\x9F\x8C\x8C')
    EARTH_GLOBE_ASIA_AUSTRALIA = n(b'\xF0\x9F\x8C\x8F')
    NEW_MOON_SYMBOL = n(b'\xF0\x9F\x8C\x91')
    FIRST_QUARTER_MOON_SYMBOL = n(b'\xF0\x9F\x8C\x93')
    WAXING_GIBBOUS_MOON_SYMBOL = n(b'\xF0\x9F\x8C\x94')
    FULL_MOON_SYMBOL = n(b'\xF0\x9F\x8C\x95')
    CRESCENT_MOON = n(b'\xF0\x9F\x8C\x99')
    FIRST_QUARTER_MOON_WITH_FACE = n(b'\xF0\x9F\x8C\x9B')
    GLOWING_STAR = n(b'\xF0\x9F\x8C\x9F')
    SHOOTING_STAR = n(b'\xF0\x9F\x8C\xA0')
    CHESTNUT = n(b'\xF0\x9F\x8C\xB0')
    SEEDLING = n(b'\xF0\x9F\x8C\xB1')
    PALM_TREE = n(b'\xF0\x9F\x8C\xB4')
    CACTUS = n(b'\xF0\x9F\x8C\xB5')
    TULIP = n(b'\xF0\x9F\x8C\xB7')
    CHERRY_BLOSSOM = n(b'\xF0\x9F\x8C\xB8')
    ROSE = n(b'\xF0\x9F\x8C\xB9')
    HIBISCUS = n(b'\xF0\x9F\x8C\xBA')
    SUNFLOWER = n(b'\xF0\x9F\x8C\xBB')
    BLOSSOM = n(b'\xF0\x9F\x8C\xBC')
    EAR_OF_MAIZE = n(b'\xF0\x9F\x8C\xBD')
    EAR_OF_RICE = n(b'\xF0\x9F\x8C\xBE')
    HERB = n(b'\xF0\x9F\x8C\xBF')
    FOUR_LEAF_CLOVER = n(b'\xF0\x9F\x8D\x80')
    MAPLE_LEAF = n(b'\xF0\x9F\x8D\x81')
    FALLEN_LEAF = n(b'\xF0\x9F\x8D\x82')
    LEAF_FLUTTERING_IN_WIND = n(b'\xF0\x9F\x8D\x83')
    MUSHROOM = n(b'\xF0\x9F\x8D\x84')
    TOMATO = n(b'\xF0\x9F\x8D\x85')
    AUBERGINE = n(b'\xF0\x9F\x8D\x86')
    GRAPES = n(b'\xF0\x9F\x8D\x87')
    MELON = n(b'\xF0\x9F\x8D\x88')
    WATERMELON = n(b'\xF0\x9F\x8D\x89')
    TANGERINE = n(b'\xF0\x9F\x8D\x8A')
    BANANA = n(b'\xF0\x9F\x8D\x8C')
    PINEAPPLE = n(b'\xF0\x9F\x8D\x8D')
    RED_APPLE = n(b'\xF0\x9F\x8D\x8E')
    GREEN_APPLE = n(b'\xF0\x9F\x8D\x8F')
    PEACH = n(b'\xF0\x9F\x8D\x91')
    CHERRIES = n(b'\xF0\x9F\x8D\x92')
    STRAWBERRY = n(b'\xF0\x9F\x8D\x93')
    HAMBURGER = n(b'\xF0\x9F\x8D\x94')
    SLICE_OF_PIZZA = n(b'\xF0\x9F\x8D\x95')
    MEAT_ON_BONE = n(b'\xF0\x9F\x8D\x96')
    POULTRY_LEG = n(b'\xF0\x9F\x8D\x97')
    RICE_CRACKER = n(b'\xF0\x9F\x8D\x98')
    RICE_BALL = n(b'\xF0\x9F\x8D\x99')
    COOKED_RICE = n(b'\xF0\x9F\x8D\x9A')
    CURRY_AND_RICE = n(b'\xF0\x9F\x8D\x9B')
    STEAMING_BOWL = n(b'\xF0\x9F\x8D\x9C')
    SPAGHETTI = n(b'\xF0\x9F\x8D\x9D')
    BREAD = n(b'\xF0\x9F\x8D\x9E')
    FRENCH_FRIES = n(b'\xF0\x9F\x8D\x9F')
    ROASTED_SWEET_POTATO = n(b'\xF0\x9F\x8D\xA0')
    DANGO = n(b'\xF0\x9F\x8D\xA1')
    ODEN = n(b'\xF0\x9F\x8D\xA2')
    SUSHI = n(b'\xF0\x9F\x8D\xA3')
    FRIED_SHRIMP = n(b'\xF0\x9F\x8D\xA4')
    FISH_CAKE_WITH_SWIRL_DESIGN = n(b'\xF0\x9F\x8D\xA5')
    SOFT_ICE_CREAM = n(b'\xF0\x9F\x8D\xA6')
    SHAVED_ICE = n(b'\xF0\x9F\x8D\xA7')
    ICE_CREAM = n(b'\xF0\x9F\x8D\xA8')
    DOUGHNUT = n(b'\xF0\x9F\x8D\xA9')
    COOKIE = n(b'\xF0\x9F\x8D\xAA')
    CHOCOLATE_BAR = n(b'\xF0\x9F\x8D\xAB')
    CANDY = n(b'\xF0\x9F\x8D\xAC')
    LOLLIPOP = n(b'\xF0\x9F\x8D\xAD')
    CUSTARD = n(b'\xF0\x9F\x8D\xAE')
    HONEY_POT = n(b'\xF0\x9F\x8D\xAF')
    SHORTCAKE = n(b'\xF0\x9F\x8D\xB0')
    BENTO_BOX = n(b'\xF0\x9F\x8D\xB1')
    POT_OF_FOOD = n(b'\xF0\x9F\x8D\xB2')
    COOKING = n(b'\xF0\x9F\x8D\xB3')
    FORK_AND_KNIFE = n(b'\xF0\x9F\x8D\xB4')
    TEACUP_WITHOUT_HANDLE = n(b'\xF0\x9F\x8D\xB5')
    SAKE_BOTTLE_AND_CUP = n(b'\xF0\x9F\x8D\xB6')
    WINE_GLASS = n(b'\xF0\x9F\x8D\xB7')
    COCKTAIL_GLASS = n(b'\xF0\x9F\x8D\xB8')
    TROPICAL_DRINK = n(b'\xF0\x9F\x8D\xB9')
    BEER_MUG = n(b'\xF0\x9F\x8D\xBA')
    CLINKING_BEER_MUGS = n(b'\xF0\x9F\x8D\xBB')
    RIBBON = n(b'\xF0\x9F\x8E\x80')
    WRAPPED_PRESENT = n(b'\xF0\x9F\x8E\x81')
    BIRTHDAY_CAKE = n(b'\xF0\x9F\x8E\x82')
    JACK_O_LANTERN = n(b'\xF0\x9F\x8E\x83')
    CHRISTMAS_TREE = n(b'\xF0\x9F\x8E\x84')
    FATHER_CHRISTMAS = n(b'\xF0\x9F\x8E\x85')
    FIREWORKS = n(b'\xF0\x9F\x8E\x86')
    FIREWORK_SPARKLER = n(b'\xF0\x9F\x8E\x87')
    BALLOON = n(b'\xF0\x9F\x8E\x88')
    PARTY_POPPER = n(b'\xF0\x9F\x8E\x89')
    CONFETTI_BALL = n(b'\xF0\x9F\x8E\x8A')
    TANABATA_TREE = n(b'\xF0\x9F\x8E\x8B')
    CROSSED_FLAGS = n(b'\xF0\x9F\x8E\x8C')
    PINE_DECORATION = n(b'\xF0\x9F\x8E\x8D')
    JAPANESE_DOLLS = n(b'\xF0\x9F\x8E\x8E')
    CARP_STREAMER = n(b'\xF0\x9F\x8E\x8F')
    WIND_CHIME = n(b'\xF0\x9F\x8E\x90')
    MOON_VIEWING_CEREMONY = n(b'\xF0\x9F\x8E\x91')
    SCHOOL_SATCHEL = n(b'\xF0\x9F\x8E\x92')
    GRADUATION_CAP = n(b'\xF0\x9F\x8E\x93')
    CAROUSEL_HORSE = n(b'\xF0\x9F\x8E\xA0')
    FERRIS_WHEEL = n(b'\xF0\x9F\x8E\xA1')
    ROLLER_COASTER = n(b'\xF0\x9F\x8E\xA2')
    FISHING_POLE_AND_FISH = n(b'\xF0\x9F\x8E\xA3')
    MICROPHONE = n(b'\xF0\x9F\x8E\xA4')
    MOVIE_CAMERA = n(b'\xF0\x9F\x8E\xA5')
    CINEMA = n(b'\xF0\x9F\x8E\xA6')
    HEADPHONE = n(b'\xF0\x9F\x8E\xA7')
    ARTIST_PALETTE = n(b'\xF0\x9F\x8E\xA8')
    TOP_HAT = n(b'\xF0\x9F\x8E\xA9')
    CIRCUS_TENT = n(b'\xF0\x9F\x8E\xAA')
    TICKET = n(b'\xF0\x9F\x8E\xAB')
    CLAPPER_BOARD = n(b'\xF0\x9F\x8E\xAC')
    PERFORMING_ARTS = n(b'\xF0\x9F\x8E\xAD')
    VIDEO_GAME = n(b'\xF0\x9F\x8E\xAE')
    DIRECT_HIT = n(b'\xF0\x9F\x8E\xAF')
    SLOT_MACHINE = n(b'\xF0\x9F\x8E\xB0')
    BILLIARDS = n(b'\xF0\x9F\x8E\xB1')
    GAME_DIE = n(b'\xF0\x9F\x8E\xB2')
    BOWLING = n(b'\xF0\x9F\x8E\xB3')
    FLOWER_PLAYING_CARDS = n(b'\xF0\x9F\x8E\xB4')
    MUSICAL_NOTE = n(b'\xF0\x9F\x8E\xB5')
    MULTIPLE_MUSICAL_NOTES = n(b'\xF0\x9F\x8E\xB6')
    SAXOPHONE = n(b'\xF0\x9F\x8E\xB7')
    GUITAR = n(b'\xF0\x9F\x8E\xB8')
    MUSICAL_KEYBOARD = n(b'\xF0\x9F\x8E\xB9')
    TRUMPET = n(b'\xF0\x9F\x8E\xBA')
    VIOLIN = n(b'\xF0\x9F\x8E\xBB')
    MUSICAL_SCORE = n(b'\xF0\x9F\x8E\xBC')
    RUNNING_SHIRT_WITH_SASH = n(b'\xF0\x9F\x8E\xBD')
    TENNIS_RACQUET_AND_BALL = n(b'\xF0\x9F\x8E\xBE')
    SKI_AND_SKI_BOOT = n(b'\xF0\x9F\x8E\xBF')
    BASKETBALL_AND_HOOP = n(b'\xF0\x9F\x8F\x80')
    CHEQUERED_FLAG = n(b'\xF0\x9F\x8F\x81')
    SNOWBOARDER = n(b'\xF0\x9F\x8F\x82')
    RUNNER = n(b'\xF0\x9F\x8F\x83')
    SURFER = n(b'\xF0\x9F\x8F\x84')
    TROPHY = n(b'\xF0\x9F\x8F\x86')
    AMERICAN_FOOTBALL = n(b'\xF0\x9F\x8F\x88')
    SWIMMER = n(b'\xF0\x9F\x8F\x8A')
    HOUSE_BUILDING = n(b'\xF0\x9F\x8F\xA0')
    HOUSE_WITH_GARDEN = n(b'\xF0\x9F\x8F\xA1')
    OFFICE_BUILDING = n(b'\xF0\x9F\x8F\xA2')
    JAPANESE_POST_OFFICE = n(b'\xF0\x9F\x8F\xA3')
    HOSPITAL = n(b'\xF0\x9F\x8F\xA5')
    BANK = n(b'\xF0\x9F\x8F\xA6')
    AUTOMATED_TELLER_MACHINE = n(b'\xF0\x9F\x8F\xA7')
    HOTEL = n(b'\xF0\x9F\x8F\xA8')
    LOVE_HOTEL = n(b'\xF0\x9F\x8F\xA9')
    CONVENIENCE_STORE = n(b'\xF0\x9F\x8F\xAA')
    SCHOOL = n(b'\xF0\x9F\x8F\xAB')
    DEPARTMENT_STORE = n(b'\xF0\x9F\x8F\xAC')
    FACTORY = n(b'\xF0\x9F\x8F\xAD')
    IZAKAYA_LANTERN = n(b'\xF0\x9F\x8F\xAE')
    JAPANESE_CASTLE = n(b'\xF0\x9F\x8F\xAF')
    EUROPEAN_CASTLE = n(b'\xF0\x9F\x8F\xB0')
    SNAIL = n(b'\xF0\x9F\x90\x8C')
    SNAKE = n(b'\xF0\x9F\x90\x8D')
    HORSE = n(b'\xF0\x9F\x90\x8E')
    SHEEP = n(b'\xF0\x9F\x90\x91')
    MONKEY = n(b'\xF0\x9F\x90\x92')
    CHICKEN = n(b'\xF0\x9F\x90\x94')
    BOAR = n(b'\xF0\x9F\x90\x97')
    ELEPHANT = n(b'\xF0\x9F\x90\x98')
    OCTOPUS = n(b'\xF0\x9F\x90\x99')
    SPIRAL_SHELL = n(b'\xF0\x9F\x90\x9A')
    BUG = n(b'\xF0\x9F\x90\x9B')
    ANT = n(b'\xF0\x9F\x90\x9C')
    HONEYBEE = n(b'\xF0\x9F\x90\x9D')
    LADY_BEETLE = n(b'\xF0\x9F\x90\x9E')
    FISH = n(b'\xF0\x9F\x90\x9F')
    TROPICAL_FISH = n(b'\xF0\x9F\x90\xA0')
    BLOWFISH = n(b'\xF0\x9F\x90\xA1')
    TURTLE = n(b'\xF0\x9F\x90\xA2')
    HATCHING_CHICK = n(b'\xF0\x9F\x90\xA3')
    BABY_CHICK = n(b'\xF0\x9F\x90\xA4')
    FRONT_FACING_BABY_CHICK = n(b'\xF0\x9F\x90\xA5')
    BIRD = n(b'\xF0\x9F\x90\xA6')
    PENGUIN = n(b'\xF0\x9F\x90\xA7')
    KOALA = n(b'\xF0\x9F\x90\xA8')
    POODLE = n(b'\xF0\x9F\x90\xA9')
    BACTRIAN_CAMEL = n(b'\xF0\x9F\x90\xAB')
    DOLPHIN = n(b'\xF0\x9F\x90\xAC')
    MOUSE_FACE = n(b'\xF0\x9F\x90\xAD')
    COW_FACE = n(b'\xF0\x9F\x90\xAE')
    TIGER_FACE = n(b'\xF0\x9F\x90\xAF')
    RABBIT_FACE = n(b'\xF0\x9F\x90\xB0')
    CAT_FACE = n(b'\xF0\x9F\x90\xB1')
    DRAGON_FACE = n(b'\xF0\x9F\x90\xB2')
    SPOUTING_WHALE = n(b'\xF0\x9F\x90\xB3')
    HORSE_FACE = n(b'\xF0\x9F\x90\xB4')
    MONKEY_FACE = n(b'\xF0\x9F\x90\xB5')
    DOG_FACE = n(b'\xF0\x9F\x90\xB6')
    PIG_FACE = n(b'\xF0\x9F\x90\xB7')
    FROG_FACE = n(b'\xF0\x9F\x90\xB8')
    HAMSTER_FACE = n(b'\xF0\x9F\x90\xB9')
    WOLF_FACE = n(b'\xF0\x9F\x90\xBA')
    BEAR_FACE = n(b'\xF0\x9F\x90\xBB')
    PANDA_FACE = n(b'\xF0\x9F\x90\xBC')
    PIG_NOSE = n(b'\xF0\x9F\x90\xBD')
    PAW_PRINTS = n(b'\xF0\x9F\x90\xBE')
    EYES = n(b'\xF0\x9F\x91\x80')
    EAR = n(b'\xF0\x9F\x91\x82')
    NOSE = n(b'\xF0\x9F\x91\x83')
    MOUTH = n(b'\xF0\x9F\x91\x84')
    TONGUE = n(b'\xF0\x9F\x91\x85')
    WHITE_UP_POINTING_BACKHAND_INDEX = n(b'\xF0\x9F\x91\x86')
    WHITE_DOWN_POINTING_BACKHAND_INDEX = n(b'\xF0\x9F\x91\x87')
    WHITE_LEFT_POINTING_BACKHAND_INDEX = n(b'\xF0\x9F\x91\x88')
    WHITE_RIGHT_POINTING_BACKHAND_INDEX = n(b'\xF0\x9F\x91\x89')
    FISTED_HAND_SIGN = n(b'\xF0\x9F\x91\x8A')
    WAVING_HAND_SIGN = n(b'\xF0\x9F\x91\x8B')
    OK_HAND_SIGN = n(b'\xF0\x9F\x91\x8C')
    THUMBS_UP_SIGN = n(b'\xF0\x9F\x91\x8D')
    THUMBS_DOWN_SIGN = n(b'\xF0\x9F\x91\x8E')
    CLAPPING_HANDS_SIGN = n(b'\xF0\x9F\x91\x8F')
    OPEN_HANDS_SIGN = n(b'\xF0\x9F\x91\x90')
    CROWN = n(b'\xF0\x9F\x91\x91')
    WOMANS_HAT = n(b'\xF0\x9F\x91\x92')
    EYEGLASSES = n(b'\xF0\x9F\x91\x93')
    NECKTIE = n(b'\xF0\x9F\x91\x94')
    T_SHIRT = n(b'\xF0\x9F\x91\x95')
    JEANS = n(b'\xF0\x9F\x91\x96')
    DRESS = n(b'\xF0\x9F\x91\x97')
    KIMONO = n(b'\xF0\x9F\x91\x98')
    BIKINI = n(b'\xF0\x9F\x91\x99')
    WOMANS_CLOTHES = n(b'\xF0\x9F\x91\x9A')
    PURSE = n(b'\xF0\x9F\x91\x9B')
    HANDBAG = n(b'\xF0\x9F\x91\x9C')
    POUCH = n(b'\xF0\x9F\x91\x9D')
    MANS_SHOE = n(b'\xF0\x9F\x91\x9E')
    ATHLETIC_SHOE = n(b'\xF0\x9F\x91\x9F')
    HIGH_HEELED_SHOE = n(b'\xF0\x9F\x91\xA0')
    WOMANS_SANDAL = n(b'\xF0\x9F\x91\xA1')
    WOMANS_BOOTS = n(b'\xF0\x9F\x91\xA2')
    FOOTPRINTS = n(b'\xF0\x9F\x91\xA3')
    BUST_IN_SILHOUETTE = n(b'\xF0\x9F\x91\xA4')
    BOY = n(b'\xF0\x9F\x91\xA6')
    GIRL = n(b'\xF0\x9F\x91\xA7')
    MAN = n(b'\xF0\x9F\x91\xA8')
    WOMAN = n(b'\xF0\x9F\x91\xA9')
    FAMILY = n(b'\xF0\x9F\x91\xAA')
    MAN_AND_WOMAN_HOLDING_HANDS = n(b'\xF0\x9F\x91\xAB')
    POLICE_OFFICER = n(b'\xF0\x9F\x91\xAE')
    WOMAN_WITH_BUNNY_EARS = n(b'\xF0\x9F\x91\xAF')
    BRIDE_WITH_VEIL = n(b'\xF0\x9F\x91\xB0')
    PERSON_WITH_BLOND_HAIR = n(b'\xF0\x9F\x91\xB1')
    MAN_WITH_GUA_PI_MAO = n(b'\xF0\x9F\x91\xB2')
    MAN_WITH_TURBAN = n(b'\xF0\x9F\x91\xB3')
    OLDER_MAN = n(b'\xF0\x9F\x91\xB4')
    OLDER_WOMAN = n(b'\xF0\x9F\x91\xB5')
    BABY = n(b'\xF0\x9F\x91\xB6')
    CONSTRUCTION_WORKER = n(b'\xF0\x9F\x91\xB7')
    PRINCESS = n(b'\xF0\x9F\x91\xB8')
    JAPANESE_OGRE = n(b'\xF0\x9F\x91\xB9')
    JAPANESE_GOBLIN = n(b'\xF0\x9F\x91\xBA')
    GHOST = n(b'\xF0\x9F\x91\xBB')
    BABY_ANGEL = n(b'\xF0\x9F\x91\xBC')
    EXTRATERRESTRIAL_ALIEN = n(b'\xF0\x9F\x91\xBD')
    ALIEN_MONSTER = n(b'\xF0\x9F\x91\xBE')
    IMP = n(b'\xF0\x9F\x91\xBF')
    SKULL = n(b'\xF0\x9F\x92\x80')
    INFORMATION_DESK_PERSON = n(b'\xF0\x9F\x92\x81')
    GUARDSMAN = n(b'\xF0\x9F\x92\x82')
    DANCER = n(b'\xF0\x9F\x92\x83')
    LIPSTICK = n(b'\xF0\x9F\x92\x84')
    NAIL_POLISH = n(b'\xF0\x9F\x92\x85')
    FACE_MASSAGE = n(b'\xF0\x9F\x92\x86')
    HAIRCUT = n(b'\xF0\x9F\x92\x87')
    BARBER_POLE = n(b'\xF0\x9F\x92\x88')
    SYRINGE = n(b'\xF0\x9F\x92\x89')
    PILL = n(b'\xF0\x9F\x92\x8A')
    KISS_MARK = n(b'\xF0\x9F\x92\x8B')
    LOVE_LETTER = n(b'\xF0\x9F\x92\x8C')
    RING = n(b'\xF0\x9F\x92\x8D')
    GEM_STONE = n(b'\xF0\x9F\x92\x8E')
    KISS = n(b'\xF0\x9F\x92\x8F')
    BOUQUET = n(b'\xF0\x9F\x92\x90')
    COUPLE_WITH_HEART = n(b'\xF0\x9F\x92\x91')
    WEDDING = n(b'\xF0\x9F\x92\x92')
    BEATING_HEART = n(b'\xF0\x9F\x92\x93')
    BROKEN_HEART = n(b'\xF0\x9F\x92\x94')
    TWO_HEARTS = n(b'\xF0\x9F\x92\x95')
    SPARKLING_HEART = n(b'\xF0\x9F\x92\x96')
    GROWING_HEART = n(b'\xF0\x9F\x92\x97')
    HEART_WITH_ARROW = n(b'\xF0\x9F\x92\x98')
    BLUE_HEART = n(b'\xF0\x9F\x92\x99')
    GREEN_HEART = n(b'\xF0\x9F\x92\x9A')
    YELLOW_HEART = n(b'\xF0\x9F\x92\x9B')
    PURPLE_HEART = n(b'\xF0\x9F\x92\x9C')
    HEART_WITH_RIBBON = n(b'\xF0\x9F\x92\x9D')
    REVOLVING_HEARTS = n(b'\xF0\x9F\x92\x9E')
    HEART_DECORATION = n(b'\xF0\x9F\x92\x9F')
    DIAMOND_SHAPE_WITH_A_DOT_INSIDE = n(b'\xF0\x9F\x92\xA0')
    ELECTRIC_LIGHT_BULB = n(b'\xF0\x9F\x92\xA1')
    ANGER_SYMBOL = n(b'\xF0\x9F\x92\xA2')
    BOMB = n(b'\xF0\x9F\x92\xA3')
    SLEEPING_SYMBOL = n(b'\xF0\x9F\x92\xA4')
    COLLISION_SYMBOL = n(b'\xF0\x9F\x92\xA5')
    SPLASHING_SWEAT_SYMBOL = n(b'\xF0\x9F\x92\xA6')
    DROPLET = n(b'\xF0\x9F\x92\xA7')
    DASH_SYMBOL = n(b'\xF0\x9F\x92\xA8')
    PILE_OF_POO = n(b'\xF0\x9F\x92\xA9')
    FLEXED_BICEPS = n(b'\xF0\x9F\x92\xAA')
    DIZZY_SYMBOL = n(b'\xF0\x9F\x92\xAB')
    SPEECH_BALLOON = n(b'\xF0\x9F\x92\xAC')
    WHITE_FLOWER = n(b'\xF0\x9F\x92\xAE')
    HUNDRED_POINTS_SYMBOL = n(b'\xF0\x9F\x92\xAF')
    MONEY_BAG = n(b'\xF0\x9F\x92\xB0')
    CURRENCY_EXCHANGE = n(b'\xF0\x9F\x92\xB1')
    HEAVY_DOLLAR_SIGN = n(b'\xF0\x9F\x92\xB2')
    CREDIT_CARD = n(b'\xF0\x9F\x92\xB3')
    BANKNOTE_WITH_YEN_SIGN = n(b'\xF0\x9F\x92\xB4')
    BANKNOTE_WITH_DOLLAR_SIGN = n(b'\xF0\x9F\x92\xB5')
    MONEY_WITH_WINGS = n(b'\xF0\x9F\x92\xB8')
    CHART_WITH_UPWARDS_TREND_AND_YEN_SIGN = n(b'\xF0\x9F\x92\xB9')
    SEAT = n(b'\xF0\x9F\x92\xBA')
    PERSONAL_COMPUTER = n(b'\xF0\x9F\x92\xBB')
    BRIEFCASE = n(b'\xF0\x9F\x92\xBC')
    MINIDISC = n(b'\xF0\x9F\x92\xBD')
    FLOPPY_DISK = n(b'\xF0\x9F\x92\xBE')
    OPTICAL_DISC = n(b'\xF0\x9F\x92\xBF')
    DVD = n(b'\xF0\x9F\x93\x80')
    FILE_FOLDER = n(b'\xF0\x9F\x93\x81')
    OPEN_FILE_FOLDER = n(b'\xF0\x9F\x93\x82')
    PAGE_WITH_CURL = n(b'\xF0\x9F\x93\x83')
    PAGE_FACING_UP = n(b'\xF0\x9F\x93\x84')
    CALENDAR = n(b'\xF0\x9F\x93\x85')
    TEAR_OFF_CALENDAR = n(b'\xF0\x9F\x93\x86')
    CARD_INDEX = n(b'\xF0\x9F\x93\x87')
    CHART_WITH_UPWARDS_TREND = n(b'\xF0\x9F\x93\x88')
    CHART_WITH_DOWNWARDS_TREND = n(b'\xF0\x9F\x93\x89')
    BAR_CHART = n(b'\xF0\x9F\x93\x8A')
    CLIPBOARD = n(b'\xF0\x9F\x93\x8B')
    PUSHPIN = n(b'\xF0\x9F\x93\x8C')
    ROUND_PUSHPIN = n(b'\xF0\x9F\x93\x8D')
    PAPERCLIP = n(b'\xF0\x9F\x93\x8E')
    STRAIGHT_RULER = n(b'\xF0\x9F\x93\x8F')
    TRIANGULAR_RULER = n(b'\xF0\x9F\x93\x90')
    BOOKMARK_TABS = n(b'\xF0\x9F\x93\x91')
    LEDGER = n(b'\xF0\x9F\x93\x92')
    NOTEBOOK = n(b'\xF0\x9F\x93\x93')
    NOTEBOOK_WITH_DECORATIVE_COVER = n(b'\xF0\x9F\x93\x94')
    CLOSED_BOOK = n(b'\xF0\x9F\x93\x95')
    OPEN_BOOK = n(b'\xF0\x9F\x93\x96')
    GREEN_BOOK = n(b'\xF0\x9F\x93\x97')
    BLUE_BOOK = n(b'\xF0\x9F\x93\x98')
    ORANGE_BOOK = n(b'\xF0\x9F\x93\x99')
    BOOKS = n(b'\xF0\x9F\x93\x9A')
    NAME_BADGE = n(b'\xF0\x9F\x93\x9B')
    SCROLL = n(b'\xF0\x9F\x93\x9C')
    MEMO = n(b'\xF0\x9F\x93\x9D')
    TELEPHONE_RECEIVER = n(b'\xF0\x9F\x93\x9E')
    PAGER = n(b'\xF0\x9F\x93\x9F')
    FAX_MACHINE = n(b'\xF0\x9F\x93\xA0')
    SATELLITE_ANTENNA = n(b'\xF0\x9F\x93\xA1')
    PUBLIC_ADDRESS_LOUDSPEAKER = n(b'\xF0\x9F\x93\xA2')
    CHEERING_MEGAPHONE = n(b'\xF0\x9F\x93\xA3')
    OUTBOX_TRAY = n(b'\xF0\x9F\x93\xA4')
    INBOX_TRAY = n(b'\xF0\x9F\x93\xA5')
    PACKAGE = n(b'\xF0\x9F\x93\xA6')
    E_MAIL_SYMBOL = n(b'\xF0\x9F\x93\xA7')
    INCOMING_ENVELOPE = n(b'\xF0\x9F\x93\xA8')
    ENVELOPE_WITH_DOWNWARDS_ARROW_ABOVE = n(b'\xF0\x9F\x93\xA9')
    CLOSED_MAILBOX_WITH_LOWERED_FLAG = n(b'\xF0\x9F\x93\xAA')
    CLOSED_MAILBOX_WITH_RAISED_FLAG = n(b'\xF0\x9F\x93\xAB')
    POSTBOX = n(b'\xF0\x9F\x93\xAE')
    NEWSPAPER = n(b'\xF0\x9F\x93\xB0')
    MOBILE_PHONE = n(b'\xF0\x9F\x93\xB1')
    MOBILE_PHONE_WITH_RIGHTWARDS_ARROW_AT_LEFT = n(b'\xF0\x9F\x93\xB2')
    VIBRATION_MODE = n(b'\xF0\x9F\x93\xB3')
    MOBILE_PHONE_OFF = n(b'\xF0\x9F\x93\xB4')
    ANTENNA_WITH_BARS = n(b'\xF0\x9F\x93\xB6')
    CAMERA = n(b'\xF0\x9F\x93\xB7')
    VIDEO_CAMERA = n(b'\xF0\x9F\x93\xB9')
    TELEVISION = n(b'\xF0\x9F\x93\xBA')
    RADIO = n(b'\xF0\x9F\x93\xBB')
    VIDEOCASSETTE = n(b'\xF0\x9F\x93\xBC')
    CLOCKWISE_DOWNWARDS_AND_UPWARDS_OPEN_CIRCLE_ARROWS = n(b'\xF0\x9F\x94\x83')
    SPEAKER_WITH_THREE_SOUND_WAVES = n(b'\xF0\x9F\x94\x8A')
    BATTERY = n(b'\xF0\x9F\x94\x8B')
    ELECTRIC_PLUG = n(b'\xF0\x9F\x94\x8C')
    LEFT_POINTING_MAGNIFYING_GLASS = n(b'\xF0\x9F\x94\x8D')
    RIGHT_POINTING_MAGNIFYING_GLASS = n(b'\xF0\x9F\x94\x8E')
    LOCK_WITH_INK_PEN = n(b'\xF0\x9F\x94\x8F')
    CLOSED_LOCK_WITH_KEY = n(b'\xF0\x9F\x94\x90')
    KEY = n(b'\xF0\x9F\x94\x91')
    LOCK = n(b'\xF0\x9F\x94\x92')
    OPEN_LOCK = n(b'\xF0\x9F\x94\x93')
    BELL = n(b'\xF0\x9F\x94\x94')
    BOOKMARK = n(b'\xF0\x9F\x94\x96')
    LINK_SYMBOL = n(b'\xF0\x9F\x94\x97')
    RADIO_BUTTON = n(b'\xF0\x9F\x94\x98')
    BACK_WITH_LEFTWARDS_ARROW_ABOVE = n(b'\xF0\x9F\x94\x99')
    END_WITH_LEFTWARDS_ARROW_ABOVE = n(b'\xF0\x9F\x94\x9A')
    ON_WITH_EXCLAMATION_MARK_WITH_LEFT_RIGHT_ARROW_ABOVE = n(
        b'\xF0\x9F\x94\x9B')
    SOON_WITH_RIGHTWARDS_ARROW_ABOVE = n(b'\xF0\x9F\x94\x9C')
    TOP_WITH_UPWARDS_ARROW_ABOVE = n(b'\xF0\x9F\x94\x9D')
    NO_ONE_UNDER_EIGHTEEN_SYMBOL = n(b'\xF0\x9F\x94\x9E')
    KEYCAP_TEN = n(b'\xF0\x9F\x94\x9F')
    INPUT_SYMBOL_FOR_LATIN_CAPITAL_LETTERS = n(b'\xF0\x9F\x94\xA0')
    INPUT_SYMBOL_FOR_LATIN_SMALL_LETTERS = n(b'\xF0\x9F\x94\xA1')
    INPUT_SYMBOL_FOR_NUMBERS = n(b'\xF0\x9F\x94\xA2')
    INPUT_SYMBOL_FOR_SYMBOLS = n(b'\xF0\x9F\x94\xA3')
    INPUT_SYMBOL_FOR_LATIN_LETTERS = n(b'\xF0\x9F\x94\xA4')
    FIRE = n(b'\xF0\x9F\x94\xA5')
    ELECTRIC_TORCH = n(b'\xF0\x9F\x94\xA6')
    WRENCH = n(b'\xF0\x9F\x94\xA7')
    HAMMER = n(b'\xF0\x9F\x94\xA8')
    NUT_AND_BOLT = n(b'\xF0\x9F\x94\xA9')
    HOCHO = n(b'\xF0\x9F\x94\xAA')
    PISTOL = n(b'\xF0\x9F\x94\xAB')
    CRYSTAL_BALL = n(b'\xF0\x9F\x94\xAE')
    SIX_POINTED_STAR_WITH_MIDDLE_DOT = n(b'\xF0\x9F\x94\xAF')
    JAPANESE_SYMBOL_FOR_BEGINNER = n(b'\xF0\x9F\x94\xB0')
    TRIDENT_EMBLEM = n(b'\xF0\x9F\x94\xB1')
    BLACK_SQUARE_BUTTON = n(b'\xF0\x9F\x94\xB2')
    WHITE_SQUARE_BUTTON = n(b'\xF0\x9F\x94\xB3')
    LARGE_RED_CIRCLE = n(b'\xF0\x9F\x94\xB4')
    LARGE_BLUE_CIRCLE = n(b'\xF0\x9F\x94\xB5')
    LARGE_ORANGE_DIAMOND = n(b'\xF0\x9F\x94\xB6')
    LARGE_BLUE_DIAMOND = n(b'\xF0\x9F\x94\xB7')
    SMALL_ORANGE_DIAMOND = n(b'\xF0\x9F\x94\xB8')
    SMALL_BLUE_DIAMOND = n(b'\xF0\x9F\x94\xB9')
    UP_POINTING_RED_TRIANGLE = n(b'\xF0\x9F\x94\xBA')
    DOWN_POINTING_RED_TRIANGLE = n(b'\xF0\x9F\x94\xBB')
    UP_POINTING_SMALL_RED_TRIANGLE = n(b'\xF0\x9F\x94\xBC')
    DOWN_POINTING_SMALL_RED_TRIANGLE = n(b'\xF0\x9F\x94\xBD')
    CLOCK_FACE_ONE_OCLOCK = n(b'\xF0\x9F\x95\x90')
    CLOCK_FACE_TWO_OCLOCK = n(b'\xF0\x9F\x95\x91')
    CLOCK_FACE_THREE_OCLOCK = n(b'\xF0\x9F\x95\x92')
    CLOCK_FACE_FOUR_OCLOCK = n(b'\xF0\x9F\x95\x93')
    CLOCK_FACE_FIVE_OCLOCK = n(b'\xF0\x9F\x95\x94')
    CLOCK_FACE_SIX_OCLOCK = n(b'\xF0\x9F\x95\x95')
    CLOCK_FACE_SEVEN_OCLOCK = n(b'\xF0\x9F\x95\x96')
    CLOCK_FACE_EIGHT_OCLOCK = n(b'\xF0\x9F\x95\x97')
    CLOCK_FACE_NINE_OCLOCK = n(b'\xF0\x9F\x95\x98')
    CLOCK_FACE_TEN_OCLOCK = n(b'\xF0\x9F\x95\x99')
    CLOCK_FACE_ELEVEN_OCLOCK = n(b'\xF0\x9F\x95\x9A')
    CLOCK_FACE_TWELVE_OCLOCK = n(b'\xF0\x9F\x95\x9B')
    MOUNT_FUJI = n(b'\xF0\x9F\x97\xBB')
    TOKYO_TOWER = n(b'\xF0\x9F\x97\xBC')
    STATUE_OF_LIBERTY = n(b'\xF0\x9F\x97\xBD')
    SILHOUETTE_OF_JAPAN = n(b'\xF0\x9F\x97\xBE')
    MOYAI = n(b'\xF0\x9F\x97\xBF')
    GRINNING_FACE = n(b'\xF0\x9F\x98\x80')
    SMILING_FACE_WITH_HALO = n(b'\xF0\x9F\x98\x87')
    SMILING_FACE_WITH_HORNS = n(b'\xF0\x9F\x98\x88')
    SMILING_FACE_WITH_SUNGLASSES = n(b'\xF0\x9F\x98\x8E')
    NEUTRAL_FACE = n(b'\xF0\x9F\x98\x90')
    EXPRESSIONLESS_FACE = n(b'\xF0\x9F\x98\x91')
    CONFUSED_FACE = n(b'\xF0\x9F\x98\x95')
    KISSING_FACE = n(b'\xF0\x9F\x98\x97')
    KISSING_FACE_WITH_SMILING_EYES = n(b'\xF0\x9F\x98\x99')
    FACE_WITH_STUCK_OUT_TONGUE = n(b'\xF0\x9F\x98\x9B')
    WORRIED_FACE = n(b'\xF0\x9F\x98\x9F')
    FROWNING_FACE_WITH_OPEN_MOUTH = n(b'\xF0\x9F\x98\xA6')
    ANGUISHED_FACE = n(b'\xF0\x9F\x98\xA7')
    GRIMACING_FACE = n(b'\xF0\x9F\x98\xAC')
    FACE_WITH_OPEN_MOUTH = n(b'\xF0\x9F\x98\xAE')
    HUSHED_FACE = n(b'\xF0\x9F\x98\xAF')
    SLEEPING_FACE = n(b'\xF0\x9F\x98\xB4')
    FACE_WITHOUT_MOUTH = n(b'\xF0\x9F\x98\xB6')
    HELICOPTER = n(b'\xF0\x9F\x9A\x81')
    STEAM_LOCOMOTIVE = n(b'\xF0\x9F\x9A\x82')
    TRAIN = n(b'\xF0\x9F\x9A\x86')
    LIGHT_RAIL = n(b'\xF0\x9F\x9A\x88')
    TRAM = n(b'\xF0\x9F\x9A\x8A')
    ONCOMING_BUS = n(b'\xF0\x9F\x9A\x8D')
    TROLLEYBUS = n(b'\xF0\x9F\x9A\x8E')
    MINIBUS = n(b'\xF0\x9F\x9A\x90')
    ONCOMING_POLICE_CAR = n(b'\xF0\x9F\x9A\x94')
    ONCOMING_TAXI = n(b'\xF0\x9F\x9A\x96')
    ONCOMING_AUTOMOBILE = n(b'\xF0\x9F\x9A\x98')
    ARTICULATED_LORRY = n(b'\xF0\x9F\x9A\x9B')
    TRACTOR = n(b'\xF0\x9F\x9A\x9C')
    MONORAIL = n(b'\xF0\x9F\x9A\x9D')
    MOUNTAIN_RAILWAY = n(b'\xF0\x9F\x9A\x9E')
    SUSPENSION_RAILWAY = n(b'\xF0\x9F\x9A\x9F')
    MOUNTAIN_CABLEWAY = n(b'\xF0\x9F\x9A\xA0')
    AERIAL_TRAMWAY = n(b'\xF0\x9F\x9A\xA1')
    ROWBOAT = n(b'\xF0\x9F\x9A\xA3')
    VERTICAL_TRAFFIC_LIGHT = n(b'\xF0\x9F\x9A\xA6')
    PUT_LITTER_IN_ITS_PLACE_SYMBOL = n(b'\xF0\x9F\x9A\xAE')
    DO_NOT_LITTER_SYMBOL = n(b'\xF0\x9F\x9A\xAF')
    POTABLE_WATER_SYMBOL = n(b'\xF0\x9F\x9A\xB0')
    NON_POTABLE_WATER_SYMBOL = n(b'\xF0\x9F\x9A\xB1')
    NO_BICYCLES = n(b'\xF0\x9F\x9A\xB3')
    BICYCLIST = n(b'\xF0\x9F\x9A\xB4')
    MOUNTAIN_BICYCLIST = n(b'\xF0\x9F\x9A\xB5')
    NO_PEDESTRIANS = n(b'\xF0\x9F\x9A\xB7')
    CHILDREN_CROSSING = n(b'\xF0\x9F\x9A\xB8')
    SHOWER = n(b'\xF0\x9F\x9A\xBF')
    BATHTUB = n(b'\xF0\x9F\x9B\x81')
    PASSPORT_CONTROL = n(b'\xF0\x9F\x9B\x82')
    CUSTOMS = n(b'\xF0\x9F\x9B\x83')
    BAGGAGE_CLAIM = n(b'\xF0\x9F\x9B\x84')
    LEFT_LUGGAGE = n(b'\xF0\x9F\x9B\x85')
    EARTH_GLOBE_EUROPE_AFRICA = n(b'\xF0\x9F\x8C\x8D')
    EARTH_GLOBE_AMERICAS = n(b'\xF0\x9F\x8C\x8E')
    GLOBE_WITH_MERIDIANS = n(b'\xF0\x9F\x8C\x90')
    WAXING_CRESCENT_MOON_SYMBOL = n(b'\xF0\x9F\x8C\x92')
    WANING_GIBBOUS_MOON_SYMBOL = n(b'\xF0\x9F\x8C\x96')
    LAST_QUARTER_MOON_SYMBOL = n(b'\xF0\x9F\x8C\x97')
    WANING_CRESCENT_MOON_SYMBOL = n(b'\xF0\x9F\x8C\x98')
    NEW_MOON_WITH_FACE = n(b'\xF0\x9F\x8C\x9A')
    LAST_QUARTER_MOON_WITH_FACE = n(b'\xF0\x9F\x8C\x9C')
    FULL_MOON_WITH_FACE = n(b'\xF0\x9F\x8C\x9D')
    SUN_WITH_FACE = n(b'\xF0\x9F\x8C\x9E')
    EVERGREEN_TREE = n(b'\xF0\x9F\x8C\xB2')
    DECIDUOUS_TREE = n(b'\xF0\x9F\x8C\xB3')
    LEMON = n(b'\xF0\x9F\x8D\x8B')
    PEAR = n(b'\xF0\x9F\x8D\x90')
    BABY_BOTTLE = n(b'\xF0\x9F\x8D\xBC')
    HORSE_RACING = n(b'\xF0\x9F\x8F\x87')
    RUGBY_FOOTBALL = n(b'\xF0\x9F\x8F\x89')
    EUROPEAN_POST_OFFICE = n(b'\xF0\x9F\x8F\xA4')
    RAT = n(b'\xF0\x9F\x90\x80')
    MOUSE = n(b'\xF0\x9F\x90\x81')
    OX = n(b'\xF0\x9F\x90\x82')
    WATER_BUFFALO = n(b'\xF0\x9F\x90\x83')
    COW = n(b'\xF0\x9F\x90\x84')
    TIGER = n(b'\xF0\x9F\x90\x85')
    LEOPARD = n(b'\xF0\x9F\x90\x86')
    RABBIT = n(b'\xF0\x9F\x90\x87')
    CAT = n(b'\xF0\x9F\x90\x88')
    DRAGON = n(b'\xF0\x9F\x90\x89')
    CROCODILE = n(b'\xF0\x9F\x90\x8A')
    WHALE = n(b'\xF0\x9F\x90\x8B')
    RAM = n(b'\xF0\x9F\x90\x8F')
    GOAT = n(b'\xF0\x9F\x90\x90')
    ROOSTER = n(b'\xF0\x9F\x90\x93')
    DOG = n(b'\xF0\x9F\x90\x95')
    PIG = n(b'\xF0\x9F\x90\x96')
    DROMEDARY_CAMEL = n(b'\xF0\x9F\x90\xAA')
    BUSTS_IN_SILHOUETTE = n(b'\xF0\x9F\x91\xA5')
    TWO_MEN_HOLDING_HANDS = n(b'\xF0\x9F\x91\xAC')
    TWO_WOMEN_HOLDING_HANDS = n(b'\xF0\x9F\x91\xAD')
    THOUGHT_BALLOON = n(b'\xF0\x9F\x92\xAD')
    BANKNOTE_WITH_EURO_SIGN = n(b'\xF0\x9F\x92\xB6')
    BANKNOTE_WITH_POUND_SIGN = n(b'\xF0\x9F\x92\xB7')
    OPEN_MAILBOX_WITH_RAISED_FLAG = n(b'\xF0\x9F\x93\xAC')
    OPEN_MAILBOX_WITH_LOWERED_FLAG = n(b'\xF0\x9F\x93\xAD')
    POSTAL_HORN = n(b'\xF0\x9F\x93\xAF')
    NO_MOBILE_PHONES = n(b'\xF0\x9F\x93\xB5')
    TWISTED_RIGHTWARDS_ARROWS = n(b'\xF0\x9F\x94\x80')
    CLOCKWISE_RIGHTWARDS_AND_LEFTWARDS_OPEN_CIRCLE_ARROWS = n(
        b'\xF0\x9F\x94\x81')
    CLOCKWISE_RIGHTWARDS_AND_LEFTWARDS_OPEN_CIRCLE_ARROWS_WITH_CIRCLED_ONE_OVERLAY = n(
        b'\xF0\x9F\x94\x82')
    ANTICLOCKWISE_DOWNWARDS_AND_UPWARDS_OPEN_CIRCLE_ARROWS = n(
        b'\xF0\x9F\x94\x84')
    LOW_BRIGHTNESS_SYMBOL = n(b'\xF0\x9F\x94\x85')
    HIGH_BRIGHTNESS_SYMBOL = n(b'\xF0\x9F\x94\x86')
    SPEAKER_WITH_CANCELLATION_STROKE = n(b'\xF0\x9F\x94\x87')
    SPEAKER_WITH_ONE_SOUND_WAVE = n(b'\xF0\x9F\x94\x89')
    BELL_WITH_CANCELLATION_STROKE = n(b'\xF0\x9F\x94\x95')
    MICROSCOPE = n(b'\xF0\x9F\x94\xAC')
    TELESCOPE = n(b'\xF0\x9F\x94\xAD')
    CLOCK_FACE_ONE_THIRTY = n(b'\xF0\x9F\x95\x9C')
    CLOCK_FACE_TWO_THIRTY = n(b'\xF0\x9F\x95\x9D')
    CLOCK_FACE_THREE_THIRTY = n(b'\xF0\x9F\x95\x9E')
    CLOCK_FACE_FOUR_THIRTY = n(b'\xF0\x9F\x95\x9F')
    CLOCK_FACE_FIVE_THIRTY = n(b'\xF0\x9F\x95\xA0')
    CLOCK_FACE_SIX_THIRTY = n(b'\xF0\x9F\x95\xA1')
    CLOCK_FACE_SEVEN_THIRTY = n(b'\xF0\x9F\x95\xA2')
    CLOCK_FACE_EIGHT_THIRTY = n(b'\xF0\x9F\x95\xA3')
    CLOCK_FACE_NINE_THIRTY = n(b'\xF0\x9F\x95\xA4')
    CLOCK_FACE_TEN_THIRTY = n(b'\xF0\x9F\x95\xA5')
    CLOCK_FACE_ELEVEN_THIRTY = n(b'\xF0\x9F\x95\xA6')
    CLOCK_FACE_TWELVE_THIRTY = n(b'\xF0\x9F\x95\xA7')
示例#19
0
        self.delete_slice__2 = self.nop
        self.delete_slice__3 = self.nop

        super(FindFTrace, self).__init__(*args, **kwargs)

        self._locals = AlmostReadOnlyDict(self._locals)
        self._globals = AlmostReadOnlyDict(self._globals)

    def store_attr(self):
        """STORE_ATTR opcode"""
        if self.names[self.oparg] == "f_trace":
            self._stop = True
            self.result = self.stack.pop() if self.stack else True


FTraceExe = type(n(b"FTraceExe"), (FindFTrace, PyInterpreter), {})  # pylint: disable=invalid-name


def get_f_trace(code, loc, glob):
    """Get frame from frame.f_trace attribution"""
    interpreter = FTraceExe(code, loc, glob)
    interpreter.execute()
    return interpreter.result


def find_f_trace(code, loc, glob, lasti):
    """Check if code has frame.f_trace attribution"""
    if "f_trace" not in code.co_names:
        return False
    interpreter = FindFTrace(code, loc, glob)
    interpreter.execute()
示例#20
0
    def to_file(self, file_name, path='.'):
        path_name = os.path.join(path, file_name)

        metadata = self.get_properties()

        packers.to_msgpack(path_name, metadata, encoding='utf-8')
        packers.to_msgpack(path_name, self.get_dataframe(), encoding='utf-8', compress=n(b'zlib'), append=True)
示例#21
0
def save_to_csv(data, file_name, path='.'):
    path_name = os.path.join(path, file_name)
    data.to_csv(path_name, sep=n(b'\t'))
示例#22
0
from .variable_usage import VariableUsage
from .tag import Tag
from .trial import Trial

# Other models
from .history import History
from .diff import Diff
from .trial_prolog import TrialProlog

ORDER = [
    Trial,
    Head,
    Tag,
    GraphCache,  # Trial
    Module,
    Dependency,
    EnvironmentAttr,  # Deployment
    FunctionDef,
    Object,  # Definition
    Activation,
    ObjectValue,
    FileAccess,  # Execution
    Variable,
    VariableUsage,
    VariableDependency  # Slicing
]

__all__ = [n(x.__modelname__) for x in ORDER] + [
    "History", "Diff", "TrialProlog", "MetaModel", "Model", "ORDER"
]
示例#23
0
def save_to_msgpack(data, file_name, path='.'):
    folder = os.path.join(path, file_name)
    data.to_msgpack(folder, compress=n(b'zlib'))

    return
示例#24
0
def test_river_discharge_simulation():
    # Modules activation and deactivation
    # analysis = False
    # cdf_pdf_representation = False
    # temporal_dependency = False
    # climatic_events_fitting = True
    # threshold_checking_for_simulation = False
    # simulation_cycles = True
    analysis = True
    cdf_pdf_representation = False
    temporal_dependency = False
    climatic_events_fitting = True
    threshold_checking_for_simulation = False
    simulation_cycles = True

    #%% Input data
    # Initial year, number of years, number of valid  data in a year
    anocomienzo, duracion, umbralano = (2018, 10, 0.8)
    # Type of fit (0-GUI, 1-stationary, 2-nonstationary)
    ant = [2]
    # Fourier order for nonstationary analysis
    no_ord_cycles = [2]
    no_ord_calms = [2]
    # Number of simulations
    no_sim = 1
    # Type of fit functions
    fun_cycles = [st.exponweib]
    fun_calms = [st.norm]
    # Number of normals
    no_norm_cycles = [False]
    no_norm_calms = [False]
    f_mix_cycles = [False]
    mod_cycles = [[0, 0, 0, 0]]

    # Cycles River discharge
    threshold_cycles = 25
    # minimum_interarrival_time = pd.Timedelta('250 days')
    # minimum_cycle_length = pd.Timedelta('5 days')
    minimum_interarrival_time = pd.Timedelta('7 days')
    minimum_cycle_length = pd.Timedelta('2 days')

    # Cycles SPEI
    threshold_spei = 0
    minimum_interarrival_time_spei = pd.Timedelta('150 days')
    minimum_cycle_length_spei = pd.Timedelta('150 days')

    interpolation = True
    interpolation_method = 'linear'
    interpolation_freq = '1min'
    truncate = True
    extra_info = True

    #%% Read data
    # Import river discharge data when all dams were active
    data_path = os.path.join(tests.current_path, '..', '..', 'inputadapter',
                             'tests', 'output', 'modf')
    modf_file_name = 'guadalete_estuary_river_discharge.modf'
    path_name = os.path.join(data_path, modf_file_name)
    modf_rd = MetOceanDF.read_file(path_name)

    # Group into dataframe
    river_discharge = pd.DataFrame(modf_rd)

    # Delete rows where with no common values
    river_discharge.dropna(how='any', inplace=True)

    # Import complete rive discharge historic data
    # All historic river discharge
    data_path = os.path.join(tests.current_path, '..', '..', '..', '..',
                             'data', 'solar_flux_nao_index_spei')
    modf_file_name = 'caudales.txt'
    path_name = os.path.join(data_path, modf_file_name)
    modf_all = pd.read_table(path_name, header=None, delim_whitespace=True)
    date_col = dates.extract_date(modf_all.iloc[:, 0:4])
    modf_all.index = date_col
    modf_all.drop(modf_all.columns[0:4], axis=1, inplace=True)
    modf_all.columns = ['Q']

    #%% Preprocessing
    t_step = missing_values.find_timestep(river_discharge)  # Find tstep
    data_gaps = missing_values.find_missing_values(river_discharge, t_step)
    river_discharge = missing_values.fill_missing_values(
        river_discharge,
        t_step,
        technique='interpolation',
        method='nearest',
        limit=16 * 24,
        limit_direction='both')
    data_gaps_after = missing_values.find_missing_values(
        river_discharge, t_step)

    # Add noise for VAR
    noise = np.random.rand(river_discharge.shape[0],
                           river_discharge.shape[1]) * 1e-2
    river_discharge = river_discharge + noise

    # Save_to_pickle
    river_discharge.to_pickle('river_discharge.p')

    # Group into list of dataframes
    df = list()
    df.append(pd.DataFrame(river_discharge['Q']))

    #%% Cycles and calms calculation
    cycles, calm_periods, info = extremal.extreme_events(
        river_discharge, 'Q', threshold_cycles, minimum_interarrival_time,
        minimum_cycle_length, interpolation, interpolation_method,
        interpolation_freq, truncate, extra_info)
    # Calculate duration of the cycles
    dur_cycles = extremal.events_duration(cycles)
    dur_cycles_description = dur_cycles.describe()

    sample_cycles = pd.DataFrame(info['data_cycles'].iloc[:, 0])
    noise = np.random.rand(sample_cycles.shape[0],
                           sample_cycles.shape[1]) * 1e-2
    sample_cycles = sample_cycles + noise

    sample_calms = pd.DataFrame(info['data_calm_periods'])
    noise = np.random.rand(sample_calms.shape[0], sample_calms.shape[1]) * 1e-2
    sample_calms = sample_calms + noise

    #%% CLIMATIC INDICES
    # Sunspots
    data_path = os.path.join(tests.current_path, '..', '..', '..', '..',
                             'data', 'solar_flux_nao_index_spei')
    modf_file_name = 'sunspot.csv'
    path_name = os.path.join(data_path, modf_file_name)
    sunspot = pd.read_csv(path_name,
                          header=None,
                          delim_whitespace=True,
                          parse_dates=[[0, 1]],
                          index_col=0)
    sunspot = sunspot.drop([2, 4, 5], axis=1)

    # SPEI
    data_path = os.path.join(tests.current_path, '..', '..', '..', '..',
                             'data', 'solar_flux_nao_index_spei')
    modf_file_name = 'spei_cadiz.csv'
    path_name = os.path.join(data_path, modf_file_name)
    spei = pd.read_csv(path_name, sep=',')
    spei.index = sunspot.index[2412:3233]

    # Calculate cycles over SPEI
    spei = pd.DataFrame(spei.loc[:, 'SPEI_12'] * 100).dropna()
    cycles_spei, calm_periods_spei, info_spei = extremal.extreme_events(
        spei, 'SPEI_12', threshold_spei, minimum_interarrival_time_spei,
        minimum_cycle_length_spei, interpolation, interpolation_method,
        interpolation_freq, truncate, extra_info)
    peaks_over_thres_spei = extremal.events_max(cycles_spei)

    # Plot peaks
    peaks_over_thres = extremal.events_max(cycles)

    # Represent cycles
    fig1 = plt.figure(figsize=(20, 20))
    ax = plt.axes()
    ax.plot(river_discharge)
    ax.axhline(threshold_cycles, color='lightgray')
    ax.plot(spei.loc[:, 'SPEI_12'] * 100, color='0.75', linewidth=2)
    # Plot cycles
    # for cycle in cycles_all:
    #     ax.plot(cycle, 'sandybrown', marker='.', markersize=5)
    #     # ax.plot(cycle.index[0], cycle[0], 'gray', marker='.', markersize=10)
    #     # ax.plot(cycle.index[-1], cycle[-1], 'black', marker='.', markersize=10)
    for cycle in cycles:
        ax.plot(cycle, 'g', marker='.', markersize=5)
        # ax.plot(cycle.index[0], cycle[0], 'gray', marker='.', markersize=10)
        # ax.plot(cycle.index[-1], cycle[-1], 'black', marker='.', markersize=10)
    for cycle in cycles_spei:
        ax.plot(cycle, 'k', marker='.', markersize=5, linewidth=2)
        ax.plot(cycle.index[0], cycle[0], 'gray', marker='.', markersize=15)
        ax.plot(cycle.index[-1], cycle[-1], 'black', marker='.', markersize=15)
    ax.plot(peaks_over_thres, '.r', markersize=15)
    ax.plot(peaks_over_thres_spei, '.c', markersize=15)
    ax.grid()
    ax.set_xlim([datetime.date(1970, 01, 01), datetime.date(2018, 04, 11)])
    ax.set_ylim([-5, 500])
    fig1.savefig(
        os.path.join('output', 'analisis', 'graficas',
                     'ciclos_river_discharge_spei.png'))

    #%% # ANALISIS CLIMATICO (0: PARA SALTARLO, 1: PARA HACERLO; LO MISMO PARA TODOS ESTOS IF)
    if analysis:
        if cdf_pdf_representation:
            for i in range(len(df)):
                # DIBUJO LAS CDF Y PDF DE LOS REGISTROS
                plot_analisis.cdf_pdf_registro(df[i], df[i].columns[0])
                plt.pause(0.5)

        #%%  THEORETICAL FIT CYCLES
        data_cycles = sample_cycles['Q']

        # Empirical cdf
        ecdf = empirical_distributions.ecdf_histogram(data_cycles)
        # Fit the variable to an extremal distribution
        (param, x, cdf_expwbl, pdf_expwbl) = theoretical_fit.fit_distribution(
            data_cycles,
            fit_type=fun_cycles[0].name,
            x_min=min(data_cycles),
            x_max=2 * max(data_cycles),
            n_points=1000)
        par0_cycles = list()
        par0_cycles.append(np.asarray(param))
        # GUARDO LOS PARAMETROS
        np.save(
            os.path.join('output', 'analisis',
                         'parameter_river_discharge_cycles.npy'), par0_cycles)

        # Check the goodness of the fit
        fig1 = plt.figure(figsize=(20, 20))
        ax = plt.axes()
        ax.plot(ecdf.index, ecdf, '.')
        ax.plot(x, cdf_expwbl)
        ax.set_xlabel('Q (m3/s)')
        ax.set_ylabel('CDF')
        ax.legend([
            'ECDF',
            'Exponweib Fit',
        ])
        ax.grid()
        ax.set_xlim([0, 500])
        fig1.savefig(
            os.path.join('output', 'analisis', 'graficas',
                         'cdf_fit_ciclos_river_discharge.png'))

        # PP - Plot values
        (yppplot_emp,
         yppplot_teo) = theoretical_fit.pp_plot(x, cdf_expwbl, ecdf)
        # QQ - Plot values
        (yqqplot_emp,
         yqqplot_teo) = theoretical_fit.qq_plot(x, cdf_expwbl, ecdf)
        # Plot Goodness of fit
        theoretical_fit.plot_goodness_of_fit(cdf_expwbl, ecdf, river_discharge,
                                             'Q', x, yppplot_emp, yqqplot_emp,
                                             yppplot_teo, yqqplot_teo)

        # Non-stationary fit for calms
        par_cycles, mod_cycles, f_mix_cycles, data_graph_cycles = list(), list(
        ), list(), list()
        df = list()
        df.append(data_cycles)
        for i in range(len(df)):
            # SE HAN SELECCIONADO LOS ULTIMOS 7 ANOS PARA QUE EL ANALISIS SEA MAS RAPIDO
            analisis_ = analisis.analisis(df[i],
                                          fun_cycles[i],
                                          ant[i],
                                          ordg=no_ord_cycles[i],
                                          nnorm=no_norm_cycles[i],
                                          par0=par0_cycles[i])

            par_cycles.append(analisis_[0])
            mod_cycles.append(analisis_[1])
            f_mix_cycles.append(analisis_[2])

            aux = list(analisis_[3])
            aux[5] = i
            aux = tuple(aux)
            data_graph_cycles.append(aux)

            # DIBUJO LOS RESULTADOS (HAY UNA GRAN GAMA DE FUNCIONES DE DIBUJO; VER MANUAL)
            plot_analisis.cuantiles_ne(*data_graph_cycles[i])
            plt.pause(0.5)

        fig2 = plt.figure(figsize=(20, 20))
        plt.plot(x, pdf_expwbl)
        _ = plt.hist(data_cycles,
                     bins=np.linspace(0, 500, 100),
                     normed=True,
                     alpha=0.5)
        plt.xlim([0, 400])
        fig2.savefig(
            os.path.join('output', 'analisis', 'graficas',
                         'pdf_fit_ciclos_river_discharge.png'))

        # %%  THEORETICAL FIT CALMS
        param0_calms = list()
        data_calms = sample_calms['Q']
        (param, x, cdf, pdf) = theoretical_fit.fit_distribution(
            data_calms,
            fit_type=fun_calms[0].name,
            x_min=np.min(data_calms),
            x_max=1.1 * np.max(data_calms),
            n_points=1000)
        param0_calms.append(np.asarray(param))
        # Empirical cdf
        ecdf = empirical_distributions.ecdf_histogram(data_calms)
        epdf = empirical_distributions.epdf_histogram(data_calms, bins=0)
        # PP - Plot values
        (yppplot_emp, yppplot_teo) = theoretical_fit.pp_plot(x, cdf, ecdf)
        # QQ - Plot values
        (yqqplot_emp, yqqplot_teo) = theoretical_fit.qq_plot(x, cdf, ecdf)
        # Plot Goodness of fit
        theoretical_fit.plot_goodness_of_fit(cdf, ecdf, sample_calms, 'Q', x,
                                             yppplot_emp, yqqplot_emp,
                                             yppplot_teo, yqqplot_teo)

        # Non-stationary fit for calms
        par_calms, mod_calms, f_mix_calms, data_graph_calms = list(), list(
        ), list(), list()
        df = list()
        df.append(data_calms)
        for i in range(len(df)):
            # SE HAN SELECCIONADO LOS ULTIMOS 7 ANOS PARA QUE EL ANALISIS SEA MAS RAPIDO
            analisis_ = analisis.analisis(df[i],
                                          fun_calms[i],
                                          ant[i],
                                          ordg=no_ord_calms[i],
                                          nnorm=no_norm_calms[i],
                                          par0=param0_calms[i])

            par_calms.append(analisis_[0])
            mod_calms.append(analisis_[1])
            f_mix_calms.append(analisis_[2])
            data_graph_calms.append(analisis_[3])

            # DIBUJO LOS RESULTADOS (HAY UNA GRAN GAMA DE FUNCIONES DE DIBUJO; VER MANUAL)
            plot_analisis.cuantiles_ne(*data_graph_calms[i])
            plt.pause(0.5)

        # Guardo parametros
        np.save(
            os.path.join('output', 'analisis',
                         'parameter_river_discharge_calms.npy'), par_calms)
        np.save(
            os.path.join('output', 'analisis',
                         'mod_river_discharge_calms.npy'), mod_calms)
        np.save(
            os.path.join('output', 'analisis',
                         'f_mix_river_discharge_calms.npy'), f_mix_calms)

    #%% TEMPORAL DEPENDENCY
    if temporal_dependency:
        # SE UTILIZAN LOS PARAMETROS DE SALIDA DEL ANÁLISIS PREVIO
        # Lectura de datos
        par_cycles = np.load(
            os.path.join('output', 'analisis',
                         'parameter_river_discharge_cycles.npy'))
        par_calms = np.load(
            os.path.join('output', 'analisis',
                         'parameter_river_discharge_calms.npy'))
        mod_calms = np.load(
            os.path.join('output', 'analisis',
                         'mod_river_discharge_calms.npy'))
        f_mix_calms = np.load(
            os.path.join('output', 'analisis',
                         'f_mix_river_discharge_calms.npy'))

        (df_dt_cycles,
         cdf_) = analisis.dependencia_temporal(sample_cycles, par_cycles,
                                               mod_cycles, no_norm_cycles,
                                               f_mix_cycles, fun_cycles)

        # SE GUARDAN LOS PARAMETROS DEL MODELO VAR
        df_dt_cycles.to_pickle(
            os.path.join('output', 'dependencia_temporal',
                         'df_dt_river_discharge_cycles.p'))

        (df_dt_calms,
         cdf_) = analisis.dependencia_temporal(sample_calms, par_calms,
                                               mod_calms, no_norm_calms,
                                               f_mix_calms, fun_calms)

        # SE GUARDAN LOS PARAMETROS DEL MODELO VAR
        df_dt_calms.to_pickle(
            os.path.join('output', 'dependencia_temporal',
                         'df_dt_river_discharge_calms.p'))

    if climatic_events_fitting:
        #%% FIT NUMBER OF EVENTS DURING WET CYCLES
        events_wet_cycle = pd.Series([5, 2, 1, 3, 2, 2, 0, 6, 1])
        ecdf_events_wet_cycle = empirical_distributions.ecdf_histogram(
            events_wet_cycle)

        mu = np.mean(events_wet_cycle)
        simulated_number_events = pd.Series(
            poisson.rvs(mu, loc=0, size=100, random_state=None))
        ecdf_simulated_events_wet_cycle = empirical_distributions.ecdf_histogram(
            simulated_number_events)
        x_poisson = np.linspace(0, 10, 100)
        cdf_poisson = poisson.cdf(x_poisson, mu, loc=0)

        plt.figure()
        ax = plt.axes()
        ax.plot(ecdf_events_wet_cycle.index, ecdf_events_wet_cycle, '.')
        ax.plot(ecdf_simulated_events_wet_cycle.index,
                ecdf_simulated_events_wet_cycle, '.')
        ax.plot(x_poisson, cdf_poisson)
        ax.legend(['ECDF', 'ECDF Sim', 'Poisson Fit'])
        ax.grid()

        #%% FIT TIME BETWEEN WET CYCLES
        t_wet_cycles = peaks_over_thres_spei.index.to_series().diff().dropna(
        ).astype('m8[s]').astype(np.float32)
        ecdf_t_wet_cycle = empirical_distributions.ecdf_histogram(t_wet_cycles)

        norm_param = norm.fit(t_wet_cycles, loc=0)
        simulated_t_wet_cycles = pd.Series(
            norm.rvs(*norm_param, size=100, random_state=None))
        ecdf_simulated_t_wet_cycles = empirical_distributions.ecdf_histogram(
            simulated_t_wet_cycles)
        x_norm = np.linspace(0, 2 * max(t_wet_cycles), 100)
        cdf_norm = norm.cdf(x_norm, *norm_param)

        plt.figure()
        ax = plt.axes()
        ax.plot(ecdf_t_wet_cycle.index, ecdf_t_wet_cycle, '.')
        ax.plot(ecdf_simulated_t_wet_cycles.index, ecdf_simulated_t_wet_cycles,
                '.')
        ax.plot(x_norm, cdf_norm)
        ax.legend(['ECDF', 'ECDF Sim', 'Exponential Fit'])
        ax.grid()

        simulated_t_wet_cycles_days = simulated_t_wet_cycles.astype('m8[s]')
        # Elimino valores negativos
        simulated_t_wet_cycles_days = simulated_t_wet_cycles_days[
            simulated_t_wet_cycles_days.values > datetime.timedelta(days=1)]

        #%% FIT TIME BETWEEN EVENTS DURING WET CYCLES
        t_between_events = peaks_over_thres.index.to_series().diff().dropna()
        t_between_events = t_between_events[
            t_between_events < datetime.timedelta(days=400)]
        t_between_events = t_between_events.astype('m8[s]').astype(np.float32)
        ecdf_t_between_events = empirical_distributions.ecdf_histogram(
            t_between_events)

        lambda_par = expon.fit(t_between_events, loc=0)
        simulated_t_between_events = pd.Series(
            expon.rvs(scale=lambda_par[1], size=100, random_state=None))
        ecdf_simulated_t_between_events = empirical_distributions.ecdf_histogram(
            simulated_t_between_events)
        x_expon = np.linspace(0, 2 * max(t_between_events), 100)
        cdf_expon = expon.cdf(x_expon, scale=lambda_par[1], loc=0)

        plt.figure()
        ax = plt.axes()
        ax.plot(ecdf_t_between_events.index, ecdf_t_between_events, '.')
        ax.plot(ecdf_simulated_t_between_events.index,
                ecdf_simulated_t_between_events, '.')
        ax.plot(x_expon, cdf_expon)
        ax.legend(['ECDF', 'ECDF Sim', 'Exponential Fit'])
        ax.grid()

        simulated_t_between_events_days = simulated_t_between_events.astype(
            'm8[s]')

        #%% FIT TIME BETWEEN ALL EVENTS
        # Fit time between events (without considering wet cycles) 2 method
        t_between_events_2method = peaks_over_thres.index.to_series().diff(
        ).dropna()
        t_between_events_2method = t_between_events_2method.astype(
            'm8[s]').astype(np.float32)
        ecdf_t_between_events_2method = empirical_distributions.ecdf_histogram(
            t_between_events_2method)

        lambda_par = expon.fit(t_between_events_2method, loc=0)
        simulated_t_between_events_2method = pd.Series(
            expon.rvs(scale=lambda_par[1], size=100, random_state=None))
        ecdf_simulated_t_between_events_2method = empirical_distributions.ecdf_histogram(
            simulated_t_between_events_2method)
        x_expon = np.linspace(0, 2 * np.max(t_between_events_2method), 100)
        cdf_expon = expon.cdf(x_expon, scale=lambda_par[1], loc=0)

        plt.figure()
        ax = plt.axes()
        ax.plot(ecdf_t_between_events_2method.index,
                ecdf_t_between_events_2method, '.')
        ax.plot(ecdf_simulated_t_between_events_2method.index,
                ecdf_simulated_t_between_events_2method, '.')
        ax.plot(x_expon, cdf_expon)
        ax.legend(['ECDF', 'ECDF Sim', 'Exponential Fit'])
        ax.grid()

        simulated_t_between_events_2method_days = simulated_t_between_events.astype(
            'm8[s]')
        # nul_values = simulated_t_between_events_2method_days.values > datetime.timedelta(days=2000)

    #%% SIMULACION CLIMÁTICA CHEQUEO UMBRAL OPTIMO PARA AJUSTAR DURACIONES
    if threshold_checking_for_simulation:
        # CARGO PARÁMETROS
        par_cycles = np.load(
            os.path.join('output', 'analisis',
                         'parameter_river_discharge_cycles.npy'))
        df_dt_cycles = pd.read_pickle(
            os.path.join('output', 'dependencia_temporal',
                         'df_dt_river_discharge_cycles.p'))
        vars_ = ['Q']

        # Cargo el SPEI Index para ajustar tiempo entre ciclos humedos, numero de eventos por ciclo humedo
        # tiempo entre eventos dentro de ciclo humedo

        # Figura de las cdf y pdf empiricas
        fig1, axes1 = plt.subplots(1, 2, figsize=(20, 7))

        cont = 0
        iter = 0
        while cont < no_sim:
            df_sim = simulacion.simulacion(anocomienzo,
                                           duracion,
                                           par_cycles,
                                           mod_cycles,
                                           no_norm_cycles,
                                           f_mix_cycles,
                                           fun_cycles,
                                           vars_,
                                           sample_cycles,
                                           df_dt_cycles, [0, 0, 0, 0, 0],
                                           semilla=int(
                                               np.random.rand(1) * 1e6))

            iter += 1

            # Primero filtro si hay valores mayores que el umbral,en cuyo caso descarto la serie
            if np.max(df_sim).values <= np.max(sample_cycles['Q']) * 1.25:
                # Representacion de la serie
                plt.figure()
                ax = plt.axes()
                ax.plot(df_sim)
                ax.plot(sample_cycles, '.')
                ax.plot(df_sim * 0 + max(sample_cycles['Q']), 'r')
                ax.grid()

                # Cdf Pdf
                data = df_sim['Q']
                ecdf = empirical_distributions.ecdf_histogram(data)
                epdf = empirical_distributions.epdf_histogram(data, bins=0)
                axes1[0].plot(epdf.index, epdf, '--', color='0.75')
                axes1[1].plot(ecdf.index, ecdf, '--', color='0.75')

                # Extract cycles from data for different thresholds to fix the duration
                fig2, axes2 = plt.subplots(1, 2, figsize=(20, 7))
                if cont == 0:
                    dur_cycles = dur_cycles.astype('m8[s]').astype(
                        np.float32)  # Convierto a segundos y flotante
                ecdf_dur = empirical_distributions.ecdf_histogram(dur_cycles)
                epdf_dur = empirical_distributions.epdf_histogram(dur_cycles,
                                                                  bins=0)
                axes2[0].plot(epdf_dur.index, epdf_dur, 'r', lw=2)
                axes2[1].plot(ecdf_dur.index, ecdf_dur, 'r', lw=2)

                threshold = np.arange(20, 110, 10)
                color_sequence = [
                    '#1f77b4', '#aec7e8', '#ff7f0e', '#ffbb78', '#2ca02c',
                    '#98df8a', '#d62728', '#ff9896', '#9467bd', '#c5b0d5',
                    '#8c564b', '#c49c94', '#e377c2', '#f7b6d2', '#7f7f7f',
                    '#c7c7c7', '#bcbd22', '#dbdb8d', '#17becf', '#9edae5'
                ]
                for j, th in enumerate(threshold):
                    minimum_interarrival_time = pd.Timedelta('1 hour')
                    minimum_cycle_length = pd.Timedelta('2 days')
                    cycles, calm_periods, info = extremal.extreme_events(
                        df_sim, 'Q', th, minimum_interarrival_time,
                        minimum_cycle_length, interpolation,
                        interpolation_method, interpolation_freq, truncate,
                        extra_info)

                    # Calculate duration of the cycles
                    dur_cycles_sim = extremal.events_duration(cycles)
                    dur_cycles_sim_description = dur_cycles_sim.describe()

                    # Represent cycles
                    fig3 = plt.figure(figsize=(20, 20))
                    ax = plt.axes()
                    ax.plot(df_sim)
                    ax.axhline(th, color='lightgray')
                    ax.grid()
                    ax.legend([
                        'Threshold: ' + str(th) + ' (m3/s)' + '/ Dur_min ' +
                        str(dur_cycles_description['min']) + ' - ' +
                        str(dur_cycles_sim_description['min']) +
                        '/ Dur_mean ' + str(dur_cycles_description['mean']) +
                        ' - ' + str(dur_cycles_sim_description['mean']) +
                        '/ Dur_max ' + str(dur_cycles_description['max']) +
                        ' - ' + str(dur_cycles_sim_description['max'])
                    ])

                    for cycle in cycles:
                        ax.plot(cycle, 'g', marker='.', markersize=5)
                        ax.plot(cycle.index[0],
                                cycle[0],
                                'gray',
                                marker='.',
                                markersize=10)
                        ax.plot(cycle.index[-1],
                                cycle[-1],
                                'black',
                                marker='.',
                                markersize=10)
                    ax.set_xlim([
                        datetime.date(2018, 04, 01),
                        datetime.date(2030, 01, 01)
                    ])
                    ax.set_ylim([0, 600])

                    fig_name = 'ciclos_sim_' + str(cont) + '_threshold_' + str(
                        th) + '.png'
                    fig3.savefig(
                        os.path.join('output', 'simulacion', 'graficas',
                                     'descarga_fluvial', 'umbral_optimo',
                                     fig_name))

                    # Calculate the cdf and pdf of the cycle duration
                    dur_cycles_sim = dur_cycles_sim.astype('m8[s]').astype(
                        np.float32)
                    ecdf_dur_sim = empirical_distributions.ecdf_histogram(
                        dur_cycles_sim)
                    epdf_dur_sim = empirical_distributions.epdf_histogram(
                        dur_cycles_sim, bins=0)
                    axes2[0].plot(epdf_dur_sim.index,
                                  epdf_dur_sim,
                                  '--',
                                  color=color_sequence[j],
                                  label=['Threshold: ' + str(threshold[j])])
                    axes2[1].plot(ecdf_dur_sim.index,
                                  ecdf_dur_sim,
                                  '--',
                                  color=color_sequence[j],
                                  label=['Threshold: ' + str(threshold[j])])
                    axes2[0].legend()
                    axes2[1].set_xlim([0, 5000000])
                    axes2[0].set_xlim([0, 5000000])

                fig_name = 'ciclos_dur_sim_' + str(cont) + '.png'
                fig2.savefig(
                    os.path.join('output', 'simulacion', 'graficas',
                                 'descarga_fluvial', 'umbral_optimo',
                                 fig_name))

                cont += 1

            data = sample_cycles['Q']
            ecdf = empirical_distributions.ecdf_histogram(data)
            epdf = empirical_distributions.epdf_histogram(data, bins=0)
            axes1[0].plot(epdf.index, epdf, 'r', lw=2)
            axes1[1].plot(ecdf.index, ecdf, 'r', lw=2)

        fig_name = 'pdf_cdf_descarga_fluvial.png'
        fig1.savefig(
            os.path.join('output', 'simulacion', 'graficas',
                         'descarga_fluvial', 'umbral_optimo', fig_name))

    #%% SIMULACION CLIMATICA
    threshold = 50
    minimum_interarrival_time = pd.Timedelta('1 hour')
    minimum_cycle_length = pd.Timedelta('2 days')
    if simulation_cycles:
        # CARGO PARÁMETROS
        par_cycles = np.load(
            os.path.join('output', 'analisis',
                         'parameter_river_discharge_cycles.npy'))
        par_calms = np.load(
            os.path.join('output', 'analisis',
                         'parameter_river_discharge_calms.npy'))
        mod_calms = np.load(
            os.path.join('output', 'analisis',
                         'mod_river_discharge_calms.npy'))
        f_mix_calms = np.load(
            os.path.join('output', 'analisis',
                         'f_mix_river_discharge_calms.npy'))

        df_dt_cycles = pd.read_pickle(
            os.path.join('output', 'dependencia_temporal',
                         'df_dt_river_discharge_cycles.p'))
        df_dt_calms = pd.read_pickle(
            os.path.join('output', 'dependencia_temporal',
                         'df_dt_river_discharge_calms.p'))
        vars_ = ['Q']

        # Figura de las cdf y pdf empiricas
        fig2, axes1 = plt.subplots(1, 2, figsize=(20, 7))

        cont = 0
        iter = 0
        while cont < no_sim:
            df_sim = simulacion.simulacion(anocomienzo,
                                           duracion,
                                           par_cycles,
                                           mod_cycles,
                                           no_norm_cycles,
                                           f_mix_cycles,
                                           fun_cycles,
                                           vars_,
                                           sample_cycles,
                                           df_dt_cycles, [0, 0, 0, 0, 0],
                                           semilla=int(
                                               np.random.rand(1) * 1e6))

            iter += 1

            # Primero filtro si hay valores mayores que el umbral,en cuyo caso descarto la serie
            if np.max(df_sim).values <= np.max(sample_cycles['Q']) * 1.25:
                df_sim = df_sim.resample('1H').interpolate()

                # Extract cycles from data for different thresholds to fix the duration
                if cont == 0:
                    dur_cycles = dur_cycles.astype('m8[s]').astype(
                        np.float32)  # Convierto a segundos y flotante
                # Calculate cycles
                cycles, calm_periods, info = extremal.extreme_events(
                    df_sim, 'Q', threshold, minimum_interarrival_time,
                    minimum_cycle_length, interpolation, interpolation_method,
                    interpolation_freq, truncate, extra_info)

                # # Represent cycles
                # fig3 = plt.figure(figsize=(20, 20))
                # ax = plt.axes()
                # ax.plot(df_sim)
                # ax.axhline(threshold, color='lightgray')
                # ax.grid()
                #
                # for cycle in cycles:
                #     ax.plot(cycle, 'g', marker='.', markersize=5)
                #     ax.plot(cycle.index[0], cycle[0], 'gray', marker='.', markersize=10)
                #     ax.plot(cycle.index[-1], cycle[-1], 'black', marker='.', markersize=10)
                # ax.set_xlim([datetime.date(2018, 01, 01), datetime.date(2021, 01, 01)])
                # ax.set_ylim([0, 600])
                # fig3.savefig(os.path.join('output', 'simulacion', 'graficas', 'descarga_fluvial',
                #                           'ciclos_cadiz_simulado_' + str(cont).zfill(4) + '.png'))

                # Start to construct the time series
                indices = pd.date_range(start='2018', end='2100', freq='1H')
                df_simulate = pd.DataFrame(np.zeros((len(indices), 1)) + 25,
                                           dtype=float,
                                           index=indices,
                                           columns=['Q'])

                # The start is in wet cycles
                cont_wet_cicles = 0
                cont_df_events = 1
                t_ini = datetime.datetime(2018, 01, 01)
                t_end = datetime.datetime(2018, 01, 01)
                while t_end < datetime.datetime(2090, 01, 01):
                    if cont_wet_cicles != 0:
                        t_ini = t_end + simulated_t_wet_cycles_days[
                            cont_wet_cicles]
                        year = t_ini.year
                    else:
                        year = 2018

                    # Select the number of events during wet cycle
                    n_events = simulated_number_events[cont_wet_cicles] - 1
                    cont_wet_cicles += 1

                    if n_events != 0:

                        # for j in range(0, n_events):
                        cont_df_events_in_wet_cycles = 0
                        while cont_df_events_in_wet_cycles <= n_events:
                            if cont_df_events_in_wet_cycles != 0:
                                # Time between events
                                year = year + 1

                            # Select the event
                            cycle = cycles[cont_df_events]

                            if np.max(cycle) >= 150:
                                # Simulate date
                                month1 = [
                                    random.randint(1, 3),
                                    random.randint(10, 12)
                                ]
                                rand_pos = random.randint(0, 1)
                                month = month1[rand_pos]
                                day = random.randint(1, 28)
                                hour = random.randint(0, 23)
                            else:
                                # Simulate date
                                month = random.randint(1, 12)
                                day = random.randint(1, 28)
                                hour = random.randint(0, 23)
                            t_ini = datetime.datetime(year, month, day, hour)
                            pos_ini = np.where(
                                df_simulate.index == t_ini)[0][0]
                            pos_end = pos_ini + cycle.shape[0]

                            # Insert cycle
                            df_simulate.iloc[pos_ini:pos_end, 0] = cycle.values
                            t_end = df_simulate.index[pos_end]
                            year = df_simulate.index[pos_end].to_datetime(
                            ).year
                            cont_df_events += 1
                            cont_df_events_in_wet_cycles += 1

                    else:
                        t_end = t_ini

                # Simulation of calm periods
                df_sim_calms = simulacion.simulacion(
                    anocomienzo,
                    85,
                    par_calms,
                    mod_calms,
                    no_norm_calms,
                    f_mix_calms,
                    fun_calms,
                    vars_,
                    sample_calms,
                    df_dt_calms, [0, 0, 0, 0, 0],
                    semilla=int(np.random.rand(1) * 1e6))

                # Remove negative values
                df_sim_calms[df_sim_calms < 0] = np.random.randint(1, 5)

                # Combine both dataframes with cycles and calms
                pos_cycles = df_simulate >= 50
                df_river_discharge = df_sim_calms
                df_river_discharge[pos_cycles] = df_simulate

                # Hourly interpolation
                df_river_discharge = df_river_discharge.resample(
                    'H').interpolate()

                # Representation of results
                fig1 = plt.figure(figsize=(20, 10))
                ax = plt.axes()
                ax.plot(river_discharge)
                ax.plot(df_river_discharge)
                ax.legend('Hindcast', 'Forecast')
                ax.grid()
                ax.set_ylim([-5, 500])
                fig1.savefig(
                    os.path.join(
                        'output', 'simulacion', 'graficas', 'descarga_fluvial',
                        'descarga_fluvial_cadiz_simulado_' +
                        str(cont).zfill(4) + '.png'))

                # Cdf Pdf
                data = df_river_discharge['Q']
                ecdf = empirical_distributions.ecdf_histogram(data)
                epdf = empirical_distributions.epdf_histogram(data, bins=0)
                axes1[0].plot(epdf.index, epdf, '--', color='0.75')
                axes1[1].plot(ecdf.index, ecdf, '--', color='0.75')

                # Guardado de ficheros
                df_river_discharge.to_csv(os.path.join(
                    'output', 'simulacion', 'series_temporales',
                    'descarga_fluvial_500', 'descarga_fluvial_guadalete_sim_' +
                    str(cont).zfill(4) + '.txt'),
                                          sep=n(b'\t'))
                cont += 1

        data = river_discharge['Q']
        ecdf = empirical_distributions.ecdf_histogram(data)
        epdf = empirical_distributions.epdf_histogram(data, bins=0)
        axes1[0].plot(epdf.index, epdf, 'r', lw=2)
        axes1[1].plot(ecdf.index, ecdf, 'r', lw=2)
        fig_name = 'pdf_cdf_descarga_fluvial.png'
        fig2.savefig(
            os.path.join('output', 'simulacion', 'graficas',
                         'descarga_fluvial', fig_name))
示例#25
0
from .slicing_variable import SlicingVariable
from .tag import Tag
from .trial import Trial

# Other models
from .history import History
from .diff import Diff
from .trial_prolog import TrialProlog


ORDER = [
    Trial, Head, Tag, GraphCache,  # Trial
    Module, Dependency, EnvironmentAttr,  # Deployment
    FunctionDef, Object,  # Definition
    Activation, ObjectValue, FileAccess,  # Execution
    SlicingVariable, SlicingUsage, SlicingDependency  # Slicing
]


__all__ = [
    n(x.__modelname__) for x in ORDER
] + [
    "History",
    "Diff",
    "TrialProlog",

    "MetaModel",
    "Model",
    "ORDER"
]