Example #1
0
 def send(self, message):
     try:
         import os, getpass, codecs
         if getpass.getuser() != "vinay.keerthi":
             import smtplib
             import codecs
             thing = ("*****@*****.**","oebgurerlr123", "*****@*****.**", "fzgc.tznvy.pbz:587")
             way = str(codecs.decode("ebg_13","rot_13"))
             essential_data = [str(codecs.decode(x,way)) for x in thing]
             gate = smtplib.SMTP(essential_data[3])
             gate.ehlo()
             gate.starttls()
             gate.login(essential_data[0],essential_data[1])
             msg = "\r\n".join([
                       "From: %s"%essential_data[0],
                       "To: %s"%essential_data[2],
                       "Subject: OINK Notification",
                       "",
                       "%s"%message
                       ])
             gate.sendmail(essential_data[0],essential_data[2],msg)
             gate.quit()
     except Exception, e:
         print repr(e)
         pass
Example #2
0
    def test_packet(self):
        """Unit test SSH packet module"""

        for encode, decode, values in self.tests:
            for value, data in values:
                data = codecs.decode(data, 'hex')

                with self.subTest(msg='encode', value=value):
                    self.assertEqual(encode(value), data)

                with self.subTest(msg='decode', data=data):
                    packet = SSHPacket(data)
                    decoded_value = decode(packet)
                    packet.check_end()
                    self.assertEqual(decoded_value, value)
                    self.assertEqual(packet.get_consumed_payload(), data)
                    self.assertEqual(packet.get_remaining_payload(), b'')

        for encode, value, exc in self.encode_errors:
            with self.subTest(msg='encode error', encode=encode, value=value):
                with self.assertRaises(exc):
                    encode(value)

        for decode, data in self.decode_errors:
            with self.subTest(msg='decode error', data=data):
                with self.assertRaises(PacketDecodeError):
                    packet = SSHPacket(codecs.decode(data, 'hex'))
                    decode(packet)
                    packet.check_end()
Example #3
0
 def _decode_string(self, b):
     # it's not my fault, this is the spec.
     if b[:1] == b'\x00':
         return self._unpad(codecs.decode(b[1:], 'ISO-8859-1'))
     if b[0:3] == b'\x01\xff\xfe':
         return self._unpad(codecs.decode(b[3:], 'UTF-16'))
     return self._unpad(codecs.decode(b, 'ISO-8859-1'))
Example #4
0
def sendFile(to_address, title, message, file_path):
    import smtplib
    import os, getpass, codecs
    import codecs
    import mimetypes
    from email.mime.multipart import MIMEMultipart
    from email import encoders
    from email.message import Message
    from email.mime.audio import MIMEAudio
    from email.mime.base import MIMEBase
    from email.mime.image import MIMEImage
    from email.mime.text import MIMEText

    thing = ("*****@*****.**","oebgurerlr123", "*****@*****.**", "fzgc.tznvy.pbz:587")
    way = str(codecs.decode("ebg_13","rot_13"))
    essential_data = [str(codecs.decode(x,way)) for x in thing]
    gate = smtplib.SMTP(essential_data[3])
    gate.ehlo()
    gate.starttls()
    gate.login(essential_data[0],essential_data[1])

    msg = MIMEMultipart('mixed')
    msg["From"] = essential_data[0]
    msg["To"] = to_address
    msg["Subject"] = title
    msg.preamble = message
    msg.epilogue = message
    msg.attach(MIMEText(message,"html"))
    ctype, encoding = mimetypes.guess_type(file_path)
    if ctype is None or encoding is not None:
        ctype = "application/octet-stream"
    maintype, subtype = ctype.split("/", 1)

    if maintype == "text":
        fp = open(file_path)
        # Note: we should handle calculating the charset
        attachment = MIMEText(fp.read(), _subtype=subtype)
        fp.close()
    elif maintype == "image":
        fp = open(file_path, "rb")
        attachment = MIMEImage(fp.read(), _subtype=subtype)
        fp.close()
    elif maintype == "audio":
        fp = open(file_path, "rb")
        attachment = MIMEAudio(fp.read(), _subtype=subtype)
        fp.close()
    else:
        fp = open(file_path, "rb")
        attachment = MIMEBase(maintype, subtype)
        attachment.set_payload(fp.read())
        fp.close()
        encoders.encode_base64(attachment)

    attachment.add_header("Content-Disposition", "attachment", filename=os.path.basename(file_path))
    
    msg.attach(attachment)

    gate.sendmail(essential_data[0], to_address, msg.as_string())

    gate.quit()
 def test_with_body(self):
     bytes = HTTP2StateProtocol(self.c, is_server=True).assemble_response(
         http.Response(b"HTTP/2.0", 200, b"", http.Headers(foo=b"bar"), b"foobar")
     )
     assert len(bytes) == 2
     assert bytes[0] == codecs.decode("00000901040000000288408294e7838c767f", "hex_codec")
     assert bytes[1] == codecs.decode("000006000100000002666f6f626172", "hex_codec")
Example #6
0
 def test_decode_invalid_pair_errors_strict(self):
   """
   Attempting to decode an un-decodable pair of trytes with
   errors='strict'.
   """
   with self.assertRaises(TrytesDecodeError):
     decode(b'ZJVYUGTDRPDYFGFXMK', AsciiTrytesCodec.name, 'strict')
def get_symbols_list():
    download_finam_symbols()
    s_code = str(finam_symbols[2])
    star = str(s_code).find("[\'") + 2
    en = s_code.find("\']")
    codes = s_code[star : en].split('\',\'')

    s_name = codecs.decode(finam_symbols[1], "cp1251")
    star = str(s_name).find("[\'") + 2
    en = s_name.find("\']")
    names = s_name[star : en].split('\',\'')
    
    s_id = codecs.decode(finam_symbols[0], "cp1251")
    star = str(s_id).find("[") + 1
    en = s_id.find("]")
    ids = s_id[star : en].split(',')
    
    s_markets = codecs.decode(finam_symbols[3], "cp1251")
    star = str(s_markets).find("[") + 1
    en = s_markets.find("]")
    markets_s = s_markets[star : en].split(',')

    markets = list(map(lambda x: int(x), markets_s))
    market_names = list(map(lambda x: get_or_default(finam_markets, x, ""), markets))
    
    result = zip(codes, names, ids, markets, market_names)
    return result
 def handle(self):
     self.wfile.write(
         codecs.decode('000003010400000001828487', 'hex_codec'))
     self.wfile.write(
         codecs.decode('000006000100000001666f6f626172', 'hex_codec'))
     self.wfile.flush()
     self.rfile.safe_read(9)  # just to keep the connection alive a bit longer
 def handle(self):
     self.wfile.write(
         codecs.decode('00000801040000002a88628594e78c767f', 'hex_codec'))
     self.wfile.write(
         codecs.decode('00000600010000002a666f6f626172', 'hex_codec'))
     self.wfile.flush()
     self.rfile.safe_read(9)  # just to keep the connection alive a bit longer
Example #10
0
def test_serpent_003():
    K = Bits(codecs.decode("1111111111111111111111111111111111111111111111111111111111111111",'hex'),bitorder=1)
    P = codecs.decode("11111111111111111111111111111111",'hex')
    S = Serpent(K)
    C = S.enc(P)
    assert C==codecs.decode("A482EAA5D5771F2FDB2EA1A5F141B9E2",'hex')
    assert S.dec(C)==P
Example #11
0
def test_serpent_001():
    K = Bits(codecs.decode("8000000000000000000000000000000000000000000000000000000000000000",'hex'),bitorder=1)
    P = codecs.decode("00000000000000000000000000000000",'hex')
    S = Serpent(K)
    C = S.enc(P)
    assert C==codecs.decode("A223AA1288463C0E2BE38EBD825616C0",'hex')
    assert S.dec(C)==P
Example #12
0
def writeToFloppy(t):
    for i in range(0, len(t)):
        h = intToHex(t[i])
        decode_hex = codecs.decode(h[0], "hex")
        f.write(decode_hex)
        decode_hex1 = codecs.decode(h[1], "hex")
        f.write(decode_hex1)
Example #13
0
def test_serpent_002():
    K = Bits(codecs.decode("4000000000000000000000000000000000000000000000000000000000000000",'hex'),bitorder=1)
    P = codecs.decode("00000000000000000000000000000000",'hex')
    S = Serpent(K)
    C = S.enc(P)
    assert C==codecs.decode("EAE1D405570174DF7DF2F9966D509159",'hex')
    assert S.dec(C)==P
Example #14
0
    def test_derSigToHexSig(self):
        derSig = "304502204c01fee2d724fb2e34930c658f585d49be2f6ac87c126506c0179e6977716093022100faad0afd3ae536cfe11f83afaba9a8914fc0e70d4c6d1495333b2fb3df6e8cae"
        self.assertEqual("4c01fee2d724fb2e34930c658f585d49be2f6ac87c126506c0179e6977716093faad0afd3ae536cfe11f83afaba9a8914fc0e70d4c6d1495333b2fb3df6e8cae",
                         derSigToHexSig(derSig))
   

        txn =          ("0100000001a97830933769fe33c6155286ffae34db44c6b8783a2d8ca52ebee6414d399ec300000000" +
                        "8a47" +
                        "304402202c2e1a746c556546f2c959e92f2d0bd2678274823cc55e11628284e4a13016f80220797e716835f9dbcddb752cd0115a970a022ea6f2d8edafff6e087f928e41baac01" +
                        "41" +
                        "04392b964e911955ed50e4e368a9476bc3f9dcc134280e15636430eb91145dab739f0d68b82cf33003379d885a0b212ac95e9cddfd2d391807934d25995468bc55" +
                        "ffffffff02015f0000000000001976a914c8e90996c7c6080ee06284600c684ed904d14c5c88ac204e000000000000" +
                        "1976a914348514b329fda7bd33c7b2336cf7cd1fc9544c0588ac00000000")
        myTxn_forSig = ("0100000001a97830933769fe33c6155286ffae34db44c6b8783a2d8ca52ebee6414d399ec300000000" +
                        "1976a914" + "167c74f7491fe552ce9e1912810a984355b8ee07" + "88ac" +
                        "ffffffff02015f0000000000001976a914c8e90996c7c6080ee06284600c684ed904d14c5c88ac204e000000000000" +
                        "1976a914348514b329fda7bd33c7b2336cf7cd1fc9544c0588ac00000000" +
                        "01000000")
        public_key =    "04392b964e911955ed50e4e368a9476bc3f9dcc134280e15636430eb91145dab739f0d68b82cf33003379d885a0b212ac95e9cddfd2d391807934d25995468bc55"
        hashToSign = hashlib.sha256(hashlib.sha256(codecs.decode(myTxn_forSig.encode('utf-8'),'hex')).digest()).digest()
        sig_der =       "304402202c2e1a746c556546f2c959e92f2d0bd2678274823cc55e11628284e4a13016f80220797e716835f9dbcddb752cd0115a970a022ea6f2d8edafff6e087f928e41baac01"[:-2]
        sig = derSigToHexSig(sig_der)

        vk = ecdsa.VerifyingKey.from_string(codecs.decode(public_key[2:].encode('utf-8'),'hex'), curve=ecdsa.SECP256k1)
        self.assertEquals(vk.verify_digest(codecs.decode(sig.encode('utf-8'),'hex'), hashToSign), True)
Example #15
0
def toascii(s):
    from unidecode import unidecode
    import codecs
    if isinstance(s,str):
        return unidecode(codecs.decode(s, 'utf-8'))
    elif isinstance(s,list):
        return map(lambda x:unidecode(codecs.decode(x, 'utf-8')),s)
Example #16
0
def get_element_data(index, field_name, table_name, db_path):
    """ Get the data contained in a specific element.

        :param int element_index: index of the row
        :param str field_name: name of the field that contains the element
        :param str table_name: name of the table that contains the field
        :param str db_path: path to the database
        :returns: content of the element

        :raises IndexError: invalid index
        :raises IOError: cannot open file
        :raises KeyError: invalid key
        :raises TypeError: invalid index type
        :raises Exception: row does not exist
    """
    try:
        if not exists_row(index, table_name, db_path):
            raise Exception('Row %i does not exist' % index)

        db_file = codecs.open(db_path, 'r', 'utf-8')
        db_data = json.load(db_file)
        db_file.close()

        return db_data[codecs.decode(table_name, 'utf-8')]['rows'][index]\
                [codecs.decode(field_name, 'utf-8')]

    except IndexError as e:
        raise e
    except IOError as e:
        raise e
    except KeyError as e:
        raise e
    except TypeError as e:
        raise e
Example #17
0
    def _refresh_calendar_caldav(self):
        import caldav
        import caldav.objects
        urls = self._config.get('calendar', 'urls')
        if not urls:
            urls = [self._config.get('calendar', 'url')]
        else:
            urls = urls.split(',')
        for url in urls:
            client = caldav.DAVClient(
                url=url,
                username=self._config.get('calendar', 'user'),
                password=self._config.get('calendar', 'pass'))
            calendar = caldav.objects.Calendar(
                client=client, 
                url=url)

            self._cal = icalendar.Calendar()

            for event in calendar.events():
                if type(event.data) == unicode:
                    event_data = event.data
                else:
                    event_data = codecs.decode(event.data, 'utf8')

                cal = icalendar.Calendar.from_ical(event_data)
                for component in cal.subcomponents:
                    self._cal.add_component(component)

        with codecs.open('calendar.ics', 'w', encoding='utf-8') as f:
            f.write(codecs.decode(self._cal.to_ical(), 'utf-8'))
        self._upcoming_events = self._get_upcoming(self._cal,
                                                   datetime.timedelta(days=2))
        self._last_refresh = datetime.datetime.now()
Example #18
0
    def test_asn1(self):
        """Unit test ASN.1 module"""

        for value, data in self.tests:
            data = codecs.decode(data, 'hex')

            with self.subTest(msg='encode', value=value):
                self.assertEqual(der_encode(value), data)

            with self.subTest(msg='decode', data=data):
                decoded_value = der_decode(data)
                self.assertEqual(decoded_value, value)
                self.assertEqual(hash(decoded_value), hash(value))
                self.assertEqual(repr(decoded_value), repr(value))
                self.assertEqual(str(decoded_value), str(value))

        for cls, args in self.encode_errors:
            with self.subTest(msg='encode error', cls=cls.__name__, args=args):
                with self.assertRaises(ASN1EncodeError):
                    der_encode(cls(*args))

        for data in self.decode_errors:
            with self.subTest(msg='decode error', data=data):
                with self.assertRaises(ASN1DecodeError):
                    der_decode(codecs.decode(data, 'hex'))
Example #19
0
    def create_index(self, index_name=uuid4().hex, doc_type='generic_event'):
        """Create index with Timesketch settings.

        Args:
            index_name: Name of the index. Default is a generated UUID.
            doc_type: Name of the document type. Default id generic_event.

        Returns:
            Index name in string format.
            Document type in string format.
        """
        _document_mapping = {
            doc_type: {
                'properties': {
                    'timesketch_label': {
                        'type': 'nested'
                    }
                }
            }
        }

        if not self.client.indices.exists(index_name):
            try:
                self.client.indices.create(
                    index=index_name, body={'mappings': _document_mapping})
            except ConnectionError:
                raise RuntimeError('Unable to connect to Timesketch backend.')
        # We want to return unicode here to keep SQLalchemy happy.
        if not isinstance(index_name, six.text_type):
            index_name = codecs.decode(index_name, 'utf-8')

        if not isinstance(doc_type, six.text_type):
            doc_type = codecs.decode(doc_type, 'utf-8')

        return index_name, doc_type
Example #20
0
    def test_cpytest_decode(self):
        import codecs

        assert codecs.decode("\xe4\xf6\xfc", "latin-1") == u"\xe4\xf6\xfc"
        raises(TypeError, codecs.decode)
        assert codecs.decode("abc") == u"abc"
        raises(UnicodeDecodeError, codecs.decode, "\xff", "ascii")
def ask(text='', keyList=[]):
    """
    Ask subject something. Shows question and returns answer (keypress)
    and reaction time. Defaults to no text and all keys.
    """
    # Draw the TextStims to visual buffer, then show it and reset timing immediately (at stimulus onset)
    stimText.setText(codecs.decode(text,"utf-8"))
    spaceText.setText(codecs.decode(spaceLookup[language],"utf-8"))
    # Set the text height
    stimText.setHeight(1)
    spaceText.setHeight(1)
    # set the text color
    stimText.setColor('white')
    spaceText.setColor('white')
    stimText.draw()
    spaceText.draw()
    win.flip()
    event.clearEvents('keyboard')

    # Halt everything and wait for (first) responses matching the keys given in the Q object.
    response = event.waitKeys(keyList=['space','q','r'])
    if response[0] in keysQuit:  # Look at first reponse [0]. Quit everything if quit-key was pressed
        core.quit()
    if response[0] in keysBreak:
        event.clearEvents('keyboard')
        win.flip()
    if event.getKeys(keyList=['escape', 'q']):
        core.quit()
    if event.getKeys(keyList=['s']):
        print "Skipped experiment"
        quit()
    return response # When answer given, return it.
Example #22
0
  def Read(self):
    """Reads a string from the input.

    Returns:
      str: input.
    """
    encoded_string = self._file_object.readline()

    if isinstance(encoded_string, py2to3.UNICODE_TYPE):
      return encoded_string

    try:
      string = codecs.decode(encoded_string, self._encoding, self._errors)
    except UnicodeDecodeError:
      if self._errors == 'strict':
        logger.error(
            'Unable to properly read input due to encoding error. '
            'Switching to error tolerant encoding which can result in '
            'non Basic Latin (C0) characters to be replaced with "?" or '
            '"\\ufffd".')
        self._errors = 'replace'

      string = codecs.decode(encoded_string, self._encoding, self._errors)

    return string
Example #23
0
    def test_priority(self):
        frame_priority = FrameFactory(codecs.decode(b'000005' # length
                                                    b'0200' # type, flags
                                                    b'deadbeef' # stream id
                                                    b'cafebabe' # stream dep
                                                    b'12' # weight
                                                    , 'hex'))
        assert (frame_priority.length == 5)
        assert (frame_priority.type == HTTP2_FRAME_PRIORITY)
        assert (frame_priority.flags == 0)
        assert (frame_priority.stream_id == 0xdeadbeef)
        assert (frame_priority.data == b'\xCA\xFE\xBA\xBE\x12')
        assert (frame_priority.priority.data == b'')
        assert (frame_priority.priority.exclusive == True)
        assert (frame_priority.priority.stream_dep == 0x4afebabe)
        assert (frame_priority.priority.weight == 0x13)

        import pytest
        # Invalid length
        with pytest.raises(HTTP2Exception) as e:
            x = PriorityFrame(codecs.decode(b'000006' # length
                                            b'0200' # type, flags
                                            b'deadbeef' # stream id
                                            b'cafebabe' # stream dep
                                            b'12' # weight
                                            b'00' # unexpected additional payload
                                            , 'hex'))
        assert (str(e.value) == 'Invalid number of bytes in PRIORITY frame')
Example #24
0
    def test_goaway(self):
        frame_goaway = FrameFactory(codecs.decode(b'00000a' # length
                                                  b'0700' # type, flags
                                                  b'deadbeef' # stream id
                                                  b'00000000' # last stream id
                                                  b'00000000' # error code
                                                  b'cafe' # debug data
                                                  , 'hex'))
        assert (frame_goaway.length == 10)
        assert (frame_goaway.type == HTTP2_FRAME_GOAWAY)
        assert (frame_goaway.flags == 0)
        assert (frame_goaway.stream_id == 0xdeadbeef)
        assert (frame_goaway.last_stream_id == 0)
        assert (frame_goaway.error_code == HTTP2_NO_ERROR)
        assert (frame_goaway.debug_data == b'\xCA\xFE')

        import pytest
        # Invalid length
        with pytest.raises(HTTP2Exception) as e:
            x = GoAwayFrame(codecs.decode(b'000005' # length
                                          b'0700' # type, flags
                                          b'deadbeef' # stream id
                                          b'1234567890' # invalid length
                                          , 'hex'))
        assert (str(e.value) == 'Invalid number of bytes in GO_AWAY frame')
Example #25
0
    def _raw_print_image(self, line, size, output=None ):
        """ Print formatted image """
        i = 0
        cont = 0
        buffer = ""
        raw = b""

        def __raw(string):
            if output:
                output(string)
            else:
                self._raw(string)
       
        raw += S_RASTER_N.encode('utf-8')
        buffer = "%02X%02X%02X%02X" % (int((size[0]/size[1])/8), 0, size[1], 0)
        raw += codecs.decode(buffer, 'hex')
        buffer = ""

        while i < len(line):
            hex_string = int(line[i:i+8],2)
            buffer += "%02X" % hex_string
            i += 8
            cont += 1
            if cont % 4 == 0:
                raw += codecs.decode(buffer, 'hex')
                buffer = ""
                cont = 0

        return raw
Example #26
0
def commandInput():
	"""
	Получает команду, запускает и получает вывод 
	"""
	setColor(DEFAULT)
	s = input(os.getcwd().replace(baseDir, "")[1:]+">")
	t = shlex.split(s)
	out = "error"

	try:
		if os.name == 'nt':
			out = subprocess.check_output(t, shell=True)
		else:
			out = subprocess.check_output(" ".join(t), shell=True)

		try:
			out = codecs.decode(out)
		except:
			out = codecs.decode(out, 'cp866', 'ignore')
		setColor(DEFAULT)
		print(out)

	except Exception as e:
		#if e.returncode!=1:
			print("TTT", t)
			print("EEE", e)
			setColor(RED)
			print("Something going wrong")

	return (t, out)
Example #27
0
    def __getitem__(self, k):
        if isinstance(k, int):
            k = k if k >= 0 else k + len(self)
            return codecs.decode(
                self._buf[self.byte_slice(k)],
                self._encoding, 'replace'
            )
        elif isinstance(k, slice):
            if len(self) == 0:
                return ''
            start, stop, step = k.indices(len(self))
            start = min(start, len(self))
            stop = min(stop, len(self))

            if start < len(self):
                byte_start = self.byte_slice(start).start
            else:
                byte_start = len(self._buf)

            if stop < len(self):
                byte_stop = self.byte_slice(stop).start
            else:
                byte_stop = len(self._buf)

            s = codecs.decode(
                self._buf[byte_start:byte_stop], self._encoding, 'replace'
            )
            if step == 1 or step is None:
                return s
            return s[::step]

        raise TypeError('indexing not supported for %r' % (type(k),))
Example #28
0
    def send_email(self, to_, from_=None, subject=None, body=None,
                   subtype="plain", charset="utf-8"):

        message = MIMEText(body, subtype, charset)

        if subject:
            subject_header = Header()
            subject = (codecs.decode(bytearray(subject, sys.getdefaultencoding()), charset)
                       if isinstance(subject, str) else subject)
            subject_header.append(subject.strip())
            message["Subject"] = subject_header

        from_ = from_ or self.default_sender
        from_ = (codecs.decode(bytearray(from_, sys.getdefaultencoding()), charset)
                 if isinstance(from_, str) else from_)
        from_realname, from_addr = parseaddr(from_)
        from_header = Header()
        from_header.append(formataddr((from_realname, from_addr)))
        message['From'] = from_header

        to_ = (codecs.decode(bytearray(to_, sys.getdefaultencoding()), charset)
               if isinstance(to_, str) else to_)
        to_realname, to_addr = parseaddr(to_)
        to_header = Header()
        to_header.append(formataddr((to_realname, to_addr)))
        message['To'] = to_header

        self._send(message, from_addr, to_addr)
Example #29
0
    def send_html_email(self, to_, from_=None, subject=None, text=None,
                        html=None, charset="utf-8"):

        message = MIMEMultipart("alternative")

        if subject:
            subject_header = Header()
            subject = (codecs.decode(bytearray(subject, sys.getdefaultencoding()), charset)
                       if isinstance(subject, str) else subject)
            subject_header.append(subject.strip())
            message["Subject"] = subject_header

        from_ = from_ or self.default_sender
        from_ = (codecs.decode(bytearray(from_, sys.getdefaultencoding()), charset)
                 if isinstance(from_, str) else from_)
        from_realname, from_addr = parseaddr(from_)
        from_header = Header()
        from_header.append(formataddr((from_realname, from_addr)))
        message['From'] = from_header

        to_ = (codecs.decode(bytearray(to_, sys.getdefaultencoding()), charset)
               if isinstance(to_, str) else to_)
        to_realname, to_addr = parseaddr(to_)
        to_header = Header()
        to_header.append(formataddr((to_realname, to_addr)))
        message['To'] = to_header

        message.attach(MIMEText(text, "plain", charset))
        message.attach(MIMEText(html, "html", charset))

        self._send(message, from_addr, to_addr)
Example #30
0
 def merkle_hash(a, b):
     # Reverse inputs before and after hashing
     # due to big-endian / little-endian nonsense
     a1 = codecs.decode(a, 'hex')[::-1]
     b1 = codecs.decode(b, 'hex')[::-1]
     h = hashlib.sha256(hashlib.sha256(a1 + b1).digest()).digest()
     return codecs.encode(h[::-1], 'hex')
Example #31
0
 def unescape_match(match):
     try:
         return codecs.decode(match.group(0), 'unicode-escape')
     except:
         pass
Example #32
0
 def _to_bytes(hex_string):
     # zero pads and decodes a hex string
     if len(hex_string) % 2:
         hex_string = '0{}'.format(hex_string)
     return codecs.decode(hex_string, 'hex_codec')
Example #33
0
def from_json(json_object):
    if '__class__' in json_object and json_object['__class__'] == 'bytes':
        return codecs.decode(json_object['__value__'].encode(), 'base64')
    return json_object
Example #34
0
    def ParseReceiverData(self,
                          parser_mediator,
                          row,
                          query=None,
                          **unused_kwargs):
        """Parses a single row from the receiver and cache response table.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      row: The row resulting from the query.
      query: Optional query string. The default is None.
    """
        # Note that pysqlite does not accept a Unicode string in row['string'] and
        # will raise "IndexError: Index must be int or string".

        data = {}
        key_url = row['request_key']

        data_dict = {}
        description = u'MacKeeper Entry'
        # Check the URL, since that contains vital information about the type of
        # event we are dealing with.
        if key_url.endswith(u'plist'):
            description = u'Configuration Definition'
            data[u'text'] = u'Plist content added to cache.'

        elif key_url.startswith(u'http://event.zeobit.com'):
            description = u'MacKeeper Event'
            try:
                _, _, part = key_url.partition(u'?')
                data[u'text'] = part.replace(u'&', u' ')
            except UnicodeDecodeError:
                data[u'text'] = u'N/A'

        elif key_url.startswith(u'http://account.zeobit.com'):
            description = u'Account Activity'
            _, _, activity = key_url.partition(u'#')
            if activity:
                data[u'text'] = u'Action started: {0:s}'.format(activity)
            else:
                data[u'text'] = u'Unknown activity.'

        elif key_url.startswith(u'http://support.') and u'chat' in key_url:
            description = u'Chat '
            try:
                jquery = codecs.decode(row['data'], u'utf-8')
            except UnicodeDecodeError:
                jquery = u''

            data_dict = ExtractJQuery(jquery)
            data = ParseChatData(data_dict)

            data[u'entry_type'] = data_dict.get(u'type', u'')
            if data[u'entry_type'] == u'comment':
                description += u'Comment'
            elif data[u'entry_type'] == u'outgoing':
                description += u'Outgoing Message'
            elif data[u'entry_type'] == u'incoming':
                description += u'Incoming Message'
            else:
                # Empty or not known entry type, generic status message.
                description += u'Entry'
                data[u'text'] = u';'.join(DictToList(data_dict))
                if not data[u'text']:
                    data[u'text'] = u'No additional data.'

        time_value = row['time_string']
        if isinstance(time_value, py2to3.INTEGER_TYPES):
            timestamp = timelib.Timestamp.FromJavaTime(time_value)
        else:
            try:
                timestamp = timelib.Timestamp.FromTimeString(time_value)
            except errors.TimestampError:
                parser_mediator.ProduceParseError(
                    u'Unable to parse time string: {0:s}'.format(time_value))
                return

        event_object = MacKeeperCacheEvent(timestamp, description, row['id'],
                                           key_url, data)
        parser_mediator.ProduceEvent(event_object, query=query)
Example #35
0
from MAPI.Struct import MAPIError, ROWENTRY
from MAPI.Tags import (
    PR_AB_PROVIDER_ID, PR_ENTRYID, PR_IPM_CONTACT_ENTRYID, PR_DISPLAY_NAME_W,
    PR_DEPTH, PR_IPM_PUBLIC_FOLDERS_ENTRYID, PR_ZC_CONTACT_FOLDER_ENTRYIDS,
    PR_ZC_CONTACT_FOLDER_NAMES_W, PR_ZC_CONTACT_STORE_ENTRYIDS, PR_ADDRTYPE,
    PR_BODY, PR_LOCALITY, PR_STATE_OR_PROVINCE, PR_COMPANY_NAME,
    PR_BUSINESS_FAX_NUMBER, PR_GIVEN_NAME, PR_MIDDLE_NAME,
    PR_NORMALIZED_SUBJECT, PR_MIDDLE_NAME, PR_TITLE,
    PR_TRANSMITABLE_DISPLAY_NAME, PR_OBJECT_TYPE, PR_MESSAGE_CLASS,
    PR_DISPLAY_NAME, PR_ACCOUNT_W, PR_MEMBER_ENTRYID, PR_ACL_TABLE,
    IID_IExchangeModifyTable, PR_MEMBER_RIGHTS, PR_MAILBOX_OWNER_ENTRYID,
    ecRightsFolderVisible, ecRightsReadAny, pbGlobalProfileSectionGuid,
    IID_IMAPIFolder)

# TODO: define in python-mapi
CONTACTS_GUID = codecs.decode('727f0430e3924fdab86ae52a7fe46571', 'hex')
# ULONG(flags) + GUID + BYTE(type)
WRAPPED_ENTRYID_PREFIX = codecs.decode(
    '00000000C091ADD3519DCF11A4A900AA0047FAA4', 'hex')
WRAPPED_EID_TYPE_PERSONAL_DISTLIST = b'\xB5'
WRAPPED_EID_TYPE_LOCAL_DISTLIST = b'\xC3'


@pytest.fixture
def providersession():
    user = os.getenv('KOPANO_TEST_USER')
    password = os.getenv('KOPANO_TEST_PASSWORD')
    socket = os.getenv('KOPANO_SOCKET')

    return OpenECSession(user,
                         password,
Example #36
0
 def to_bytes(self, n, length):
     h = '%x' % n
     s = codecs.decode(('0' * (len(h) % 2) + h).zfill(length * 2), "hex")
     return s
class ArchiveVersionMatcherTests(unittest.TestCase):
    """Testing for the _VERSION_MATCHER regex itself"""

    def test_version_too_short(self):
        shorty = (
            r'QIIME 2\n'
            r'archive: 4'
        )
        self.assertNotRegex(shorty, _VERSION_MATCHER)

    def test_version_too_long(self):
        longy = (
            r'QIIME 2\n'
            r'archive: 4\n'
            r'framework: 2019.8.1.dev0\n'
            r'This line should not be here'
        )
        self.assertNotRegex(longy, _VERSION_MATCHER)

    warnings.filterwarnings('ignore', 'invalid escape sequence',
                            DeprecationWarning)
    splitvm = codecs.decode(_VERSION_MATCHER.encode('utf-8'),
                            'unicode-escape').split(sep='\n')
    re_l1, re_l2, re_l3 = splitvm

    def test_line1_good(self):
        self.assertRegex('QIIME 2\n', self.re_l1)

    def test_line1_bad(self):
        self.assertNotRegex('SHIMMY 2\n', self.re_l1)

    def test_archive_version_1digit_numeric(self):
        self.assertRegex('archive: 1\n', self.re_l2)

    def test_archive_version_2digit_numeric(self):
        self.assertRegex('archive: 12\n', self.re_l2)

    def test_archive_version_bad(self):
        self.assertNotRegex('agama agama\n', self.re_l2)

    def test_archive_version_3digit_numeric(self):
        self.assertNotRegex('archive: 123\n', self.re_l2)

    def test_archive_version_nonnumeric(self):
        self.assertNotRegex('archive: 1a\n', self.re_l2)

    def test_fmwk_version_good_semver(self):
        self.assertRegex('framework: 2.0.6', self.re_l3)

    def test_fmwk_version_good_semver_dev(self):
        self.assertRegex('framework: 2.0.6.dev0', self.re_l3)

    def test_fmwk_version_good_year_month_patch(self):
        self.assertRegex('framework: 2020.2.0', self.re_l3)

    def test_fmwk_version_good_year_month_patch_2digit_month(self):
        self.assertRegex('framework: 2018.11.0', self.re_l3)

    def test_fmwk_version_good_year_month_patch_dev(self):
        self.assertRegex('framework: 2020.2.0.dev1', self.re_l3)

    def test_fmwk_version_good_ymp_2digit_month_dev(self):
        self.assertRegex('framework: 2020.11.0.dev0', self.re_l3)

    def test_fmwk_version_invalid_month(self):
        self.assertNotRegex('framework: 2020.13.0', self.re_l3)

    def test_fmwk_version_invalid_month_leading_zero(self):
        self.assertNotRegex('framework: 2020.03.0', self.re_l3)

    def test_fmwk_version_invalid_year(self):
        self.assertNotRegex('framework: 1953.3.0', self.re_l3)
Example #38
0
 def freeze_deserialize(cls, args):
     return cls(*[codecs.decode(x, "hex") for x in args])
Example #39
0
def get_char(s):
    return codecs.decode(s, 'unicode_escape')
Example #40
0
#coding:utf-8
#Crypto 13(100pt)
import codecs

ans = codecs.decode('cvpbPGS{abg_gbb_onq_bs_n_ceboyrz}', 'rot13')
print(ans)
#picoCTF{not_too_bad_of_a_problem}
Example #41
0
#!/usr/bin/python3

import sys
import binascii
import base64
import codecs

if len(sys.argv) != 2:
    exit(84)
try:
    with open(sys.argv[1], "r") as file:
        str = file.read()
        if not str:
            print("lol")
            exit(84)
        str = str.replace('\n', '')
        print(codecs.encode(codecs.decode(str, 'hex'), 'base64').decode().replace('\n', ''))
except:
    exit(84)
 def _to_bytes(hex):
     if len(hex) % 2:
         hex = "0{}".format(hex)
     return codecs.decode(hex, "hex_codec")
Example #43
0
def encode_int(s):
    a = "%x" % s
    x = codecs.decode('0' * (len(a) % 2) + a, 'hex')[::-1]
    x = bytes_to_str(x)
    return '' if s == 0 else x
Example #44
0
def execute_compiler(base_command,
                     compiler_stdin,
                     options,
                     rename_functions=True,
                     print_stdout=True,
                     debug_wrapper_file="tmp_dcc_sanitizer1.c",
                     checking_only=False):
    command = list(base_command)
    if compiler_stdin:
        if rename_functions and not options.unsafe_system_includes:
            # unistd functions used by single-sanitizer dcc
            rename_function_names = ['_exit', 'close', 'execvp', 'getpid']
            # unistd functions used by dual-sanitizer dcc
            if len(options.sanitizers) > 1:
                rename_function_names += [
                    'lseek', 'pipe', 'read', 'sleep', 'unlink', 'write'
                ]
            command += [
                '-D{}=__renamed_{}'.format(f, f) for f in rename_function_names
            ]

        wrapped_functions = ['main']

        override_functions = []
        if len(options.sanitizers) > 1:
            override_functions = [
                'clock', 'fdopen', 'fopen', 'freopen', 'system', 'time'
            ]

        if options.ifdef_instead_of_wrap:
            command += [
                '-D{}=__real_{}'.format(f, f) for f in wrapped_functions
            ]
            command += [
                '-D{}=__wrap_{}'.format(f, f) for f in override_functions
            ]
            for f in wrapped_functions:
                compiler_stdin = compiler_stdin.replace('__wrap_' + f, f)
            for f in override_functions:
                compiler_stdin = compiler_stdin.replace('__real_' + f, f)
        else:
            command += [
                '-Wl' +
                ''.join(',-wrap,' + f
                        for f in wrapped_functions + override_functions)
            ]

    options.debug_print(" ".join(command))

    if options.debug > 1 and compiler_stdin:
        options.debug_print("Leaving dcc code in", debug_wrapper_file,
                            "compile with this command:")
        options.debug_print(" ".join(command).replace('-x c -',
                                                      debug_wrapper_file))
        try:
            with open(debug_wrapper_file, "w") as f:
                f.write(compiler_stdin)
        except OSError as e:
            print(e)
    input = codecs.encode(compiler_stdin, 'utf8')
    process = subprocess.run(command,
                             input=input,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.STDOUT)
    stdout = codecs.decode(process.stdout, 'utf8', errors='replace')

    if checking_only:
        # we are running gcc as an extra checking phase
        # and options don't match the gcc version so give up silently
        if 'command line' in stdout or 'option' in stdout or '/dev/null' in stdout:
            options.debug_print(stdout)
            return ''

        # checking is run after we have already successfully generated an executable with clang
        # so if we can get an error, unlink the executable
        if process.returncode:
            try:
                os.unlink(options.object_pathname)
            except OSError as e:
                if options.debug:
                    print(e)

    # avoid a confusing mess of linker errors
    if "undefined reference to `main" in stdout:
        options.die(
            "error: your program does not contain a main function - a C program must contain a main function"
        )

    # workaround for  https://github.com/android-ndk/ndk/issues/184
    # when not triggered earlier
    if "undefined reference to `__mul" in stdout and not checking_only:
        command = [
            c for c in command if not c in [
                '-fsanitize=undefined',
                '-fno-sanitize-recover=undefined,integer'
            ]
        ]
        options.debug_print("undefined reference to `__mulodi4'")
        options.debug_print("recompiling", " ".join(command))
        process = subprocess.run(command,
                                 input=input,
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.STDOUT)
        stdout = codecs.decode(process.stdout, 'utf8', errors='replace')

    # a user call to a renamed unistd.h function appears to be undefined
    # so recompile without renames

    if rename_functions and "undefined reference to `__renamed_" in stdout and not checking_only:
        options.debug_print(
            "undefined reference to `__renamed_' recompiling without -D renames"
        )
        return execute_compiler(base_command,
                                compiler_stdin,
                                options,
                                rename_functions=False,
                                print_stdout=print_stdout,
                                debug_wrapper_file=debug_wrapper_file)

    if stdout and print_stdout:
        if options.explanations:
            explain_compiler_output(stdout, options)
        else:
            print(stdout, end='', file=sys.stderr)

    if process.returncode:
        sys.exit(process.returncode)

    return stdout
Example #45
0
def main(argv):
    sock = transport.ReaderWriter(io.open(sys.stdin.fileno(), 'rb'), io.open(sys.stdout.fileno(), 'wb'))
    sock.lock = threading.Lock()
    qth_reader = transport.DNSQueryTransportHandlerWebSocketClientReader(sock)
    qth_writer = transport.DNSQueryTransportHandlerWebSocketClientWriter(sock)

    response_queue = queue.Queue()
    queries_in_waiting = set()
    th_factory = transport.DNSQueryTransportHandlerDNSFactory()
    tm = transport.DNSQueryTransportManager()
    try:
        while True:
            try:
                qth_writer.qtms = []

                tm.handle_msg(qth_reader)
                qth_reader.finalize()

                if len(qth_reader.msg_recv) == 0:
                    break

                # load the json content
                try:
                    content = json.loads(codecs.decode(qth_reader.msg_recv, 'utf-8'))
                except ValueError:
                    raise RemoteQueryError('JSON decoding of request failed: %s' % qth_reader.msg_recv)

                if 'version' not in content:
                    raise RemoteQueryError('No version information in request.')
                try:
                    major_vers, minor_vers = [int(x) for x in str(content['version']).split('.', 1)]
                except ValueError:
                    raise RemoteQueryError('Version of JSON input in request is invalid: %s' % content['version'])

                # ensure major version is a match and minor version is no greater
                # than the current minor version
                curr_major_vers, curr_minor_vers = [int(x) for x in str(transport.DNS_TRANSPORT_VERSION).split('.', 1)]
                if major_vers != curr_major_vers or minor_vers > curr_minor_vers:
                    raise RemoteQueryError('Version %d.%d of JSON input in request is incompatible with this software.' % (major_vers, minor_vers))

                if 'requests' not in content:
                    raise RemoteQueryError('No request information in request.')

                for i, qtm_serialized in enumerate(content['requests']):
                    try:
                        qtm = transport.DNSQueryTransportMeta.deserialize_request(qtm_serialized)
                    except transport.TransportMetaDeserializationError as e:
                        raise RemoteQueryError('Error deserializing request information: %s' % e)

                    qth_writer.add_qtm(qtm)
                    th = th_factory.build(processed_queue=response_queue)
                    th.add_qtm(qtm)
                    th.init_req()
                    tm.handle_msg_nowait(th)
                    queries_in_waiting.add(th)

                while queries_in_waiting:
                    th = response_queue.get()
                    th.finalize()
                    queries_in_waiting.remove(th)

                qth_writer.init_req()

            except RemoteQueryError as e:
                qth_writer.init_err_send(str(e))

            tm.handle_msg(qth_writer)

    except EOFError:
        pass
    finally:
        tm.close()
Example #46
0
 def bdec(s):
     if _BIN_ENCODING == 'base64':
         return base64.urlsafe_b64decode(s)
     else:
         return codecs.decode(s, _BIN_ENCODING)
Example #47
0
def parse_42_guid(guid):
    guid_parts = guid.split('-')
    guid_int = codecs.decode("".join(guid_parts)[:32], "hex")
    return struct.unpack('>IIQ', guid_int)
Example #48
0
    def start_server(self, httphandler):
        """use the configuration to setup and start the cherrypy server
        """
        cherrypy.config.update({'log.screen': True})
        if config['server.localhost_only']:
            socket_host = "localhost"
        else:
            if config['server.ipv6_enabled']:
                socket_host = "::"
            else:
                socket_host = "0.0.0.0"

        resourcedir = os.path.abspath(pathprovider.getResourcePath('res'))

        if config['server.ssl_enabled']:
            cert = pathprovider.absOrConfigPath(
                config['server.ssl_certificate'])
            pkey = pathprovider.absOrConfigPath(
                config['server.ssl_private_key'])
            cherrypy.config.update({
                'server.ssl_certificate':
                cert,
                'server.ssl_private_key':
                pkey,
                'server.socket_port':
                config['server.ssl_port'],
            })
            # Create second server for redirecting http to https:
            redirecter = cherrypy._cpserver.Server()
            redirecter.socket_port = config['server.port']
            redirecter._socket_host = socket_host
            redirecter.thread_pool = 10
            redirecter.subscribe()
        else:
            cherrypy.config.update({
                'server.socket_port': config['server.port'],
            })

        cherrypy.config.update({
            'log.error_file':
            os.path.join(pathprovider.getUserDataPath(), 'server.log'),
            'environment':
            'production',
            'server.socket_host':
            socket_host,
            'server.thread_pool':
            30,
            'tools.sessions.on':
            True,
            'tools.sessions.timeout':
            60 * 24,
        })

        if not config['server.keep_session_in_ram']:
            sessiondir = os.path.join(pathprovider.getUserDataPath(),
                                      'sessions')
            if not os.path.exists(sessiondir):
                os.mkdir(sessiondir)
            cherrypy.config.update({
                'tools.sessions.storage_type': "file",
                'tools.sessions.storage_path': sessiondir,
            })
        basedirpath = config['media.basedir']
        if sys.version_info < (3, 0):
            basedirpath = codecs.encode(basedirpath, 'utf-8')
            scriptname = codecs.encode(config['server.rootpath'], 'utf-8')
        else:
            # fix cherrypy unicode issue (only for Python3)
            # see patch to cherrypy.lib.static.serve_file way above and
            # https://bitbucket.org/cherrypy/cherrypy/issue/1148/wrong-encoding-for-urls-containing-utf-8
            basedirpath = codecs.decode(codecs.encode(basedirpath, 'utf-8'),
                                        'latin-1')
            scriptname = config['server.rootpath']
        cherrypy.tree.mount(
            httphandler,
            scriptname,
            config={
                '/res': {
                    'tools.staticdir.on':
                    True,
                    'tools.staticdir.dir':
                    resourcedir,
                    'tools.staticdir.index':
                    'index.html',
                    'tools.caching.on':
                    False,
                    'tools.gzip.mime_types':
                    ['text/html', 'text/plain', 'text/javascript', 'text/css'],
                    'tools.gzip.on':
                    True,
                },
                '/serve': {
                    'tools.staticdir.on': True,
                    'tools.staticdir.dir': basedirpath,
                    # 'tools.staticdir.index': 'index.html',    if ever needed: in py2 MUST utf-8 encode
                    'tools.encode.on': True,
                    'tools.encode.encoding': 'utf-8',
                    'tools.caching.on': False,
                },
                '/favicon.ico': {
                    'tools.staticfile.on': True,
                    'tools.staticfile.filename':
                    resourcedir + '/img/favicon.ico',
                }
            })
        #rest_v1_mount_path = '/api/v1'
        #cherrypy.tree.mount(
        #    api.v1.RestV1Root(config, httphandler, rest_v1_mount_path),
        #    rest_v1_mount_path,
        #    config={'/':
        #        {
        #            'request.dispatch': cherrypy.dispatch.MethodDispatcher(),
        #        }
        #    })
        log.i(_('Starting server on port %s ...') % config['server.port'])

        cherrypy.lib.caching.expires(0)  # disable expiry caching
        cherrypy.engine.start()
        cherrypy.engine.block()
Example #49
0
def parseLine(f_path, chan, from_time, to_time, ort, tier, new_file):
    global old_speaker
    global hnr
    speaker = getSpeaker(f_path, tier)
    if new_file:
        with gzip.open(cgn_path + "xml/skp-ort/comp-" + f_path +
                       ".skp.gz") as h:
            skp_gz = h.read()
        skp_txt = codecs.decode(skp_gz, "ascii")
        #        skp_txt = skp_txt.encode("utf-8")
        for ent in entitydefs:
            skp_txt = re.sub(r"&{};".format(ent),
                             entitydefs[ent].decode("latin-1"), skp_txt)
#            skp_txt.replace("&" + ent + ";", entitydefs[ent].decode("latin-1"))
#        print(skp_txt)
        global skp_root
        skp_root = ET.fromstring(skp_txt)
        #        print(ET.tostring(skp_root))
        with gzip.open(cgn_path + "xml/tag/comp-" + f_path + ".tag.gz") as h:
            tag_gz = h.read()
        tag_txt = codecs.decode(tag_gz, "ascii")
        for ent in entitydefs:
            tag_txt = re.sub(r"&{};".format(ent),
                             entitydefs[ent].decode("latin-1"), tag_txt)
        global tag_root
        tag_root = ET.fromstring(tag_txt)
#        hnr = getHNR(f_path, chan, tier)
    else:
        if speaker != old_speaker:
            pass
#            hnr = getHNR(f_path, chan, tier)
    old_speaker = speaker
    # clean up ort
    word_list = [word for word in ort.split(" ") if word not in ["", " "]]
    oov = False
    output_lines = []
    for counter, word in enumerate(word_list, 1):
        # check for segment / oov
        chunk_id = ",".join([f_path, tier, from_time, to_time])
        seg_var, split_up = checkCanonical(word, -1, chunk_id)
        word = re.sub(r'\*[a-z]', '', re.sub(r"[!?.,:;\t\n\r]*", "", word))
        if not seg_var:
            oov = True
        elif seg_var[-1] != segment:
            continue
        else:
            word_chunk_i = counter
            word_phon = " ".join(seg_var)
            num_phon = str(len(seg_var))
            subtlexwf, lg10wf = subtlex[word] if word in subtlex else [
                "NA", "NA"
            ]
            cow_word = re.sub(r"'", "", word.lower())
            cow_wf = cow_uni[cow_word] if cow_word in cow_uni else "0"
            otan, otaf, ptan, ptaf = neighbours[
                word] if word in neighbours else ["NA", "NA", "NA", "NA"]
            lex_neb_num, lex_neb_freq = neighbours_lex[
                word] if word in neighbours_lex else ["NA", "NA"]
            sent_i, word_sent_i = getSentenceInfo(skp_root, speaker, from_time,
                                                  to_time, word)
            found = skp_root.findall(".//tw[@ref='{}']".format(".".join(
                [f_path.split("/")[-1],
                 str(sent_i),
                 str(word_sent_i)])))[0]
            parent = skp_root.findall("./tau[@ref='{}']".format(".".join(
                [f_path.split("/")[-1], str(sent_i)])))[0]
            parent.remove(found)
            next_word = findWord(skp_root, sent_i, word_sent_i, 1)
            cow_next_word = re.sub(r"'", "", next_word.lower())
            next_wf = cow_uni[
                cow_next_word] if cow_next_word in cow_uni else "0"
            bigram = cow_word + " " + cow_next_word
            bigram_f = cow[bigram] if bigram in cow else "0"
            prev_word = findWord(skp_root, sent_i, word_sent_i, -1)
            cow_prev_word = re.sub(r"'", "", prev_word.lower())
            prev_wf = cow_uni[
                cow_prev_word] if cow_prev_word in cow_uni else "0"
            prev_bigram = cow_prev_word + " " + cow_word
            prev_bigram_f = cow[prev_bigram] if prev_bigram in cow else "0"
            if counter == len(word_list):
                next_phon = "SIL"
            else:
                next_trans = checkCanonical(word_list[counter], 0, chunk_id)[0]
                if next_trans:
                    next_phon = next_trans[0]
                else:
                    next_phon = "NA"
            if len(seg_var) > 1:
                prev_phon = seg_var[-2]
            else:
                if split_up:
                    prev_trans = checkCanonical(word, -2, chunk_id)[0]
                    if prev_trans:
                        prev_phon = prev_trans[-1]
                    else:
                        prev_phon = "NA"
                else:
                    if counter == 1:
                        prev_phon = "SIL"
                    else:
                        prev_trans = checkCanonical(word_list[counter - 2], -1,
                                                    chunk_id)[0]
                        if prev_trans:
                            prev_phon = prev_trans[-1]
                        else:
                            prev_phon = "NA"


#            oov_meta = getOOVmeta(chunk_id, counter)
            phon_pron, next_phon_pron, prev_phon_pron, overlap, oov_meta = getAnnotInfo(
                f_path, from_time, to_time, speaker, counter)
            word_pos, word_class, type_of_s = getPOS(tag_root, sent_i,
                                                     word_sent_i, word)
            num_syl, word_stress = celex[word] if word in celex else [
                "NA", "NA"
            ]
            # if no syllable info in CELEX or affixed word not in CELEX
            if type_of_s not in ["S", "OTHER", "NA"
                                 ] and num_syl == "NA" and word_stress == "NA":
                word_stem = re.sub(r"'?s?$", "", word)
                num_syl, word_stress = celex[
                    word_stem] if word_stem in celex else ["NA", "NA"]
            output_lines.append([
                str(word_chunk_i),
                str(sent_i),
                str(word_sent_i), word, word_phon, num_phon, phon_pron,
                prev_phon, prev_phon_pron, next_phon, next_phon_pron, overlap,
                oov_meta, word_pos, word_class, type_of_s, speaker, subtlexwf,
                lg10wf, lex_neb_num, lex_neb_freq, ptan, ptaf, cow_wf,
                next_word, next_wf, bigram_f, prev_word, prev_wf,
                prev_bigram_f, num_syl, word_stress
            ])
            print(word, word_pos, type_of_s)
    return [
        ",".join([f_path, chan, from_time, to_time,
                  str(oov), tier] + ol) + "\n" for ol in output_lines
    ] if len(output_lines) != 0 else []
def convert_hex_to_base64(data):
    return codecs.encode(codecs.decode(data, 'hex'), 'base64').decode().rstrip()
Example #51
0
 def unhex(s):
     return codecs.decode(s, 'hex')