def __init__(self, sequence, mass=None, missed_cleavages=0, length=None, unordered=False):
     if mass is None:
         if isinstance(sequence, basestring):
             mass = sequence_to_mass(str(sequence)) - precursor_mass_shift
             length = len(sequence_tokenizer(sequence)[0])
         else:
             mass = sum(map(masser, sequence))
             length = len(sequence)
     self.sequence = sequence
     self.mass = mass
     self.missed_cleavages = missed_cleavages
     self.length = length
     self.unordered = unordered
def clean_tokenizer(seq):
    tokens, mods, glycan, n_term, c_term = sequence.sequence_tokenizer(seq)
    return tokens