コード例 #1
0
ファイル: fasta.py プロジェクト: ptraverse/gsc
class FastaFileCls: #{
  def __init__(self, path, fail_msg="cannot open fasta file",
      log_info=None, line_delim="", maintain_case=False): #{
    self.file = FileBoxCls(path, "r", fail_msg)
    self.line_delim = line_delim
    self.curr_line = None
    self.log_info = log_info
    self.finished = False
    self.maintain_case = maintain_case
  #} end def

  def __del__(self): #{
    self.file.Close()
  #} end def

  def __iter__(self): #{
    return self
  #} end def

  def next(self): #{
    if (self.finished): #{
      raise StopIteration
    #} end if
    new_seq = None
    try: #{
      if (None == self.curr_line): #{
        self.curr_line = self.file.next()
      #} end if
      if (not self.curr_line.startswith(">")): #{
        raise FastaError("improperly formatted fasta file: sequence id line "
          "must begin with \">\": \"%s\"." % self.curr_line)
      #} end if
      if (" " in self.curr_line): #{
        (seq_id, seq_extra) = self.curr_line.lstrip(">").split(" ", 1)
      else:
        seq_id = self.curr_line.lstrip(">")
        seq_extra = None
      #} end if
      new_seq = SequenceCls(seq_id, seq_extra)
      self.curr_line = self.file.next()
      while (not self.curr_line.startswith(">")): #{
        if ("" != new_seq.sequence): #{
          new_seq.sequence += self.line_delim
        #} end if
        new_seq.sequence += self.curr_line
        try: #{
          self.curr_line = self.file.next()
        except StopIteration:
          self.finished = True
          break
        #} end try
      #} end while
      if (not self.maintain_case): #{
        new_seq.sequence = new_seq.sequence.upper()
      #} end if
      return new_seq
    except StopIteration, e:
      self.finished = True
      raise e
コード例 #2
0
ファイル: realigner.py プロジェクト: ptraverse/gsc
 def CreateQueryFile(self): #{
   LogMsg(self, "Creating query file...")
   query_file = FileBoxCls(self.query_path, "w", "cannot create query "
     "contig sequences file")
   all_contigs_file = FileBoxCls(self.options.ctg_seq_path, "r",
     "cannot read contig sequences file")
   seqs_found = False
   num_written = 0
   for id_line in all_contigs_file: #{
     seq_line = all_contigs_file.next()
     if (not id_line.startswith(">")): #{
       raise RealignerError("invalid contig id line in sequece file:\n%s" %
         id_line)
     #} end if
     # extract the contig id from the line
     ctg_id = id_line.lstrip(">").split()[0]
     #DebugMsg(self, "Contig ID from sequence file: %s" % ctg_id)
     # if the contig is represented in one of the potential predictions
     if (ctg_id in self.contigs): #{
       #DebugMsg(self, "Writing sequence to query file.")
       # write it to the query file
       query_file.WriteLine(id_line)
       query_file.WriteLine(seq_line)
       seqs_found = True
       num_written += 1
       self.contigs[ctg_id].written = True
       self.contigs[ctg_id].sequence = seq_line.lower()
       #if ("itd" in self.contigs[ctg_id].types): #{
       #  self.contigs[ctg_id].sequence = seq_line.lower()
       #} end if
       self.missing.discard(ctg_id)
     #} end if
     #if (ctg_id in self.contig_seqs): #{
     #  self.contig_seqs[ctg_id] = seq_line
     #} end if
   #} for
   if (not seqs_found): #{
     raise RealignerError("could not find any contig sequences in %s" %
       self.options.ctg_seq_path)
   #} end if
   if (num_written != len(self.contigs)): #{
     #missed = list()
     #for contig in self.contigs.itervalues(): #{
     #  if (not contig.written): #{
     #    missed.append(contig.id)
     #  #} end if
     #} end for
     LogMsg(self, "WARNING: only wrote %i of %i contig sequences! " %
       (num_written, len(self.contigs)) + "Missing: %s" %
       ",".join(sorted(self.missing)))
     #  ",".join(missed))
   #} end if
   all_contigs_file.Close()
   query_file.Close()
コード例 #3
0
ファイル: integrate.py プロジェクト: ptraverse/gsc
class R2CResultsFileCls: #{
  def __init__(self, path, log_info=None): #{
    self.log_info = log_info
    fail_msg = "cannot open read-to-contig support results file"
    self.file = FileBoxCls(path, "r", fail_msg)
    self.integrated = False
    self.curr_member = None
  #} end def

  def __del__(self): #{
    self.file.Close()
  #} end def

  def BeforeGroup(self, group_id): #{
    if (self.integrated): #{
      return False
    #} end if
    if (None == self.curr_member or self.curr_member.group_id < group_id): #{
      return True
    #} end if
    return False
  #} end def

  def GroupIsCurrent(self, group_id): #{
    if (self.integrated or None == self.curr_member): #{
      return False
    #} end if
    if (self.curr_member.group_id == group_id): #{
      return True
    #} end if
    return False
  #} end def

  def GetMember(self): #{
    if (self.integrated): #{
      DebugMsg(self, "Not getting member, file already fully integrated.")
      return
    #} end if
    DebugMsg(self, "Getting member...")
    try: #{
      member_line = self.file.next()
      # create a new member from the current line, store it as "curr_member"
      (member_id, support_list) = member_line.split(" ")
      self.curr_member = R2CMemberCls(member_id, log_info=self.log_info)
      # store support values
      self.curr_member.InitializeSupport(support_list)
      DebugMsg(self, "New member: %s" % self.curr_member.DebugString())
    except StopIteration:
      DebugMsg(self, "Integrated all support from %s" % self.file.path)
      self.curr_member = None
      self.integrated = True
      self.file.Close()
      return
コード例 #4
0
ファイル: gtf_parser.py プロジェクト: ptraverse/gsc
class GTFAnnotationParserCls: #{
  def __init__(self, input_path, log_info=None): #{
    self.file = FileBoxCls(input_path, "r",
      "cannot read gene annotations input file")
    self.curr_feature = None
    self.log_info = log_info
    self.finished = False
  #} end def

  def __del__(self): #{
    self.close()
  #} end def

  def __iter__(self): #{
    return self
  #} end def

  def next(self): #{
    if (self.finished): #{
      raise StopIteration
    #} end if
    transcript = None
    try: #{
      if (None == self.curr_feature): #{
        self.ParseFeature()
      #} end if
      transcript = GTFTranscriptCls(name=self.curr_feature.name)
      while (self.curr_feature.name == transcript.name): #{
        transcript.Update(self.curr_feature)
        self.ParseFeature()
      #} end while
    except StopIteration:
      self.finished = True
    #} end try
    if (None == transcript): #{
      raise StopIteration
    #} end if
    transcript.CreateExonList()
    return transcript
  #} end def

  def ParseFeature(self): #{
    #ExtremeDebugMsg(self, "  Parsing feature from file...")
    try: #[
      line = self.file.next()
    except StopIteration, e:
      self.curr_feature = None
      raise e
    #} end try
    tokenizer = TokenizerCls(line, delimiter="\t", log_info=self.log_info)
    self.curr_feature = GTFFeatureCls(tokenizer)
コード例 #5
0
ファイル: read_simulator.py プロジェクト: ptraverse/gsc
 def GenerateEventReads(self):  # {
     LogMsg(self, "Generating event reads...")
     start = time.time()
     seq_file = FastaFileCls(self.options.eseq_path, "cannot read sequences file")
     npairs_file = FileBoxCls(self.options.enreads_path, "w", "cannot create event read counts file")
     cov_file = FileBoxCls(self.options.ecov_path, "r", "cannot read event coverages file")
     # number of sequences from which reads were actually simulated
     nseqs_sim = 0
     for seq_obj in seq_file:  # {
         if len(seq_obj) <= self.options.frag_length:  # {
             LogMsg(
                 self,
                 "Sequence %s shorter than fragment length: "
                 "%i < %i" % (seq_obj.id, len(seq_obj), self.options.frag_length),
             )
             continue
         # } end if
         nseqs_sim += 1
         seq_obj.covered = False
         while not seq_obj.covered:  # {
             cov_line = cov_file.next()
             # coverage = float(cov_line) + self.options.cov_adjust
             coverage = float(cov_line)
             nreads = coverage * (float(len(seq_obj)) / float(self.options.read_length))
             npairs = IntFloor(float(nreads) / 2.0)
             if 1 > npairs:  # {
                 ExtremeDebugMsg(self, "    coverage %.3f too low, no reads." % coverage)
                 continue
             # } end if
             # coverage = nreads * self.options.read_length / len(seq_obj)
             self.SimulateReads(seq_obj, npairs, "e")
         # } end while
         npairs_file.WriteLine("%s %i %f" % (seq_obj.id, npairs, coverage))
     # } end for
     cov_file.Close()
     npairs_file.Close()
     seq_file.Close()
     LogMsg(self, "Simulated reads from %i event sequences" % nseqs_sim)
     LogMsg(self, "Time spent generating event reads: %s" % TimeSpent(start))
コード例 #6
0
ファイル: with_tophat_fusion.py プロジェクト: ptraverse/gsc
class TopHatFileCls:  # {
    def __init__(self, path, log_info=None):  # {
        self.file = FileBoxCls(path, "r", "cannot read TopHat-Fusion results file")
        self.log_info = log_info

    # } end def

    def __del__(self):  # {
        self.close()

    # } end def

    def __iter__(self):  # {
        return self

    # } end def

    def next(self):  # {
        # the first line should start with "allAtOnce" and
        # it contains the breakpoint coordinates
        # parse the tophat line
        tophat_event = TopHatEventCls(self.file.next())
        # the next two lines should be "sequence" lines
        tophat_event.CheckSeqLine(self.file.next())
        tophat_event.CheckSeqLine(self.file.next())
        # the next lines should be... scores?
        tophat_event.CheckScoreLine(self.file.next())
        # the next line should have the gene ids
        tophat_event.ParseGenesLine(self.file.next())
        # skip the final line
        self.file.next()
        return tophat_event

    # } end def

    def close(self):  # {
        if hasattr(self, "file") and None != self.file and not self.file.closed:  # {
            self.file.close()
コード例 #7
0
class CandidateGroupParserCls: #{
  def __init__(self, data_file_path, keep_lines=False, check_data=False): #{
    CheckFilePath(data_file_path, "candidate group file")
    self.group_parser = GroupParserCls(keep_lines=keep_lines)
    self.check_data = check_data
    fail_message = "cannot open data file"
    self.data_file = FileBoxCls(data_file_path, "r", fail_message)
    self.groups = list()
  #} end def

  def __del__(self): #{
    # close data file if it is open
    self.CloseDataFile()
  #} end def

  def __iter__(self): #{
    return self
  #} end def

  # Load the entire data file into memory
  # Do not mix with using GetNextGroup() method
  def ParseDataFile(self): #{
    #self.OpenDataFile()
    for group_line in self.data_file: #{
      #group_line = CleanLine(group_line)
      # skip blank lines
      #if ("" == group_line): #{
      #  continue
      #} end if
      self.groups.append(self.group_parser.ParseGroup \
        (group_line, self.data_file, check_data=self.check_data))
    #} end for
    self.CloseDataFile()
    return self.groups
  #} end def

  # Load a single group from the data file into memory
  # Do not mix with using ParseDataFile() method
  def GetNextGroup(self): #{
    return self.next()
  #} end def

  def next(self): #{
    #if (None == self.data_file): #{
    #  self.OpenDataFile()
    #} end if
    group_line = ""
    # skip blank lines
    while ("" == group_line): #{
      #group_line = CleanLine(self.data_file.next())
      group_line = self.data_file.next()
    #} end if
    return self.group_parser.ParseGroup \
      (group_line, self.data_file, check_data=self.check_data)
  #} end def

  def Close(self): #{
    self.CloseDataFile()
  #} end def

  def CloseDataFile(self): #{
    if (not hasattr(self, "data_file")): #{
      return
    #} end if
    if (None == self.data_file): #{
      return
    #} end if
    if (self.data_file.closed): #{
      return
    #} end if
    self.data_file.close()
    #self.data_file = None
  #} end def

  def close(self): #{
    self.CloseDataFile()
  #} end def

  def GroupLine(self): #{
    if (not self.group_parser.keep_lines): #{
      raise CandidateGroupParserError \
        ("cannot get group line when keep_lines flag was not set")
    #} end if
    return self.group_parser.group_line
  #} end def

  def MemberLines(self): #{
    if (not self.group_parser.keep_lines): #{
      raise CandidateGroupParserError \
        ("cannot get member lines when keep_lines flag was not set")
    #} end if
    return self.group_parser.member_lines
コード例 #8
0
ファイル: predict_events.py プロジェクト: ptraverse/gsc
class EventPredictionCls: #{
  def __init__(self, options): #{
    SetupMainClass(self, options)
    if (not hasattr(self.options, "realign")): #{
      self.options.realign = False
    #} end if
    if (self.options.realign): #{
      CheckConfigCommands(self, "blat")
    #} end if
    self.predictors = dict()
    if (self.options.predict_fusions): #{
      predictor = FusionPredictorCls(options, log_info=self.log_info)
      self.predictors[predictor.key] = predictor
    #} end if
    if (self.options.predict_ptds): #{
      predictor = PTDPredictorCls(options, log_info=self.log_info)
      self.predictors[predictor.key] = predictor
    #} end if
    if (self.options.predict_itds): #{
      predictor = ITDPredictorCls(options, log_info=self.log_info)
      self.predictors[predictor.key] = predictor
    #} end if
    self.use_chr = False
    #self.postpone_gene_check = False
  #} end def

  def __del__(self): #{
    CloseLogFile(self)
  #} end def

  def PredictEvents(self): #{
    LogMsg(self, "Predicting events...")
    start = time.time()
    # get the reference gene names, if a path is given
    #self.ref_gene_names = GetGeneNamesFromFile(self.options.gene_names_path,
    #  self.log_info)
    group_parser = CandidateGroupParserCls(self.options.barnacle_path)
    # recheck breakpoint exons
    self.RecheckBreakpointExons(group_parser)
    realigner = None
    if (self.options.realign): #{
      realigner = RealignerCls(self.options, self.log_info)
    #} end if
    # potential_events[bio_type][group_id] = event and gene sets object
    #potential_events = dict([(predictor.key, dict()) for
    #  predictor in self.predictors])
    LogMsg(self, "Processing candidate groups...")
    process_start = time.time()
    for group in group_parser: #{
      # get the breakpoint exons for the group
      self.GetBreakpointExons(group)
      # check whether the event is any biologically typed event
      #self.CheckEvent(group, output_files, lib_info.lib_name, potential_events)
      # attempt to predict events of each specified type
      # from the current candidate group
      for predictor in self.predictors.itervalues(): #{
        good_members = list()
        if (predictor.ProcessGroup(group, good_members) and
            None != realigner): #{
          #realigner.UpdateContigs(group, good_members, predictor.store_seq)
          realigner.UpdateContigs(group, good_members, predictor.key)
        #} end if
      #} end for
    #} end for
    LogMsg(self, "Time spent processing candidate groups: %s" %
      TimeSpent(process_start))
    if ("itd" in self.predictors and
        0 < self.predictors["itd"].num_over_aligned): #{
      LogMsg(self, "WARNING: %i gap candidates have aligned length greater "
        "than gap length!" % self.predictors["itd"].num_over_aligned)
    #} end if
    #if ('event_coords' in output_files): #{
    #  output_files['event_coords'].Close()
    #  self.RecheckExonOverlap(output_files, potential_events, lib_info.lib_name)
    #} end if
    if (None != realigner and 0 < len(realigner.contigs)): #{
      realigner.RealignContigs()
      LogMsg(self, "Before realignment:")
      for predictor in self.predictors.itervalues(): #{
        LogMsg(self, "  Number of %s predictions: %i" %
          (predictor.description, predictor.num_predictions))
        if (0 == predictor.num_predictions): #{
          continue
        #} end if
        if ("itd" in predictor.key or "fusion" in predictor.key): #{
          predictor.LoadTranscriptSequences(realigner.contigs)
        #} end if
        predictor.ReprocessPredictions(realigner.contigs)
        #predictor.ReprocessPredictions(realigner.contigs,
        #  realigner.contig_seqs)
      #} end for
      LogMsg(self, "%s\nAfter realignment:" % ("-"*40))
    #} end if
    for predictor in self.predictors.itervalues(): #{
      LogMsg(self, "Number of %s predictions: %i" %
        (predictor.description, predictor.num_predictions))
    #} end for
    LogMsg(self, "Time spent predicting events: %s" % TimeSpent(start))
  #} end def

  #def CreateOutputFiles(self, input_path): #{
  #  input_file_name = os.path.basename(input_path)
  #  input_root = os.path.splitext(input_file_name)[0]
  #  output_files = dict()
  #  # setup the coordinates file for rechecking exon overlaps
  #  self.SetupEventCoordsFile(input_root, output_files)
  #  return output_files
  #} end def

  def RecheckBreakpointExons(self, group_parser): #{
    if (None == self.options.breakpoint_exons): #{
      self.overlaps_file = None
      return
    #} end if
    if (self.options.use_existing_group_coords): #{
      LogMsg(self, "Using existing group coordinates file.")
    else:
      group_coords_file = self.CreateGroupCoordsFile()
      for group in group_parser: #{
        #ExtremeDebugMsg(self, "Writing coordinates for group %i" % group.id)
        self.WriteGroupCoords(group, group_coords_file)
      #} end for
      group_parser.Close()
      group_coords_file.Close()
    #} end if
    self.CreateOverlapsFile()
  #} end def

  #def SetupEventCoordsFile(self, input_root, output_files): #{
  def CreateGroupCoordsFile(self): #{
    # check whether to use "chr" in chromosome names in coordinates file
    self.use_chr = ShouldChromUseChr(1, self.options.breakpoint_exons,
      "exon coordinates", self.log_info)
    # open the group coordinates file
    #output_files['event_coords'] = FileBoxCls(group_coords_path, "w",
    group_coords_file = FileBoxCls(self.options.group_coords_path, "w",
      "cannot create event coordinates file")
    #self.postpone_gene_check = True
    return group_coords_file
  #} end def

  #def WriteEventCoords(self, event, group_coords_file): #{
  def WriteGroupCoords(self, event, group_coords_file): #{
    for member in event.members: #{
      if (member.gap): #{
        # write gap event coordinates
        self.WriteGapGroupCoords(member, group_coords_file)
      else:
        # write split event coordinates
        self.WriteSplitGroupCoords(member, group_coords_file)
      #} end if
    #} end for
  #} end def

  def WriteGapGroupCoords(self, member, group_coords_file): #{
    gap_coords = GroupCoordsCls(
      member.align_info_B.chrom,
      min(member.align_info_B.genome_start, member.align_info_B.genome_end),
      max(member.align_info_B.genome_start, member.align_info_B.genome_end),
      "%sA" % member.IDString(),
      self.use_chr
    )
    group_coords_file.WriteLine("%s" % gap_coords.ToString())
  #} end def

  def WriteSplitGroupCoords(self, member, group_coords_file): #{
    split_coords_A = GroupCoordsCls(
      member.align_info_A.chrom,
      member.align_info_A.genome_end - self.options.event_buffer,
      member.align_info_A.genome_end + self.options.event_buffer,
      "%sA" % member.IDString(),
      self.use_chr
    )
    group_coords_file.WriteLine("%s" % split_coords_A.ToString())
    split_coords_B = GroupCoordsCls(
      member.align_info_B.chrom,
      member.align_info_B.genome_start - self.options.event_buffer,
      member.align_info_B.genome_start + self.options.event_buffer,
      "%sB" % member.IDString(),
      self.use_chr
    )
    group_coords_file.WriteLine("%s" % split_coords_B.ToString())
  #} end def

  #def RecheckExonOverlap(self, output_files, potential_events, lib_name): #{
  #  LogMsg(self, "Rechecking exon overlap...")
  #  start = time.time()
  #  # run overlap code
  #  overlaps_path = self.RunOverlapCode(output_files['group_coords'].path)
  #  try: #{
  #    # parse overlap code output
  #    self.ParseOverlapResults(overlaps_path, potential_events)
  #  except ACEventGroupError, e:
  #    raise EventPredictionError("error parsing overlap file: %s" % e)
  #  #} end try
  #  self.ProcessPotentialEvents(potential_events, output_files, lib_name)
  #  LogMsg(self, "Time spent rechecking exon overlaps: %s" % TimeSpent(start))
  #} end def

  def CreateOverlapsFile(self): #{
    if (self.options.use_existing_overlaps): #{
      LogMsg(self, "Using existing breakpoint/transcript overlaps file.")
    else:
      LogMsg(self, "Running overlap code...")
      if (hasattr(self, "log_file") and None != self.log_file): #{
        self.log_file.Flush()
      #} end if
      start = time.time()
      RunOverlapCode(self.options.breakpoint_exons,
        self.options.group_coords_path, self.options.overlaps_path,
        dpt=self.options.dpt)
      LogMsg(self, "Time spent running overlap code: %s" % TimeSpent(start))
    #} end if
    self.overlaps_file = FileBoxCls(self.options.overlaps_path, "r",
      "cannot read exon/group overlaps file")
    #self.GetNextExonOverlap()
  #} end def

  def GetBreakpointExons(self, group): #{
    if (not hasattr(self, "overlaps_file") or None == self.overlaps_file): #{
      return
    #} end if
    ExtremeDebugMsg(self, "Getting breakpoint exons for group %i" % group.id)
    # clear any previous breakpoint genes
    group.ClearBPGenes()
    if (not hasattr(self, "curr_overlap")): #{
      self.curr_overlap = None
    #} end if
    # skip overlaps for groups that come before the current group
    while (None == self.curr_overlap or
        self.curr_overlap.group_id < group.id): #{
      try: #{
        self.GetNextExonOverlap()
      except StopIteration:
        return
      #} end try
    #} end while
    # create a dictionary of the members of the current group
    members_dict = dict()
    for member in group.members: #{
      members_dict[member.candidate_id] = member
    #} end for
    # get all overlaps for the current group
    while (self.curr_overlap.group_id == group.id): #{
      if (self.curr_overlap.member_id in members_dict): #{
        self.AddBreakPointGene(members_dict[self.curr_overlap.member_id])
      #} end if
      try: #{
        self.GetNextExonOverlap()
      except StopIteration:
        return
      #} end try
    #} end while
  #} end def

  def GetNextExonOverlap(self): #{
    if (not hasattr(self, "overlaps_file") or None == self.overlaps_file): #{
      ExtremeDebugMsg(self, "Setting current overlap to \"None\".")
      self.curr_overlap = None
      return
    #} end if
    overlap = ExonOverlapCls(self.overlaps_file.next())
    self.curr_overlap = overlap
    ExtremeDebugMsg(self, "Current overlap = G%i%s r%s exons: %s" %
      (overlap.group_id, overlap.member_id, overlap.region_id,
      ",".join(overlap.exons)))
  #} end def

  def AddBreakPointGene(self, member): #{
    if (None == self.curr_overlap): #{
      return
    #} end if
    if (self.curr_overlap.group_id != member.group_id): #{
      raise EventPredictionError("Group ID: %i does not match overlap ID: %i" %
        (member.group_id, self.curr_overlap.group_id))
    #} end if
    if (self.curr_overlap.member_id != member.candidate_id): #{
      raise EventPredictionError("Candidate ID: %s " % member.candidate_id +
        "does not match overlap ID: %s" % self.curr_overlap.member_id)
    #} end if
    member.AddGenes("breakpoint_%s" % self.curr_overlap.region_id,
      self.curr_overlap.exons)
コード例 #9
0
ファイル: annotation.py プロジェクト: ptraverse/gsc
class GeneAnnotationParserCls: #{
  def __init__(self, path, type=None, log_info=None): #{
    if (None == type): #{
      type = GetAnnotationsType(path)
    #} end if
    if (type in PARSERS): #{
      self.parser = PARSERS[type](path, log_info=log_info)
      self.file = None
      self.ParseLine = None
    elif (type in PARSE_FUNCTIONS): #{
      self.parser = None
      self.file = FileBoxCls(path, "r",
      "cannot open %s annotations file" % type)
      self.ParseLine = PARSE_FUNCTIONS[type]
    else:
      raise GeneAnnotationError("cannot determine correct annotation parser "
        "from annotations type: %s" % type)
    #} end if
    self.log_info = log_info
    self.finished = False
  #} end def

  def __del__(self): #{
    self.close()
  #} end def

  def __iter__(self): #{
    #if (None == self.parser): #{
    #  return self
    #else:
    #  return self.parser
    #} end if
    return self
  #} end def

  def next(self): #{
    if (self.finished): #{
      raise StopIteration
    #} end if
    #ExtremeDebugMsg(self, "Parsing annotation from file...")
    transcript = None
    try:
      if (None == self.parser): #{
        #ExtremeDebugMsg(self, "Using ParseLine function...")
        line = self.file.next()
        transcript = self.ParseLine(line)
      else:
        #ExtremeDebugMsg(self, "Using internal parser...")
        transcript = self.parser.next()
      #} end if
    except StopIteration:
      self.finished = True
    #} end try
    if (None == transcript): #{
      raise StopIteration
    #} end if
    transcript.gene_name = transcript.alias.replace(" ","_")
    transcript.transcript_id = transcript.name.replace(" ","_")
    #ExtremeDebugMsg(self, "Parsing transcript: %s (%s)" %
    #  (transcript.gene_name, transcript.transcript_id))
    return transcript
  #} end def

  def Close(self): #{
    for attr in ["file", "parser"]: #{
      if (hasattr(self, attr) and None != getattr(self, attr)): #{
        getattr(self, attr).close()
      #} end if
    #} end for
  #} end def

  def close(self): #{
    self.Close()