logging.basicConfig(filename=crawler_id + '.log', level=logging.ERROR)

    stream_url = parser.get(crawler_config, 'stream_url')

    trackTerms = []

    if parser.has_option(crawler_config, 'terms_file'):
        terms_file = open(parser.get(crawler_config, 'terms_file'), 'r')
        json_string = terms_file.read()
        trackTerms = json.loads(json_string)['terms']
        terms_file.close()

    while True:

        currentDate = datetime.now()
        dateStr = date_to_fname_string(currentDate)
        output = codecs.open(dataDirectory + '/' + dateStr + '-Tweets.txt',
                              encoding='utf-8', mode='w+')
        ofh = OutputFileHandler()
        ofh.set(output)

        try:
            stream = Stream(stream_url, username, password, on_receive,
                              initial_params=trackTerms, filter_type=crawler_type)
            stream.start()
            time.sleep(time_per_file)
            stream.stop()
            ofh.close()
        except Exception, err:
            logging.error(str(datetime.now()) + ':' + str(err))
            try:
    logging.basicConfig(filename=crawler_id + '.log', level=logging.ERROR)

    stream_url = parser.get(crawler_config, 'stream_url')

    trackTerms = []

    if parser.has_option(crawler_config, 'terms_file'):
        terms_file = open(parser.get(crawler_config, 'terms_file'), 'r')
        json_string = terms_file.read()
        trackTerms = json.loads(json_string)['terms']
        terms_file.close()

    while True:

        currentDate = datetime.now()
        dateStr = date_to_fname_string(currentDate, crawler_config.lower())
        output = codecs.open(dataDirectory + '/' + dateStr + '.txt',
                              encoding='utf-8', mode='w+')
        ofh = OutputFileHandler()
        ofh.set(output)

        try:
            stream = Stream(stream_url, username, password, on_receive,
                              initial_params=trackTerms, filter_type=crawler_type)
            print 'Stream Start'
            stream.start()
            time.sleep(time_per_file)
            stream.stop()
            print 'Stream Stop'
            ofh.close()
        except Exception, err: