Exemplo n.º 1
0
    def download_and_cache(self):
        self.log.log("--- START DOWNLOAD AND CACHE DATA ---")
        self.log.log2video_status("STARTING DOWNLOAD")

        self.log.log("Removing old cached files")
        scriptlib.cleanup_oldcachedfiles(self.CONF_CACHEDIR,
                                         self.FIELD_SEPARATOR)

        self.log.log("Start download XML data from \'%s\'" % self.CONF_URL)
        self.log.log2video_status("downloading XML data ...")

        i = self.HTTP_ERROR_RETRY
        while i > 0:
            try:
                sock = urllib2.urlopen(self.CONF_URL)
                data = sock.read()
            except IOError as e:
                serr = "unknown"
                if hasattr(e, 'reason'):
                    serr = str(e.reason)
                elif hasattr(e, 'code'):
                    serr = str(e.code)
                if hasattr(e, 'msg'):
                    serr += " , " + str(e.msg)

                self.log.log("\'" + self.CONF_URL +
                             "\' connection error. Reason: " + serr +
                             ". Waiting " + str(self.HTTP_ERROR_WAIT_RETRY) +
                             " sec. and retry [" + str(i) + "] ...")
                time.sleep(self.HTTP_ERROR_WAIT_RETRY)  # add sleep
                i -= 1

            else:
                i = -99
                sock.close()

        if (i != -99):
            self.log.log("Cannot retrieve data from \'" + self.CONF_URL +
                         "\'. Abort script")
            self.log.log2video_status("ERROR: cannot download XML data, abort")
            time.sleep(5)
            sys.exit(1)

        self.log.log("end download XML data, now processing")
        self.log.log2video_status("processing XML data, wait ...")

        # replace malformed single end tag <.../> as <...> (SGML doesn't like "/>")
        data = data.replace('/>', '></programma>')

        # set max 'id' occurencies
        self.SGML_PBAR_MAXVALUE = data.count('idref="')

        # start SGMLLIB parsing
        self.log.log2video_pbar_on()
        self.log.log2video_pbar(0)
        self.parse(data)
        self.log.log2video_pbar(0)
        self.log.log2video_pbar_off()

        self.log.log("end process XML data")
Exemplo n.º 2
0
	def download_and_cache(self):
		self.log.log("--- START DOWNLOAD AND CACHE DATA ---")
		self.log.log2video_status("STARTING DOWNLOAD")

		self.log.log("Removing old cached files")
		scriptlib.cleanup_oldcachedfiles(self.CONF_CACHEDIR, self.FIELD_SEPARATOR)


		self.log.log("Start download XML data from \'%s\'" % self.CONF_URL )
		self.log.log2video_status("downloading XML data ...")

		i = self.HTTP_ERROR_RETRY
		while i > 0:
			try:
				sock = urllib2.urlopen(self.CONF_URL)
				data = sock.read()
			except IOError, e:
				serr = "unknown"
				if hasattr(e, 'reason'):
					serr = str(e.reason)
				elif hasattr(e, 'code'):
					serr = str(e.code)
				if hasattr(e, 'msg'):
					serr += " , " + str(e.msg)

				self.log.log("\'" + self.CONF_URL + "\' connection error. Reason: "+serr+". Waiting "+str(self.HTTP_ERROR_WAIT_RETRY)+" sec. and retry ["+str(i)+"] ...")
				time.sleep(self.HTTP_ERROR_WAIT_RETRY) # add sleep
				i -= 1

			else:
				i = -99
				sock.close()
Exemplo n.º 3
0
    def download_and_cache(self):
        self.log("--- START DOWNLOAD AND CACHE DATA ---")
        self.log2video("STARTING DOWNLOAD")

        self.log("Removing old cached files")
        scriptlib.cleanup_oldcachedfiles(self.CONF_CACHEDIR,
                                         self.FIELD_SEPARATOR)

        self.log("Start download XML data from \'" + self.CONF_URL + "\'")
        self.log2video("downloading XML data ...")

        i = self.HTTP_ERROR_RETRY
        while i > 0:
            try:
                sock = urllib2.urlopen(self.CONF_URL)
                data = sock.read()
            except IOError, e:
                serr = "unknown"
                if hasattr(e, 'reason'):
                    serr = str(e.reason)
                elif hasattr(e, 'code'):
                    serr = str(e.code)
                if hasattr(e, 'msg'):
                    serr += " , " + str(e.msg)

                self.log("\'" + self.CONF_URL +
                         "\' connection error. Reason: " + serr +
                         ". Waiting " + str(self.HTTP_ERROR_WAIT_RETRY) +
                         " sec. and retry [" + str(i) + "] ...")
                time.sleep(self.HTTP_ERROR_WAIT_RETRY)  # add sleep
                i -= 1

            else:
                i = -99
                sock.close()
Exemplo n.º 4
0
	def download_and_cache(self):
		self.log.log("--- START DOWNLOAD AND CACHE DATA ---")
		self.log.log2video_status("STARTING DOWNLOAD")

		self.log.log("Removing old cached files")
		scriptlib.cleanup_oldcachedfiles(self.CONF_CACHEDIR, self.FIELD_SEPARATOR)

		#self.log("Start downloading HTML data from \'%s\'" % self.CONF_URL)

		chlist = self.CHANNELLIST

		# get remote XML files
		#   chid format: channel id , 0|1|2(,new name)
		#   i.e. ("101" , "1,SkyCinema1")
		pbar_max = 0
		for c in chlist.keys():
			cacheopt = int(string.split(chlist[c],",")[0])
			if cacheopt == 1:
				pbar_max += 1
				
		pbar_max *= self.CONF_MAX_DAY_EPG
		pbar_max = 100.0 / pbar_max
			
		self.log.log2video_pbar_on()
		self.log.log2video_pbar(0)
		pbar_value = 0
		
		for c in sorted(chlist.keys()):
			self.guidatoday = []
			self.guidatomorrow = []

			# get cache option
			#  0 : don't download/cache
			#  1 : download and cache (optional 1,new_name )
			#  2 : always download overwriting existing files (optional 2,new_name )
			#  3 : always download overwriting existing files only for TODAY (optional 3,new_name )

			cacheopt = int(string.split(chlist[c],",")[0])

			# if cacheopt == 0, do nothing
			if cacheopt == 0:
				continue
		
			self.log.log2video_status("processing %s" % c)
			channel_name = ''
			if len(chlist[c].split(",")) > 1 :
				if chlist[c].split(",")[1] != '' :
					# channel renamed, new name provided by user
					channel_name = chlist[c].split(",")[1].strip(' ').lower()

			# if channel name is not present as option, quit with error
			if channel_name == '':
				self.log.log("ERROR ! ID=%s channel name not present" % c)
				sys.exit(1)

			channel_provider = self.CONF_DEFAULT_PROVIDER
			if len(chlist[c].split(",")) > 2 :
				if chlist[c].split(",")[2] != '' :
					channel_provider = chlist[c].split(",")[2].strip(' ').lower()

			exit_for_loop = False
			for day in self.DAYCACHE:
				if exit_for_loop == True:
					break
				
				pbar_value += 1
				self.log.log2video_pbar(pbar_value * pbar_max)


				day_get = time.strftime("%Y_%m_%d",time.strptime(day,"%Y%m%d"))
				xmlfile = "%s_%s" % (c,day_get)

				# download only if file doesn't exist or cacheopt == 2 (always download),
				# using open(...,"w") files will be overwritten (saving a delete + create)

				eventfilename = scriptlib.fn_escape(str(c) + self.FIELD_SEPARATOR + channel_name + self.FIELD_SEPARATOR + day)
				eventfilepath = os.path.join(self.CONF_CACHEDIR, eventfilename)
				if (cacheopt == 1) and os.path.exists(eventfilepath):
					continue
				if (cacheopt == 3) and os.path.exists(eventfilepath) and (day != self.TODAY):
					continue
				if (cacheopt != 1) and (cacheopt != 2) and (cacheopt != 3):
					self.log.log("Warning: unknown cache option %s" % cacheopt)
					exit_for_loop = True
					continue

				self.log.log("Download HTML data from \'%s/%s.html\'" % (self.CONF_URL,xmlfile))
				self.log.log2video_status("downloading %s" % xmlfile)

				i = self.HTTP_ERROR_RETRY
				while i > 0  :
					#  wait randomly to avoid overloading website
					time.sleep(random.uniform(self.CONF_RANDOM_MIN, self.CONF_RANDOM_MAX))

					try:
						sock=urllib2.urlopen(self.CONF_URL + '/' + xmlfile + '.html')
						data=sock.read()

					except IOError, e:
						serr="unknown"
						if hasattr(e, 'reason'):
							serr=str(e.reason)
						elif hasattr(e, 'code'):
							serr=str(e.code)
							if hasattr(e, 'msg'):
								serr+=" , "+str(e.msg)

						self.log.log("\'%s?%s\' connection error. Reason: %s. Waiting %d sec. and retry [%d] ..." % (self.CONF_URL,xmlfile, serr, self.HTTP_ERROR_WAIT_RETRY, i))
						time.sleep(self.HTTP_ERROR_WAIT_RETRY) # add sleep
						i -= 1

					else:
						i = 0 # force quit WHILE loop
						sock.close()

						dtparser = Titolo_parser(day)
						dtparser.parse(data)
						self.guida = self.guidatomorrow
						(self.guidatoday, self.guidatomorrow) = dtparser.get_guida()

						# if no data, quit for loop and stop downloading
						if len(self.guidatoday) == 0:
							exit_for_loop = True
							break

						self.guida = self.guida + self.guidatoday

						self.log.log("  writing in cache \'%s\'" % eventfilename)
						# write data in cache file using UTF-8 encoding
						fd = codecs.open(eventfilepath, "w", 'utf-8')
						fd.write(str(c) + self.FIELD_SEPARATOR + channel_name + self.FIELD_SEPARATOR + channel_provider + self.FIELD_SEPARATOR + day + '\n')
						fd.write("Local Time (human readeable)###Unix GMT Time###Event Title###Event Description\n")

						# extract all events and put in eventfile
						for event in self.guida:
							(dataora,titolo,sommario) = event
							event_starttime = dataora
							# time.mktime return Unix time inside GMT timezone
							event_startime_unix_gmt = str(int(time.mktime(time.strptime(event_starttime,"%Y-%m-%d %H:%M"))) - self.DELTA_UTC )
							#event_startime_unix_gmt = str(int(time.mktime(time.strptime(event_starttime,"%Y-%m-%d %H:%M")))  )
							#self.log(event_starttime + " , " + str(self.DELTA_UTC) + " , " + str(int(time.mktime(time.strptime(event_starttime,"%Y-%m-%d %H:%M")))) + " , " + event_startime_unix_gmt )

							# convert remote data (RAI website use UTF-8) in Python Unicode (UCS2)
							event_title = unicode(titolo,self.REMOTE_EPG_CHARSET)
							event_title = event_title.replace('\r','')
							event_title = event_title.replace('\n',u' ')
							event_title = event_title.strip(u' ')

							event_description = unicode(sommario,self.REMOTE_EPG_CHARSET)
							event_description = event_description.replace('\r','')
							event_description = event_description.replace('\n',u' ')
							event_description = event_description.strip(u' ')
							

							fd.write(event_starttime + self.FIELD_SEPARATOR + event_startime_unix_gmt + self.FIELD_SEPARATOR + event_title + self.FIELD_SEPARATOR + event_description + '\n')

						fd.close()
Exemplo n.º 5
0
    def download_and_cache(self):
        self.log("--- START DOWNLOAD AND CACHE DATA ---")
        self.log2video("STARTING DOWNLOAD")

        self.log("Removing old cached files")
        scriptlib.cleanup_oldcachedfiles(self.CONF_CACHEDIR,
                                         self.FIELD_SEPARATOR)

        chlist = self.CHANNELLIST

        self.log("Start download XML data from \'" + self.CONF_URL + "\'")
        self.log2video("downloading XML data ...")

        i = self.HTTP_ERROR_RETRY
        while i > 0:
            try:
                sock = urllib2.urlopen(self.CONF_URL)
                data = sock.read()
            except IOError as e:
                serr = "unknown"
                if hasattr(e, 'reason'):
                    serr = str(e.reason)
                elif hasattr(e, 'code'):
                    serr = str(e.code)
                if hasattr(e, 'msg'):
                    serr += " , " + str(e.msg)

                self.log("\'" + self.CONF_URL +
                         "\' connection error. Reason: " + serr +
                         ". Waiting " + str(self.HTTP_ERROR_WAIT_RETRY) +
                         " sec. and retry [" + str(i) + "] ...")
                time.sleep(self.HTTP_ERROR_WAIT_RETRY)  # add sleep
                i -= 1

            else:
                i = -99
                sock.close()

        if (i != -99):
            self.log("Cannot retrieve data from \'" + self.CONF_URL +
                     "\'. Abort script")
            self.log2video("Error: cannot download XML data, abort")
            time.sleep(5)
            sys.exit(1)

        self.log("End download XML data, now processing XML code.")
        self.log2video("preprocessing XML data, wait ...")
        try:
            xmldoc = minidom.parseString(data)
        except:
            self.log(
                "Warning ! Data are not in a valid XML format. Abort script")
            self.log2video("Error: no valid XML data, abort")
            time.sleep(5)
            sys.exit(1)

        self.log("End process XML data")
        self.log2video("end process XML data")

        # days list
        xmlref_giorno = xmldoc.getElementsByTagName('giorno')
        for xml_gg in xmlref_giorno:
            gg = xml_gg.attributes["data"].value
            if gg not in self.DAYCACHEMP:
                continue

            xmlref_canale = xml_gg.getElementsByTagName('canale')
            for xml_ch in xmlref_canale:
                chid = xml_ch.attributes["id"].value.strip(' \n\r').lower()
                if chid not in chlist:
                    self.log(
                        "Warning: new channel \"id=%s name=%s\" found in XML data"
                        % (xml_ch.attributes["id"].value,
                           xml_ch.attributes["description"]))
                    continue

                clist = [chid]
                if chid + '+1' in self.CHANNELLIST:
                    clist.append(chid + '+1')

                for c in clist:

                    # get cache option
                    #  0 : don't download/cache
                    #  1 : download and cache (optional 1,new_name )
                    #  2 : always download overwriting existing files (optional 2,new_name )
                    #  3 : always download overwriting existing files only for TODAY (optional 3,new_name )

                    cacheopt = int(chlist[c].split(",")[0])

                    # if cacheopt == 0, do nothing
                    if cacheopt == 0:
                        continue

                    channel_name = ''
                    if len(chlist[c].split(",")) > 1:
                        if chlist[c].split(",")[1] != '':
                            # channel renamed, new name provided by user
                            channel_name = chlist[c].split(",")[1].strip(
                                ' \n\r').lower()

                    # if channel name is not present as option, quit with error
                    if channel_name == '':
                        self.log("ERROR ! ID=%s channel name not present" % c)
                        sys.exit(1)

                    channel_provider = self.CONF_DEFAULT_PROVIDER
                    if len(chlist[c].split(",")) > 2:
                        if chlist[c].split(",")[2] != '':
                            channel_provider = chlist[c].split(",")[2].strip(
                                ' \n\r').lower()

                    # if channel name is not present as option in channel_list.conf , quit with error
                    if channel_name == '':
                        self.log("ERROR ! ID=" + str(c) +
                                 " channel name not present. Skip !")
                        continue

                    # download only if file doesn't exist or cacheopt == 2 (always download),
                    # using open(...,"w") files will be overwritten (saving a delete + create)

                    day = str(self.convert_daymp(gg))
                    eventfilename = scriptlib.fn_escape(
                        str(c) + self.FIELD_SEPARATOR + channel_name +
                        self.FIELD_SEPARATOR + day)
                    eventfilepath = os.path.join(self.CONF_CACHEDIR,
                                                 eventfilename)
                    if (cacheopt == 1) and os.path.exists(eventfilepath):
                        continue
                    if (cacheopt == 3) and os.path.exists(eventfilepath) and (
                            gg != self.TODAYMP):
                        continue
                    if (cacheopt != 1) and (cacheopt != 2) and (cacheopt != 3):
                        self.log("Warning: unknown cache option " +
                                 str(cacheopt))
                        exit_for_loop = True
                        continue

                    num_events = 0
                    self.log("  Writing in cache \'" + eventfilename + "\'", 2)
                    self.log2video(" extracting \"%s\" [%d] (%s)" %
                                   (channel_name, num_events, day))

                    fd = codecs.open(eventfilepath, "w", 'utf-8')

                    fd.write(
                        str(c) + self.FIELD_SEPARATOR + channel_name +
                        self.FIELD_SEPARATOR + channel_provider +
                        self.FIELD_SEPARATOR + day + '\n')
                    fd.write(
                        "Local Time (human readeable)###Unix GMT Time###Event Title###Event Description\n"
                    )

                    xmlref_events = xml_ch.getElementsByTagName('prg')
                    for xml_ee in xmlref_events:
                        orainiz = xml_ee.attributes["orainizio"].value

                        if (orainiz >= '00:00') and (orainiz <= '05:59'):
                            nextdayevent = 86400
                        else:
                            nextdayevent = 0

                        event_starttime = gg + " " + orainiz

                        if c == (chid + '+1'):
                            # manage channel "+1"
                            event_startime_unix_gmt = str(
                                int(
                                    time.mktime(
                                        time.strptime(event_starttime,
                                                      "%Y/%m/%d %H:%M"))) -
                                self.DELTA_UTC + 3600 + nextdayevent)
                        else:
                            # normal channel, not "+1"
                            event_startime_unix_gmt = str(
                                int(
                                    time.mktime(
                                        time.strptime(event_starttime,
                                                      "%Y/%m/%d %H:%M"))) -
                                self.DELTA_UTC + nextdayevent)

                        event_title = unicode(
                            xml_ee.getElementsByTagName('titolo')
                            [0].firstChild.data)
                        event_title = event_title.replace('\r', '')
                        event_title = event_title.replace('\n', '')
                        event_title = event_title.strip(u' ')

                        event_description = ''
                        if self.CONF_DL_DESC == 1:
                            url_desc = xml_ee.getElementsByTagName(
                                'linkScheda')[0].firstChild.data
                            event_description = unicode(
                                self.get_description(url_desc.strip(' \n\r'))
                                [:self.CONF_DLDESCMAXCHAR])
                            event_description = event_description.replace(
                                '\r', '')
                            event_description = event_description.replace(
                                '\n', u' ')
                            event_description = event_description.strip(u' ')

                        fd.write(event_starttime + self.FIELD_SEPARATOR +
                                 event_startime_unix_gmt +
                                 self.FIELD_SEPARATOR + event_title +
                                 self.FIELD_SEPARATOR + event_description +
                                 '\n')
                        num_events += 1
                        self.log2video(" extracting \"%s\" [%d] (%s)" %
                                       (channel_name, num_events, day))

                    fd.close()

        del xmldoc
Exemplo n.º 6
0
    def download_and_cache(self):
        self.log("--- START DOWNLOAD AND CACHE DATA ---")
        self.log2video("STARTING DOWNLOAD")

        self.log("Removing old cached files")
        scriptlib.cleanup_oldcachedfiles(self.CONF_CACHEDIR,
                                         self.FIELD_SEPARATOR)

        #self.log("Start downloading HTML data from \'%s\'" % self.CONF_URL)

        chlist = self.CHANNELLIST

        # get remote XML files
        #   chid format: channel id , 0|1|2(,new name)
        #   i.e. ("101" , "1,SkyCinema1")
        for c in sorted(chlist.keys()):
            self.guidatoday = []
            self.guidatomorrow = []

            # get cache option
            #  0 : don't download/cache
            #  1 : download and cache (optional 1,new_name )
            #  2 : always download overwriting existing files (optional 2,new_name )
            #  3 : always download overwriting existing files only for TODAY (optional 3,new_name )

            cacheopt = int(string.split(chlist[c], ",")[0])

            # if cacheopt == 0, do nothing
            if cacheopt == 0:
                continue

            channel_name = ''
            if len(chlist[c].split(",")) > 1:
                if chlist[c].split(",")[1] != '':
                    # channel renamed, new name provided by user
                    channel_name = chlist[c].split(",")[1].strip(' ').lower()

            # if channel name is not present as option, quit with error
            if channel_name == '':
                self.log("ERROR ! ID=%s channel name not present" % c, 1)
                sys.exit(1)

            channel_provider = self.CONF_DEFAULT_PROVIDER
            if len(chlist[c].split(",")) > 2:
                if chlist[c].split(",")[2] != '':
                    channel_provider = chlist[c].split(",")[2].strip(
                        ' ').lower()

            exit_for_loop = False
            for day in self.DAYCACHE:
                if exit_for_loop == True:
                    break

                day_get = time.strftime("%Y_%m_%d",
                                        time.strptime(day, "%Y%m%d"))
                xmlfile = "?%s_%s" % (c, day_get)

                # download only if file doesn't exist or cacheopt == 2 (always download),
                # using open(...,"w") files will be overwritten (saving a delete + create)

                eventfilename = scriptlib.fn_escape(
                    str(c) + self.FIELD_SEPARATOR + channel_name +
                    self.FIELD_SEPARATOR + day)
                eventfilepath = os.path.join(self.CONF_CACHEDIR, eventfilename)
                if (cacheopt == 1) and os.path.exists(eventfilepath):
                    continue
                if (cacheopt == 3) and os.path.exists(eventfilepath) and (
                        day != self.TODAY):
                    continue
                if (cacheopt != 1) and (cacheopt != 2) and (cacheopt != 3):
                    self.log("Warning: unknown cache option " + str(cacheopt))
                    exit_for_loop = True
                    continue

                self.log("Download HTML data from \'%s\'" %
                         (self.CONF_URL + xmlfile))
                self.log2video("Download " + c)

                i = self.HTTP_ERROR_RETRY
                while i > 0:
                    #  wait randomly to avoid overloading website
                    time.sleep(
                        random.uniform(self.CONF_RANDOM_MIN,
                                       self.CONF_RANDOM_MAX))

                    try:
                        sock = urllib2.urlopen(self.CONF_URL + xmlfile)
                        data = sock.read()

                    except IOError, e:
                        serr = "unknown"
                        if hasattr(e, 'reason'):
                            serr = str(e.reason)
                        elif hasattr(e, 'code'):
                            serr = str(e.code)
                            if hasattr(e, 'msg'):
                                serr += " , " + str(e.msg)

                        self.log(
                            "\'%s\' connection error. Reason: %s. Waiting %d sec. and retry [%d] ..."
                            % (self.CONF_URL + xmlfile, serr,
                               self.HTTP_ERROR_WAIT_RETRY, i))
                        time.sleep(self.HTTP_ERROR_WAIT_RETRY)  # add sleep
                        i -= 1

                    else:
                        i = 0  # force quit WHILE loop
                        sock.close()

                        dtparser = Titolo_parser(day)
                        dtparser.parse(data)
                        self.guida = self.guidatomorrow
                        (self.guidatoday,
                         self.guidatomorrow) = dtparser.get_guida()

                        # if no data, quit for loop and stop downloading
                        if len(self.guidatoday) == 0:
                            exit_for_loop = True
                            break

                        self.guida = self.guida + self.guidatoday

                        self.log("  writing in cache \'%s\'" % eventfilename)
                        # write data in cache file using UTF-8 encoding
                        fd = codecs.open(eventfilepath, "w", 'utf-8')
                        fd.write(
                            str(c) + self.FIELD_SEPARATOR + channel_name +
                            self.FIELD_SEPARATOR + channel_provider +
                            self.FIELD_SEPARATOR + day + '\n')
                        fd.write(
                            "Local Time (human readeable)###Unix GMT Time###Event Title###Event Description\n"
                        )

                        # extract all events and put in eventfile
                        for event in self.guida:
                            (dataora, titolo) = event
                            event_starttime = dataora
                            # time.mktime return Unix time inside GMT timezone
                            event_startime_unix_gmt = str(
                                int(
                                    time.mktime(
                                        time.strptime(event_starttime,
                                                      "%Y-%m-%d %H:%M"))) -
                                self.DELTA_UTC)
                            #event_startime_unix_gmt = str(int(time.mktime(time.strptime(event_starttime,"%Y-%m-%d %H:%M")))  )
                            #self.log(event_starttime + " , " + str(self.DELTA_UTC) + " , " + str(int(time.mktime(time.strptime(event_starttime,"%Y-%m-%d %H:%M")))) + " , " + event_startime_unix_gmt )

                            # convert remote data (RAI website use UTF-8) in Python Unicode (UCS2)
                            event_title = unicode(titolo,
                                                  self.REMOTE_EPG_CHARSET)

                            event_title = event_title.replace('\r', '')
                            event_title = event_title.replace('\n', u' ')
                            event_title = event_title.strip(u' ')

                            event_description = u''

                            fd.write(event_starttime + self.FIELD_SEPARATOR +
                                     event_startime_unix_gmt +
                                     self.FIELD_SEPARATOR + event_title +
                                     self.FIELD_SEPARATOR + event_description +
                                     '\n')

                        fd.close()