def test_to_datetime(self): now = arrow.utcnow().to('Europe/Paris').timetuple() data = {'published_parsed': now} date = to_datetime(data) self.assertTrue(type(date), type(datetime)) data = {'created_parsed': now} date = to_datetime(data) self.assertTrue(type(date), type(datetime)) data = {'updated_parsed': now} date = to_datetime(data) self.assertTrue(type(date), type(datetime)) data = {'my_date': now} date = to_datetime(data) self.assertTrue(type(date), type(datetime))
def _create_content(self, site_title, content, pelican_path, url, **data): """ create the file in the 'content' directory of pelican :param content: the content of the post :param pelican_path: where the files are created :param url: url of the datasource :param data: the data to check to be used and save :type content: string :type pelican_path: string :type url: string :type data: dict :return: the status of the save statement :rtype: boolean """ published = to_datetime(data) category = data.get('category') if data.get('category') else '' tags = data.get('tags') if data.get('tags') else '' filename = self._set_filename(data.get('title'), pelican_path) full_content = self._set_full_content(site_title, data.get('title'), published, content, url, category, tags) try: with open(filename, 'w') as f: f.write(full_content) status = True except Exception as e: logger.critical(e) status = False return status
def publishing(service, now): """ the purpose of this tasks is to get the data from the cache then publish them :param service: service object where we will publish :param now: it's the current date :type service: obect :type now: string date """ # flag to know if we have to update to_update = False # flag to get the status of a service status = False # counting the new data to store to display them in the log count_new_data = 0 # provider - the service that offer data service_provider = default_provider.get_service( str(service.provider.name.name)) # consumer - the service which uses the data service_consumer = default_provider.get_service( str(service.consumer.name.name)) # check if the service has already been triggered # if date_triggered is None, then it's the first run if service.date_triggered is None: logger.debug("first run {}".format(service)) to_update = True status = True # run run run else: # 1) get the data from the provider service kw = {'trigger_id': service.id} datas = getattr(service_provider, 'process_data')(**kw) if datas is not None and len(datas) > 0: consumer = getattr(service_consumer, '__init__')(service.consumer.token) consumer = getattr(service_consumer, 'save_data') published = '' which_date = '' # 2) for each one for data in datas: if settings.DEBUG: from django_th.tools import to_datetime published = to_datetime(data) published, which_date = get_published( published, which_date) date_triggered = arrow.get(str(service.date_triggered), 'YYYY-MM-DD HH:mm:ss').to( settings.TIME_ZONE) publish_log_data(published, date_triggered, data) # the consummer will save the data and return if success or not status = consumer(service.consumer.token, service.id, **data) else: count_new_data = len(datas) to_update = True # let's log log_update(service, to_update, status, count_new_data) # let's update if to_update and status: update_trigger(service)