Ejemplo n.º 1
0
 def _get_detail(self, detail_page_url: str) -> AnimeDetailInfo:
     """引擎管理器负责调用, 捕获异常"""
     try:
         return self.get_detail(detail_page_url)
     except Exception as e:
         logger.error(f"Catch exception: {e} when processing {detail_page_url}")
         return AnimeDetailInfo()
Ejemplo n.º 2
0
 def _search(self, keyword: str) -> List[DanmakuMetaInfo]:
     """引擎管理器负责调用, 捕获异常"""
     try:
         return self.search(keyword)
     except Exception as e:
         logger.error(f"Catch exception: {e} when searching for {keyword}")
         return []
Ejemplo n.º 3
0
    def handle(self, *args, **options):
        logger.info('Moving Read only users out of staff status...')

        users = self.get_readonly_users()
        if len(users) > 0:
            logger.info('Revoking staff status from %s user%s' % (len(users), 's' if len(users) > 1 else ''))
            num_updated = 0
            for u in users:
                u.is_staff = False
                try:
                    u.save()
                except Exception as e:
                    logger.error(str(e)[:100])
                    logger.error('Could not update user %s' % u.email)
                    continue
                num_updated += 1
                logger.info(' %s user%s updated' % (num_updated, 's' if num_updated > 1 else ''))
            logger.info('... user%s moving completed' % ('s' if len(users) > 1 else ''))
        else:
            logger.info('... not found any users to be moved')

        ifrc_users = self.get_ifrc_domain_users()
        ifrc_grp = Group.objects.get(name='IFRC Admins')
        if len(ifrc_users) > 0:
            logger.info('Adding IFRC Admins Group membership to %s user%s' % (len(ifrc_users), 's' if len(ifrc_users) > 1 else ''))
            num_i_updated = 0
            for u in ifrc_users:
                ifrc_grp.user_set.add(u)
                num_i_updated += 1
                logger.info(' %s user%s added' % (num_i_updated, 's' if num_i_updated > 1 else ''))
            logger.info('... user%s adding to IFRC Admins Group completed' % ('s' if len(ifrc_users) > 1 else ''))
        else:
            logger.info('... not found any users to be put into IFRC Admins')
Ejemplo n.º 4
0
 def _get_danmaku(self, cid: str) -> Dict:
     """引擎管理器负责调用, 捕获异常"""
     try:
         return self.get_danmaku(cid)
     except Exception as e:
         logger.error(f"Catch exception: {e} when parsing danmaku {cid}")
         return {}
Ejemplo n.º 5
0
def remove_child_events_from_es(sender, instance, using, **kwargs):
    ''' Handle Emergency Elasticsearch indexes '''
    model = instance.__class__.__name__
    try:
        if model == 'Event':
            curr_record = Event.objects.filter(id=instance.id).first()
            # If new record, do nothing, index_and_notify should handle it
            if curr_record is None:
                return

            if curr_record.parent_event is None and instance.parent_event:
                # Delete ES record if Emergency became a child
                delete_es_index(instance)
            elif curr_record.parent_event and instance.parent_event is None:
                # Add back ES record if Emergency became a parent (index_elasticsearch.py)
                create_es_index(instance)
        elif model == 'Country':
            curr_record = Country.objects.filter(id=instance.id).first()
            if instance.in_search:
                if not curr_record:
                    create_es_index(instance)
                else:
                    if not curr_record.in_search and instance.in_search:
                        create_es_index(instance)
                    elif curr_record.in_search and not instance.in_search:
                        delete_es_index(instance)
                    else:
                        update_es_index(instance)
            else:
                delete_es_index(instance)
    except Exception as ex:
        logger.error(f'Failed to index a Country, error: {str(ex)[:512]}')
Ejemplo n.º 6
0
 def _get_detail(self, play_page_url: str) -> DanmakuCollection:
     """引擎管理器负责调用, 捕获异常"""
     try:
         return self.get_detail(play_page_url)
     except Exception as e:
         logger.error(f"Catch exception: {e} when processing {play_page_url}")
         return DanmakuCollection()
Ejemplo n.º 7
0
 def push_table_to_index(self, model):
     query = model.objects.all()
     data = [self.convert_for_bulk(s) for s in list(query)]
     created, errors = bulk(client=ES_CLIENT, actions=data)
     logger.info('Created %s records' % created)
     if len(errors):
         logger.error('Produced the following errors:')
         logger.error('[%s]' % ', '.join(map(str, errors)))
Ejemplo n.º 8
0
 def make_response_for(self, video: Video) -> requests.Response:
     """获取视频对应的 handler 对象, 用于代理访问数据并返回响应给客户端"""
     if not video:
         logger.error(f"Invalid request")
         return requests.Response()
     target_handler = self._handlers.get(video.handler)
     if not target_handler:
         logger.error(f"VideoHandler not found: {video.handler}")
         return requests.Response()
     return target_handler(video).make_response()
Ejemplo n.º 9
0
 def get_anime_detail(self, meta: AnimeMetaInfo) -> AnimeDetailInfo:
     """解析一部番剧的详情页,返回包含视频列表的详细信息"""
     if not meta:
         logger.error(f"Invalid request")
         return AnimeDetailInfo()
     target_engine = self._engines.get(meta.engine)
     if not target_engine:
         logger.error(f"Engine not found: {meta.engine}")
         return AnimeDetailInfo()
     return target_engine()._get_detail(meta.detail_page_url)
Ejemplo n.º 10
0
 def get_danmaku_detail(self, meta: DanmakuMetaInfo) -> DanmakuCollection:
     """解析一部番剧的详情页,返回包含视频列表的详细信息"""
     if not meta:
         logger.error(f"Invalid request")
         return DanmakuCollection()
     target_engine = self._danmaku_engine.get(meta.dm_engine)
     if not target_engine:
         logger.error(f"Danmaku Engine not found: {meta.dm_engine}")
         return DanmakuCollection()
     return target_engine()._get_detail(meta.play_page_url)
Ejemplo n.º 11
0
 def get_danmaku_data(self, dmk: Danmaku) -> List:
     """解析一部番剧的详情页,返回包含视频列表的详细信息"""
     if not dmk:
         logger.error(f"Invalid request")
         return []
     target_engine = self._danmaku_engine.get(dmk.dm_engine)
     if not target_engine:
         logger.error(f"Danmaku Engine not found: {dmk.dm_engine}")
         return []
     return target_engine()._get_danmaku(dmk.cid)
Ejemplo n.º 12
0
def mongo_connect():
    if connect("mongodb://*****:*****@0.0.0.0:27017/") != False:
        config['mongo_url'] = "mongodb://*****:*****@0.0.0.0:27017/"
        logger.info("数据库连接成功")
    else:
        if connect("mongodb://*****:*****@34.64.197.155:27017/") != False:
            config['mongo_url'] = "mongodb://*****:*****@34.64.197.155:27017/"
            logger.info("数据库连接成功")
        else:
            logger.error("请检查数据库设置")
            sys.exit()
Ejemplo n.º 13
0
 def get_video_url(self, video: Video) -> str:
     """解析视频真实 url"""
     if not video:
         logger.error(f"Invalid request")
         return "error"
     target_handler = self._handlers.get(video.handler)
     if not target_handler:
         logger.error(f"VideoHandler not found: {video.handler}")
         return "error"
     target_handler = target_handler(video)
     return target_handler._get_real_url()
Ejemplo n.º 14
0
def add_tags_to_obj(obj, tags):
    # We clear all tags first, and then re-add them
    tag_molnix_ids = [t['id'] for t in tags]
    obj.molnix_tags.clear()
    for molnix_id in tag_molnix_ids:
        try:
            t = MolnixTag.objects.get(molnix_id=molnix_id)
        except:
            logger.error('ERROR - tag not found: %d' % molnix_id)
            continue
        obj.molnix_tags.add(t)
    obj.save()
Ejemplo n.º 15
0
 def push_table_to_index(self, model):
     if model.__name__ == 'Event':
         query = model.objects.filter(parent_event__isnull=True)
     elif model.__name__ == 'Country':
         query = model.objects.filter(in_search=True)
     else:
         query = model.objects.all()
     data = [construct_es_data(s, is_create=True) for s in list(query)]
     created, errors = bulk(client=ES_CLIENT, actions=data)
     logger.info('Created %s records' % created)
     if len(errors):
         logger.error('Produced the following errors:')
         logger.error('[%s]' % ', '.join(map(str, errors)))
Ejemplo n.º 16
0
    def handle(self, *args, **options):
        contacts = [
            # Area/field/extent (en, fr, es, ar), name, email
            [
                'Africa Region', 'Région Afrique', 'Región de África',
                'منطقة إفريقيا', 'Elly NANDASABA MULAHA',
                '*****@*****.**'
            ],
            [
                'Americas Region', 'Région Amériques', 'Región de América',
                'منطقة الأمريكتين', 'Luis FANOVICH', '*****@*****.**'
            ],
            [
                'Asia Pacific Region', 'Région Asie-Pacifique',
                'Región de Asia-Pacífico', 'منطقة آسيا والمحيط الهادئ',
                'Dedi JUNADI', '*****@*****.**'
            ],
            [
                'Europe Region', 'Région Europe', 'Región de Europa',
                'منطقة أوروبا', 'Anssi ANONEN', '*****@*****.**'
            ],
            [
                'MENA Region', 'Région MENA', 'Región de MENA',
                'منطقة الشرق الأوسط وشمال أفريقيا', 'Ahmad AL JAMAL',
                '*****@*****.**'
            ]
        ]

        error = False
        c_to_add = []
        contacts_empty = not MainContact.objects.exists()

        if contacts_empty:
            for con in contacts:
                contact = MainContact(extent=con[0],
                                      extent_en=con[0],
                                      extent_fr=con[1],
                                      extent_es=con[2],
                                      extent_ar=con[3],
                                      name=con[4],
                                      email=con[5])
                c_to_add.append(contact)

        try:
            MainContact.objects.bulk_create(c_to_add)
        except Exception as ex:
            logger.error(f'Could not create MainContacts. Error: {str(ex)}')
            error = True

        if not error:
            logger.info('Successfully added MainContacts.')
Ejemplo n.º 17
0
    def handle(self, *args, **options):
        logger.info('Starting appeal document ingest')

        # get latest
        url = 'https://proxy.hxlstandard.org/data.json?url=https%3A%2F%2Fdocs.google.com%2Fspreadsheets%2Fd%2F1gJ4N_PYBqtwVuJ10d8zXWxQle_i84vDx5dHNBomYWdU%2Fedit%3Fusp%3Dsharing'

        response = requests.get(url)
        if response.status_code != 200:
            logger.error('Error querying Appeal Document HXL API')
            raise Exception('Error querying Appeal Document HXL API')
        records = response.json()

        # some logging variables
        not_found = []
        existing = []
        created = []

        # group records by appeal code
        acodes = list(set([a[2] for a in records[2:]]))
        for code in acodes:
            try:
                appeal = Appeal.objects.get(code=code)
            except ObjectDoesNotExist:
                not_found.append(code)
                continue

            existing_docs = list(appeal.appealdocument_set.all())
            docs = [a for a in records if a[2] == code]
            for doc in docs:
                exists = len(
                    [a for a in existing_docs if a.document_url == doc[0]]) > 0
                if exists:
                    existing.append(doc[0])
                else:
                    try:
                        created_at = self.parse_date(doc[5])
                    except:
                        created_at = None

                    AppealDocument.objects.create(
                        document_url=doc[0],
                        name=doc[4],
                        created_at=created_at,
                        appeal=appeal,
                    )
                    created.append(doc[0])
        logger.info('%s appeal documents created' % len(created))
        logger.info('%s existing appeal documents' % len(existing))
        logger.warn('%s documents without appeals in system' % len(not_found))
Ejemplo n.º 18
0
    def handle(self, *args, **options):
        logger.info('Starting appeals ingest')
        new, modified, bilaterals = self.get_new_or_modified_appeals()
        logger.info('%s current appeals' % Appeal.objects.all().count())
        logger.info('Creating %s new appeals' % len(new))
        logger.info('Updating %s existing appeals that have been modified' %
                    len(modified))

        num_created = 0
        for i, r in enumerate(new):
            fields = self.parse_appeal_record(r, is_new_appeal=True)
            if fields[
                    'code'] in bilaterals:  # correction of the appeal record due to appealbilaterals api
                fields['amount_funded'] += round(bilaterals[fields['code']], 1)
            try:
                Appeal.objects.create(**fields)
            except Exception as e:
                logger.error(str(e)[:100])
                logger.error('Could not create appeal with code %s' %
                             fields['code'])
                continue
            num_created = num_created + 1

        num_updated = 0
        for i, r in enumerate(modified):
            fields = self.parse_appeal_record(r, is_new_appeal=False)
            if fields[
                    'code'] in bilaterals:  # correction of the appeal record due to appealbilaterals api
                fields['amount_funded'] += round(bilaterals[fields['code']], 1)

            try:
                appeal, created = Appeal.objects.update_or_create(
                    code=fields['code'], defaults=fields)
            except Exception as e:
                logger.error(str(e)[:100])
                logger.error('Could not update appeal with code %s' %
                             fields['code'])
                continue
            num_updated = num_updated + 1

        CronJobSum = Appeal.objects.all().count()
        logger.info('%s appeals created' % num_created)
        logger.info('%s appeals updated' % num_updated)
        logger.info('%s total appeals' % CronJobSum)
        logger.info('Appeals ingest completed')

        body = {
            "name":
            "ingest_appeals",
            "message":
            'Appeals ingest completed, %s total appeals (%s new, %s existing).'
            % (CronJobSum, num_created, num_updated),
            "num_result":
            CronJobSum,
            "status":
            CronJobStatus.SUCCESSFUL
        }
        CronJob.sync_cron(body)
Ejemplo n.º 19
0
 def handle(self, *args, **options):
     try:
         # Update countries which should appear in search
         inc_c = Country.objects.filter(
             independent=True, is_deprecated=False,
             record_type=1).update(in_search=True)
         # Update countries which should NOT appear in search
         # independent can be null too thus why negated check
         exc_c = Country.objects.filter(~Q(independent=True)
                                        | Q(is_deprecated=True)
                                        | ~Q(record_type=1)).update(
                                            in_search=False)
         logger.info('Successfully set in_search for Countries')
     except Exception as ex:
         logger.error(
             f'Failed to set in_search for Countries. Error: {str(ex)}')
Ejemplo n.º 20
0
 def bulk(self, actions):
     try:
         created, errors = bulk(client=ES_CLIENT, actions=actions)
         if len(errors):
             logger.error('Produced the following errors:')
             logger.error('[%s]' % ', '.join(map(str, errors)))
     except Exception as e:
         logger.error('Could not index records')
         logger.error('%s...' % str(e)[:512])
Ejemplo n.º 21
0
def extract_table(dbfile, table):
    """ Extract a table from the Access database """
    cmd = 'mdb-export %s %s' % (dbfile, table)
    try:
        output = subprocess.check_output(cmd.split(' ')).splitlines()
    except Exception as e:
        logger.error(e)
    output = [o.decode('utf-8') for o in output]
    reader = csv.reader(output, delimiter=',', quotechar='"')
    records = []
    for i, row in enumerate(reader):
        if i == 0:
            header = row
        else:
            d = {header[i]: l for i, l in enumerate(row)}
            records.append(d)
    return records
Ejemplo n.º 22
0
def delete_es_index(instance):
    ''' instance needs an es_id() '''

    if ES_CLIENT and ES_PAGE_NAME:
        # To make sure it doesn't run for tests
        if hasattr(instance, 'es_id'):
            try:
                deleted, errors = bulk(client=ES_CLIENT,
                                       actions=[{
                                           '_op_type': 'delete',
                                           '_index': ES_PAGE_NAME,
                                           '_type': 'page',
                                           '_id': instance.es_id()
                                       }])
                logger.info(f'Deleted {deleted} records')
                log_errors(errors)
            except Exception:
                logger.error(
                    'Could not reach Elasticsearch server or index was already missing.'
                )
        else:
            logger.warning('instance does not have an es_id() method')
Ejemplo n.º 23
0
 def run(self):
     try:
         server = smtplib.SMTP(settings.EMAIL_HOST, settings.EMAIL_PORT)
         server.ehlo()
         server.starttls()
         server.ehlo()
         succ = server.login(settings.EMAIL_USER, settings.EMAIL_PASS)
         if 'successful' not in str(succ[1]):
             cron_rec = {
                 "name": "notification",
                 "message": 'Error contacting ' + settings.EMAIL_HOST +
                 ' smtp server for notifications',
                 "status": CronJobStatus.ERRONEOUS
             }
             CronJob.sync_cron(cron_rec)
         if len(self.recipients) > 0:
             server.sendmail(settings.EMAIL_USER, self.recipients,
                             self.msg.as_string())
         server.quit()
         logger.info('E-mails were sent successfully.')
     except Exception as exc:
         logger.error(
             'Could not send emails with Python smtlib, exception: {} -- {}'
             .format(type(exc).__name__, exc.args))
         ex = ''
         try:
             ex = str(exc.args)
         except Exception as exctwo:
             logger.error(exctwo.args)
         cron_rec = {
             "name":
             "notification",
             "message":
             'Error sending out email with Python smtplib: {}'.format(ex),
             "status":
             CronJobStatus.ERRONEOUS
         }
         CronJob.sync_cron(cron_rec)
Ejemplo n.º 24
0
    def get_new_or_modified_appeals(self):
        use_local_file = True if os.getenv(
            'DJANGO_DB_NAME') == 'test' and os.path.exists(
                'appeals.json') else False
        new = []
        modified = []
        if use_local_file:
            # read from static file for development
            logger.info('Using local appeals.json file')
            with open('appeals.json') as f:
                modified = json.loads(f.read())
        else:
            # get latest, determine which appeals need to be ingested based on last modified time
            logger.info('Querying appeals API for new appeals data')
            url = 'http://go-api.ifrc.org/api/appeals'
            auth = (os.getenv('APPEALS_USER'), os.getenv('APPEALS_PASS'))
            response = requests.get(url, auth=auth)
            if response.status_code != 200:
                logger.error('Error querying Appeals API')
                raise Exception('Error querying Appeals API')
            records = response.json()

            # write the current record file to local disk
            with open('appeals.json', 'w') as outfile:
                json.dump(records, outfile)

            since_last_checked = datetime.utcnow().replace(
                tzinfo=timezone.utc) - timedelta(minutes=90)
            codes = [a.code for a in Appeal.objects.all()]
            for r in records:
                if not r['APP_code'] in codes:
                    new.append(r)
                last_modified = self.parse_date(r['APP_modifyTime'])
                if last_modified > since_last_checked:
                    modified.append(r)

        return new, modified
Ejemplo n.º 25
0
    def detect_video_format(self) -> str:
        """判断视频真正的格式, url 可能没有视频后缀"""
        # 尝试从 url 提取后缀
        url = self._get_real_url()
        try:
            ext = url.split("?")[0].split(".")[-1].lower()
            if ext in ["mp4", "flv"]:
                return ext
            if ext == "m3u8":
                return "hls"
        except (IndexError, AttributeError):
            pass

        # 视频的元数据中包含了视频的格式信息, 在视频开头寻找十六进制标识符推断视频格式
        format_hex = {
            "mp4": ["69736F6D", "70617663", "6D703432", "4D50454734", "4C617666"],
            "flv": ["464C56"],
            "hls": ["4558544D3355"]
        }

        _, data_iter = self._get_stream_from_server(0, 512)
        if not data_iter:
            logger.warning("Could not get video stream from server")
            return "unknown"

        logger.debug("Detecting video format from binary stream")
        video_meta = next(data_iter).hex().upper()
        for format_, hex_list in format_hex.items():
            for hex_sign in hex_list:
                if hex_sign in video_meta:
                    logger.debug(f"Video format: {format_}")
                    return format_
        logger.error("Could not detect video format from stream")
        logger.debug("Video raw binary stream (512byte):")
        logger.debug(video_meta)
        return "unknown"
Ejemplo n.º 26
0
 def run(self):
     try:
         server = smtplib.SMTP('smtp.office365.com', '587')
         server.ehlo()
         server.starttls()
         server.ehlo()
         server.login(username, password)
         server.sendmail(username, self.recipients, self.msg.as_string())
         server.quit()
         logger.info('Notifications sent!')
     except SMTPAuthenticationError:
         logger.error('SMTPAuthenticationError')
         logger.error('Cannot send notification')
         logger.error(str(SMTPAuthenticationError)[:100])
Ejemplo n.º 27
0
 def run(self):
     try:
         server = smtplib.SMTP(emailhost, emailport)
         server.ehlo()
         server.starttls()
         server.ehlo()
         server.login(username, password)
         if len(self.recipients) > 0:
             server.sendmail(username, self.recipients,
                             self.msg.as_string())
         server.quit()
         logger.info('Notifications sent!')
     except SMTPAuthenticationError:
         logger.error('SMTPAuthenticationError')
         logger.error('Cannot send notification')
         logger.error(str(SMTPAuthenticationError)[:100])
Ejemplo n.º 28
0
    def handle(self, *args, **options):
        logger.info('Starting appeals ingest')
        new, modified = self.get_new_or_modified_appeals()
        logger.info('%s current appeals' % Appeal.objects.all().count())
        logger.info('Creating %s new appeals' % len(new))
        logger.info('Updating %s existing appeals that have been modified' %
                    len(modified))

        num_created = 0
        for i, r in enumerate(new):
            fields = self.parse_appeal_record(r, is_new_appeal=True)
            try:
                Appeal.objects.create(**fields)
            except Exception as e:
                logger.error(str(e)[:100])
                logger.error('Could not create appeal with code %s' %
                             fields['code'])
                continue
            num_created = num_created + 1

        num_updated = 0
        for i, r in enumerate(modified):
            fields = self.parse_appeal_record(r, is_new_appeal=False)
            try:
                appeal, created = Appeal.objects.update_or_create(
                    code=fields['code'], defaults=fields)
            except Exception as e:
                logger.error(str(e)[:100])
                logger.error('Could not update appeal with code %s' %
                             fields['code'])
                continue
            num_updated = num_updated + 1

        logger.info('%s appeals created' % num_created)
        logger.info('%s appeals updated' % num_updated)
        logger.info('%s total appeals' % Appeal.objects.all().count())
        logger.info('Appeals ingest completed')
Ejemplo n.º 29
0
    def handle(self, *args, **options):
        molnix = MolnixApi(url=settings.MOLNIX_API_BASE,
                           username=settings.MOLNIX_USERNAME,
                           password=settings.MOLNIX_PASSWORD)
        try:
            molnix.login()
        except Exception as ex:
            msg = 'Failed to login to Molnix API: %s' % str(ex)
            logger.error(msg)
            create_cron_record(CRON_NAME, msg, CronJobStatus.ERRONEOUS)
            return
        try:
            countries = molnix.get_countries()
            deployments = molnix.get_deployments()
            open_positions = molnix.get_open_positions()
        except Exception as ex:
            msg = 'Failed to fetch data from Molnix API: %s' % str(ex)
            logger.error(msg)
            create_cron_record(CRON_NAME, msg, CronJobStatus.ERRONEOUS)
            return

        try:
            used_tags = get_unique_tags(deployments, open_positions)
            add_tags(used_tags)
            positions_messages, positions_warnings, positions_created = sync_open_positions(
                open_positions, molnix, countries)
            deployments_messages, deployments_warnings, deployments_created = sync_deployments(
                deployments, molnix, countries)
        except Exception as ex:
            msg = 'Unknown Error occurred: %s' % str(ex)
            logger.error(msg)
            create_cron_record(CRON_NAME, msg, CronJobStatus.ERRONEOUS)
            return

        msg = get_status_message(positions_messages, deployments_messages,
                                 positions_warnings, deployments_warnings)
        num_records = positions_created + deployments_created
        has_warnings = len(positions_warnings) > 0 or len(
            deployments_warnings) > 0
        cron_status = CronJobStatus.WARNED if has_warnings else CronJobStatus.SUCCESSFUL
        create_cron_record(CRON_NAME, msg, cron_status, num_result=num_records)
        molnix.logout()
Ejemplo n.º 30
0
    def get_new_or_modified_appeals(self):
        use_local_file = True if os.getenv(
            'DJANGO_DB_NAME') == 'test' and os.path.exists(
                'appeals.json') else False
        new = []
        modified = []
        if use_local_file:
            # read from static file for development
            logger.info('Using local appeals.json file')
            with open('appeals.json') as f:
                modified = json.loads(f.read())
            logger.info('Using local appealbilaterals.json file')
            with open('appealbilaterals.json') as f:
                records = json.loads(f.read())
                bilaterals = {}
                for r in records:  # code duplication ¤
                    if r['APP_Code'] and r['AmountCHF']:
                        if r['APP_Code'] in bilaterals.keys():
                            bilaterals[r['APP_Code']] += r['AmountCHF']
                        else:
                            bilaterals[r['APP_Code']] = r['AmountCHF']
        else:
            # get latest BILATERALS
            logger.info('Querying appeals API for new appeals data')
            url = 'http://go-api.ifrc.org/api/appealbilaterals'
            auth = (os.getenv('APPEALS_USER'), os.getenv('APPEALS_PASS'))
            response = requests.get(url, auth=auth)
            if response.status_code != 200:
                text_to_log = 'Error querying AppealBilaterals API at ' + url
                logger.error(text_to_log)
                logger.error(response.content)
                body = {
                    "name": "ingest_appeals",
                    "message": text_to_log,
                    "status": CronJobStatus.ERRONEOUS
                }  # not every case is catched here, e.g. if the base URL is wrong...
                CronJob.sync_cron(body)
                raise Exception(text_to_log)

            records = response.json()

            # write the current record file to local disk
            with open('appealbilaterals.json', 'w') as outfile:
                json.dump(records, outfile)

            bilaterals = {}
            for r in records:  # code duplication ¤
                if r['APP_Code'] and r['AmountCHF']:
                    if r['APP_Code'] in bilaterals.keys():
                        bilaterals[r['APP_Code']] += r['AmountCHF']
                    else:
                        bilaterals[r['APP_Code']] = r['AmountCHF']

            # get latest APPEALS
            logger.info('Querying appeals API for new appeals data')
            url = 'http://go-api.ifrc.org/api/appeals'
            auth = (os.getenv('APPEALS_USER'), os.getenv('APPEALS_PASS'))
            response = requests.get(url, auth=auth)
            if response.status_code != 200:
                logger.error('Error querying Appeals API')
                raise Exception('Error querying Appeals API')
            records = response.json()

            # write the current record file to local disk
            with open('appeals.json', 'w') as outfile:
                json.dump(records, outfile)

            codes = [a.code for a in Appeal.objects.all()]
            for r in records:
                # Temporary filtering, the manual version should be kept:
                if r['APP_code'] in ['MDR65002', 'MDR00001', 'MDR00004']:
                    continue
                #if r['APP_code'] != 'MDRMZ014': # Debug to test bilateral additions or other specific appeals
                #    continue
                if not r['APP_code'] in codes:
                    new.append(r)
                # We use all records, do NOT check if last_modified > since_last_checked
                modified.append(r)

        return new, modified, bilaterals