Esempio n. 1
0
 def submit(self, request_data):
     # secondary data pipeline using new internal data format
     reports = self.prepare_reports(request_data)
     for i in range(0, len(reports), 100):
         batch = reports[i:i + 100]
         # insert reports, expire the task if it wasn't processed
         # after six hours to avoid queue overload
         queue_reports.apply_async(kwargs={
             'api_key': self.api_key.valid_key,
             'email': self.email,
             'nickname': self.nickname,
             'reports': batch,
         },
                                   expires=21600)
Esempio n. 2
0
    def submit(self, api_key):
        # may raise HTTP error
        request_data = self.preprocess()

        # data pipeline using new internal data format
        reports = request_data["items"]
        batch_size = 50
        for i in range(0, len(reports), batch_size):
            batch = reports[i : i + batch_size]
            # insert reports, expire the task if it wasn't processed
            # after six hours to avoid queue overload
            queue_reports.apply_async(
                kwargs={"api_key": api_key.valid_key, "nickname": self.nickname, "reports": batch}, expires=21600
            )

        self.emit_upload_metrics(len(reports), api_key)
Esempio n. 3
0
    def submit(self, api_key):
        # may raise HTTP error
        request_data = self.preprocess(api_key)

        # data pipeline using new internal data format
        transform = self.transform()
        reports = transform.transform_many(request_data['items'])
        for i in range(0, len(reports), 100):
            batch = reports[i:i + 100]
            # insert reports, expire the task if it wasn't processed
            # after six hours to avoid queue overload
            queue_reports.apply_async(
                kwargs={
                    'api_key': api_key.valid_key,
                    'email': self.email,
                    'ip': self.request.client_addr,
                    'nickname': self.nickname,
                    'reports': batch,
                },
                expires=21600)
Esempio n. 4
0
    def submit(self, api_key):
        # may raise HTTP error
        request_data = self.preprocess()

        # data pipeline using new internal data format
        reports = request_data['items']
        batch_size = 50
        for i in range(0, len(reports), batch_size):
            batch = reports[i:i + batch_size]
            # insert reports, expire the task if it wasn't processed
            # after six hours to avoid queue overload
            queue_reports.apply_async(
                kwargs={
                    'api_key': api_key.valid_key,
                    'nickname': self.nickname,
                    'reports': batch,
                },
                expires=21600)

        self.emit_upload_metrics(len(reports), api_key)
Esempio n. 5
0
    def submit(self, api_key):
        # may raise HTTP error
        request_data = self.preprocess()

        # data pipeline using new internal data format
        reports = request_data['items']
        batch_size = 50
        for i in range(0, len(reports), batch_size):
            batch = reports[i:i + batch_size]
            # insert reports, expire the task if it wasn't processed
            # after six hours to avoid queue overload
            queue_reports.apply_async(
                kwargs={
                    'api_key': api_key.valid_key,
                    'email': self.email,
                    'ip': self.request.client_addr,
                    'nickname': self.nickname,
                    'reports': batch,
                },
                expires=21600)

        self.emit_upload_metrics(len(reports), api_key)