Exemple #1
0
 def emit(self, record):  # pylint:disable=inconsistent-return-statements
     if settings.IS_MANAGED or not self.can_record(record):
         return
     try:
         return self._send_logs(self.format_record(record))
     except Exception:
         logger.warning("Polyaxon failed creating log record")
Exemple #2
0
 def log_data_ref(self, data, data_name='data', reset=False):
     try:
         params = {data_name: hash_value(data)}
         patch_dict = {'data_refs': params}
         if reset is False:
             patch_dict['merge'] = True
         self._update(patch_dict)
     except Exception as e:
         logger.warning('Could create data hash %s', e)
    def upload(self,
               url,
               files,
               files_size,
               params=None,
               json_data=None,
               timeout=None,
               headers=None,
               session=None):

        if files_size > settings.WARN_UPLOAD_SIZE:
            logger.warning(
                "You are uploading %s, there's a hard limit of %s.\n"
                "If you have data files in the current directory, "
                "please make sure to add them to .polyaxonignore or "
                "add them directly to your data volume, or upload them "
                "separately using `polyaxon data` command and remove them from here.\n",
                self.format_sizeof(settings.WARN_UPLOAD_SIZE),
                self.format_sizeof(settings.MAX_UPLOAD_SIZE))

        if files_size > settings.MAX_UPLOAD_SIZE:
            raise PolyaxonShouldExitError(
                "Files too large to sync, please keep it under {}.\n"
                "If you have data files in the current directory, "
                "please add them directly to your data volume, or upload them "
                "separately using `polyaxon data` command and remove them from here.\n"
                .format(self.format_sizeof(settings.MAX_UPLOAD_SIZE)))

        files = to_list(files)
        if json_data:
            files.append(('json', json.dumps(json_data)))

        multipart_encoder = MultipartEncoder(fields=files)
        request_headers = headers or {}
        request_headers.update(
            {"Content-Type": multipart_encoder.content_type})

        # Attach progress bar
        progress_callback, callback_bar = self.create_progress_callback(
            multipart_encoder)
        multipart_encoder_monitor = MultipartEncoderMonitor(
            multipart_encoder, progress_callback)

        timeout = timeout if timeout is not None else settings.LONG_REQUEST_TIMEOUT

        try:
            response = self.put(url=url,
                                params=params,
                                data=multipart_encoder_monitor,
                                headers=request_headers,
                                timeout=timeout,
                                session=session)
        finally:
            # always make sure we clear the console
            callback_bar.done()

        return response
Exemple #4
0
    def log_data_ref(self, data, data_name='data', reset=False):
        if settings.NO_OP:
            return

        try:
            import hashlib

            params = {
                data_name: hashlib.md5(str(data).encode("utf-8")).hexdigest()[:settings.HASH_LENGTH]
            }
            patch_dict = {'data_refs': params}
            if reset is False:
                patch_dict['merge'] = True
            self.client.experiment.update_experiment(username=self.username,
                                                     project_name=self.project_name,
                                                     experiment_id=self.experiment_id,
                                                     patch_dict=patch_dict,
                                                     background=True)
        except Exception as e:
            logger.warning('Could create data hash %s', e)