示例#1
0
 def __init__(self, scheme=None):
     """
     Tracks files or directories on a local filesystem. Directories
     are expanded to create an entry for each file contained within.
     """
     self._scheme = scheme or "file"
     self._cache = artifacts_cache.get_artifacts_cache()
示例#2
0
 def __init__(self, name, type, description=None, metadata=None):
     if not re.match('^[a-zA-Z0-9_\-.]+$', name):
         raise ValueError(
             'Artifact name may only contain alphanumeric characters, dashes, underscores, and dots. Invalid name: "%s"'
             % name)
     if type is None:
         raise ValueError(
             "type is required when logging artifacts, specify \"dataset\", \"model\", or a custom type"
         )
     # TODO: this shouldn't be a property of the artifact. It's a more like an
     # argument to log_artifact.
     self._storage_policy = WandbStoragePolicy()
     self._file_specs = {}
     self._api = InternalApi()  # TODO: persist project in settings?
     self._final = False
     self._digest = None
     self._file_entries = None
     self._manifest = ArtifactManifestV1(self, self._storage_policy)
     self._cache = artifacts_cache.get_artifacts_cache()
     self._added_new = False
     # You can write into this directory when creating artifact files
     self._artifact_dir = compat_tempfile.TemporaryDirectory(
         missing_ok_on_cleanup=True)
     self.server_manifest = None
     self.type = type
     self.name = name
     self.description = description
     self.metadata = metadata
示例#3
0
    def __init__(self):
        s3 = S3Handler()
        gcs = GCSHandler()
        file_handler = LocalFileHandler()

        self._api = InternalApi()
        self._handler = MultiHandler(handlers=[
            s3,
            gcs,
            file_handler,
        ],
                                     default_handler=TrackingHandler())

        self._cache = artifacts_cache.get_artifacts_cache()

        # I believe this makes the first sleep 1s, and then doubles it up to
        # total times, which makes for ~18 hours.
        retry_strategy = requests.packages.urllib3.util.retry.Retry(
            backoff_factor=1,
            total=16,
            status_forcelist=(308, 408, 409, 429, 500, 502, 503, 504))
        self._session = requests.Session()
        adapter = requests.adapters.HTTPAdapter(max_retries=retry_strategy,
                                                pool_connections=64,
                                                pool_maxsize=64)
        self._session.mount('http://', adapter)
        self._session.mount('https://', adapter)
示例#4
0
    def __init__(self):
        self._cache = artifacts_cache.get_artifacts_cache()
        self._session = requests.Session()
        adapter = requests.adapters.HTTPAdapter(
            max_retries=_REQUEST_RETRY_STRATEGY,
            pool_connections=_REQUEST_POOL_CONNECTIONS,
            pool_maxsize=_REQUEST_POOL_MAXSIZE)
        self._session.mount('http://', adapter)
        self._session.mount('https://', adapter)

        s3 = S3Handler()
        gcs = GCSHandler()
        http = HTTPHandler(self._session)
        https = HTTPHandler(self._session, scheme="https")
        file_handler = LocalFileHandler()

        self._api = InternalApi()
        self._handler = MultiHandler(handlers=[
            s3,
            gcs,
            http,
            https,
            file_handler,
        ], default_handler=TrackingHandler())
示例#5
0
 def __init__(self, scheme=None):
     self._scheme = scheme or "s3"
     self._s3 = None
     self._versioning_enabled = None
     self._cache = artifacts_cache.get_artifacts_cache()
示例#6
0
 def __init__(self, session, scheme=None):
     self._scheme = scheme or "http"
     self._cache = artifacts_cache.get_artifacts_cache()
     self._session = session