Exemple #1
0
 def delete(self, path, **kwargs):
     figshare_path = FigshareProjectPath(path)
     provider = yield from self._make_article_provider(figshare_path.article_id)
     if figshare_path.file_id:
         yield from provider.delete(str(figshare_path.child), **kwargs)
     else:
         yield from provider._remove_from_project(self.project_id)
Exemple #2
0
    def delete(self, path, **kwargs):
        provider = yield from self._make_article_provider(path.parts[1].identifier)

        if len(path.parts) == 3:
            yield from provider.delete(path, **kwargs)
        else:
            yield from provider._remove_from_project(self.project_id)
Exemple #3
0
    def delete(self, path, **kwargs):
        provider = yield from self._make_article_provider(
            path.parts[1].identifier)

        if len(path.parts) == 3:
            yield from provider.delete(path, **kwargs)
        else:
            yield from provider._remove_from_project(self.project_id)
Exemple #4
0
    def upload(self, stream, path, **kwargs):
        self._create_paths()

        pending_name = str(uuid.uuid4())
        provider = self.make_provider(self.settings)
        local_pending_path = os.path.join(settings.FILE_PATH_PENDING, pending_name)
        remote_pending_path = yield from provider.validate_path('/' + pending_name)

        stream.add_writer('md5', streams.HashStreamWriter(hashlib.md5))
        stream.add_writer('sha1', streams.HashStreamWriter(hashlib.sha1))
        stream.add_writer('sha256', streams.HashStreamWriter(hashlib.sha256))

        with open(local_pending_path, 'wb') as file_pointer:
            stream.add_writer('file', file_pointer)
            yield from provider.upload(stream, remote_pending_path, check_created=False, fetch_metadata=False, **kwargs)

        complete_name = stream.writers['sha256'].hexdigest
        local_complete_path = os.path.join(settings.FILE_PATH_COMPLETE, complete_name)
        remote_complete_path = yield from provider.validate_path('/' + complete_name)

        try:
            metadata = yield from provider.metadata(remote_complete_path)
        except exceptions.MetadataError as e:
            if e.code != 404:
                raise
            metadata, _ = yield from provider.move(provider, remote_pending_path, remote_complete_path)
        else:
            yield from provider.delete(remote_pending_path)
        finally:
            metadata = metadata.serialized()

        # Due to cross volume movement in unix we leverage shutil.move which properly handles this case.
        # http://bytes.com/topic/python/answers/41652-errno-18-invalid-cross-device-link-using-os-rename#post157964
        shutil.move(local_pending_path, local_complete_path)

        response = yield from self.make_signed_request(
            'POST',
            self.build_url(path.parent.identifier, 'children'),
            expects=(200, 201),
            data=json.dumps({
                'name': path.name,
                'user': self.auth['id'],
                'settings': self.settings['storage'],
                'metadata': metadata,
                'hashes': {
                    'md5': stream.writers['md5'].hexdigest,
                    'sha1': stream.writers['sha1'].hexdigest,
                    'sha256': stream.writers['sha256'].hexdigest,
                },
                'worker': {
                    'host': os.uname()[1],
                    # TODO: Include additional information
                    'address': None,
                    'version': self.__version__,
                },
            }),
            headers={'Content-Type': 'application/json'},
        )

        created = response.status == 201
        data = yield from response.json()

        if settings.RUN_TASKS and data.pop('archive', True):
            parity.main(
                local_complete_path,
                self.parity_credentials,
                self.parity_settings,
            )
            backup.main(
                local_complete_path,
                data['version'],
                self.build_url('hooks', 'metadata') + '/',
                self.archive_credentials,
                self.archive_settings,
            )

        name = path.name

        metadata.update({
            'name': name,
            'md5': data['data']['md5'],
            'path': data['data']['path'],
            'sha256': data['data']['sha256'],
            'version': data['data']['version'],
            'downloads': data['data']['downloads'],
            'checkout': data['data']['checkout'],
        })

        path._parts[-1]._id = metadata['path'].strip('/')
        return OsfStorageFileMetadata(metadata, str(path)), created
Exemple #5
0
    def upload(self, stream, path, **kwargs):
        self._create_paths()

        pending_name = str(uuid.uuid4())
        provider = self.make_provider(self.settings)
        local_pending_path = os.path.join(settings.FILE_PATH_PENDING, pending_name)
        remote_pending_path = yield from provider.validate_path('/' + pending_name)

        stream.add_writer('md5', streams.HashStreamWriter(hashlib.md5))
        stream.add_writer('sha1', streams.HashStreamWriter(hashlib.sha1))
        stream.add_writer('sha256', streams.HashStreamWriter(hashlib.sha256))

        with open(local_pending_path, 'wb') as file_pointer:
            stream.add_writer('file', file_pointer)
            yield from provider.upload(stream, remote_pending_path, check_created=False, fetch_metadata=False, **kwargs)

        complete_name = stream.writers['sha256'].hexdigest
        local_complete_path = os.path.join(settings.FILE_PATH_COMPLETE, complete_name)
        remote_complete_path = yield from provider.validate_path('/' + complete_name)

        try:
            metadata = yield from provider.metadata(remote_complete_path)
        except exceptions.MetadataError as e:
            if e.code != 404:
                raise
            metadata, _ = yield from provider.move(provider, remote_pending_path, remote_complete_path)
        else:
            yield from provider.delete(remote_pending_path)
        finally:
            metadata = metadata.serialized()

        # Due to cross volume movement in unix we leverage shutil.move which properly handles this case.
        # http://bytes.com/topic/python/answers/41652-errno-18-invalid-cross-device-link-using-os-rename#post157964
        shutil.move(local_pending_path, local_complete_path)

        response = yield from self.make_signed_request(
            'POST',
            self.build_url(path.parent.identifier, 'children'),
            expects=(200, 201),
            data=json.dumps({
                'name': path.name,
                'user': self.auth['id'],
                'settings': self.settings['storage'],
                'metadata': metadata,
                'hashes': {
                    'md5': stream.writers['md5'].hexdigest,
                    'sha1': stream.writers['sha1'].hexdigest,
                    'sha256': stream.writers['sha256'].hexdigest,
                },
                'worker': {
                    'host': os.uname()[1],
                    # TODO: Include additional information
                    'address': None,
                    'version': self.__version__,
                },
            }),
            headers={'Content-Type': 'application/json'},
        )

        created = response.status == 201
        data = yield from response.json()

        if settings.RUN_TASKS and data.pop('archive', True):
            parity.main(
                local_complete_path,
                self.parity_credentials,
                self.parity_settings,
            )
            backup.main(
                local_complete_path,
                data['version'],
                self.build_url('hooks', 'metadata') + '/',
                self.archive_credentials,
                self.archive_settings,
            )

        name = path.name

        metadata.update({
            'name': name,
            'path': data['data']['path'],
            'version': data['data']['version'],
            'downloads': data['data']['downloads']
        })

        return OsfStorageFileMetadata(metadata, str(path)), created
Exemple #6
0
    def upload(self, stream, path, **kwargs):
        self._create_paths()

        pending_name = str(uuid.uuid4())
        provider = self.make_provider(self.settings)
        local_pending_path = os.path.join(settings.FILE_PATH_PENDING, pending_name)
        remote_pending_path = yield from provider.validate_path("/" + pending_name)

        stream.add_writer("md5", streams.HashStreamWriter(hashlib.md5))
        stream.add_writer("sha1", streams.HashStreamWriter(hashlib.sha1))
        stream.add_writer("sha256", streams.HashStreamWriter(hashlib.sha256))

        with open(local_pending_path, "wb") as file_pointer:
            stream.add_writer("file", file_pointer)
            yield from provider.upload(stream, remote_pending_path, check_created=False, fetch_metadata=False, **kwargs)

        complete_name = stream.writers["sha256"].hexdigest
        local_complete_path = os.path.join(settings.FILE_PATH_COMPLETE, complete_name)
        remote_complete_path = yield from provider.validate_path("/" + complete_name)

        try:
            metadata = yield from provider.metadata(remote_complete_path)
        except exceptions.MetadataError as e:
            if e.code != 404:
                raise
            metadata, _ = yield from provider.move(provider, remote_pending_path, remote_complete_path)
        else:
            yield from provider.delete(remote_pending_path)
        finally:
            metadata = metadata.serialized()

        # Due to cross volume movement in unix we leverage shutil.move which properly handles this case.
        # http://bytes.com/topic/python/answers/41652-errno-18-invalid-cross-device-link-using-os-rename#post157964
        shutil.move(local_pending_path, local_complete_path)

        response = yield from self.make_signed_request(
            "POST",
            self.build_url(path.parent.identifier, "children"),
            expects=(200, 201),
            data=json.dumps(
                {
                    "name": path.name,
                    "user": self.auth["id"],
                    "settings": self.settings["storage"],
                    "metadata": metadata,
                    "hashes": {
                        "md5": stream.writers["md5"].hexdigest,
                        "sha1": stream.writers["sha1"].hexdigest,
                        "sha256": stream.writers["sha256"].hexdigest,
                    },
                    "worker": {
                        "host": os.uname()[1],
                        # TODO: Include additional information
                        "address": None,
                        "version": self.__version__,
                    },
                }
            ),
            headers={"Content-Type": "application/json"},
        )

        created = response.status == 201
        data = yield from response.json()

        if settings.RUN_TASKS and data.pop("archive", True):
            parity.main(local_complete_path, self.parity_credentials, self.parity_settings)
            backup.main(
                local_complete_path,
                data["version"],
                self.build_url("hooks", "metadata") + "/",
                self.archive_credentials,
                self.archive_settings,
            )

        name = path.name

        metadata.update(
            {
                "name": name,
                "md5": data["data"]["md5"],
                "path": data["data"]["path"],
                "sha256": data["data"]["sha256"],
                "version": data["data"]["version"],
                "downloads": data["data"]["downloads"],
                "checkout": data["data"]["checkout"],
            }
        )

        path._parts[-1]._id = metadata["path"].strip("/")
        return OsfStorageFileMetadata(metadata, str(path)), created
Exemple #7
0
    def upload(self, stream, path, **kwargs):
        self._create_paths()

        pending_name = str(uuid.uuid4())
        pending_path = os.path.join(settings.FILE_PATH_PENDING, pending_name)

        pending_name = OSFPath('/' + pending_name).path

        stream.add_writer('md5', streams.HashStreamWriter(hashlib.md5))
        stream.add_writer('sha1', streams.HashStreamWriter(hashlib.sha1))
        stream.add_writer('sha256', streams.HashStreamWriter(hashlib.sha256))

        with open(pending_path, 'wb') as file_pointer:
            stream.add_writer('file', file_pointer)
            provider = self.make_provider(self.settings)
            yield from provider.upload(stream, pending_name, check_created=False, fetch_metadata=False, **kwargs)

        complete_name = stream.writers['sha256'].hexdigest
        complete_path = os.path.join(settings.FILE_PATH_COMPLETE, complete_name)

        complete_name = OSFPath('/' + complete_name).path

        try:
            metadata = yield from provider.metadata(complete_name)
        except exceptions.ProviderError:
            metadata = yield from provider.move(
                provider,
                {'path': pending_name},
                {'path': complete_name},
            )
        else:
            yield from provider.delete(pending_name)

        # Due to cross volume movement in unix we leverage shutil.move which properly handles this case.
        # http://bytes.com/topic/python/answers/41652-errno-18-invalid-cross-device-link-using-os-rename#post157964
        shutil.move(pending_path, complete_path)

        response = yield from self.make_signed_request(
            'POST',
            self.callback_url,
            expects=(200, 201),
            data=json.dumps({
                'auth': self.auth,
                'settings': self.settings['storage'],
                'metadata': metadata,
                'hashes': {
                    'md5': stream.writers['md5'].hexdigest,
                    'sha1': stream.writers['sha1'].hexdigest,
                    'sha256': stream.writers['sha256'].hexdigest,
                },
                'worker': {
                    'host': os.uname()[1],
                    # TODO: Include additional information
                    'address': None,
                    'version': self.__version__,
                },
                'path': path,
            }),
            headers={'Content-Type': 'application/json'},
        )

        created = response.status == 201
        data = yield from response.json()

        if settings.RUN_TASKS:
            version_id = data['version']
            parity.main(
                complete_path,
                self.parity_credentials,
                self.parity_settings,
            )
            backup.main(
                complete_path,
                version_id,
                self.callback_url,
                self.archive_credentials,
                self.archive_settings,
            )

        _, name = os.path.split(path)

        metadata.update({
            'name': name,
            'path': data['path'],
            'version': data['version'],
            'downloads': data['downloads']
        })

        return OsfStorageFileMetadata(metadata).serialized(), created