Giter Site home page Giter Site logo

fastapi-storages's People

Contributors

aminalaee avatar mmzeynalli avatar

Stargazers

 avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar

Watchers

 avatar  avatar  avatar  avatar  avatar

fastapi-storages's Issues

Use Pathy for storage adapters

I have a mostly complete, yet untested, storage adapter in my fork that I wanted to see if you were interested in using before I devote more time to it. It uses Pathy to add support for GCS & Azure and is a single file in place of the storages directory. In it's current state it looks something like this:

import json
import os
import re
from contextlib import suppress
from pathlib import Path
from typing import BinaryIO

from pathy import Pathy
from pathy import set_client_params
from pathy import use_fs
from pathy import use_fs_cache

_filename_ascii_strip_re = re.compile(r"[^A-Za-z0-9_.-]")


def secure_filename(filename: str) -> str:
    """
    From Werkzeug secure_filename.
    """

    for sep in os.path.sep, os.path.altsep:
        if sep:
            filename = filename.replace(sep, " ")

    normalized_filename = _filename_ascii_strip_re.sub("", "_".join(filename.split()))
    filename = str(normalized_filename).strip("._")
    return filename


class BaseStorage:  # pragma: no cover
    def get_name(self, name: str) -> str:
        ...

    def get_path(self, name: str) -> str:
        ...

    def get_size(self, name: str) -> int:
        ...

    def open(self, name: str) -> BinaryIO:
        ...

    def write(self, file: BinaryIO, name: str) -> str:
        ...


class StorageFile:
    """
    The file obect returned by the storage.
    """

    def __init__(self, *, name: str, storage: BaseStorage):
        self._name = name
        self._storage = storage

    @property
    def name(self) -> str:
        """File name including extension."""

        return self._storage.get_name(self._name)

    @property
    def path(self) -> str:
        """Complete file path."""

        return self._storage.get_path(self._name)

    @property
    def size(self) -> int:
        """File size in bytes."""

        return self._storage.get_size(self._name)

    def open(self) -> BinaryIO:
        """
        Open a file handle of the file.
        """

        return self._storage.open(self._name)

    def write(self, file: BinaryIO) -> str:
        """
        Write input file which is opened in binary mode to destination.
        """

        return self._storage.write(file=file, name=self._name)

    def __str__(self) -> str:
        return self.path


class StorageImage(StorageFile):
    """
    Inherits features of `StorageFile` and adds image specific properties.
    """

    def __init__(
        self, *, name: str, storage: BaseStorage, height: int, width: int
    ) -> None:
        super().__init__(name=name, storage=storage)
        self._width = width
        self._height = height

    @property
    def height(self) -> int:
        """
        Image height in pixels.
        """

        return self._height

    @property
    def width(self) -> int:
        """
        Image width in pixels.
        """

        return self._width


class PathyStorage(BaseStorage):
    """
    Pathy storage class which stores files on the local filesystem or in the cloud.
    You might want to use this with the `FileType` type.
    """

    default_chunk_size = 64 * 1024

    def __init__(self, path: str):
        self._path = Pathy(path)
        self._path.mkdir(parents=True, exist_ok=True)
        self.bucket = os.environ.get("SQLALCHEMY_FIELDS_STORAGE_BUCKET")
        if os.environ.get("SQLALCHEMY_FIELDS_USE_FS_CACHE", None):
            use_fs_cache(os.environ.get("SQLALCHEMY_FIELDS_FS_CACHE_DIR", None))

    def get_name(self, name: str) -> str:
        """
        Get the normalized name of the file.
        """

        return secure_filename(Path(name).name)

    def get_path(self, name: str) -> str:
        """
        Get full path to the file.
        """

        return str(self._path / Path(name))

    def get_size(self, name: str) -> int:
        """
        Get file size in bytes.
        """

        return (self._path / name).stat().size

    def open(self, name: str) -> BinaryIO:
        """
        Open a file handle of the file object in binary mode.
        """

        path = self._path / Path(name)
        return open(path, "rb")

    def write(self, file: BinaryIO, name: str) -> str:
        """
        Write input file which is opened in binary mode to destination.
        """

        filename = secure_filename(name)
        path = self._path / Path(filename)

        file.seek(0, 0)
        with open(path, "wb") as output:
            while True:
                chunk = file.read(self.default_chunk_size)
                if not chunk:
                    break
                output.write(chunk)

        return str(path)


class FileSystemStorage(PathyStorage):
    """
    File system storage backend.
    You might want to use this with the `FileType` type.
    """

    def __init__(self, path: str):
        super().__init__(path)
        use_fs(os.environ.get("SQLALCHEMY_FIELDS_FS_DIR", None))


class GCSStorage(PathyStorage):
    """
    Google Cloud Storage backend.
    You might want to use this with the `FileType` type.
    """

    def __init__(self, path: str):
        super().__init__(path)
        self.bucket = Pathy.from_bucket(self.bucket, scheme="gs")
        self._path = self.bucket / self._path
        from google.oauth2 import service_account

        _creds = os.environ.get("GCS_CREDENTIALS", "")
        account = service_account.Credentials
        with suppress(TypeError, OSError, FileNotFoundError):
            _creds = Path(_creds).read_text()
        creds = json.loads(_creds)
        set_client_params("gs", credentials=account.from_service_account_info(creds))


class S3Storage(PathyStorage):
    """
    Amazon S3 or any S3 compatible storage backend.
    You might want to use this with the `FileType` type.
    """

    def __init__(self, path: str):
        super().__init__(path)
        self.bucket = Pathy.from_bucket(self.bucket, scheme="s3")
        self._path = self.bucket / self._path
        access_key_id = os.environ.get("AWS_ACCESS_KEY_ID", "")
        secret_access_key = os.environ.get("AWS_SECRET_ACCESS_KEY", "")
        set_client_params("s3", key_id=access_key_id, key_secret=secret_access_key)


class AzureStorage(PathyStorage):  # pragma: no cover
    """
    Azure Storage backend.
    You might want to use this with the `FileType` type.
    """

    def __init__(self, path: str):
        super().__init__(path)
        self.bucket = Pathy.from_bucket(self.bucket, scheme="azure")
        self._path = self.bucket / self._path
        connection_string = os.environ.get("AZURE_STORAGE_CONNECTION_STRING")
        set_client_params("azure", connection_string=connection_string)

Potential memory leak

I'm getting unclosed file error:

ResourceWarning: unclosed file <_io.BufferedReader name='/image/path.webp'> return fixed_process_value(value, dialect)

I think it's related to this part of the code:

    def process_result_value(
        self, value: Any, dialect: Dialect
    ) -> Optional[StorageImage]:
        if value is None:
            return value

        image = Image.open(self.storage.get_path(value))
        return StorageImage(
            name=value, storage=self.storage, height=image.height, width=image.width
        )

Here as I can see image is not closed.

Instead of overriding files, change file names.

Currently, if I upload two DIFFERENT images with the same name, one overrides another. In order to avoid this, it would be better to change filename to filename_1, filename_2 before writing to folder.

Fix alembic migrations

When generating automatic alembic migrations, imports and storage argument are missing.

Support AWS_DEFAULT_ACL

Use this to set an ACL on your file such as public-read. If not set the file will be private per Amazon’s default

Cannot open admin panel

So, I am using fastapi-storages and sqladmin together. I was able to create model with field, but when I open admin panel I get the error: sqladmin.exceptions.NoConverterFound: Could not find field converter for column image (<class 'fastapi_storages.integrations.sqlalchemy.FileType'>).

Here is my code:

class Image(Base):
    __tablename__ = "core_image"

    id = Column(Integer, primary_key=True, index=True)

    contenttype_id = Column(ForeignKey("base_contenttype.id", ondelete="CASCADE"))
    object_id = Column(Integer)

    contenttype = relationship("ContentType")
    image = Column(FileType(storage=FileSystemStorage(path="src/media")))

class ImageAdmin(ModelView, model=Image):
    column_list = [Image.contenttype, Image.object_id, Image.image]

I get the error in DETAILED view, LIST view is loading just fine.

P. S. Folder media exists.

Recommend Projects

  • React photo React

    A declarative, efficient, and flexible JavaScript library for building user interfaces.

  • Vue.js photo Vue.js

    🖖 Vue.js is a progressive, incrementally-adoptable JavaScript framework for building UI on the web.

  • Typescript photo Typescript

    TypeScript is a superset of JavaScript that compiles to clean JavaScript output.

  • TensorFlow photo TensorFlow

    An Open Source Machine Learning Framework for Everyone

  • Django photo Django

    The Web framework for perfectionists with deadlines.

  • D3 photo D3

    Bring data to life with SVG, Canvas and HTML. 📊📈🎉

Recommend Topics

  • javascript

    JavaScript (JS) is a lightweight interpreted programming language with first-class functions.

  • web

    Some thing interesting about web. New door for the world.

  • server

    A server is a program made to process requests and deliver data to clients.

  • Machine learning

    Machine learning is a way of modeling and interpreting data that allows a piece of software to respond intelligently.

  • Game

    Some thing interesting about game, make everyone happy.

Recommend Org

  • Facebook photo Facebook

    We are working to build community through open source technology. NB: members must have two-factor auth.

  • Microsoft photo Microsoft

    Open source projects and samples from Microsoft.

  • Google photo Google

    Google ❤️ Open Source for everyone.

  • D3 photo D3

    Data-Driven Documents codes.