aminalaee / fastapi-storages Goto Github PK
View Code? Open in Web Editor NEWFastAPI backend storages and ORM extensions
Home Page: https://aminalaee.dev/fastapi-storages
License: MIT License
FastAPI backend storages and ORM extensions
Home Page: https://aminalaee.dev/fastapi-storages
License: MIT License
I have a mostly complete, yet untested, storage adapter in my fork that I wanted to see if you were interested in using before I devote more time to it. It uses Pathy to add support for GCS & Azure and is a single file in place of the storages directory. In it's current state it looks something like this:
import json
import os
import re
from contextlib import suppress
from pathlib import Path
from typing import BinaryIO
from pathy import Pathy
from pathy import set_client_params
from pathy import use_fs
from pathy import use_fs_cache
_filename_ascii_strip_re = re.compile(r"[^A-Za-z0-9_.-]")
def secure_filename(filename: str) -> str:
"""
From Werkzeug secure_filename.
"""
for sep in os.path.sep, os.path.altsep:
if sep:
filename = filename.replace(sep, " ")
normalized_filename = _filename_ascii_strip_re.sub("", "_".join(filename.split()))
filename = str(normalized_filename).strip("._")
return filename
class BaseStorage: # pragma: no cover
def get_name(self, name: str) -> str:
...
def get_path(self, name: str) -> str:
...
def get_size(self, name: str) -> int:
...
def open(self, name: str) -> BinaryIO:
...
def write(self, file: BinaryIO, name: str) -> str:
...
class StorageFile:
"""
The file obect returned by the storage.
"""
def __init__(self, *, name: str, storage: BaseStorage):
self._name = name
self._storage = storage
@property
def name(self) -> str:
"""File name including extension."""
return self._storage.get_name(self._name)
@property
def path(self) -> str:
"""Complete file path."""
return self._storage.get_path(self._name)
@property
def size(self) -> int:
"""File size in bytes."""
return self._storage.get_size(self._name)
def open(self) -> BinaryIO:
"""
Open a file handle of the file.
"""
return self._storage.open(self._name)
def write(self, file: BinaryIO) -> str:
"""
Write input file which is opened in binary mode to destination.
"""
return self._storage.write(file=file, name=self._name)
def __str__(self) -> str:
return self.path
class StorageImage(StorageFile):
"""
Inherits features of `StorageFile` and adds image specific properties.
"""
def __init__(
self, *, name: str, storage: BaseStorage, height: int, width: int
) -> None:
super().__init__(name=name, storage=storage)
self._width = width
self._height = height
@property
def height(self) -> int:
"""
Image height in pixels.
"""
return self._height
@property
def width(self) -> int:
"""
Image width in pixels.
"""
return self._width
class PathyStorage(BaseStorage):
"""
Pathy storage class which stores files on the local filesystem or in the cloud.
You might want to use this with the `FileType` type.
"""
default_chunk_size = 64 * 1024
def __init__(self, path: str):
self._path = Pathy(path)
self._path.mkdir(parents=True, exist_ok=True)
self.bucket = os.environ.get("SQLALCHEMY_FIELDS_STORAGE_BUCKET")
if os.environ.get("SQLALCHEMY_FIELDS_USE_FS_CACHE", None):
use_fs_cache(os.environ.get("SQLALCHEMY_FIELDS_FS_CACHE_DIR", None))
def get_name(self, name: str) -> str:
"""
Get the normalized name of the file.
"""
return secure_filename(Path(name).name)
def get_path(self, name: str) -> str:
"""
Get full path to the file.
"""
return str(self._path / Path(name))
def get_size(self, name: str) -> int:
"""
Get file size in bytes.
"""
return (self._path / name).stat().size
def open(self, name: str) -> BinaryIO:
"""
Open a file handle of the file object in binary mode.
"""
path = self._path / Path(name)
return open(path, "rb")
def write(self, file: BinaryIO, name: str) -> str:
"""
Write input file which is opened in binary mode to destination.
"""
filename = secure_filename(name)
path = self._path / Path(filename)
file.seek(0, 0)
with open(path, "wb") as output:
while True:
chunk = file.read(self.default_chunk_size)
if not chunk:
break
output.write(chunk)
return str(path)
class FileSystemStorage(PathyStorage):
"""
File system storage backend.
You might want to use this with the `FileType` type.
"""
def __init__(self, path: str):
super().__init__(path)
use_fs(os.environ.get("SQLALCHEMY_FIELDS_FS_DIR", None))
class GCSStorage(PathyStorage):
"""
Google Cloud Storage backend.
You might want to use this with the `FileType` type.
"""
def __init__(self, path: str):
super().__init__(path)
self.bucket = Pathy.from_bucket(self.bucket, scheme="gs")
self._path = self.bucket / self._path
from google.oauth2 import service_account
_creds = os.environ.get("GCS_CREDENTIALS", "")
account = service_account.Credentials
with suppress(TypeError, OSError, FileNotFoundError):
_creds = Path(_creds).read_text()
creds = json.loads(_creds)
set_client_params("gs", credentials=account.from_service_account_info(creds))
class S3Storage(PathyStorage):
"""
Amazon S3 or any S3 compatible storage backend.
You might want to use this with the `FileType` type.
"""
def __init__(self, path: str):
super().__init__(path)
self.bucket = Pathy.from_bucket(self.bucket, scheme="s3")
self._path = self.bucket / self._path
access_key_id = os.environ.get("AWS_ACCESS_KEY_ID", "")
secret_access_key = os.environ.get("AWS_SECRET_ACCESS_KEY", "")
set_client_params("s3", key_id=access_key_id, key_secret=secret_access_key)
class AzureStorage(PathyStorage): # pragma: no cover
"""
Azure Storage backend.
You might want to use this with the `FileType` type.
"""
def __init__(self, path: str):
super().__init__(path)
self.bucket = Pathy.from_bucket(self.bucket, scheme="azure")
self._path = self.bucket / self._path
connection_string = os.environ.get("AZURE_STORAGE_CONNECTION_STRING")
set_client_params("azure", connection_string=connection_string)
I'm getting unclosed file error:
ResourceWarning: unclosed file <_io.BufferedReader name='/image/path.webp'> return fixed_process_value(value, dialect)
I think it's related to this part of the code:
def process_result_value(
self, value: Any, dialect: Dialect
) -> Optional[StorageImage]:
if value is None:
return value
image = Image.open(self.storage.get_path(value))
return StorageImage(
name=value, storage=self.storage, height=image.height, width=image.width
)
Here as I can see image is not closed.
Currently, if I upload two DIFFERENT images with the same name, one overrides another. In order to avoid this, it would be better to change filename to filename_1
, filename_2
before writing to folder.
When generating automatic alembic migrations, imports and storage argument are missing.
Use this to set an ACL on your file such as public-read. If not set the file will be private per Amazon’s default
So, I am using fastapi-storages and sqladmin together. I was able to create model with field, but when I open admin panel I get the error: sqladmin.exceptions.NoConverterFound: Could not find field converter for column image (<class 'fastapi_storages.integrations.sqlalchemy.FileType'>)
.
Here is my code:
class Image(Base):
__tablename__ = "core_image"
id = Column(Integer, primary_key=True, index=True)
contenttype_id = Column(ForeignKey("base_contenttype.id", ondelete="CASCADE"))
object_id = Column(Integer)
contenttype = relationship("ContentType")
image = Column(FileType(storage=FileSystemStorage(path="src/media")))
class ImageAdmin(ModelView, model=Image):
column_list = [Image.contenttype, Image.object_id, Image.image]
I get the error in DETAILED view, LIST view is loading just fine.
P. S. Folder media exists.
A declarative, efficient, and flexible JavaScript library for building user interfaces.
🖖 Vue.js is a progressive, incrementally-adoptable JavaScript framework for building UI on the web.
TypeScript is a superset of JavaScript that compiles to clean JavaScript output.
An Open Source Machine Learning Framework for Everyone
The Web framework for perfectionists with deadlines.
A PHP framework for web artisans
Bring data to life with SVG, Canvas and HTML. 📊📈🎉
JavaScript (JS) is a lightweight interpreted programming language with first-class functions.
Some thing interesting about web. New door for the world.
A server is a program made to process requests and deliver data to clients.
Machine learning is a way of modeling and interpreting data that allows a piece of software to respond intelligently.
Some thing interesting about visualization, use data art
Some thing interesting about game, make everyone happy.
We are working to build community through open source technology. NB: members must have two-factor auth.
Open source projects and samples from Microsoft.
Google ❤️ Open Source for everyone.
Alibaba Open Source for everyone
Data-Driven Documents codes.
China tencent open source team.