You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
38 KiB
38 KiB
文件存储服务
文档版本:v3.0
最后更新:2026-02-02
重大变更:从 MinIO 迁移到阿里云 OSS
目录
服务概述
文件存储服务(FileStorageService)是一个公共服务层,为所有文件相关的业务(attachments、project_resources、videos 等)提供统一的文件上传、下载、删除和去重功能。
职责
- 文件上传到对象存储(S3 兼容协议)
- 文件校验和计算(SHA256)
- 全局文件去重(file_checksums 表)
- 预签名 URL 生成
- 文件删除和引用计数管理
设计原则
- 统一接口:所有业务表通过此服务上传文件
- 去重优先:相同文件只存储一次
- 引用计数:跟踪文件被引用次数,便于清理
- S3 兼容:使用 boto3 SDK,支持 AWS S3、阿里云 OSS、MinIO 等所有 S3 兼容存储
支持的存储提供商
- 阿里云 OSS(生产环境推荐)
- AWS S3
- MinIO(本地开发/测试)
- 腾讯云 COS
- 其他 S3 兼容存储
核心功能
1. 文件上传(带去重)
async def upload_file(
file_content: bytes,
filename: str,
content_type: str,
category: str,
user_id: UUID
) -> Filemeta_data
功能:
- 计算文件 SHA256 校验和
- 检查 file_checksums 表是否已存在
- 如果存在,增加引用计数并返回已有文件信息
- 如果不存在,上传到对象存储并记录到 file_checksums 表
2. 预签名 URL 生成
async def get_presigned_url(
storage_path: str,
expires: int = 3600
) -> str
功能:
- 生成临时访问链接
- 支持自定义过期时间
- 用于文件下载
3. 文件删除
async def delete_file(
checksum: str
) -> None
功能:
- 减少引用计数
- 如果引用计数为 0,从对象存储删除文件
- 从 file_checksums 表删除记录
4. 引用计数管理
async def increase_reference_count(checksum: str) -> None
async def decrease_reference_count(checksum: str) -> None
功能:
- 跟踪文件被引用次数
- 支持安全删除(只有无引用时才真正删除)
数据库设计
file_checksums 表结构
CREATE TABLE file_checksums (
id UUID PRIMARY KEY,
checksum VARCHAR(64) NOT NULL UNIQUE,
file_url VARCHAR(500) NOT NULL,
file_size BIGINT NOT NULL,
mime_type VARCHAR(100) NOT NULL,
storage_provider VARCHAR(50) NOT NULL DEFAULT 'oss',
storage_path VARCHAR(500) NOT NULL,
reference_count INTEGER NOT NULL DEFAULT 1,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
last_accessed_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
-- 表和字段注释
COMMENT ON TABLE file_checksums IS '文件校验和表 - 用于全局文件去重';
COMMENT ON COLUMN file_checksums.id IS '主键 - UUID v7';
COMMENT ON COLUMN file_checksums.checksum IS 'SHA256 校验和 - 用于文件去重';
COMMENT ON COLUMN file_checksums.file_url IS '文件访问 URL';
COMMENT ON COLUMN file_checksums.file_size IS '文件大小(字节)';
COMMENT ON COLUMN file_checksums.mime_type IS 'MIME 类型';
COMMENT ON COLUMN file_checksums.storage_provider IS '存储提供商(默认:oss)';
COMMENT ON COLUMN file_checksums.storage_path IS '对象存储路径';
COMMENT ON COLUMN file_checksums.reference_count IS '引用计数 - 被引用次数';
COMMENT ON COLUMN file_checksums.created_at IS '创建时间(自动记录时区)';
COMMENT ON COLUMN file_checksums.last_accessed_at IS '最后访问时间(自动记录时区)';
COMMENT ON COLUMN file_checksums.updated_at IS '更新时间(自动记录时区)';
-- 索引
CREATE INDEX idx_file_checksums_checksum ON file_checksums (checksum);
CREATE INDEX idx_file_checksums_file_url ON file_checksums (file_url);
CREATE INDEX idx_file_checksums_reference_count ON file_checksums (reference_count);
CREATE INDEX idx_file_checksums_last_accessed ON file_checksums (last_accessed_at);
CREATE INDEX idx_file_checksums_created_at ON file_checksums (created_at);
-- 触发器:自动更新 updated_at
CREATE TRIGGER update_file_checksums_updated_at
BEFORE UPDATE ON file_checksums
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
设计说明
- 主键设计:使用 UUID v7 作为主键,应用层生成
- 唯一约束:checksum(SHA256)设置 UNIQUE 约束,用于文件去重
- 引用计数:reference_count 记录文件被引用次数,支持安全删除
- 访问追踪:last_accessed_at 记录最后访问时间,便于清理过期文件
- 存储信息:storage_provider 和 storage_path 记录文件在对象存储中的位置
- 跨表去重:所有业务表(attachments、project_resources、videos 等)都通过此表实现去重
去重流程
用户上传文件
↓
计算 SHA256 校验和
↓
查询 file_checksums 表(通过 checksum)
├─ 存在 → 增加 reference_count,返回已有 file_url
└─ 不存在 → 上传到对象存储,插入新记录
数据模型
FileChecksum 模型
# app/models/file_checksum.py
from sqlalchemy import Column, String, BigInteger, Integer, DateTime
from app.core.database import Base
from datetime import datetime
class FileChecksum(Base):
__tablename__ = "file_checksums"
checksum = Column(String(64), primary_key=True) # SHA256
file_url = Column(String(500), nullable=False)
file_size = Column(BigInteger, nullable=False)
mime_type = Column(String(100), nullable=False)
storage_provider = Column(String(50)) # minio, s3, oss
storage_path = Column(String(500), nullable=False)
reference_count = Column(Integer, nullable=False, default=1)
created_at = Column(DateTime, nullable=False, default=lambda: datetime.now(timezone.utc))
last_accessed_at = Column(DateTime, nullable=False, default=lambda: datetime.now(timezone.utc))
FileChecksumRepository
# app/repositories/file_checksum_repository.py
from typing import Optional, List
from sqlalchemy.orm import Session
from sqlalchemy import and_
from datetime import datetime
from app.models.file_checksum import FileChecksum
class FileChecksumRepository:
def __init__(self, db: Session):
self.db = db
async def create(self, file_checksum: FileChecksum) -> FileChecksum:
"""创建记录"""
self.db.add(file_checksum)
await self.db.commit()
await self.db.refresh(file_checksum)
return file_checksum
async def get_by_checksum(self, checksum: str) -> Optional[FileChecksum]:
"""根据校验和查询"""
return self.db.query(FileChecksum).filter(
FileChecksum.checksum == checksum
).first()
async def update(self, file_checksum: FileChecksum) -> FileChecksum:
"""更新记录"""
file_checksum.last_accessed_at = datetime.now(timezone.utc)
await self.db.commit()
await self.db.refresh(file_checksum)
return file_checksum
async def delete(self, checksum: str) -> None:
"""删除记录"""
self.db.query(FileChecksum).filter(
FileChecksum.checksum == checksum
).delete()
await self.db.commit()
async def get_unused_files(self, before_date: datetime) -> List[FileChecksum]:
"""获取无引用的过期文件"""
return self.db.query(FileChecksum).filter(
and_(
FileChecksum.reference_count == 0,
FileChecksum.last_accessed_at < before_date
)
).all()
Model 层
FileChecksum 模型
# app/models/file_checksum.py
from datetime import datetime, timezone
from typing import Optional
from uuid import UUID
from sqlalchemy import Column, String, BigInteger, Integer, Index, DateTime
from sqlalchemy.dialects.postgresql import UUID as PG_UUID
from sqlmodel import Field, SQLModel
from app.utils.id_generator import generate_uuid
class FileChecksum(SQLModel, table=True):
"""文件校验和模型 - 用于全局文件去重"""
__tablename__ = "file_checksums"
# 主键
id: UUID = Field(
sa_column=Column(
PG_UUID(as_uuid=True),
primary_key=True,
default=generate_uuid,
nullable=False
),
description="主键 - UUID v7"
)
# 文件信息
checksum: str = Field(
sa_column=Column(String(64), nullable=False, unique=True, index=True),
description="SHA256 校验和 - 用于文件去重"
)
file_url: str = Field(
sa_column=Column(String(500), nullable=False, index=True),
description="文件访问 URL"
)
file_size: int = Field(
sa_column=Column(BigInteger, nullable=False),
description="文件大小(字节)"
)
mime_type: str = Field(
sa_column=Column(String(100), nullable=False),
description="MIME 类型"
)
# 存储信息
storage_provider: str = Field(
sa_column=Column(String(50), nullable=False, default='oss'),
description="存储提供商(oss/s3/minio)"
)
storage_path: str = Field(
sa_column=Column(String(500), nullable=False),
description="对象存储路径"
)
# 引用计数
reference_count: int = Field(
sa_column=Column(Integer, nullable=False, default=1, index=True),
description="引用计数 - 被引用次数"
)
# 时间戳(使用 UTC aware datetime)
created_at: datetime = Field(
default_factory=lambda: datetime.now(timezone.utc),
sa_column=Column(DateTime(timezone=True), nullable=False, index=True),
description="创建时间(UTC)"
)
last_accessed_at: datetime = Field(
default_factory=lambda: datetime.now(timezone.utc),
sa_column=Column(DateTime(timezone=True), nullable=False, index=True),
description="最后访问时间(UTC)"
)
updated_at: datetime = Field(
default_factory=lambda: datetime.now(timezone.utc),
sa_column=Column(DateTime(timezone=True), nullable=False),
description="更新时间(UTC)"
)
__table_args__ = (
Index('idx_file_checksums_checksum', 'checksum'),
Index('idx_file_checksums_file_url', 'file_url'),
Index('idx_file_checksums_reference_count', 'reference_count'),
Index('idx_file_checksums_last_accessed', 'last_accessed_at'),
Index('idx_file_checksums_created_at', 'created_at'),
)
def __repr__(self) -> str:
"""字符串表示"""
return f"<FileChecksum(id={self.id}, checksum='{self.checksum[:16]}...', size={self.file_size})>"
Schema 层
FileChecksum Schema
# app/schemas/file_checksum.py
from datetime import datetime
from typing import Optional
from uuid import UUID
from pydantic import BaseModel, Field, ConfigDict
class FileChecksumBase(BaseModel):
"""FileChecksum 基础 Schema"""
checksum: str = Field(..., description="SHA256 校验和")
file_url: str = Field(..., description="文件访问 URL")
file_size: int = Field(..., description="文件大小(字节)", ge=0)
mime_type: str = Field(..., description="MIME 类型")
storage_provider: str = Field(..., description="存储提供商")
storage_path: str = Field(..., description="对象存储路径")
class FileChecksumCreate(FileChecksumBase):
"""创建 FileChecksum 请求"""
reference_count: int = Field(default=1, description="初始引用计数", ge=1)
class FileChecksumUpdate(BaseModel):
"""更新 FileChecksum 请求"""
reference_count: Optional[int] = Field(None, description="引用计数", ge=0)
last_accessed_at: Optional[datetime] = Field(None, description="最后访问时间")
class FileChecksumResponse(FileChecksumBase):
"""FileChecksum 响应"""
id: UUID = Field(..., description="主键")
reference_count: int = Field(..., description="引用计数")
created_at: datetime = Field(..., description="创建时间")
last_accessed_at: datetime = Field(..., description="最后访问时间")
updated_at: datetime = Field(..., description="更新时间")
model_config = ConfigDict(from_attributes=True)
class Filemeta_data(BaseModel):
"""文件元数据 - 上传返回"""
file_url: str = Field(..., description="文件访问 URL")
file_size: int = Field(..., description="文件大小(字节)")
checksum: str = Field(..., description="SHA256 校验和")
mime_type: str = Field(..., description="MIME 类型")
extension: str = Field(..., description="文件扩展名")
storage_provider: str = Field(..., description="存储提供商")
storage_path: str = Field(..., description="对象存储路径")
Repository 层
FileChecksumRepository
# app/repositories/file_checksum_repository.py
from datetime import datetime, timezone
from typing import Optional, List
from uuid import UUID
from sqlalchemy import select, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.file_checksum import FileChecksum
import logging
logger = logging.getLogger(__name__)
class FileChecksumRepository:
"""文件校验和仓储层"""
def __init__(self, db: AsyncSession):
self.db = db
async def create(self, file_checksum: FileChecksum) -> FileChecksum:
"""创建记录"""
self.db.add(file_checksum)
await self.db.commit()
await self.db.refresh(file_checksum)
logger.info("创建文件校验和记录: checksum=%s, size=%d",
file_checksum.checksum[:16], file_checksum.file_size)
return file_checksum
async def get_by_id(self, file_checksum_id: UUID) -> Optional[FileChecksum]:
"""根据 ID 查询"""
result = await self.db.execute(
select(FileChecksum).where(FileChecksum.id == file_checksum_id)
)
return result.scalar_one_or_none()
async def get_by_checksum(self, checksum: str) -> Optional[FileChecksum]:
"""根据校验和查询"""
result = await self.db.execute(
select(FileChecksum).where(FileChecksum.checksum == checksum)
)
return result.scalar_one_or_none()
async def update(self, file_checksum: FileChecksum) -> FileChecksum:
"""更新记录"""
file_checksum.updated_at = datetime.now(timezone.utc)
await self.db.commit()
await self.db.refresh(file_checksum)
return file_checksum
async def delete(self, file_checksum_id: UUID) -> None:
"""删除记录"""
result = await self.db.execute(
select(FileChecksum).where(FileChecksum.id == file_checksum_id)
)
file_checksum = result.scalar_one_or_none()
if file_checksum:
await self.db.delete(file_checksum)
await self.db.commit()
logger.info("删除文件校验和记录: id=%s, checksum=%s",
file_checksum_id, file_checksum.checksum[:16])
async def get_unused_files(self, before_date: datetime) -> List[FileChecksum]:
"""获取无引用的过期文件"""
result = await self.db.execute(
select(FileChecksum).where(
and_(
FileChecksum.reference_count == 0,
FileChecksum.last_accessed_at < before_date
)
)
)
return list(result.scalars().all())
async def list_by_storage_provider(
self,
storage_provider: str,
skip: int = 0,
limit: int = 100
) -> List[FileChecksum]:
"""根据存储提供商查询"""
result = await self.db.execute(
select(FileChecksum)
.where(FileChecksum.storage_provider == storage_provider)
.offset(skip)
.limit(limit)
)
return list(result.scalars().all())
Service 层
FileStorageService
# app/services/file_storage_service.py
import hashlib
import os
from datetime import datetime, timedelta, timezone
from typing import Optional
from uuid import UUID
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.file_checksum import FileChecksum
from app.repositories.file_checksum_repository import FileChecksumRepository
from app.schemas.file_checksum import Filemeta_data
from app.core.storage import StorageService
from app.core.exceptions import StorageError
from app.core.config import settings
import logging
logger = logging.getLogger(__name__)
class FileStorageService:
"""文件存储服务 - 提供文件上传、下载、删除和去重功能"""
def __init__(self, db: AsyncSession):
self.db = db
self.checksum_repo = FileChecksumRepository(db)
self.storage = StorageService()
async def upload_file(
self,
file_content: bytes,
filename: str,
content_type: str,
category: str,
user_id: UUID
) -> Filemeta_data:
"""上传文件(带去重)
Args:
file_content: 文件内容(字节)
filename: 文件名
content_type: MIME 类型
category: 文件分类(用于存储路径)
user_id: 用户 ID
Returns:
Filemeta_data: 文件元数据
"""
# 1. 计算校验和
checksum = self._calculate_checksum(file_content)
# 2. 检查是否已存在
existing = await self.checksum_repo.get_by_checksum(checksum)
if existing:
# 文件已存在,增加引用计数
await self.increase_reference_count(existing.id)
extension = os.path.splitext(filename)[1]
return Filemeta_data(
file_url=existing.file_url,
file_size=existing.file_size,
checksum=existing.checksum,
mime_type=existing.mime_type,
extension=extension,
storage_provider=existing.storage_provider,
storage_path=existing.storage_path
)
# 3. 上传到对象存储
extension = os.path.splitext(filename)[1]
object_name = f"{category}/{user_id}/{checksum}{extension}"
try:
file_url = await self.storage.upload_bytes(
data=file_content,
object_name=object_name,
content_type=content_type
)
except Exception as e:
logger.error("文件上传失败: %s", str(e))
raise StorageError(f"文件上传失败: {str(e)}")
# 4. 记录到去重表
file_checksum = FileChecksum(
checksum=checksum,
file_url=file_url,
file_size=len(file_content),
mime_type=content_type,
storage_provider=settings.STORAGE_PROVIDER,
storage_path=object_name,
reference_count=1
)
await self.checksum_repo.create(file_checksum)
return Filemeta_data(
file_url=file_url,
file_size=len(file_content),
checksum=checksum,
mime_type=content_type,
extension=extension,
storage_provider=settings.STORAGE_PROVIDER,
storage_path=object_name
)
async def get_by_checksum(self, checksum: str) -> Optional[FileChecksum]:
"""根据校验和查询文件"""
return await self.checksum_repo.get_by_checksum(checksum)
async def get_presigned_url(
self,
storage_path: str,
expires: int = 3600
) -> str:
"""获取预签名 URL
Args:
storage_path: 对象存储路径
expires: 过期时间(秒)
Returns:
str: 预签名 URL
"""
return await self.storage.get_presigned_url(storage_path, expires)
async def increase_reference_count(self, file_checksum_id: UUID) -> None:
"""增加引用计数"""
file_checksum = await self.checksum_repo.get_by_id(file_checksum_id)
if file_checksum:
file_checksum.reference_count += 1
file_checksum.last_accessed_at = datetime.now(timezone.utc)
await self.checksum_repo.update(file_checksum)
async def decrease_reference_count(self, file_checksum_id: UUID) -> None:
"""减少引用计数"""
file_checksum = await self.checksum_repo.get_by_id(file_checksum_id)
if not file_checksum:
return
file_checksum.reference_count -= 1
if file_checksum.reference_count <= 0:
# 无引用,删除文件
try:
await self.storage.delete_file(file_checksum.storage_path)
except Exception as e:
logger.error("删除文件失败: %s", str(e))
# 删除记录
await self.checksum_repo.delete(file_checksum.id)
else:
await self.checksum_repo.update(file_checksum)
def _calculate_checksum(self, file_content: bytes) -> str:
"""计算文件 SHA256 校验和"""
return hashlib.sha256(file_content).hexdigest()
async def cleanup_unused_files(self, days: int = 30) -> int:
"""清理无引用的过期文件
Args:
days: 过期天数
Returns:
int: 删除的文件数量
"""
cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
unused_files = await self.checksum_repo.get_unused_files(cutoff_date)
deleted_count = 0
for file_checksum in unused_files:
try:
await self.storage.delete_file(file_checksum.storage_path)
await self.checksum_repo.delete(file_checksum.id)
deleted_count += 1
except Exception as e:
logger.error("清理文件失败 %s: %s", file_checksum.checksum[:16], str(e))
return deleted_count
API 层
FileStorage API
# app/api/v1/file_storage.py
from typing import List
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Query
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import get_session
from app.api.deps import get_current_user
from app.schemas.file_checksum import FileChecksumResponse, Filemeta_data
from app.schemas.user import UserResponse
from app.services.file_storage_service import FileStorageService
from app.core.exceptions import StorageError
router = APIRouter()
@router.post("/upload", response_model=Filemeta_data, summary="上传文件(带去重)")
async def upload_file(
file: UploadFile = File(..., description="上传的文件"),
category: str = Query(..., description="文件分类"),
db: AsyncSession = Depends(get_session),
current_user: UserResponse = Depends(get_current_user)
):
"""上传文件到对象存储,自动去重"""
service = FileStorageService(db)
try:
content = await file.read()
file_meta = await service.upload_file(
file_content=content,
filename=file.filename,
content_type=file.content_type,
category=category,
user_id=current_user.id
)
return file_meta
except StorageError as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/checksum/{checksum}", response_model=FileChecksumResponse, summary="根据校验和查询文件")
async def get_file_by_checksum(
checksum: str,
db: AsyncSession = Depends(get_session),
current_user: UserResponse = Depends(get_current_user)
):
"""根据 SHA256 校验和查询文件信息"""
service = FileStorageService(db)
file_checksum = await service.get_by_checksum(checksum)
if not file_checksum:
raise HTTPException(status_code=404, detail="文件不存在")
return file_checksum
@router.get("/presigned-url", response_model=dict, summary="获取预签名 URL")
async def get_presigned_url(
storage_path: str = Query(..., description="对象存储路径"),
expires: int = Query(3600, description="过期时间(秒)", ge=60, le=86400),
db: AsyncSession = Depends(get_session),
current_user: UserResponse = Depends(get_current_user)
):
"""获取文件的临时访问链接"""
service = FileStorageService(db)
try:
url = await service.get_presigned_url(storage_path, expires)
return {"url": url, "expires_in": expires}
except Exception as e:
raise HTTPException(status_code=500, detail=f"生成预签名 URL 失败: {str(e)}")
@router.post("/cleanup", response_model=dict, summary="清理无引用文件")
async def cleanup_unused_files(
days: int = Query(30, description="过期天数", ge=1, le=365),
db: AsyncSession = Depends(get_session),
current_user: UserResponse = Depends(get_current_user)
):
"""清理指定天数内无引用的过期文件(需要管理员权限)"""
# TODO: 添加管理员权限检查
service = FileStorageService(db)
try:
deleted_count = await service.cleanup_unused_files(days)
return {"deleted_count": deleted_count, "message": f"成功清理 {deleted_count} 个文件"}
except Exception as e:
raise HTTPException(status_code=500, detail=f"清理失败: {str(e)}")
对象存储集成
StorageService 接口(boto3 + S3 兼容协议)
# app/core/storage.py
from datetime import timedelta
from io import BytesIO
from typing import Optional
import logging
import boto3
from botocore.exceptions import ClientError
from botocore.client import Config
from app.core.config import settings
from app.core.exceptions import StorageError
logger = logging.getLogger(__name__)
class StorageService:
"""对象存储服务 - 使用 boto3(S3 兼容协议)
支持的存储提供商:
- 阿里云 OSS
- AWS S3
- MinIO(本地开发/测试)
- 腾讯云 COS
- 其他 S3 兼容存储
"""
def __init__(self):
# 配置 boto3 客户端
self.s3_client = boto3.client(
's3',
endpoint_url=settings.S3_ENDPOINT_URL,
aws_access_key_id=settings.S3_ACCESS_KEY_ID,
aws_secret_access_key=settings.S3_SECRET_ACCESS_KEY,
region_name=settings.S3_REGION,
config=Config(
signature_version='s3v4',
s3={'addressing_style': 'virtual'} # 使用虚拟主机风格(OSS 推荐)
)
)
self.bucket_name = settings.S3_BUCKET_NAME
self._ensure_bucket_exists()
def _ensure_bucket_exists(self):
"""确保 bucket 存在"""
try:
self.s3_client.head_bucket(Bucket=self.bucket_name)
logger.info("Bucket 已存在: %s", self.bucket_name)
except ClientError as e:
error_code = e.response['Error']['Code']
if error_code == '404':
# Bucket 不存在,创建它
try:
self.s3_client.create_bucket(Bucket=self.bucket_name)
logger.info("创建 Bucket: %s", self.bucket_name)
except ClientError as create_error:
logger.error("创建 Bucket 失败: %s", str(create_error))
else:
logger.error("检查 Bucket 失败: %s", str(e))
async def upload_bytes(
self,
data: bytes,
object_name: str,
content_type: Optional[str] = None
) -> str:
"""上传字节数据到对象存储
Args:
data: 文件内容(字节)
object_name: 对象名称(存储路径)
content_type: MIME 类型
Returns:
str: 文件访问 URL
"""
try:
extra_args = {}
if content_type:
extra_args['ContentType'] = content_type
self.s3_client.put_object(
Bucket=self.bucket_name,
Key=object_name,
Body=data,
**extra_args
)
# 返回公开访问 URL
if settings.S3_PUBLIC_URL:
return f"{settings.S3_PUBLIC_URL}/{object_name}"
else:
# 使用 endpoint 构建 URL
return f"{settings.S3_ENDPOINT_URL}/{self.bucket_name}/{object_name}"
except ClientError as e:
logger.error("上传文件失败: %s", str(e))
raise StorageError(f"上传文件失败: {str(e)}")
async def get_presigned_url(
self,
object_name: str,
expires: int = 3600
) -> str:
"""获取预签名 URL(用于临时访问)
Args:
object_name: 对象名称(存储路径)
expires: 过期时间(秒)
Returns:
str: 预签名 URL
"""
try:
url = self.s3_client.generate_presigned_url(
'get_object',
Params={
'Bucket': self.bucket_name,
'Key': object_name
},
ExpiresIn=expires
)
return url
except ClientError as e:
logger.error("生成预签名 URL 失败: %s", str(e))
raise StorageError(f"生成预签名 URL 失败: {str(e)}")
async def delete_file(self, object_name: str) -> None:
"""删除文件
Args:
object_name: 对象名称(存储路径)
"""
try:
self.s3_client.delete_object(
Bucket=self.bucket_name,
Key=object_name
)
except ClientError as e:
logger.error("删除文件失败: %s", str(e))
raise StorageError(f"删除文件失败: {str(e)}")
async def file_exists(self, object_name: str) -> bool:
"""检查文件是否存在
Args:
object_name: 对象名称(存储路径)
Returns:
bool: 文件是否存在
"""
try:
self.s3_client.head_object(
Bucket=self.bucket_name,
Key=object_name
)
return True
except ClientError:
return False
使用示例
示例1:附件服务使用
# app/services/attachment_service.py
from uuid import UUID
from fastapi import UploadFile
from sqlalchemy.ext.asyncio import AsyncSession
from app.services.file_storage_service import FileStorageService
from app.models.attachment import Attachment
class AttachmentService:
def __init__(self, db: AsyncSession):
self.db = db
self.file_storage = FileStorageService(db)
async def upload_attachment(
self,
file: UploadFile,
user_id: UUID,
related_type: str,
related_id: UUID,
attachment_purpose: str
) -> Attachment:
"""上传附件"""
# 使用 FileStorageService 上传
content = await file.read()
file_meta = await self.file_storage.upload_file(
file_content=content,
filename=file.filename,
content_type=file.content_type,
category='attachment_document',
user_id=user_id
)
# 创建附件记录
attachment = Attachment(
user_id=user_id,
related_type=related_type,
related_id=related_id,
attachment_purpose=attachment_purpose,
file_url=file_meta.file_url,
file_size=file_meta.file_size,
file_type=file_meta.mime_type,
checksum=file_meta.checksum,
storage_provider=file_meta.storage_provider,
storage_path=file_meta.storage_path
)
self.db.add(attachment)
await self.db.commit()
await self.db.refresh(attachment)
return attachment
示例2:项目素材服务使用
# app/services/project_resource_service.py
from uuid import UUID
from fastapi import UploadFile
from sqlalchemy.ext.asyncio import AsyncSession
from app.services.file_storage_service import FileStorageService
from app.models.project_resource import ProjectResource
class ProjectResourceService:
def __init__(self, db: AsyncSession):
self.db = db
self.file_storage = FileStorageService(db)
async def upload_resource(
self,
file: UploadFile,
project_id: UUID,
user_id: UUID,
resource_type: str
) -> ProjectResource:
"""上传项目素材"""
# 使用 FileStorageService 上传
content = await file.read()
file_meta = await self.file_storage.upload_file(
file_content=content,
filename=file.filename,
content_type=file.content_type,
category='project_resource',
user_id=user_id
)
# 创建项目素材记录
resource = ProjectResource(
project_id=project_id,
user_id=user_id,
resource_type=resource_type,
file_url=file_meta.file_url,
file_size=file_meta.file_size,
file_type=file_meta.mime_type,
checksum=file_meta.checksum,
storage_provider=file_meta.storage_provider,
storage_path=file_meta.storage_path
)
self.db.add(resource)
await self.db.commit()
await self.db.refresh(resource)
return resource
配置
环境变量
# S3 兼容存储配置(支持 OSS/S3/MinIO)
S3_ENDPOINT_URL=https://oss-cn-hangzhou.aliyuncs.com # 阿里云 OSS endpoint
S3_ACCESS_KEY_ID=your_access_key_id
S3_SECRET_ACCESS_KEY=your_secret_access_key
S3_REGION=cn-hangzhou # 区域
S3_BUCKET_NAME=jointo
S3_PUBLIC_URL=https://jointo.oss-cn-hangzhou.aliyuncs.com # 公开访问 URL(可选,用于 CDN)
# 存储提供商标识(用于记录)
STORAGE_PROVIDER=oss # oss/s3/minio
不同环境配置示例
生产环境(阿里云 OSS)
S3_ENDPOINT_URL=https://oss-cn-hangzhou.aliyuncs.com
S3_ACCESS_KEY_ID=LTAI5t...
S3_SECRET_ACCESS_KEY=xxx...
S3_REGION=cn-hangzhou
S3_BUCKET_NAME=jointo-prod
S3_PUBLIC_URL=https://cdn.jointo.com # 使用 CDN 加速
STORAGE_PROVIDER=oss
开发环境(本地 MinIO)
S3_ENDPOINT_URL=http://localhost:9000
S3_ACCESS_KEY_ID=minioadmin
S3_SECRET_ACCESS_KEY=minioadmin
S3_REGION=us-east-1
S3_BUCKET_NAME=jointo-dev
S3_PUBLIC_URL=http://localhost:9000/jointo-dev
STORAGE_PROVIDER=minio
AWS S3
S3_ENDPOINT_URL=https://s3.amazonaws.com
S3_ACCESS_KEY_ID=AKIA...
S3_SECRET_ACCESS_KEY=xxx...
S3_REGION=us-east-1
S3_BUCKET_NAME=jointo-prod
S3_PUBLIC_URL=https://jointo-prod.s3.amazonaws.com
STORAGE_PROVIDER=s3
配置类
# app/core/config.py
from pydantic_settings import BaseSettings
class Settings(BaseSettings):
"""应用配置"""
# S3 兼容存储配置
S3_ENDPOINT_URL: str
S3_ACCESS_KEY_ID: str
S3_SECRET_ACCESS_KEY: str
S3_REGION: str = "cn-hangzhou"
S3_BUCKET_NAME: str = "jointo"
S3_PUBLIC_URL: str = "" # 可选,用于 CDN
# 存储提供商标识
STORAGE_PROVIDER: str = "oss" # oss/s3/minio
class Config:
env_file = ".env"
settings = Settings()
定时任务
清理无引用文件
# app/tasks/cleanup_files.py
from celery import Celery
from sqlalchemy.ext.asyncio import AsyncSession
from app.services.file_storage_service import FileStorageService
from app.core.database import async_session_maker
from app.core.celery_app import celery_app
import logging
logger = logging.getLogger(__name__)
@celery_app.task(name="cleanup_unused_files")
async def cleanup_unused_files():
"""清理30天未访问且无引用的文件"""
async with async_session_maker() as db:
try:
file_storage = FileStorageService(db)
deleted_count = await file_storage.cleanup_unused_files(days=30)
logger.info(f"清理了 {deleted_count} 个无引用文件")
return {"deleted_count": deleted_count}
except Exception as e:
logger.error(f"清理文件失败: {str(e)}")
raise
# 配置定时任务(每天凌晨3点执行)
celery_app.conf.beat_schedule = {
'cleanup-unused-files': {
'task': 'cleanup_unused_files',
'schedule': crontab(hour=3, minute=0),
},
}
异常定义
StorageError
# app/core/exceptions.py
class StorageError(Exception):
"""对象存储异常"""
pass
相关文档
变更记录
v3.0 (2026-02-02)
- 重大变更:从 MinIO 迁移到 boto3(S3 兼容协议)
- 使用 boto3 SDK 替代 minio SDK
- 支持多种 S3 兼容存储(阿里云 OSS、AWS S3、MinIO、腾讯云 COS 等)
- 更新配置项(S3_* 替代 MINIO_*)
- 更新 StorageService 实现
- 添加不同环境配置示例
- 更新依赖(boto3 替代 minio)
v2.1 (2026-02-02)
- 修复 Model 层时间戳字段类型定义(明确使用 DateTime(timezone=True))
- 为 FileChecksum 模型添加
__repr__方法 - 更新 Schema 层为 Pydantic v2 风格(使用 ConfigDict)
- 为 Repository 层添加日志记录(create、delete 操作)
- 统一日志格式为 %-formatting(符合 logging.md 规范)
- 修正数据库注释语法(使用 COMMENT ON 而非 COMMENT)
- 添加 updated_at 触发器
v2.0 (2026-01-28)
- 全面重构,符合 jointo-tech-stack 规范
- 使用 SQLModel 替代 SQLAlchemy Base
- 使用 UUID v7 作为主键,checksum 作为唯一索引
- 使用 AsyncSession 和 async/await 模式
- 补充完整的 Schema 层定义
- 补充完整的 API 层实现
- 规范化错误处理和日志记录
- 更新配置类和环境变量
- 更新定时任务实现
v1.0 (2025-01-27)
- 初始版本
- 实现文件上传、下载、删除功能
- 实现全局去重机制
- 实现引用计数管理
- 支持 MinIO 对象存储
文档版本:v3.0
最后更新:2026-02-02