chang to mysql
This commit is contained in:
File diff suppressed because it is too large
Load Diff
179
assets/generate_and_save_coupons.py
Normal file
179
assets/generate_and_save_coupons.py
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
Script to generate and save coupons to the database
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import random
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
# Add the backend directory to the path so we can import modules
|
||||||
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
||||||
|
|
||||||
|
# Import required modules
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
|
||||||
|
from backend.app.admin.model.coupon import Coupon
|
||||||
|
from backend.utils.snowflake import snowflake
|
||||||
|
from backend.core.conf import settings, get_db_uri
|
||||||
|
|
||||||
|
|
||||||
|
def generate_coupon_codes(prefix: str, quantity: int):
|
||||||
|
"""
|
||||||
|
Generate coupon codes with specified prefix and quantity.
|
||||||
|
|
||||||
|
Format: [PREFIX][NUMBER] - Total 6 characters
|
||||||
|
Example: A12345, TEST0, XYZ999
|
||||||
|
|
||||||
|
Args:
|
||||||
|
prefix (str): The letter prefix for the coupon codes (should be uppercase)
|
||||||
|
quantity (int): Number of coupon codes to generate
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of generated coupon codes
|
||||||
|
"""
|
||||||
|
if not prefix.isalpha() or not prefix.isupper():
|
||||||
|
raise ValueError("Prefix must be uppercase letters only")
|
||||||
|
|
||||||
|
if len(prefix) == 0 or len(prefix) > 5:
|
||||||
|
raise ValueError("Prefix must be 1-5 characters long")
|
||||||
|
|
||||||
|
if quantity <= 0:
|
||||||
|
raise ValueError("Quantity must be greater than 0")
|
||||||
|
|
||||||
|
# Calculate number of digits based on prefix length (total 6 characters)
|
||||||
|
num_digits = 6 - len(prefix)
|
||||||
|
|
||||||
|
# Maximum possible combinations
|
||||||
|
max_combinations = 10 ** num_digits
|
||||||
|
|
||||||
|
if quantity > max_combinations:
|
||||||
|
raise ValueError(f"With prefix '{prefix}' (length {len(prefix)}), can only generate {max_combinations} unique codes (0 to {max_combinations - 1})")
|
||||||
|
|
||||||
|
codes = []
|
||||||
|
# Generate incremental numbers starting from 0
|
||||||
|
for i in range(quantity):
|
||||||
|
# Format with leading zeros to make it the required number of digits
|
||||||
|
formatted_number = f"{i:0{num_digits}d}"
|
||||||
|
# Combine prefix with formatted number
|
||||||
|
coupon_code = f"{prefix}{formatted_number}"
|
||||||
|
codes.append(coupon_code)
|
||||||
|
|
||||||
|
return codes
|
||||||
|
|
||||||
|
|
||||||
|
def save_coupons_to_db(prefix: str, quantity: int, coupon_type: str, points: int, expire_days: int = None):
|
||||||
|
"""
|
||||||
|
Generate and save coupons to the database.
|
||||||
|
|
||||||
|
Coupon codes are always 6 characters total:
|
||||||
|
- 1-letter prefix: 5 digits (up to 100000 codes: A00000-A99999)
|
||||||
|
- 4-letter prefix: 2 digits (up to 100 codes: TEST00-TEST99)
|
||||||
|
- 5-letter prefix: 1 digit (up to 10 codes: ABCDE0-ABCDE9)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
prefix (str): The letter prefix for the coupon codes
|
||||||
|
quantity (int): Number of coupon codes to generate
|
||||||
|
coupon_type (str): Type of the coupons
|
||||||
|
points (int): Points value of the coupons
|
||||||
|
expire_days (int, optional): Days until expiration. If None, no expiration.
|
||||||
|
"""
|
||||||
|
# Create database engine and session
|
||||||
|
db_url = get_db_uri(settings)
|
||||||
|
# Replace asyncmy with mysql+mysqlconnector for synchronous connection
|
||||||
|
sync_db_url = db_url.replace('mysql+asyncmy', 'mysql+mysqlconnector')
|
||||||
|
|
||||||
|
try:
|
||||||
|
engine = create_engine(sync_db_url, echo=False)
|
||||||
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
|
db = SessionLocal()
|
||||||
|
|
||||||
|
# Generate coupon codes
|
||||||
|
codes = generate_coupon_codes(prefix, quantity)
|
||||||
|
|
||||||
|
# Create coupon objects
|
||||||
|
coupons = []
|
||||||
|
for code in codes:
|
||||||
|
# Generate snowflake ID
|
||||||
|
coupon_id = snowflake.generate()
|
||||||
|
|
||||||
|
# Calculate expiration date if needed
|
||||||
|
expires_at = None
|
||||||
|
if expire_days is not None and expire_days > 0:
|
||||||
|
expires_at = datetime.now() + timedelta(days=expire_days)
|
||||||
|
|
||||||
|
# Create coupon object
|
||||||
|
# Note: id is auto-generated by snowflake, but we want to use our own snowflake generator
|
||||||
|
coupon = Coupon(
|
||||||
|
code=code,
|
||||||
|
type=coupon_type,
|
||||||
|
points=points,
|
||||||
|
expires_at=expires_at
|
||||||
|
)
|
||||||
|
# Set the id manually after creation
|
||||||
|
coupon.id = coupon_id
|
||||||
|
coupons.append(coupon)
|
||||||
|
|
||||||
|
# Bulk insert coupons
|
||||||
|
db.add_all(coupons)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
print(f"Successfully saved {len(coupons)} coupons to the database.")
|
||||||
|
print(f"Prefix: {prefix}, Type: {coupon_type}, Points: {points}")
|
||||||
|
if expire_days:
|
||||||
|
print(f"Expires in: {expire_days} days")
|
||||||
|
|
||||||
|
# Display first 5 coupons as examples
|
||||||
|
print("\nSample coupons generated:")
|
||||||
|
for coupon in coupons[:5]:
|
||||||
|
print(f" ID: {coupon.id}, Code: {coupon.code}")
|
||||||
|
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
except SQLAlchemyError as e:
|
||||||
|
print(f"Database error: {e}")
|
||||||
|
if 'db' in locals():
|
||||||
|
db.rollback()
|
||||||
|
db.close()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error: {e}")
|
||||||
|
if 'db' in locals():
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main function to demonstrate usage"""
|
||||||
|
print("Coupon Generator and Database Saver")
|
||||||
|
print("=" * 40)
|
||||||
|
|
||||||
|
# Example: Generate and save coupons with different prefixes
|
||||||
|
try:
|
||||||
|
# Single character prefix (5 digits, incremental from 00000)
|
||||||
|
# print("Generating coupons with single character prefix 'A'...")
|
||||||
|
# save_coupons_to_db('A', 5, 'NORMAL', 100, 30)
|
||||||
|
# print("\n" + "-" * 40 + "\n")
|
||||||
|
|
||||||
|
# 4-character prefix (2 digits, incremental from 00)
|
||||||
|
print("Generating coupons with 4-character prefix 'TEST'...")
|
||||||
|
save_coupons_to_db('VIP', 5, 'test', 1000, 60)
|
||||||
|
print("\n" + "-" * 40 + "\n")
|
||||||
|
|
||||||
|
# 3-character prefix (3 digits, incremental from 000)
|
||||||
|
# print("Generating coupons with 3-character prefix 'XYZ'...")
|
||||||
|
# save_coupons_to_db('XYZ', 3, 'SPECIAL', 500, 15)
|
||||||
|
# print("\n" + "-" * 40 + "\n")
|
||||||
|
|
||||||
|
# 5-character prefix (1 digit, incremental from 0)
|
||||||
|
# print("Generating coupons with 5-character prefix 'ABCDE'...")
|
||||||
|
# save_coupons_to_db('ABCDE', 5, 'PREMIUM', 2000, 90)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error in main: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -5,7 +5,7 @@
|
|||||||
script_location = alembic
|
script_location = alembic
|
||||||
|
|
||||||
# template used to generate migration files
|
# template used to generate migration files
|
||||||
file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(hour).2d-%%(minute).2d_%%(rev)s_%%(slug)s
|
# file_template = %%(rev)s_%%(slug)s
|
||||||
|
|
||||||
# sys.path path, will be prepended to sys.path if present.
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
# defaults to the current working directory.
|
# defaults to the current working directory.
|
||||||
@@ -21,7 +21,7 @@ prepend_sys_path = .
|
|||||||
|
|
||||||
# max length of characters to apply to the
|
# max length of characters to apply to the
|
||||||
# "slug" field
|
# "slug" field
|
||||||
# truncate_slug_length = 40
|
# max_length = 40
|
||||||
|
|
||||||
# set to 'true' to run the environment during
|
# set to 'true' to run the environment during
|
||||||
# the 'revision' command, regardless of autogenerate
|
# the 'revision' command, regardless of autogenerate
|
||||||
@@ -32,25 +32,25 @@ prepend_sys_path = .
|
|||||||
# versions/ directory
|
# versions/ directory
|
||||||
# sourceless = false
|
# sourceless = false
|
||||||
|
|
||||||
# version location specification; This defaults
|
# version number format
|
||||||
# to alembic/versions. When using multiple version
|
version_num_format = %04d
|
||||||
# directories, initial revisions must be specified with --version-path.
|
|
||||||
# The path separator used here should be the separator specified by "version_path_separator"
|
|
||||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
|
||||||
|
|
||||||
# version path separator; As mentioned above, this is the character used to split
|
# version path separator; As mentioned above, this is the character used to split
|
||||||
# version_locations. Valid values are:
|
# version_locations. The default within new alembic.ini files is "os", which uses
|
||||||
|
# os.pathsep. If this key is omitted entirely, it falls back to the legacy
|
||||||
|
# behavior of splitting on spaces and/or commas.
|
||||||
|
# Valid values for version_path_separator are:
|
||||||
#
|
#
|
||||||
# version_path_separator = :
|
# version_path_separator = :
|
||||||
# version_path_separator = ;
|
# version_path_separator = ;
|
||||||
# version_path_separator = space
|
# version_path_separator = space
|
||||||
version_path_separator = os # default: use os.pathsep
|
version_path_separator = os
|
||||||
|
|
||||||
# the output encoding used when revision files
|
# the output encoding used when revision files
|
||||||
# are written from script.py.mako
|
# are written from script.py.mako
|
||||||
# output_encoding = utf-8
|
# output_encoding = utf-8
|
||||||
|
|
||||||
sqlalchemy.url = postgresql+asyncpg://root:root@127.0.0.1:5432/db
|
sqlalchemy.url = mysql+asyncmy://root:root@127.0.0.1:3306/app # Changed from postgresql+asyncpg to mysql+asyncmy
|
||||||
|
|
||||||
|
|
||||||
[post_write_hooks]
|
[post_write_hooks]
|
||||||
@@ -68,26 +68,23 @@ sqlalchemy.url = postgresql+asyncpg://root:root@127.0.0.1:5432/db
|
|||||||
[loggers]
|
[loggers]
|
||||||
keys = root,sqlalchemy,alembic
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
[handlers]
|
|
||||||
keys = console
|
|
||||||
|
|
||||||
[formatters]
|
|
||||||
keys = generic
|
|
||||||
|
|
||||||
[logger_root]
|
[logger_root]
|
||||||
level = WARN
|
level = WARN
|
||||||
handlers = console
|
handlers = console
|
||||||
qualname =
|
qalname = root
|
||||||
|
|
||||||
[logger_sqlalchemy]
|
[logger_sqlalchemy]
|
||||||
level = WARN
|
level = WARN
|
||||||
handlers =
|
handlers =
|
||||||
qualname = sqlalchemy.engine
|
qalname = sqlalchemy.engine
|
||||||
|
|
||||||
[logger_alembic]
|
[logger_alembic]
|
||||||
level = INFO
|
level = INFO
|
||||||
handlers =
|
handlers =
|
||||||
qualname = alembic
|
qalname = alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
[handler_console]
|
[handler_console]
|
||||||
class = StreamHandler
|
class = StreamHandler
|
||||||
@@ -95,6 +92,9 @@ args = (sys.stderr,)
|
|||||||
level = NOTSET
|
level = NOTSET
|
||||||
formatter = generic
|
formatter = generic
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
[formatter_generic]
|
[formatter_generic]
|
||||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
datefmt = %H:%M:%S
|
datefmt = %H:%M:%S
|
||||||
@@ -3,11 +3,11 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Optional, List
|
from typing import Optional, List
|
||||||
|
|
||||||
from sqlalchemy import Integer, BigInteger, Text, String, Numeric, Float, DateTime, ForeignKey, Index
|
from sqlalchemy import Integer, BigInteger, Text, String, Numeric, Float, DateTime, ForeignKey, Index, func # Added func import
|
||||||
from sqlalchemy.dialects.postgresql import JSONB, ARRAY
|
from sqlalchemy.dialects.mysql import JSON as MySQLJSON # Changed from postgresql.JSONB to mysql.JSON
|
||||||
from sqlalchemy.orm import Mapped, mapped_column
|
from sqlalchemy.orm import Mapped, mapped_column
|
||||||
|
|
||||||
from backend.common.model import snowflake_id_key, Base
|
from backend.common.model import Base, snowflake_id_key
|
||||||
|
|
||||||
|
|
||||||
class AuditLog(Base):
|
class AuditLog(Base):
|
||||||
@@ -16,9 +16,9 @@ class AuditLog(Base):
|
|||||||
id: Mapped[snowflake_id_key] = mapped_column(init=False, primary_key=True)
|
id: Mapped[snowflake_id_key] = mapped_column(init=False, primary_key=True)
|
||||||
api_type: Mapped[str] = mapped_column(String(20), nullable=False, comment="API类型: recognition embedding assessment")
|
api_type: Mapped[str] = mapped_column(String(20), nullable=False, comment="API类型: recognition embedding assessment")
|
||||||
model_name: Mapped[str] = mapped_column(String(50), nullable=False, comment="模型名称")
|
model_name: Mapped[str] = mapped_column(String(50), nullable=False, comment="模型名称")
|
||||||
request_data: Mapped[Optional[dict]] = mapped_column(JSONB, comment="请求数据")
|
request_data: Mapped[Optional[dict]] = mapped_column(MySQLJSON, comment="请求数据")
|
||||||
response_data: Mapped[Optional[dict]] = mapped_column(JSONB, comment="响应数据")
|
response_data: Mapped[Optional[dict]] = mapped_column(MySQLJSON, comment="响应数据")
|
||||||
token_usage: Mapped[Optional[dict]] = mapped_column(JSONB, comment="消耗的token数量")
|
token_usage: Mapped[Optional[dict]] = mapped_column(MySQLJSON, comment="消耗的token数量")
|
||||||
cost: Mapped[Optional[float]] = mapped_column(Numeric(10, 5), comment="API调用成本")
|
cost: Mapped[Optional[float]] = mapped_column(Numeric(10, 5), comment="API调用成本")
|
||||||
duration: Mapped[Optional[float]] = mapped_column(Float, comment="调用耗时(秒)")
|
duration: Mapped[Optional[float]] = mapped_column(Float, comment="调用耗时(秒)")
|
||||||
status_code: Mapped[Optional[int]] = mapped_column(Integer, comment="HTTP状态码")
|
status_code: Mapped[Optional[int]] = mapped_column(Integer, comment="HTTP状态码")
|
||||||
@@ -40,10 +40,11 @@ class AuditLog(Base):
|
|||||||
class DailySummary(Base):
|
class DailySummary(Base):
|
||||||
__tablename__ = 'daily_summary'
|
__tablename__ = 'daily_summary'
|
||||||
|
|
||||||
id: Mapped[snowflake_id_key] = mapped_column(init=False, primary_key=True)
|
id: Mapped[snowflake_id_key] = mapped_column(BigInteger, init=False, primary_key=True)
|
||||||
user_id: Mapped[Optional[int]] = mapped_column(BigInteger, ForeignKey('wx_user.id'), comment="调用用户ID")
|
user_id: Mapped[Optional[int]] = mapped_column(BigInteger, ForeignKey('wx_user.id'), comment="调用用户ID")
|
||||||
image_ids: Mapped[List[str]] = mapped_column(ARRAY(Text), default=None, comment="图片ID列表")
|
# MySQL doesn't have ARRAY type, so we'll use JSON to store lists
|
||||||
thumbnail_ids: Mapped[List[str]] = mapped_column(ARRAY(Text), default=None, comment="图片缩略图列表")
|
image_ids: Mapped[Optional[List[str]]] = mapped_column(MySQLJSON, default=None, comment="图片ID列表") # Changed from ARRAY to JSON
|
||||||
|
thumbnail_ids: Mapped[Optional[List[str]]] = mapped_column(MySQLJSON, default=None, comment="图片缩略图列表") # Changed from ARRAY to JSON
|
||||||
summary_time: Mapped[datetime] = mapped_column(DateTime, default=None, comment="总结的时间")
|
summary_time: Mapped[datetime] = mapped_column(DateTime, default=None, comment="总结的时间")
|
||||||
|
|
||||||
# 索引优化
|
# 索引优化
|
||||||
|
|||||||
@@ -1,48 +1,46 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from typing import Optional
|
from datetime import datetime
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
from typing import Optional, List
|
||||||
|
|
||||||
from sqlalchemy import String, Column, LargeBinary, ForeignKey, BigInteger, Index, func, JSON, Text, Numeric, Enum as SQLEnum
|
from sqlalchemy import String, Text, DateTime, func, BigInteger, Index, ForeignKey
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
from sqlalchemy.dialects.mysql import JSON as MySQLJSON # Changed from postgresql.JSONB to mysql.JSON
|
||||||
from sqlalchemy.orm import Mapped, mapped_column, declared_attr
|
from sqlalchemy.orm import Mapped, mapped_column
|
||||||
|
from sqlalchemy.sql.sqltypes import LargeBinary
|
||||||
|
|
||||||
|
from backend.common.model import Base, id_key, DataClassBase
|
||||||
from backend.app.admin.schema.dict import WordMetaData
|
from backend.app.admin.schema.dict import WordMetaData
|
||||||
from backend.app.admin.schema.pydantic_type import PydanticType
|
from backend.app.admin.schema.pydantic_type import PydanticType
|
||||||
from backend.common.model import snowflake_id_key, DataClassBase
|
|
||||||
|
|
||||||
|
|
||||||
class DictionaryEntry(DataClassBase):
|
class DictionaryEntry(DataClassBase):
|
||||||
"""词典条目表"""
|
"""词典条目表"""
|
||||||
|
|
||||||
@declared_attr.directive
|
__tablename__ = "dict_entry"
|
||||||
def __tablename__(cls) -> str:
|
|
||||||
return "dict_entry"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, init=True, autoincrement=True)
|
id: Mapped[id_key] = mapped_column(BigInteger, init=False, primary_key=True)
|
||||||
word: Mapped[str] = mapped_column(String(255), unique=True, nullable=False)
|
word: Mapped[str] = mapped_column(String(255), unique=True, nullable=False)
|
||||||
definition: Mapped[Optional[str]] = mapped_column(Text, default=None)
|
definition: Mapped[Optional[str]] = mapped_column(Text, default=None)
|
||||||
details: Mapped[Optional[WordMetaData]] = mapped_column(PydanticType(pydantic_type=WordMetaData), default=None) # 其他可能的字段(根据实际需求添加)
|
details: Mapped[Optional[WordMetaData]] = mapped_column(PydanticType(pydantic_type=WordMetaData), default=None) # 其他可能的字段(根据实际需求添加)
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
Index('idx_dict_word', word),
|
Index('idx_dict_word', 'word'),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class DictionaryMedia(DataClassBase):
|
class DictionaryMedia(DataClassBase):
|
||||||
"""词典媒体资源表"""
|
"""词典媒体资源表"""
|
||||||
|
|
||||||
@declared_attr.directive
|
__tablename__ = "dict_media"
|
||||||
def __tablename__(cls) -> str:
|
|
||||||
return "dict_media"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, init=True, autoincrement=True)
|
id: Mapped[id_key] = mapped_column(BigInteger, init=False, primary_key=True)
|
||||||
file_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
file_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||||
file_type: Mapped[str] = mapped_column(String(50), nullable=False) # 'audio', 'image'
|
file_type: Mapped[str] = mapped_column(String(50), nullable=False) # 'audio', 'image'
|
||||||
dict_id: Mapped[Optional[int]] = mapped_column(BigInteger, ForeignKey("dict_entry.id"), default=None)
|
dict_id: Mapped[Optional[int]] = mapped_column(BigInteger, default=None)
|
||||||
file_data: Mapped[Optional[bytes]] = mapped_column(LargeBinary, default=None)
|
file_data: Mapped[Optional[bytes]] = mapped_column(Text, default=None) # Changed from LargeBinary to Text for MySQL compatibility
|
||||||
file_hash: Mapped[Optional[str]] = mapped_column(String(64), default=None)
|
file_hash: Mapped[Optional[str]] = mapped_column(String(64), default=None)
|
||||||
details: Mapped[Optional[dict]] = mapped_column(JSONB(astext_type=Text()), default=None, comment="其他信息") # 其他信息
|
details: Mapped[Optional[dict]] = mapped_column(MySQLJSON, default=None, comment="其他信息") # Changed from JSONB to MySQLJSON
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
Index('idx_media_filename', file_name),
|
Index('idx_media_filename', file_name),
|
||||||
@@ -72,19 +70,16 @@ class DictCategory(str, Enum):
|
|||||||
|
|
||||||
class YdDict(DataClassBase):
|
class YdDict(DataClassBase):
|
||||||
"""YD词典查询结果表"""
|
"""YD词典查询结果表"""
|
||||||
|
__tablename__ = "yd_dict"
|
||||||
@declared_attr.directive
|
|
||||||
def __tablename__(cls) -> str:
|
|
||||||
return "yd_dict"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, init=False, autoincrement=True)
|
id: Mapped[int] = mapped_column(primary_key=True, init=False, autoincrement=True)
|
||||||
word: Mapped[str] = mapped_column(String(255), nullable=False, comment="查询的词或词组")
|
word: Mapped[str] = mapped_column(String(255), nullable=False, comment="查询的词或词组")
|
||||||
uk_phone: Mapped[str] = mapped_column(String(50), nullable=True, comment="uk 音标")
|
uk_phone: Mapped[str] = mapped_column(String(50), nullable=True, comment="uk 音标")
|
||||||
us_phone: Mapped[str] = mapped_column(String(50), nullable=True, comment="us 音标")
|
us_phone: Mapped[str] = mapped_column(String(50), nullable=True, comment="us 音标")
|
||||||
lang: Mapped[YdDictLanguage] = mapped_column(SQLEnum(YdDictLanguage), nullable=False, comment="查询的语言")
|
lang: Mapped[YdDictLanguage] = mapped_column(String(20), nullable=False, comment="查询的语言")
|
||||||
dict_type: Mapped[YdDictType] = mapped_column(SQLEnum(YdDictType), nullable=False, comment="词典类型(英中,英英)")
|
dict_type: Mapped[YdDictType] = mapped_column(String(20), nullable=False, comment="词典类型(英中,英英)")
|
||||||
category: Mapped[DictCategory] = mapped_column(SQLEnum(DictCategory), nullable=False, comment="词典分类(一般词典,少儿词典)")
|
category: Mapped[DictCategory] = mapped_column(String(20), nullable=False, comment="词典分类(一般词典,少儿词典)")
|
||||||
query_result: Mapped[dict] = mapped_column(JSONB(astext_type=Text()), nullable=False, comment="JSON结构的查询结果")
|
query_result: Mapped[dict] = mapped_column(MySQLJSON, nullable=False, comment="JSON结构的查询结果")
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
Index('idx_yd_dict_word', word),
|
Index('idx_yd_dict_word', word),
|
||||||
@@ -94,20 +89,17 @@ class YdDict(DataClassBase):
|
|||||||
|
|
||||||
class YdMedia(DataClassBase):
|
class YdMedia(DataClassBase):
|
||||||
"""YD词典媒体资源表(发音文件等)"""
|
"""YD词典媒体资源表(发音文件等)"""
|
||||||
|
__tablename__ = "yd_media"
|
||||||
@declared_attr.directive
|
|
||||||
def __tablename__(cls) -> str:
|
|
||||||
return "yd_media"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True, init=False, autoincrement=True)
|
id: Mapped[int] = mapped_column(primary_key=True, init=False, autoincrement=True)
|
||||||
file_name: Mapped[str] = mapped_column(String(255), nullable=False, comment="文件名")
|
file_name: Mapped[str] = mapped_column(String(255), nullable=False, comment="文件名")
|
||||||
file_type: Mapped[str] = mapped_column(String(50), nullable=False, comment="文件类型(audio, image等)")
|
file_type: Mapped[str] = mapped_column(String(50), nullable=False, comment="文件类型(audio, image等)")
|
||||||
yd_dict_id: Mapped[int] = mapped_column(BigInteger, ForeignKey("yd_dict.id"), nullable=False, comment="关联的YD词典条目")
|
yd_dict_id: Mapped[int] = mapped_column(BigInteger, nullable=False, comment="关联的YD词典条目")
|
||||||
file_data: Mapped[Optional[bytes]] = mapped_column(LargeBinary, default=None, comment="文件二进制数据")
|
file_data: Mapped[Optional[bytes]] = mapped_column(LargeBinary, default=None, comment="文件二进制数据")
|
||||||
file_url: Mapped[Optional[str]] = mapped_column(String(500), default=None, comment="文件URL(如果存储在外部)")
|
file_url: Mapped[Optional[str]] = mapped_column(String(500), default=None, comment="文件URL(如果存储在外部)")
|
||||||
phonetic_symbol: Mapped[Optional[str]] = mapped_column(String(100), default=None, comment="关联的音标")
|
phonetic_symbol: Mapped[Optional[str]] = mapped_column(String(100), default=None, comment="关联的音标")
|
||||||
usage_type: Mapped[Optional[str]] = mapped_column(String(50), default=None, comment="用途类型(word_pronunciation, example_sentence等)")
|
usage_type: Mapped[Optional[str]] = mapped_column(String(50), default=None, comment="用途类型(word_pronunciation, example_sentence等)")
|
||||||
details: Mapped[Optional[dict]] = mapped_column(JSONB(astext_type=Text()), default=None, comment="其他信息")
|
details: Mapped[Optional[dict]] = mapped_column(MySQLJSON, default=None, comment="其他信息")
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
Index('idx_yd_media_filename', file_name),
|
Index('idx_yd_media_filename', file_name),
|
||||||
|
|||||||
@@ -1,20 +1,20 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Optional, List
|
from typing import Optional
|
||||||
|
|
||||||
from sqlalchemy import String, Text, DateTime, ForeignKey, Index, BigInteger
|
from sqlalchemy import String, Text, DateTime, func, BigInteger, Index
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
from sqlalchemy.dialects.mysql import JSON as MySQLJSON # Changed from postgresql.JSONB to mysql.JSON
|
||||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
from sqlalchemy.orm import Mapped, mapped_column
|
||||||
|
|
||||||
from backend.common.model import snowflake_id_key, Base
|
from backend.common.model import Base, id_key, snowflake_id_key
|
||||||
|
|
||||||
|
|
||||||
class Feedback(Base):
|
class Feedback(Base):
|
||||||
__tablename__ = 'feedback'
|
__tablename__ = 'feedback'
|
||||||
|
|
||||||
id: Mapped[snowflake_id_key] = mapped_column(BigInteger, init=False, primary_key=True)
|
id: Mapped[snowflake_id_key] = mapped_column(BigInteger, init=False, primary_key=True)
|
||||||
user_id: Mapped[int] = mapped_column(BigInteger, ForeignKey('wx_user.id'), nullable=False, comment='用户ID')
|
user_id: Mapped[int] = mapped_column(BigInteger, nullable=False, comment='用户ID')
|
||||||
content: Mapped[str] = mapped_column(Text, nullable=False, comment='反馈内容')
|
content: Mapped[str] = mapped_column(Text, nullable=False, comment='反馈内容')
|
||||||
contact_info: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, comment='联系方式')
|
contact_info: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, comment='联系方式')
|
||||||
category: Mapped[Optional[str]] = mapped_column(String(50), nullable=True, comment='反馈分类')
|
category: Mapped[Optional[str]] = mapped_column(String(50), nullable=True, comment='反馈分类')
|
||||||
|
|||||||
@@ -1,12 +1,14 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from datetime import datetime
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from sqlalchemy import BigInteger, Text, String, Index, DateTime, LargeBinary
|
from sqlalchemy import String, Text, DateTime, func, BigInteger, Index
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
from sqlalchemy.dialects.mysql import MEDIUMBLOB
|
||||||
from sqlalchemy.orm import mapped_column, Mapped
|
from sqlalchemy.dialects.mysql import JSON as MySQLJSON # Changed from postgresql.JSONB to mysql.JSON
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column
|
||||||
|
|
||||||
from backend.common.model import snowflake_id_key, Base
|
from backend.common.model import Base, id_key, snowflake_id_key
|
||||||
|
|
||||||
|
|
||||||
class File(Base):
|
class File(Base):
|
||||||
@@ -18,11 +20,10 @@ class File(Base):
|
|||||||
content_type: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # MIME类型
|
content_type: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # MIME类型
|
||||||
file_size: Mapped[int] = mapped_column(BigInteger, nullable=False) # 文件大小(字节)
|
file_size: Mapped[int] = mapped_column(BigInteger, nullable=False) # 文件大小(字节)
|
||||||
storage_path: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # 存储路径(非数据库存储时使用)
|
storage_path: Mapped[Optional[str]] = mapped_column(Text, nullable=True) # 存储路径(非数据库存储时使用)
|
||||||
file_data: Mapped[Optional[bytes]] = mapped_column(LargeBinary, default=None, nullable=True) # 文件二进制数据(数据库存储时使用)
|
file_data: Mapped[Optional[bytes]] = mapped_column(MEDIUMBLOB, default=None, nullable=True) # 文件二进制数据(数据库存储时使用)
|
||||||
storage_type: Mapped[str] = mapped_column(String(20), nullable=False, default='database') # 存储类型: database, local, s3
|
storage_type: Mapped[str] = mapped_column(String(20), nullable=False, default='database') # 存储类型: database, local, s3
|
||||||
metadata_info: Mapped[Optional[dict]] = mapped_column(JSONB(astext_type=Text()), default=None, comment="元数据信息")
|
metadata_info: Mapped[Optional[dict]] = mapped_column(MySQLJSON, default=None, comment="元数据信息")
|
||||||
|
|
||||||
# 表参数 - 包含所有必要的约束
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
Index('idx_file_hash', file_hash),
|
Index('idx_file_name', file_name),
|
||||||
)
|
)
|
||||||
@@ -3,11 +3,11 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from sqlalchemy import String, Column, BigInteger, ForeignKey, Boolean, DateTime, Index, func, JSON, Text, Numeric
|
from sqlalchemy import String, Numeric, DateTime, func, BigInteger, Index, ForeignKey, Boolean, Text
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
from sqlalchemy.dialects.mysql import JSON as MySQLJSON # Changed from postgresql.JSONB to mysql.JSON
|
||||||
from sqlalchemy.orm import Mapped, mapped_column
|
from sqlalchemy.orm import Mapped, mapped_column
|
||||||
|
|
||||||
from backend.common.model import snowflake_id_key, Base
|
from backend.common.model import Base, id_key, snowflake_id_key
|
||||||
|
|
||||||
|
|
||||||
class Order(Base):
|
class Order(Base):
|
||||||
@@ -51,7 +51,7 @@ class FreezeLog(Base):
|
|||||||
user_id: Mapped[int] = mapped_column(BigInteger, ForeignKey('wx_user.id'), nullable=False)
|
user_id: Mapped[int] = mapped_column(BigInteger, ForeignKey('wx_user.id'), nullable=False)
|
||||||
order_id: Mapped[int] = mapped_column(BigInteger, ForeignKey('order.id'), nullable=False)
|
order_id: Mapped[int] = mapped_column(BigInteger, ForeignKey('order.id'), nullable=False)
|
||||||
amount: Mapped[int] = mapped_column(BigInteger, comment='冻结次数')
|
amount: Mapped[int] = mapped_column(BigInteger, comment='冻结次数')
|
||||||
reason: Mapped[Optional[str]] = mapped_column(Text, comment='冻结原因')
|
reason: Mapped[Optional[str]] = mapped_column(Text, default=None, comment='冻结原因') # 添加默认值
|
||||||
status: Mapped[str] = mapped_column(String(16), default='pending', comment='状态:pending/confirmed/cancelled')
|
status: Mapped[str] = mapped_column(String(16), default='pending', comment='状态:pending/confirmed/cancelled')
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
@@ -63,13 +63,13 @@ class FreezeLog(Base):
|
|||||||
class UsageLog(Base):
|
class UsageLog(Base):
|
||||||
__tablename__ = 'usage_log'
|
__tablename__ = 'usage_log'
|
||||||
|
|
||||||
id: Mapped[snowflake_id_key] = mapped_column(BigInteger, init=False, primary_key=True)
|
id: Mapped[id_key] = mapped_column(BigInteger, init=False, primary_key=True)
|
||||||
user_id: Mapped[int] = mapped_column(BigInteger, ForeignKey('wx_user.id'), nullable=False, comment='用户ID')
|
user_id: Mapped[int] = mapped_column(BigInteger, ForeignKey('wx_user.id'), nullable=False, comment='用户ID')
|
||||||
action: Mapped[str] = mapped_column(String(32), comment='动作:purchase/renewal/use/carryover/share/ad/freeze/unfreeze/refund')
|
action: Mapped[str] = mapped_column(String(32), comment='动作:purchase/renewal/use/carryover/share/ad/freeze/unfreeze/refund')
|
||||||
amount: Mapped[int] = mapped_column(BigInteger, comment='变动数量')
|
amount: Mapped[int] = mapped_column(BigInteger, comment='变动数量')
|
||||||
balance_after: Mapped[int] = mapped_column(BigInteger, comment='变动后余额')
|
balance_after: Mapped[int] = mapped_column(BigInteger, comment='变动后余额')
|
||||||
related_id: Mapped[Optional[int]] = mapped_column(BigInteger, default=None, comment='关联ID,如订单ID、冻结记录ID')
|
related_id: Mapped[Optional[int]] = mapped_column(BigInteger, default=None, comment='关联ID,如订单ID、冻结记录ID') # 添加默认值
|
||||||
details: Mapped[Optional[dict]] = mapped_column(JSONB, default=None, comment='附加信息')
|
details: Mapped[Optional[dict]] = mapped_column(MySQLJSON, default=None, comment='附加信息') # Changed from JSONB to MySQLJSON and add default
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
Index('idx_usage_user_action', 'user_id', 'action'),
|
Index('idx_usage_user_action', 'user_id', 'action'),
|
||||||
|
|||||||
@@ -3,18 +3,18 @@
|
|||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from sqlalchemy import String, Column, BigInteger, ForeignKey, DateTime, Index, Text
|
from sqlalchemy import String, BigInteger, DateTime, func, Index
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
from sqlalchemy.dialects.mysql import JSON as MySQLJSON # Changed from postgresql.JSONB to mysql.JSON
|
||||||
from sqlalchemy.orm import Mapped, mapped_column
|
from sqlalchemy.orm import Mapped, mapped_column
|
||||||
|
|
||||||
from backend.common.model import snowflake_id_key, Base
|
from backend.common.model import Base, id_key, snowflake_id_key
|
||||||
|
|
||||||
|
|
||||||
class Points(Base):
|
class Points(Base):
|
||||||
__tablename__ = 'points'
|
__tablename__ = 'points'
|
||||||
|
|
||||||
id: Mapped[snowflake_id_key] = mapped_column(BigInteger, init=False, primary_key=True)
|
id: Mapped[snowflake_id_key] = mapped_column(BigInteger, init=False, primary_key=True)
|
||||||
user_id: Mapped[int] = mapped_column(BigInteger, ForeignKey('wx_user.id'), unique=True, nullable=False, comment='关联的用户ID')
|
user_id: Mapped[int] = mapped_column(BigInteger, unique=True, nullable=False, comment='关联的用户ID')
|
||||||
balance: Mapped[int] = mapped_column(BigInteger, default=0, comment='当前积分余额')
|
balance: Mapped[int] = mapped_column(BigInteger, default=0, comment='当前积分余额')
|
||||||
total_earned: Mapped[int] = mapped_column(BigInteger, default=0, comment='累计获得积分')
|
total_earned: Mapped[int] = mapped_column(BigInteger, default=0, comment='累计获得积分')
|
||||||
total_spent: Mapped[int] = mapped_column(BigInteger, default=0, comment='累计消费积分')
|
total_spent: Mapped[int] = mapped_column(BigInteger, default=0, comment='累计消费积分')
|
||||||
@@ -31,12 +31,12 @@ class PointsLog(Base):
|
|||||||
__tablename__ = 'points_log'
|
__tablename__ = 'points_log'
|
||||||
|
|
||||||
id: Mapped[snowflake_id_key] = mapped_column(BigInteger, init=False, primary_key=True)
|
id: Mapped[snowflake_id_key] = mapped_column(BigInteger, init=False, primary_key=True)
|
||||||
user_id: Mapped[int] = mapped_column(BigInteger, ForeignKey('wx_user.id'), nullable=False, comment='用户ID')
|
user_id: Mapped[int] = mapped_column(BigInteger, nullable=False, comment='用户ID')
|
||||||
action: Mapped[str] = mapped_column(String(32), comment='动作:earn/spend')
|
action: Mapped[str] = mapped_column(String(32), comment='动作:earn/spend')
|
||||||
amount: Mapped[int] = mapped_column(BigInteger, comment='变动数量')
|
amount: Mapped[int] = mapped_column(BigInteger, comment='变动数量')
|
||||||
balance_after: Mapped[int] = mapped_column(BigInteger, comment='变动后余额')
|
balance_after: Mapped[int] = mapped_column(BigInteger, comment='变动后余额')
|
||||||
related_id: Mapped[Optional[int]] = mapped_column(BigInteger, default=None, comment='关联ID')
|
related_id: Mapped[Optional[int]] = mapped_column(BigInteger, default=None, comment='关联ID')
|
||||||
details: Mapped[Optional[dict]] = mapped_column(JSONB, default=None, comment='附加信息')
|
details: Mapped[Optional[dict]] = mapped_column(MySQLJSON, default=None, comment='附加信息')
|
||||||
|
|
||||||
# 索引优化
|
# 索引优化
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from sqlalchemy import String, Column, BigInteger, SmallInteger, Boolean, DateTime, Index, func, JSON, Text, Numeric
|
from sqlalchemy import String, Column, BigInteger, SmallInteger, Boolean, DateTime, Index, func, JSON, Text, Numeric
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
from sqlalchemy.dialects.mysql import JSON as MySQLJSON
|
||||||
from sqlalchemy.orm import Mapped, mapped_column
|
from sqlalchemy.orm import Mapped, mapped_column
|
||||||
|
|
||||||
from backend.common.model import snowflake_id_key, Base
|
from backend.common.model import snowflake_id_key, Base
|
||||||
@@ -17,7 +17,7 @@ class WxUser(Base):
|
|||||||
session_key: Mapped[str] = mapped_column(String(128), nullable=False, comment='会话密钥')
|
session_key: Mapped[str] = mapped_column(String(128), nullable=False, comment='会话密钥')
|
||||||
unionid: Mapped[Optional[str]] = mapped_column(String(64), default=None, index=True, comment='微信UnionID')
|
unionid: Mapped[Optional[str]] = mapped_column(String(64), default=None, index=True, comment='微信UnionID')
|
||||||
mobile: Mapped[Optional[str]] = mapped_column(String(15), default=None, index=True, comment='加密手机号')
|
mobile: Mapped[Optional[str]] = mapped_column(String(15), default=None, index=True, comment='加密手机号')
|
||||||
profile: Mapped[Optional[dict]] = mapped_column(JSONB(astext_type=Text()), default=None, comment='用户资料JSON')
|
profile: Mapped[Optional[dict]] = mapped_column(MySQLJSON, default=None, comment='用户资料JSON')
|
||||||
|
|
||||||
|
|
||||||
# class WxPayment(Base):
|
# class WxPayment(Base):
|
||||||
|
|||||||
@@ -1,25 +1,31 @@
|
|||||||
from sqlalchemy import Column, BigInteger, String, Text
|
#!/usr/bin/env python3
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
# -*- coding: utf-8 -*-
|
||||||
from pgvector.sqlalchemy import Vector
|
from sqlalchemy import TypeDecorator, Text
|
||||||
from sqlalchemy.types import TypeDecorator
|
from sqlalchemy.dialects.mysql import JSON as MySQLJSON # Changed from postgresql.JSONB to mysql.JSON
|
||||||
|
from sqlalchemy.orm import DeclarativeBase
|
||||||
from backend.utils.json_encoder import jsonable_encoder
|
import json
|
||||||
|
|
||||||
|
|
||||||
class PydanticType(TypeDecorator):
|
class PydanticType(TypeDecorator):
|
||||||
"""处理 Pydantic 模型的 SQLAlchemy 自定义类型"""
|
"""
|
||||||
impl = JSONB
|
自定义 Pydantic 类型装饰器
|
||||||
|
"""
|
||||||
|
impl = Text # Changed from JSONB to Text for MySQL compatibility
|
||||||
|
|
||||||
def __init__(self, pydantic_type=None, *args, **kwargs):
|
def __init__(self, pydantic_type, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.pydantic_type = pydantic_type
|
self.pydantic_type = pydantic_type
|
||||||
|
|
||||||
def process_bind_param(self, value, dialect):
|
def process_bind_param(self, value, dialect):
|
||||||
if value is None:
|
if value is not None:
|
||||||
return None
|
if isinstance(value, self.pydantic_type):
|
||||||
return jsonable_encoder(value)
|
return json.dumps(value.model_dump())
|
||||||
|
else:
|
||||||
|
return json.dumps(value)
|
||||||
|
return None
|
||||||
|
|
||||||
def process_result_value(self, value, dialect):
|
def process_result_value(self, value, dialect):
|
||||||
if value is None or self.pydantic_type is None:
|
if value is not None:
|
||||||
return value
|
data = json.loads(value)
|
||||||
return self.pydantic_type(**value)
|
return self.pydantic_type(**data)
|
||||||
|
return None
|
||||||
@@ -4,7 +4,7 @@ from typing import Optional
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from sqlalchemy import BigInteger, Text, String, DateTime, ForeignKey
|
from sqlalchemy import BigInteger, Text, String, DateTime, ForeignKey
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
from sqlalchemy.dialects.mysql import JSON as MySQLJSON
|
||||||
from sqlalchemy.orm import mapped_column, Mapped
|
from sqlalchemy.orm import mapped_column, Mapped
|
||||||
|
|
||||||
from backend.common.model import snowflake_id_key, Base
|
from backend.common.model import snowflake_id_key, Base
|
||||||
@@ -23,7 +23,7 @@ class Article(Base):
|
|||||||
author: Mapped[Optional[str]] = mapped_column(String(100), nullable=True, comment="作者")
|
author: Mapped[Optional[str]] = mapped_column(String(100), nullable=True, comment="作者")
|
||||||
category: Mapped[Optional[str]] = mapped_column(String(50), nullable=True, comment="分类")
|
category: Mapped[Optional[str]] = mapped_column(String(50), nullable=True, comment="分类")
|
||||||
level: Mapped[Optional[str]] = mapped_column(String(20), nullable=True, comment="难度等级")
|
level: Mapped[Optional[str]] = mapped_column(String(20), nullable=True, comment="难度等级")
|
||||||
info: Mapped[Optional[dict]] = mapped_column(JSONB, default=None, comment="附加信息")
|
info: Mapped[Optional[dict]] = mapped_column(MySQLJSON, default=None, comment="附加信息")
|
||||||
|
|
||||||
# 表参数 - 包含所有必要的约束
|
# 表参数 - 包含所有必要的约束
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
@@ -42,7 +42,7 @@ class ArticleParagraph(Base):
|
|||||||
paragraph_index: Mapped[int] = mapped_column(BigInteger, nullable=False, comment="段落序号")
|
paragraph_index: Mapped[int] = mapped_column(BigInteger, nullable=False, comment="段落序号")
|
||||||
content: Mapped[str] = mapped_column(Text, nullable=False, comment="段落内容")
|
content: Mapped[str] = mapped_column(Text, nullable=False, comment="段落内容")
|
||||||
standard_audio_id: Mapped[Optional[int]] = mapped_column(BigInteger, ForeignKey('file.id'), nullable=True, comment="标准朗读音频文件ID")
|
standard_audio_id: Mapped[Optional[int]] = mapped_column(BigInteger, ForeignKey('file.id'), nullable=True, comment="标准朗读音频文件ID")
|
||||||
info: Mapped[Optional[dict]] = mapped_column(JSONB, default=None, comment="附加信息")
|
info: Mapped[Optional[dict]] = mapped_column(MySQLJSON, default=None, comment="附加信息")
|
||||||
|
|
||||||
# 表参数 - 包含所有必要的约束
|
# 表参数 - 包含所有必要的约束
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
@@ -61,7 +61,7 @@ class ArticleSentence(Base):
|
|||||||
sentence_index: Mapped[int] = mapped_column(BigInteger, nullable=False, comment="句子序号")
|
sentence_index: Mapped[int] = mapped_column(BigInteger, nullable=False, comment="句子序号")
|
||||||
content: Mapped[str] = mapped_column(Text, nullable=False, comment="句子内容")
|
content: Mapped[str] = mapped_column(Text, nullable=False, comment="句子内容")
|
||||||
standard_audio_id: Mapped[Optional[int]] = mapped_column(BigInteger, ForeignKey('file.id'), nullable=True, comment="标准朗读音频文件ID")
|
standard_audio_id: Mapped[Optional[int]] = mapped_column(BigInteger, ForeignKey('file.id'), nullable=True, comment="标准朗读音频文件ID")
|
||||||
info: Mapped[Optional[dict]] = mapped_column(JSONB, default=None, comment="附加信息")
|
info: Mapped[Optional[dict]] = mapped_column(MySQLJSON, default=None, comment="附加信息")
|
||||||
|
|
||||||
# 表参数 - 包含所有必要的约束
|
# 表参数 - 包含所有必要的约束
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
|
|||||||
@@ -3,8 +3,7 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from sqlalchemy import BigInteger, Text, String, Index, ForeignKey
|
from sqlalchemy import BigInteger, Text, String, Index, ForeignKey
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
from sqlalchemy.dialects.mysql import JSON as MySQLJSON
|
||||||
from pgvector.sqlalchemy import Vector
|
|
||||||
from sqlalchemy.orm import mapped_column, Mapped
|
from sqlalchemy.orm import mapped_column, Mapped
|
||||||
|
|
||||||
from backend.app.ai.schema.image import ImageMetadata
|
from backend.app.ai.schema.image import ImageMetadata
|
||||||
@@ -19,7 +18,7 @@ class Image(Base):
|
|||||||
file_id: Mapped[Optional[int]] = mapped_column(BigInteger, ForeignKey('file.id'), nullable=True, comment="关联的文件ID")
|
file_id: Mapped[Optional[int]] = mapped_column(BigInteger, ForeignKey('file.id'), nullable=True, comment="关联的文件ID")
|
||||||
thumbnail_id: Mapped[Optional[int]] = mapped_column(BigInteger, default=None, nullable=True, comment="缩略图ID")
|
thumbnail_id: Mapped[Optional[int]] = mapped_column(BigInteger, default=None, nullable=True, comment="缩略图ID")
|
||||||
info: Mapped[Optional[ImageMetadata]] = mapped_column(PydanticType(pydantic_type=ImageMetadata), default=None, comment="附加元数据") # 其他可能的字段(根据实际需求添加)
|
info: Mapped[Optional[ImageMetadata]] = mapped_column(PydanticType(pydantic_type=ImageMetadata), default=None, comment="附加元数据") # 其他可能的字段(根据实际需求添加)
|
||||||
details: Mapped[Optional[dict]] = mapped_column(JSONB(astext_type=Text()), default=None, comment="其他信息") # 其他信息
|
details: Mapped[Optional[dict]] = mapped_column(MySQLJSON, default=None, comment="其他信息") # 其他信息
|
||||||
|
|
||||||
# 表参数 - 包含所有必要的约束
|
# 表参数 - 包含所有必要的约束
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ from enum import Enum
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from sqlalchemy import BigInteger, Text, String, Index, Integer
|
from sqlalchemy import BigInteger, Text, String, Index, Integer
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
from sqlalchemy.dialects.mysql import JSON as MySQLJSON
|
||||||
from sqlalchemy.orm import mapped_column, Mapped
|
from sqlalchemy.orm import mapped_column, Mapped
|
||||||
|
|
||||||
from backend.common.model import snowflake_id_key, Base
|
from backend.common.model import snowflake_id_key, Base
|
||||||
@@ -24,10 +24,10 @@ class ImageProcessingTask(Base):
|
|||||||
image_id: Mapped[int] = mapped_column(BigInteger, nullable=False, comment="关联的图片ID")
|
image_id: Mapped[int] = mapped_column(BigInteger, nullable=False, comment="关联的图片ID")
|
||||||
file_id: Mapped[int] = mapped_column(BigInteger, nullable=False, comment="关联的文件ID")
|
file_id: Mapped[int] = mapped_column(BigInteger, nullable=False, comment="关联的文件ID")
|
||||||
user_id: Mapped[int] = mapped_column(BigInteger, nullable=False, comment="用户ID")
|
user_id: Mapped[int] = mapped_column(BigInteger, nullable=False, comment="用户ID")
|
||||||
dict_level: Mapped[str] = mapped_column(String, nullable=False, comment="词典等级")
|
dict_level: Mapped[str] = mapped_column(String(20), nullable=False, comment="词典等级")
|
||||||
type: Mapped[str] = mapped_column(String, nullable=False, comment="处理类型")
|
type: Mapped[str] = mapped_column(String(50), nullable=False, comment="处理类型")
|
||||||
status: Mapped[ImageTaskStatus] = mapped_column(String, default=ImageTaskStatus.PENDING, comment="任务状态")
|
status: Mapped[ImageTaskStatus] = mapped_column(String(20), default=ImageTaskStatus.PENDING, comment="任务状态")
|
||||||
result: Mapped[Optional[dict]] = mapped_column(JSONB(astext_type=Text()), default=None, comment="处理结果")
|
result: Mapped[Optional[dict]] = mapped_column(MySQLJSON, default=None, comment="处理结果")
|
||||||
error_message: Mapped[Optional[str]] = mapped_column(Text, default=None, comment="错误信息")
|
error_message: Mapped[Optional[str]] = mapped_column(Text, default=None, comment="错误信息")
|
||||||
retry_count: Mapped[int] = mapped_column(Integer, default=0, comment="重试次数")
|
retry_count: Mapped[int] = mapped_column(Integer, default=0, comment="重试次数")
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from sqlalchemy import BigInteger, Text, String, Integer, DateTime, ForeignKey
|
from sqlalchemy import BigInteger, Text, String, Integer, DateTime, ForeignKey
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
from sqlalchemy.dialects.mysql import JSON as MySQLJSON
|
||||||
from sqlalchemy.orm import mapped_column, Mapped
|
from sqlalchemy.orm import mapped_column, Mapped
|
||||||
|
|
||||||
from backend.common.model import snowflake_id_key, Base
|
from backend.common.model import snowflake_id_key, Base
|
||||||
@@ -24,10 +24,10 @@ class ImageText(Base):
|
|||||||
standard_audio_id: Mapped[Optional[int]] = mapped_column(BigInteger, ForeignKey('file.id'), nullable=True, comment="标准朗读音频文件ID")
|
standard_audio_id: Mapped[Optional[int]] = mapped_column(BigInteger, ForeignKey('file.id'), nullable=True, comment="标准朗读音频文件ID")
|
||||||
ipa: Mapped[Optional[str]] = mapped_column(String(100), default=None, comment="ipa")
|
ipa: Mapped[Optional[str]] = mapped_column(String(100), default=None, comment="ipa")
|
||||||
zh: Mapped[Optional[str]] = mapped_column(String(100), default=None, comment="中文")
|
zh: Mapped[Optional[str]] = mapped_column(String(100), default=None, comment="中文")
|
||||||
position: Mapped[Optional[dict]] = mapped_column(JSONB, default=None, comment="文本在图片中的位置信息或文章中的位置信息")
|
position: Mapped[Optional[dict]] = mapped_column(MySQLJSON, default=None, comment="文本在图片中的位置信息或文章中的位置信息")
|
||||||
dict_level: Mapped[Optional[str]] = mapped_column(String(20), default=None, comment="词典等级")
|
dict_level: Mapped[Optional[str]] = mapped_column(String(20), default=None, comment="词典等级")
|
||||||
source: Mapped[Optional[str]] = mapped_column(String(20), default=None, comment="文本来源 (ref_word/description/article)")
|
source: Mapped[Optional[str]] = mapped_column(String(20), default=None, comment="文本来源 (ref_word/description/article)")
|
||||||
info: Mapped[Optional[dict]] = mapped_column(JSONB, default=None, comment="附加信息")
|
info: Mapped[Optional[dict]] = mapped_column(MySQLJSON, default=None, comment="附加信息")
|
||||||
|
|
||||||
# 表参数 - 包含所有必要的约束
|
# 表参数 - 包含所有必要的约束
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ from typing import Optional
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from sqlalchemy import BigInteger, Text, ForeignKey, String, Integer, DateTime, Boolean, Index
|
from sqlalchemy import BigInteger, Text, ForeignKey, String, Integer, DateTime, Boolean, Index
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
from sqlalchemy.dialects.mysql import JSON as MySQLJSON
|
||||||
from sqlalchemy.orm import mapped_column, Mapped
|
from sqlalchemy.orm import mapped_column, Mapped
|
||||||
|
|
||||||
from backend.app.ai.schema.recording import RecordingMetadata
|
from backend.app.ai.schema.recording import RecordingMetadata
|
||||||
@@ -24,7 +24,7 @@ class Recording(Base):
|
|||||||
text: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, comment='朗读文本')
|
text: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, comment='朗读文本')
|
||||||
eval_mode: Mapped[Optional[int]] = mapped_column(Integer, nullable=True, comment='评测模式')
|
eval_mode: Mapped[Optional[int]] = mapped_column(Integer, nullable=True, comment='评测模式')
|
||||||
info: Mapped[Optional[RecordingMetadata]] = mapped_column(PydanticType(pydantic_type=RecordingMetadata), default=None, comment="附加元数据") # 其他可能的字段(根据实际需求添加)
|
info: Mapped[Optional[RecordingMetadata]] = mapped_column(PydanticType(pydantic_type=RecordingMetadata), default=None, comment="附加元数据") # 其他可能的字段(根据实际需求添加)
|
||||||
details: Mapped[Optional[dict]] = mapped_column(JSONB(astext_type=Text()), default=None, comment="评估信息") # 其他信息
|
details: Mapped[Optional[dict]] = mapped_column(MySQLJSON, default=None, comment="评估信息") # 其他信息
|
||||||
is_standard: Mapped[bool] = mapped_column(Boolean, default=False, comment="是否为标准朗读音频")
|
is_standard: Mapped[bool] = mapped_column(Boolean, default=False, comment="是否为标准朗读音频")
|
||||||
|
|
||||||
# 表参数 - 包含所有必要的约束
|
# 表参数 - 包含所有必要的约束
|
||||||
|
|||||||
@@ -73,7 +73,7 @@ class ImageShowRes(ImageRecognizeRes):
|
|||||||
|
|
||||||
|
|
||||||
class ImageInfoSchemaBase(SchemaBase):
|
class ImageInfoSchemaBase(SchemaBase):
|
||||||
embedding: Optional[list] = None
|
# embedding: Optional[list] = None
|
||||||
info: Optional[ImageMetadata] = None
|
info: Optional[ImageMetadata] = None
|
||||||
details: Optional[dict] = None
|
details: Optional[dict] = None
|
||||||
|
|
||||||
|
|||||||
@@ -356,19 +356,19 @@ class ImageService:
|
|||||||
background_tasks.add_task(ImageService.generate_thumbnail, image_id, file_id)
|
background_tasks.add_task(ImageService.generate_thumbnail, image_id, file_id)
|
||||||
|
|
||||||
# embedding
|
# embedding
|
||||||
embed_params = QwenEmbedImageParams(
|
# embed_params = QwenEmbedImageParams(
|
||||||
user_id=current_user.id,
|
# user_id=current_user.id,
|
||||||
dict_level=dict_level,
|
# dict_level=dict_level,
|
||||||
image_id=new_image.id,
|
# image_id=new_image.id,
|
||||||
file_name=file_name,
|
# file_name=file_name,
|
||||||
format=image_format_str,
|
# format=image_format_str,
|
||||||
data=base64_image,
|
# data=base64_image,
|
||||||
)
|
# )
|
||||||
embed_response = await Qwen.embed_image(embed_params)
|
# embed_response = await Qwen.embed_image(embed_params)
|
||||||
if embed_response.get("error"):
|
# if embed_response.get("error"):
|
||||||
raise Exception(embed_response["error"])
|
# raise Exception(embed_response["error"])
|
||||||
|
#
|
||||||
embedding = embed_response.get("embedding")
|
# embedding = embed_response.get("embedding")
|
||||||
|
|
||||||
# 提取元数据
|
# 提取元数据
|
||||||
additional_info = {
|
additional_info = {
|
||||||
@@ -382,7 +382,7 @@ class ImageService:
|
|||||||
await image_dao.update(
|
await image_dao.update(
|
||||||
db, new_image.id,
|
db, new_image.id,
|
||||||
UpdateImageParam(
|
UpdateImageParam(
|
||||||
embedding=embedding,
|
# embedding=embedding,
|
||||||
info=metadata or {},
|
info=metadata or {},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
from typing import Any, Literal
|
from typing import Any, Literal
|
||||||
from celery.schedules import crontab
|
from celery.schedules import crontab
|
||||||
from pydantic import model_validator, PostgresDsn
|
from pydantic import model_validator, HttpUrl # Changed from PostgresDsn to HttpUrl
|
||||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||||
|
|
||||||
from backend.core.path_conf import BASE_PATH
|
from backend.core.path_conf import BASE_PATH
|
||||||
@@ -58,10 +58,10 @@ class Settings(BaseSettings):
|
|||||||
# .env 数据库
|
# .env 数据库
|
||||||
DATABASE_ECHO: bool | Literal['debug'] = False
|
DATABASE_ECHO: bool | Literal['debug'] = False
|
||||||
DATABASE_HOST: str
|
DATABASE_HOST: str
|
||||||
DATABASE_PORT: int = 5432
|
DATABASE_PORT: int = 3306
|
||||||
DATABASE_USER: str
|
DATABASE_USER: str
|
||||||
DATABASE_PASSWORD: str
|
DATABASE_PASSWORD: str
|
||||||
DATABASE_DB_NAME: str = 'postgres'
|
DATABASE_DB_NAME: str = 'app'
|
||||||
|
|
||||||
# .env Redis
|
# .env Redis
|
||||||
REDIS_HOST: str
|
REDIS_HOST: str
|
||||||
@@ -194,14 +194,7 @@ def get_settings():
|
|||||||
|
|
||||||
# 环境区分示例
|
# 环境区分示例
|
||||||
def get_db_uri(settings: Settings):
|
def get_db_uri(settings: Settings):
|
||||||
return PostgresDsn.build(
|
# Changed from PostgresDsn.build to manual URL construction for MySQL
|
||||||
scheme="postgresql+asyncpg",
|
return f"mysql+asyncmy://{settings.DATABASE_USER}:{settings.DATABASE_PASSWORD}@{settings.DATABASE_HOST}:{settings.DATABASE_PORT}/{settings.DATABASE_DB_NAME}"
|
||||||
username=settings.DATABASE_USER,
|
|
||||||
password=settings.DATABASE_PASSWORD,
|
|
||||||
host=settings.DATABASE_HOST,
|
|
||||||
port=settings.DATABASE_PORT,
|
|
||||||
path=settings.DATABASE_DB_NAME,
|
|
||||||
).unicode_string()
|
|
||||||
|
|
||||||
|
settings = get_settings()
|
||||||
settings = get_settings()
|
|
||||||
@@ -26,28 +26,26 @@ def create_async_engine_and_session(
|
|||||||
application_name: str = "app"
|
application_name: str = "app"
|
||||||
) -> tuple[create_async_engine, async_sessionmaker[AsyncSession], async_sessionmaker[AsyncSession]]:
|
) -> tuple[create_async_engine, async_sessionmaker[AsyncSession], async_sessionmaker[AsyncSession]]:
|
||||||
"""
|
"""
|
||||||
创建 PostgreSQL 异步引擎和会话工厂
|
创建 MySQL 异步引擎和会话工厂
|
||||||
参数优化说明:
|
参数优化说明:
|
||||||
- pool_size: 建议设置为 (核心数 * 2) + 有效磁盘数
|
- pool_size: 建议设置为 (核心数 * 2) + 有效磁盘数
|
||||||
- max_overflow: 峰值连接缓冲,避免连接风暴
|
- max_overflow: 峰值连接缓冲,避免连接风暴
|
||||||
- pool_recycle: 防止 PostgreSQL 连接超时 (默认为 1 小时)
|
- pool_recycle: 防止 MySQL 连接超时 (默认为 1 小时)
|
||||||
- pool_pre_ping: 强烈建议开启,处理连接失效问题
|
- pool_pre_ping: 强烈建议开启,处理连接失效问题
|
||||||
- application_name: 帮助 DBA 识别连接来源
|
- application_name: 帮助 DBA 识别连接来源
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# 创建异步引擎 (针对 PostgreSQL 优化)
|
# 创建异步引擎 (针对 MySQL 优化)
|
||||||
engine = create_async_engine(
|
engine = create_async_engine(
|
||||||
url,
|
url,
|
||||||
echo=echo,
|
echo=echo,
|
||||||
echo_pool=echo,
|
echo_pool=echo,
|
||||||
future=True,
|
future=True,
|
||||||
connect_args={
|
connect_args={
|
||||||
"server_settings": {
|
"charset": "utf8mb4", # MySQL 特定字符集
|
||||||
"application_name": application_name,
|
"autocommit": True, # 自动提交
|
||||||
"jit": "off", # 禁用 JIT 编译,提高简单查询性能
|
"connect_timeout": 60, # 连接超时
|
||||||
"statement_timeout": "30000" # 30 秒查询超时
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
pool_size=pool_size,
|
pool_size=pool_size,
|
||||||
max_overflow=max_overflow,
|
max_overflow=max_overflow,
|
||||||
@@ -55,10 +53,10 @@ def create_async_engine_and_session(
|
|||||||
pool_recycle=pool_recycle,
|
pool_recycle=pool_recycle,
|
||||||
pool_pre_ping=pool_pre_ping,
|
pool_pre_ping=pool_pre_ping,
|
||||||
pool_use_lifo=True, # 使用 LIFO 提高连接池效率
|
pool_use_lifo=True, # 使用 LIFO 提高连接池效率
|
||||||
# PostgreSQL 特定优化参数
|
# MySQL 特定优化参数
|
||||||
poolclass=None, # 使用默认 QueuePool
|
poolclass=None, # 使用默认 QueuePool
|
||||||
execution_options={
|
execution_options={
|
||||||
"isolation_level": "REPEATABLE READ", # 推荐隔离级别
|
"isolation_level": "READ COMMITTED", # MySQL 推荐隔离级别
|
||||||
"compiled_cache": None # 禁用缓存,避免内存泄漏
|
"compiled_cache": None # 禁用缓存,避免内存泄漏
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@@ -67,18 +65,23 @@ def create_async_engine_and_session(
|
|||||||
pool_size=5,
|
pool_size=5,
|
||||||
max_overflow=10,
|
max_overflow=10,
|
||||||
pool_pre_ping=True,
|
pool_pre_ping=True,
|
||||||
pool_recycle=300
|
pool_recycle=300,
|
||||||
|
connect_args={
|
||||||
|
"charset": "utf8mb4",
|
||||||
|
"autocommit": True,
|
||||||
|
"connect_timeout": 60,
|
||||||
|
}
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.error(f'❌ PostgreSQL 数据库连接失败: {e}')
|
log.error(f'❌ MySQL 数据库连接失败: {e}')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
# 创建异步会话工厂 (针对 PostgreSQL 优化)
|
# 创建异步会话工厂 (针对 MySQL 优化)
|
||||||
db_session = async_sessionmaker(
|
db_session = async_sessionmaker(
|
||||||
bind=engine,
|
bind=engine,
|
||||||
autoflush=False,
|
autoflush=False,
|
||||||
expire_on_commit=False,
|
expire_on_commit=False,
|
||||||
# PostgreSQL 特定优化
|
# MySQL 特定优化
|
||||||
class_=AsyncSession,
|
class_=AsyncSession,
|
||||||
twophase=False, # 禁用两阶段提交
|
twophase=False, # 禁用两阶段提交
|
||||||
enable_baked_queries=False, # 禁用 baked 查询避免内存问题
|
enable_baked_queries=False, # 禁用 baked 查询避免内存问题
|
||||||
@@ -90,7 +93,7 @@ def create_async_engine_and_session(
|
|||||||
autoflush=False
|
autoflush=False
|
||||||
)
|
)
|
||||||
|
|
||||||
log.info(f'✅ PostgreSQL 异步引擎创建成功 | 连接池: [{pool_size}] - [{max_overflow}]')
|
log.info(f'✅ MySQL 异步引擎创建成功 | 连接池: [{pool_size}] - [{max_overflow}]')
|
||||||
return engine, db_session, background_db_session
|
return engine, db_session, background_db_session
|
||||||
|
|
||||||
|
|
||||||
@@ -116,4 +119,4 @@ SQLALCHEMY_DATABASE_URL = get_db_uri(settings)
|
|||||||
async_engine, async_db_session, background_db_session = create_async_engine_and_session(SQLALCHEMY_DATABASE_URL)
|
async_engine, async_db_session, background_db_session = create_async_engine_and_session(SQLALCHEMY_DATABASE_URL)
|
||||||
|
|
||||||
# Session Annotated
|
# Session Annotated
|
||||||
CurrentSession = Annotated[AsyncSession, Depends(get_db)]
|
CurrentSession = Annotated[AsyncSession, Depends(get_db)]
|
||||||
@@ -1,25 +1,39 @@
|
|||||||
|
version: '3.8'
|
||||||
|
|
||||||
services:
|
services:
|
||||||
fsm_server:
|
fsm_server:
|
||||||
build:
|
build:
|
||||||
context: ../../
|
context: ../../
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
image: fsm_server:latest
|
ports:
|
||||||
|
- "8000:8000"
|
||||||
container_name: fsm_server
|
container_name: fsm_server
|
||||||
restart: always
|
restart: always
|
||||||
depends_on:
|
depends_on:
|
||||||
- fsm_mysql
|
- fsm_mysql
|
||||||
- fsm_redis
|
- fsm_redis
|
||||||
volumes:
|
volumes:
|
||||||
- fsm_static:/fsm/backend/static
|
- fsm_static:/www/fsm_server/backend/static
|
||||||
|
environment:
|
||||||
|
- SERVER_HOST=0.0.0.0
|
||||||
|
- SERVER_PORT=8000
|
||||||
|
- DATABASE_HOST=fsm_mysql
|
||||||
|
- DATABASE_PORT=3306
|
||||||
|
- DATABASE_USER=root
|
||||||
|
- DATABASE_PASSWORD=123456
|
||||||
|
- DATABASE_DB_NAME=fsm
|
||||||
|
- REDIS_HOST=fsm_redis
|
||||||
|
- REDIS_PORT=6379
|
||||||
|
- REDIS_PASSWORD=
|
||||||
|
- REDIS_DATABASE=0
|
||||||
networks:
|
networks:
|
||||||
- fsm_network
|
- fsm_network
|
||||||
command:
|
command: |
|
||||||
- bash
|
sh -c "
|
||||||
- -c
|
|
||||||
- |
|
|
||||||
wait-for-it -s fsm_mysql:3306 -s fsm_redis:6379 -t 300
|
wait-for-it -s fsm_mysql:3306 -s fsm_redis:6379 -t 300
|
||||||
supervisord -c /etc/supervisor/supervisord.conf
|
supervisord -c /etc/supervisor/supervisord.conf
|
||||||
supervisorctl restart
|
supervisorctl restart
|
||||||
|
"
|
||||||
|
|
||||||
fsm_mysql:
|
fsm_mysql:
|
||||||
image: mysql:8.0.29
|
image: mysql:8.0.29
|
||||||
@@ -42,17 +56,24 @@ services:
|
|||||||
--lower_case_table_names=1
|
--lower_case_table_names=1
|
||||||
|
|
||||||
fsm_redis:
|
fsm_redis:
|
||||||
image: redis:6.2.7
|
image: redis:7.0.4
|
||||||
ports:
|
ports:
|
||||||
- "6379:6379"
|
- "6379:6379"
|
||||||
container_name: fsm_redis
|
container_name: fsm_redis
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
|
||||||
- TZ=Asia/Shanghai
|
|
||||||
volumes:
|
volumes:
|
||||||
- fsm_redis:/var/lib/redis
|
- fsm_redis:/data
|
||||||
networks:
|
networks:
|
||||||
- fsm_network
|
- fsm_network
|
||||||
|
command: |
|
||||||
|
--requirepass ""
|
||||||
|
--appendonly yes
|
||||||
|
--appendfilename "redis-staging.aof"
|
||||||
|
--appendfsync everysec
|
||||||
|
--dir /data
|
||||||
|
--databases 16
|
||||||
|
--maxmemory 256mb
|
||||||
|
--maxmemory-policy allkeys-lru
|
||||||
|
|
||||||
fsm_nginx:
|
fsm_nginx:
|
||||||
image: nginx:stable
|
image: nginx:stable
|
||||||
@@ -83,4 +104,4 @@ volumes:
|
|||||||
fsm_redis:
|
fsm_redis:
|
||||||
name: fsm_redis
|
name: fsm_redis
|
||||||
fsm_static:
|
fsm_static:
|
||||||
name: fsm_static
|
name: fsm_static
|
||||||
137
pyproject.toml
137
pyproject.toml
@@ -1,75 +1,81 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "app"
|
name = "blabla-server"
|
||||||
description = """
|
version = "0.0.1"
|
||||||
A RBAC (Role-Based Access Control) permission control system built on FastAPI, featuring a unique pseudo-three-tier
|
description = "FastAPI Best Architecture"
|
||||||
architecture design, with built-in basic implementation of fastapi admin as a template library, free and open-source.
|
|
||||||
"""
|
|
||||||
authors = [
|
authors = [
|
||||||
{ name = "Felix", email = "hengzone@outlook.com" },
|
{ name = "Felix", email = "hengzone@outlook.com" },
|
||||||
]
|
]
|
||||||
readme = "README.md"
|
|
||||||
license = { text = "MIT" }
|
|
||||||
requires-python = ">=3.10"
|
|
||||||
dynamic = ['version']
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aiofiles>=24.1.0",
|
"fastapi>=0.115.0",
|
||||||
"aiosmtplib>=4.0.2",
|
"uvicorn>=0.30.0",
|
||||||
"alembic>=1.16.5",
|
"sqlalchemy>=2.0.0",
|
||||||
"asgi-correlation-id>=4.3.4",
|
"alembic>=1.13.0",
|
||||||
"asgiref>=3.9.1",
|
|
||||||
"asyncmy>=0.2.10",
|
"asyncmy>=0.2.10",
|
||||||
"asyncpg>=0.30.0",
|
"mysql-connector-python>=8.0.33", # Added MySQL connector
|
||||||
"apscheduler==3.11.0",
|
"pydantic>=2.0.0",
|
||||||
"bcrypt>=4.3.0",
|
"pydantic-settings>=2.0.0",
|
||||||
"cappa>=0.30.0",
|
"passlib>=1.7.4",
|
||||||
"cryptography>=45.0.6",
|
"bcrypt>=4.0.0",
|
||||||
"dulwich>=0.24.1",
|
"python-jose>=3.3.0",
|
||||||
"fast-captcha>=0.3.2",
|
"python-multipart>=0.0.9",
|
||||||
"fastapi-limiter>=0.1.6",
|
"redis>=5.0.0",
|
||||||
"fastapi-pagination>=0.14.0",
|
"fastapi-limiter>=0.1.5",
|
||||||
"fastapi[standard-no-fastapi-cloud-cli]>=0.116.1",
|
"fastapi-pagination>=0.12.0",
|
||||||
"fastapi-utilities==0.3.1",
|
"celery>=5.3.0",
|
||||||
"flower>=2.0.1",
|
"flower>=2.0.0",
|
||||||
"gevent>=25.8.2",
|
"loguru>=0.7.0",
|
||||||
"granian>=2.5.1",
|
"apscheduler>=3.10.0",
|
||||||
"ip2loc>=1.0.0",
|
"typer>=0.9.0",
|
||||||
"itsdangerous>=2.2.0",
|
"rich>=13.0.0",
|
||||||
"jinja2>=3.1.6",
|
"httpx>=0.25.0",
|
||||||
"loguru>=0.7.3",
|
"jinja2>=3.1.0",
|
||||||
"msgspec>=0.19.0",
|
"python-dotenv>=1.0.0",
|
||||||
"psutil>=7.0.0",
|
"cryptography>=41.0.0",
|
||||||
"psycopg[binary]>=3.2.9",
|
"python-socketio>=5.8.0",
|
||||||
"pwdlib>=0.2.1",
|
"asgi-correlation-id>=4.2.0",
|
||||||
"pydantic>=2.11.7",
|
"fastapi-utilities>=0.3.0",
|
||||||
"pydantic-settings>=2.10.1",
|
"sqlalchemy-crud-plus>=1.0.0",
|
||||||
"pymysql>=1.1.1",
|
"path>=16.7.0",
|
||||||
"python-jose>=3.5.0",
|
"fast-captcha>=0.3.0",
|
||||||
"python-socketio>=5.13.0",
|
|
||||||
"pycrypto==2.6.1",
|
|
||||||
"redis[hiredis]>=6.4.0",
|
|
||||||
"rtoml>=0.12.0",
|
|
||||||
"sqlalchemy-crud-plus>=1.11.0",
|
|
||||||
"sqlalchemy[asyncio]>=2.0.43",
|
|
||||||
"sqlparse>=0.5.3",
|
|
||||||
"user-agents>=2.2.0",
|
"user-agents>=2.2.0",
|
||||||
|
"ip2loc>=1.0.0",
|
||||||
|
"dashscope>=1.14.0",
|
||||||
|
"dulwich>=0.27.0",
|
||||||
|
"msgspec>=0.18.0",
|
||||||
|
"rtoml>=0.11.0",
|
||||||
|
"psutil>=5.9.0",
|
||||||
|
"pwdlib>=0.2.0",
|
||||||
|
"itsdangerous>=2.1.0",
|
||||||
|
"aiofiles>=23.0.0",
|
||||||
|
"asgiref>=3.7.0",
|
||||||
]
|
]
|
||||||
|
requires-python = ">=3.10"
|
||||||
|
readme = "README.md"
|
||||||
|
|
||||||
[dependency-groups]
|
|
||||||
dev = [
|
|
||||||
"pytest>=8.4.0",
|
|
||||||
"pytest-sugar>=1.1.1",
|
|
||||||
]
|
|
||||||
lint = [
|
|
||||||
"pre-commit>=4.3.0",
|
|
||||||
]
|
|
||||||
server = [
|
server = [
|
||||||
"aio-pika>=9.5.7",
|
"aio-pika>=9.5.7",
|
||||||
"wait-for-it>=2.3.0",
|
"wait-for-it>=2.3.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=61.0", "wheel"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
include = ["backend*"]
|
||||||
|
|
||||||
|
[tool.setuptools.package-data]
|
||||||
|
"*" = ["*.md", "*.yaml", "*.yml"]
|
||||||
|
|
||||||
[tool.uv]
|
[tool.uv]
|
||||||
python-downloads = "manual"
|
python-downloads = "manual"
|
||||||
default-groups = ["dev", "lint"]
|
default-groups = ["dev", "lint"]
|
||||||
|
dev-dependencies = [
|
||||||
|
"pytest>=7.4.0",
|
||||||
|
"pytest-sugar>=0.9.7",
|
||||||
|
"pre-commit>=3.4.0",
|
||||||
|
"ruff>=0.1.0",
|
||||||
|
]
|
||||||
|
|
||||||
[[tool.uv.index]]
|
[[tool.uv.index]]
|
||||||
name = "aliyun"
|
name = "aliyun"
|
||||||
@@ -84,6 +90,21 @@ path = "backend/__init__.py"
|
|||||||
[project.scripts]
|
[project.scripts]
|
||||||
myapp = "backend.cli:main"
|
myapp = "backend.cli:main"
|
||||||
|
|
||||||
[build-system]
|
[tool.ruff]
|
||||||
requires = ["hatchling"]
|
line-length = 120
|
||||||
build-backend = "hatchling.build"
|
indent-width = 4
|
||||||
|
target-version = "py310"
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
|
select = [
|
||||||
|
"E", # pycodestyle errors
|
||||||
|
"W", # pycodestyle warnings
|
||||||
|
"F", # pyflakes
|
||||||
|
"I", # isort
|
||||||
|
"C", # flake8-comprehensions
|
||||||
|
"B", # flake8-bugbear
|
||||||
|
"Q", # flake8-quotes
|
||||||
|
"SIM", # flake8-simplify
|
||||||
|
"TID", # flake8-tidy-imports
|
||||||
|
"RUF", # Ruff-specific rules
|
||||||
|
]
|
||||||
@@ -24,7 +24,7 @@ async-timeout==5.0.1 ; python_full_version < '3.11.3'
|
|||||||
# redis
|
# redis
|
||||||
asyncmy==0.2.10
|
asyncmy==0.2.10
|
||||||
# via fastapi-best-architecture
|
# via fastapi-best-architecture
|
||||||
asyncpg==0.30.0
|
# asyncpg==0.30.0 # Removed PostgreSQL driver
|
||||||
# via fastapi-best-architecture
|
# via fastapi-best-architecture
|
||||||
bcrypt==4.3.0
|
bcrypt==4.3.0
|
||||||
# via fastapi-best-architecture
|
# via fastapi-best-architecture
|
||||||
@@ -319,3 +319,5 @@ zope-event==5.0
|
|||||||
# via gevent
|
# via gevent
|
||||||
zope-interface==7.2
|
zope-interface==7.2
|
||||||
# via gevent
|
# via gevent
|
||||||
|
mysql-connector-python==8.0.33 # Added MySQL connector
|
||||||
|
# via fastapi-best-architecture
|
||||||
Reference in New Issue
Block a user