feat: 完成后端接口对接的基本实现

main
chenzhirong 4 months ago
parent f374f4c12c
commit 2b58603f76

@ -1,3 +1,3 @@
#!/usr/bin/env bash #!/usr/bin/env bash
cd /root/SUSTechPOINTS && python3 ./main.py cd /root/SUSTechPOINTS && uv sync && python3 ./main.py

@ -5,6 +5,9 @@ import numpy as np
import sys import sys
import os import os
from crud.service.scene_service import SceneService
BASE_DIR = os.path.dirname(os.path.abspath(__file__)) BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(BASE_DIR) sys.path.append(BASE_DIR)
@ -253,8 +256,8 @@ def write_annotation_back(scene_id, frame, new_ann):
ann = list(filter(lambda a: a["obj_id"]!=new_ann["obj_id"], ann)) ann = list(filter(lambda a: a["obj_id"]!=new_ann["obj_id"], ann))
ann.append(new_ann) ann.append(new_ann)
SceneService.update_label_json(scene_id, frame, ann)
scene_reader.save_annotations(scene_id, frame, ann) # scene_reader.save_annotations(scene_id, frame, ann)
if __name__ == "__main__": if __name__ == "__main__":

@ -0,0 +1,8 @@
DATABASE_URL = "mysql+pymysql://root:ngsk0809@192.168.5.10:3406/labelu"
MINIO_CONFIG = {
"host": "192.168.5.10:9100",
"user": "admin",
"password": "ngsk0809"
}
MINIO_BUCKET = "3dlabel"

@ -0,0 +1,6 @@
from sqlmodel import create_engine, Session
from constant_3d import DATABASE_URL
engine = create_engine(DATABASE_URL)

@ -0,0 +1,35 @@
import uuid
from typing import Any, List
from sqlalchemy import JSON
from sqlmodel import SQLModel, Field
from crud.config.db import engine
def get_uuid():
return uuid.uuid1().hex
class Scene(SQLModel,table=True):
id: str = Field(default_factory=get_uuid, max_length=32, primary_key=True)
scene_name: str = Field(max_length=256, description="事件名称", unique=True)
calib_json: List[dict] = Field(sa_type=JSON, nullable=True, description="calib是从点云到图像的校准矩阵。它是可选的但如果提供该框将投影在图像上以帮助注释。")
desc: List[dict] = Field(sa_type=JSON, nullable=True, description="calib是从点云到图像的校准矩阵。它是可选的但如果提供该框将投影在图像上以帮助注释。")
class SceneWorldItem(SQLModel,table=True):
__tablename__ = "scene_world_item"
id: str = Field(default_factory=get_uuid, max_length=32, primary_key=True)
scene_id: str = Field(max_length=256, description="事件id")
scene_name: str = Field(max_length=256, description="事件名")
frame: str = Field(max_length=256, description="帧序号")
calib_json: List[dict] = Field(sa_type=JSON, nullable=True, description="calib是从点云到图像的校准矩阵。它是可选的但如果提供该框将投影在图像上以帮助注释。")
label_json: List[dict] = Field(sa_type=JSON, nullable=True, description="标注信息 == annotation")
ego_pose_json: List[dict] = Field(sa_type=JSON, nullable=True, description="")
front_img_path: str = Field(max_length=512, description="minio对应文件的路径")
left_img_path: str = Field(max_length=512, description="minio对应文件的路径")
right_img_path: str = Field(max_length=512, description="minio对应文件的路径")
lidar_pcd_path: str = Field(max_length=512, description="minio对应的pcd文件的路径")
# 初始化数据库表(异步执行)
SQLModel.metadata.create_all(engine)

@ -0,0 +1,25 @@
from typing import Any
from pydantic import BaseModel, Field
class SaveWorldItem(BaseModel):
scene: str
frame: str
annotation: Any
class CropSceneRequest(BaseModel):
rawSceneId: str
startTime: str
seconds: str
desc: str
class PredictRotationRequest(BaseModel):
points: Any # 假设是 N*3 的点云数据
# 1. 定义模型
class LoadWorldItem(BaseModel):
scene: str = Field(description="这是scene")
frame: str = Field(description="这是frame")

@ -0,0 +1,145 @@
from typing import List, Dict, Any
from sqlmodel import select, Session
from crud.config.db import engine
from crud.entity.models import SceneWorldItem, Scene
from crud.entity.scene_dto import SaveWorldItem
class SceneService:
@classmethod
def get_scene_info(cls,s=None):
result_list = []
with Session(engine) as session:
query_stmt = select(Scene)
if s is not None:
query_stmt = query_stmt.where(Scene.scene_name == s)
scene_all = session.exec(query_stmt)
for scene in scene_all:
query_stmt = select(SceneWorldItem).where(SceneWorldItem.scene_name == scene.scene_name)
query_result_item = session.exec(query_stmt)
scene_worlds = query_result_item.all()
item = {
"scene": scene.scene_name,
"frames": [scene_world.frame for scene_world in scene_worlds],
"lidar_ext": ".pcd",
"camera_ext": ".jpg",
"radar_ext": ".pcd",
"aux_lidar_ext": ".pcd",
"boxtype": "psr",
"camera": [
"right",
"left",
"front"
],
"calib": scene.calib_json,
}
result_list.append(item)
return result_list
@classmethod
def save_world_list(cls, items: List[SaveWorldItem]):
"""批量保存标注数据"""
for item in items:
scene = item.scene
frame = item.frame
ann = item.annotation
with Session(engine) as session:
query_stmt = select(SceneWorldItem).where(SceneWorldItem.scene_name == scene,
SceneWorldItem.frame == frame)
result_item = session.exec(query_stmt).first()
result_item.label_json = ann
session.add(result_item)
session.commit()
return "ok"
@classmethod
def update_label_json(cls, scene, frame,ann):
with Session(engine) as session:
query_stmt = select(SceneWorldItem).where(SceneWorldItem.scene_name == scene,
SceneWorldItem.frame == frame)
result_item = session.exec(query_stmt).first()
result_item.label_json = ann
session.add(result_item)
session.commit()
return "ok"
@classmethod
def get_frame_ids(cls, scene: str):
with Session(engine) as session:
exec_result = session.exec(select(SceneWorldItem.id).where(SceneWorldItem.scene_name == scene))
ids = exec_result.all()
return ids
@classmethod
def get_scene_items(cls, scene: str):
with Session(engine) as session:
items = session.exec(select(SceneWorldItem).where(SceneWorldItem.scene_name == scene)).all()
return items
@classmethod
def get_label_json(cls, scene, frame):
with Session(engine) as session:
query_stmt = select(SceneWorldItem).where(SceneWorldItem.scene_name == scene, SceneWorldItem.frame == frame)
result_item = session.exec(query_stmt).first()
return result_item.label_json if result_item else []
@classmethod
def get_ego_pose_json(cls, scene, frame):
with Session(engine) as session:
query_stmt = select(SceneWorldItem).where(SceneWorldItem.scene_name == scene, SceneWorldItem.frame == frame)
result_item = session.exec(query_stmt).first()
return result_item.ego_pose_json if result_item else None
@classmethod
def get_scene_names(cls,s=None):
with Session(engine) as session:
query_stmt = select(Scene)
if s is not None:
query_stmt = query_stmt.where(Scene.scene_name == s)
query_result_item = session.exec(query_stmt)
return [i.scene_name for i in query_result_item.all()]
@classmethod
def get_scene_desc(cls):
with Session(engine) as session:
query_stmt = select(Scene)
query_result_item = session.exec(query_stmt)
result = {}
for i in query_result_item.all():
result[i.scene_name] = i.desc
return result
@classmethod
def get_all_objs(cls,scene: str) -> List[Dict[str, Any]]:
"""从 CherryPy 类中提取的辅助函数"""
scene_items:List[SceneWorldItem] = cls.get_scene_items(scene)
if scene_items is None:
return []
all_objs = {}
for f in scene_items:
boxes = f.label_json
for b in boxes:
o = {"category": b.get("obj_type"), "id": b.get("obj_id")}
if not o["category"] or not o["id"]: continue
k = f"{o['category']}-{o['id']}"
if k in all_objs:
all_objs[k]['count'] += 1
else:
all_objs[k] = {
"category": o["category"],
"id": o["id"],
"count": 1
}
return list(all_objs.values())
if __name__ == '__main__':
# print(SceneService.get_frame_ids("example"))
# print(SceneService.get_scene_items("example"))
print(SceneService.get_scene_info())

@ -0,0 +1,191 @@
import logging
import time
from io import BytesIO
from minio import Minio
from minio.commonconfig import CopySource
from minio.error import S3Error
from constant_3d import MINIO_CONFIG
MINIO = MINIO_CONFIG
class MinioServer:
def __init__(self):
self.conn = None
self.__open__()
def __open__(self):
try:
if self.conn:
self.__close__()
except Exception:
pass
try:
self.conn = Minio(MINIO["host"],
access_key=MINIO["user"],
secret_key=MINIO["password"],
secure=False
)
except Exception:
logging.exception(
"Fail to connect %s " % MINIO["host"])
def __close__(self):
del self.conn
self.conn = None
def health(self):
bucket, fnm, binary = "txtxtxtxt1", "txtxtxtxt1", b"_t@@@1"
if not self.conn.bucket_exists(bucket):
self.conn.make_bucket(bucket)
r = self.conn.put_object(bucket, fnm,
BytesIO(binary),
len(binary)
)
return r
def put(self, bucket, fnm, binary):
for _ in range(3):
try:
if not self.conn.bucket_exists(bucket):
self.conn.make_bucket(bucket)
r = self.conn.put_object(bucket, fnm,
BytesIO(binary),
len(binary)
)
return r
except Exception:
logging.exception(f"Fail to put {bucket}/{fnm}:")
self.__open__()
time.sleep(1)
def rm(self, bucket, fnm):
try:
self.conn.remove_object(bucket, fnm)
except Exception:
logging.exception(f"Fail to remove {bucket}/{fnm}:")
def get(self, bucket, filename):
for _ in range(3):
try:
r = self.conn.get_object(bucket, filename)
return r.read()
except Exception:
logging.exception(f"Fail to get {bucket}/{filename}")
self.__open__()
time.sleep(1)
return None
def get_file(self, bucket, filename):
for _ in range(2):
try:
r = self.conn.get_object(bucket, filename)
return r
except Exception:
logging.exception(f"Fail to get {bucket}/{filename}")
self.__open__()
time.sleep(1)
return None
def obj_exist(self, bucket, filename):
try:
if not self.conn.bucket_exists(bucket):
return False
if self.conn.stat_object(bucket, filename):
return True
else:
return False
except S3Error as e:
if e.code in ["NoSuchKey", "NoSuchBucket", "ResourceNotFound"]:
return False
except Exception:
logging.exception(f"obj_exist {bucket}/{filename} got exception")
return False
def get_presigned_url(self, bucket, fnm, expires):
for _ in range(10):
try:
return self.conn.get_presigned_url("GET", bucket, fnm, expires)
except Exception:
logging.exception(f"Fail to get_presigned {bucket}/{fnm}:")
self.__open__()
time.sleep(1)
return
def remove_bucket(self, bucket):
try:
if self.conn.bucket_exists(bucket):
objects_to_delete = self.conn.list_objects(bucket, recursive=True)
for obj in objects_to_delete:
self.conn.remove_object(bucket, obj.object_name)
self.conn.remove_bucket(bucket)
except Exception:
logging.exception(f"Fail to remove bucket {bucket}")
def init_directory(self, bucket, fnm):
for _ in range(3):
try:
if not self.conn.bucket_exists(bucket):
self.conn.make_bucket(bucket)
r = self.conn.put_object(bucket, fnm, BytesIO(b''), 0)
return True
except Exception:
logging.exception(f"Fail to init directory {bucket}/{fnm}:")
self.__open__()
time.sleep(1)
return False
def initCreateBucket(self, bucket_name):
# 检查桶是否存在
found = self.conn.bucket_exists(bucket_name)
if not found:
# 桶不存在,创建桶
try:
self.conn.make_bucket(bucket_name)
policy = """
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "PublicRead",
"Effect": "Allow",
"Principal": "*",
"Action": ["s3:GetObject"],
"Resource": ["arn:aws:s3:::%s/*" ]
}
]
}
""" % bucket_name
self.conn.set_bucket_policy(bucket_name, policy)
logging.info(f"'{bucket_name}' 创建成功.")
return True
except S3Error as err:
logging.error(f"Error occurred: {err}")
else:
logging.info(f"'{bucket_name}' 存在.")
return False
def copy_file_in_bucket(self, source_bucket, source_file_path, target_bucket, target_path):
"""
在同一个桶内复制文件
:param source_bucket: 存放元数据的桶名称
:param source_file_path: 源文件路径 "aa/a"
:param target_bucket: 存放目标位置的桶名称
:param target_path: 目标文件路径 "bb/a"
"""
copy_source = CopySource(source_bucket, source_file_path)
try:
# 服务端复制对象
self.conn.copy_object(
bucket_name=target_bucket,
object_name=target_path, # 目标路径
source=copy_source # 源路径
)
logging.info(f"文件从桶‘{source_bucket}’的‘{source_file_path} 复制到桶‘{target_bucket}’的‘{target_path}")
except S3Error as e:
logging.error(f"复制失败: {e}")

@ -1,238 +1,75 @@
import random from fastapi import FastAPI, Request
import string from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
import cherrypy from router import router
import os
import json
from jinja2 import Environment, FileSystemLoader
env = Environment(loader=FileSystemLoader('./'))
import os # 1. 创建 FastAPI 应用实例
import sys app = FastAPI(
import scene_reader title="Annotation Tool API",
from tools import check_labels as check description="从 CherryPy 转换而来的标注工具 API",
version="2.0.0"
)
# 2. 设置 Jinja2 模板
# CherryPy 的 FileSystemLoader('./') 对应 FastAPI 的 directory="."
templates = Jinja2Templates(directory=".")
# BASE_DIR = os.path.dirname(os.path.abspath(__file__)) # 3. 挂载静态文件 (对应 server.conf)
# sys.path.append(BASE_DIR) # app.mount("路径", StaticFiles(directory="本地目录"), name="唯一名称")
app.mount("/static", StaticFiles(directory="public"), name="public")
# app.mount("/data", StaticFiles(directory="data"), name="data")
app.mount("/temp", StaticFiles(directory="temp"), name="temp")
app.mount("/views", StaticFiles(directory="views"), name="views")
app.mount("/assets", StaticFiles(directory="assets"), name="assets")
#sys.path.append(os.path.join(BASE_DIR, './algos')) # --- HTML 页面路由 ---
#import algos.rotation as rotation
from algos import pre_annotate
@app.get("/icon")
def icon(request: Request):
"""渲染测试图标页"""
return templates.TemplateResponse("test_icon.html", {"request": request})
#sys.path.append(os.path.join(BASE_DIR, '../tracking'))
#import algos.trajectory as trajectory
# extract_object_exe = "~/code/pcltest/build/extract_object" @app.get("/ml")
# registration_exe = "~/code/go_icp_pcl/build/test_go_icp" def ml(request: Request):
"""渲染测试 ML 页"""
return templates.TemplateResponse("test_ml.html", {"request": request})
# sys.path.append(os.path.join(BASE_DIR, './tools'))
# import tools.dataset_preprocess.crop_scene as crop_scene
class Root(object): @app.get("/reg")
@cherrypy.expose def reg(request: Request):
def index(self, scene="", frame=""): """渲染注册演示页"""
tmpl = env.get_template('index.html') return templates.TemplateResponse("registration_demo.html", {"request": request})
return tmpl.render()
@cherrypy.expose
def icon(self):
tmpl = env.get_template('test_icon.html')
return tmpl.render()
@cherrypy.expose @app.get("/view/{file_path:path}")
def ml(self): def view(request: Request, file_path: str):
tmpl = env.get_template('test_ml.html') """渲染查看页,:path 允许路径中包含斜杠"""
return tmpl.render() # 原始代码没有使用 file 参数,这里保持一致
return templates.TemplateResponse("view.html", {"request": request})
@cherrypy.expose
def reg(self):
tmpl = env.get_template('registration_demo.html')
return tmpl.render()
@cherrypy.expose # --- API 接口路由 ---
def view(self, file):
tmpl = env.get_template('view.html')
return tmpl.render()
# @cherrypy.expose @app.get("/")
# def saveworld(self, scene, frame): def index(request: Request, scene: str = "", frame: str = ""):
"""渲染主页"""
return templates.TemplateResponse("index.html", {"request": request})
# # cl = cherrypy.request.headers['Content-Length']
# rawbody = cherrypy.request.body.readline().decode('UTF-8')
# with open("./data/"+scene +"/label/"+frame+".json",'w') as f: app.include_router(router)
# f.write(rawbody)
# return "ok"
@cherrypy.expose # 6. 启动服务器 (对应 if __name__ == '__main__')
def saveworldlist(self): # 在命令行运行: uvicorn main:app --host 0.0.0.0 --port 8081 --reload
if __name__ == "__main__":
import uvicorn
# cl = cherrypy.request.headers['Content-Length'] print("Starting FastAPI server...")
rawbody = cherrypy.request.body.readline().decode('UTF-8') # server.conf 中的 host 和 port 在这里配置
data = json.loads(rawbody) uvicorn.run(
app,
for d in data: host="0.0.0.0",
scene = d["scene"] port=8081
frame = d["frame"] )
ann = d["annotation"]
with open("./data/"+scene +"/label/"+frame+".json",'w') as f:
json.dump(ann, f, indent=2, sort_keys=True)
return "ok"
@cherrypy.expose
@cherrypy.tools.json_out()
def cropscene(self):
rawbody = cherrypy.request.body.readline().decode('UTF-8')
data = json.loads(rawbody)
rawdata = data["rawSceneId"]
timestamp = rawdata.split("_")[0]
print("generate scene")
log_file = "temp/crop-scene-"+timestamp+".log"
cmd = "python ./tools/dataset_preprocess/crop_scene.py generate "+ \
rawdata[0:10]+"/"+timestamp + "_preprocessed/dataset_2hz " + \
"- " +\
data["startTime"] + " " +\
data["seconds"] + " " +\
"\""+ data["desc"] + "\"" +\
"> " + log_file + " 2>&1"
print(cmd)
code = os.system(cmd)
with open(log_file) as f:
log = list(map(lambda s: s.strip(), f.readlines()))
os.system("rm "+log_file)
return {"code": code,
"log": log
}
@cherrypy.expose
@cherrypy.tools.json_out()
def checkscene(self, scene):
ck = check.LabelChecker(os.path.join("./data", scene))
ck.check()
print(ck.messages)
return ck.messages
# data N*3 numpy array
@cherrypy.expose
@cherrypy.tools.json_out()
def predict_rotation(self):
cl = cherrypy.request.headers['Content-Length']
rawbody = cherrypy.request.body.readline().decode('UTF-8')
data = json.loads(rawbody)
return {"angle": pre_annotate.predict_yaw(data["points"])}
#return {}
@cherrypy.expose
@cherrypy.tools.json_out()
def auto_annotate(self, scene, frame):
print("auto annotate ", scene, frame)
return pre_annotate.annotate_file('./data/{}/lidar/{}.pcd'.format(scene,frame))
@cherrypy.expose
@cherrypy.tools.json_out()
def load_annotation(self, scene, frame):
return scene_reader.read_annotations(scene, frame)
@cherrypy.expose
@cherrypy.tools.json_out()
def load_ego_pose(self, scene, frame):
return scene_reader.read_ego_pose(scene, frame)
@cherrypy.expose
@cherrypy.tools.json_out()
def loadworldlist(self):
rawbody = cherrypy.request.body.readline().decode('UTF-8')
worldlist = json.loads(rawbody)
anns = list(map(lambda w:{
"scene": w["scene"],
"frame": w["frame"],
"annotation":scene_reader.read_annotations(w["scene"], w["frame"])},
worldlist))
return anns
@cherrypy.expose
@cherrypy.tools.json_out()
def datameta(self):
return scene_reader.get_all_scenes()
@cherrypy.expose
@cherrypy.tools.json_out()
def scenemeta(self, scene):
return scene_reader.get_one_scene(scene)
@cherrypy.expose
@cherrypy.tools.json_out()
def get_all_scene_desc(self):
return scene_reader.get_all_scene_desc()
@cherrypy.expose
@cherrypy.tools.json_out()
def objs_of_scene(self, scene):
return self.get_all_objs(os.path.join("./data",scene))
def get_all_objs(self, path):
label_folder = os.path.join(path, "label")
if not os.path.isdir(label_folder):
return []
files = os.listdir(label_folder)
files = filter(lambda x: x.split(".")[-1]=="json", files)
def file_2_objs(f):
with open(f) as fd:
boxes = json.load(fd)
objs = [x for x in map(lambda b: {"category":b["obj_type"], "id": b["obj_id"]}, boxes)]
return objs
boxes = map(lambda f: file_2_objs(os.path.join(path, "label", f)), files)
# the following map makes the category-id pairs unique in scene
all_objs={}
for x in boxes:
for o in x:
k = str(o["category"])+"-"+str(o["id"])
if all_objs.get(k):
all_objs[k]['count']= all_objs[k]['count']+1
else:
all_objs[k]= {
"category": o["category"],
"id": o["id"],
"count": 1
}
return [x for x in all_objs.values()]
if __name__ == '__main__':
cherrypy.quickstart(Root(), '/', config="server.conf")
else:
application = cherrypy.Application(Root(), '/', config="server.conf")

@ -1,79 +0,0 @@
from fastapi import FastAPI, Request
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from router import router
# 1. 创建 FastAPI 应用实例
app = FastAPI(
title="Annotation Tool API",
description="从 CherryPy 转换而来的标注工具 API",
version="2.0.0"
)
# 2. 设置 Jinja2 模板
# CherryPy 的 FileSystemLoader('./') 对应 FastAPI 的 directory="."
templates = Jinja2Templates(directory=".")
# 3. 挂载静态文件 (对应 server.conf)
# app.mount("路径", StaticFiles(directory="本地目录"), name="唯一名称")
app.mount("/static", StaticFiles(directory="public"), name="public")
app.mount("/data", StaticFiles(directory="data"), name="data")
app.mount("/temp", StaticFiles(directory="temp"), name="temp")
app.mount("/views", StaticFiles(directory="views"), name="views")
app.mount("/assets", StaticFiles(directory="assets"), name="assets")
# 4. 定义 Pydantic 模型用于请求体验证
# 这比直接解析 JSON 更安全、更清晰
# 5. 将 CherryPy 的类方法转换为 FastAPI 路由函数
# --- HTML 页面路由 ---
@app.get("/icon")
def icon(request: Request):
"""渲染测试图标页"""
return templates.TemplateResponse("test_icon.html", {"request": request})
@app.get("/ml")
def ml(request: Request):
"""渲染测试 ML 页"""
return templates.TemplateResponse("test_ml.html", {"request": request})
@app.get("/reg")
def reg(request: Request):
"""渲染注册演示页"""
return templates.TemplateResponse("registration_demo.html", {"request": request})
@app.get("/view/{file_path:path}")
def view(request: Request, file_path: str):
"""渲染查看页,:path 允许路径中包含斜杠"""
# 原始代码没有使用 file 参数,这里保持一致
return templates.TemplateResponse("view.html", {"request": request})
# --- API 接口路由 ---
@app.get("/")
def index(request: Request, scene: str = "", frame: str = ""):
"""渲染主页"""
return templates.TemplateResponse("index.html", {"request": request})
app.include_router(router)
# 6. 启动服务器 (对应 if __name__ == '__main__')
# 在命令行运行: uvicorn main:app --host 0.0.0.0 --port 8081 --reload
if __name__ == "__main__":
import uvicorn
print("Starting FastAPI server...")
# server.conf 中的 host 和 port 在这里配置
uvicorn.run(
app,
host="0.0.0.0",
port=8081
)

@ -13,6 +13,9 @@ dependencies = [
"tensorflow-macos==2.15.0; sys_platform == 'darwin'", "tensorflow-macos==2.15.0; sys_platform == 'darwin'",
"fastapi>=0.104.0", "fastapi>=0.104.0",
"uvicorn>=0.37.0", "uvicorn>=0.37.0",
"sqlmodel>=0.0.27",
"pymysql>=1.1.2",
"minio==7.2.4",
] ]
[[tool.uv.index]] [[tool.uv.index]]

@ -1,5 +0,0 @@
tensorflow>=2.1
jinja2
cherrypy
filterpy
cheroot != 8.4.4

@ -1,86 +1,39 @@
import json import json
import mimetypes
import os import os
from typing import List, Dict, Any from typing import List
from fastapi import APIRouter, HTTPException,Request from fastapi import APIRouter, HTTPException, Request,Response
from pydantic import BaseModel, Field
import scene_reader import scene_reader
from tools import check_labels as check
from algos import pre_annotate from algos import pre_annotate
from constant_3d import MINIO_BUCKET
from crud.entity.scene_dto import SaveWorldItem, CropSceneRequest, PredictRotationRequest, LoadWorldItem
from crud.service.scene_service import SceneService
from crud.utils.minio_conn import MinioServer
from tools import check_labels as check
class SaveWorldItem(BaseModel): router = APIRouter()
scene: str
frame: str
annotation: Any
class CropSceneRequest(BaseModel):
rawSceneId: str
startTime: str
seconds: str
desc: str
class PredictRotationRequest(BaseModel):
points: Any # 假设是 N*3 的点云数据
# 1. 定义模型 # 已修改完成
class LoadWorldItem(BaseModel):
scene: str = Field(description="这是scene")
frame: str = Field(description="这是frame")
router = APIRouter()
@router.post("/saveworldlist") @router.post("/saveworldlist")
async def saveworldlist(items: List[SaveWorldItem]): async def saveworldlist(items: List[SaveWorldItem]):
"""批量保存标注数据""" """批量保存标注数据"""
for item in items: return SceneService.save_world_list(items)
scene = item.scene
frame = item.frame
ann = item.annotation
label_dir = os.path.join("./data", scene, "label")
os.makedirs(label_dir, exist_ok=True) # 确保目录存在
file_path = os.path.join(label_dir, f"{frame}.json")
# todo 标注存入数据库
with open(file_path, 'w') as f:
json.dump(ann, f, indent=2, sort_keys=True)
return "ok"
@router.post("/cropscene")
async def cropscene(request_data: CropSceneRequest):
"""裁剪场景"""
rawdata = request_data.rawSceneId
timestamp = rawdata.split("_")[0]
log_file = f"temp/crop-scene-{timestamp}.log"
# 注意os.system 存在安全风险,在生产环境中应替换为更安全的 subprocess 模块
cmd = (
f"python ./tools/dataset_preprocess/crop_scene.py generate "
f"{rawdata[0:10]}/{timestamp}_preprocessed/dataset_2hz - "
f"{request_data.startTime} {request_data.seconds} "
f'"{request_data.desc}" > {log_file} 2>&1'
)
print(f"Executing command: {cmd}")
code = os.system(cmd)
log = []
if os.path.exists(log_file):
with open(log_file) as f:
log = [s.strip() for s in f.readlines()]
os.remove(log_file)
return {"code": code, "log": log}
# 已修改完成
@router.get("/checkscene") @router.get("/checkscene")
def checkscene(scene: str): def checkscene(scene: str):
"""检查场景的标注""" """检查场景的标注"""
ck = check.LabelChecker(os.path.join("./data", scene)) # ck = check.LabelChecker(os.path.join("./data", scene))
ck = check.LabelChecker(scene)
ck.check() ck.check()
print(ck.messages)
return ck.messages return ck.messages
# 已修改完成
@router.post("/predict_rotation") @router.post("/predict_rotation")
async def predict_rotation(request_data: PredictRotationRequest): async def predict_rotation(request_data: PredictRotationRequest):
"""预测旋转角度""" """预测旋转角度"""
@ -88,29 +41,24 @@ async def predict_rotation(request_data: PredictRotationRequest):
return {"angle": pre_annotate.predict_yaw(request_data.points)} return {"angle": pre_annotate.predict_yaw(request_data.points)}
@router.get("/auto_annotate") # 已修改完成
def auto_annotate(scene: str, frame: str):
"""自动标注"""
print(f"Auto annotate {scene}, {frame}")
file_path = f'./data/{scene}/lidar/{frame}.pcd'
if not os.path.exists(file_path):
raise HTTPException(status_code=404, detail=f"File not found: {file_path}")
return pre_annotate.annotate_file(file_path)
@router.get("/load_annotation") @router.get("/load_annotation")
def load_annotation(scene: str, frame: str): def load_annotation(scene: str, frame: str):
"""加载标注数据""" """加载标注数据"""
return scene_reader.read_annotations(scene, frame) # 读取标注数据 /data/项目名/label/**.json
# return scene_reader.read_annotations(scene, frame)
return SceneService.get_label_json(scene, frame)
# 已修改完成
@router.get("/load_ego_pose") @router.get("/load_ego_pose")
def load_ego_pose(scene: str, frame: str): def load_ego_pose(scene: str, frame: str):
"""加载自车姿态""" """加载自车姿态"""
return scene_reader.read_ego_pose(scene, frame) # return scene_reader.read_ego_pose(scene, frame)
return SceneService.get_ego_pose_json(scene, frame)
# 已修改完成
@router.post("/loadworldlist") @router.post("/loadworldlist")
async def load_world_list(request: Request): async def load_world_list(request: Request):
"""批量加载标注数据""" """批量加载标注数据"""
@ -125,68 +73,125 @@ async def load_world_list(request: Request):
anns = [] anns = []
for i in items: for i in items:
w = LoadWorldItem(**i) w = LoadWorldItem(**i)
# todo 查询数据库的标注数据 # 查询数据库的标注数据
anns.append({ anns.append({
"scene": w.scene, "scene": w.scene,
"frame": w.frame, "frame": w.frame,
"annotation": scene_reader.read_annotations(w.scene, w.frame) # "annotation": scene_reader.read_annotations(w.scene, w.frame)
"annotation": SceneService.get_label_json(w.scene, w.frame)
}) })
return anns return anns
# 已修改完成
@router.get("/datameta") @router.get("/datameta")
def datameta(): def datameta():
"""获取所有场景元数据""" """获取所有场景元数据"""
return scene_reader.get_all_scenes() return SceneService.get_scene_info()
@router.get("/scenemeta")
def scenemeta(scene: str):
"""获取单个场景元数据"""
return scene_reader.get_one_scene(scene)
# 已修改完成
@router.get("/get_all_scene_desc") @router.get("/get_all_scene_desc")
def get_all_scene_desc(): def get_all_scene_desc():
"""获取所有场景描述""" """获取所有场景描述"""
return scene_reader.get_all_scene_desc() # todo 获取标注任务列表 ==> 字典key为目录
# return {"aaa": 1}
return SceneService.get_scene_desc()
# 已修改完成
@router.get("/objs_of_scene") @router.get("/objs_of_scene")
def objs_of_scene(scene: str): def objs_of_scene(scene: str):
"""获取场景中的所有对象""" """获取场景中的所有对象"""
# todo从数据库查询图片列表 # todo从数据库查询图片列表
return get_all_objs(os.path.join("./data", scene)) return SceneService.get_all_objs(os.path.join("./data", scene))
# 无须修改
@router.post("/cropscene")
async def cropscene(request_data: CropSceneRequest):
"""裁剪场景"""
rawdata = request_data.rawSceneId
timestamp = rawdata.split("_")[0]
log_file = f"temp/crop-scene-{timestamp}.log"
# 注意os.system 存在安全风险,在生产环境中应替换为更安全的 subprocess 模块
cmd = (
f"python ./tools/dataset_preprocess/crop_scene.py generate "
f"{rawdata[0:10]}/{timestamp}_preprocessed/dataset_2hz - "
f"{request_data.startTime} {request_data.seconds} "
f'"{request_data.desc}" > {log_file} 2>&1'
)
print(f"Executing command: {cmd}")
code = os.system(cmd)
log = []
if os.path.exists(log_file):
with open(log_file) as f:
log = [s.strip() for s in f.readlines()]
os.remove(log_file)
# --- 辅助函数 --- return {"code": code, "log": log}
def get_all_objs(path: str) -> List[Dict[str, Any]]:
"""从 CherryPy 类中提取的辅助函数"""
label_folder = os.path.join(path, "label")
if not os.path.isdir(label_folder):
return []
files = [f for f in os.listdir(label_folder) if f.endswith(".json")] # todo 修改完成一半
@router.get("/scenemeta")
def scenemeta(scene: str):
# 获取s目录下的所有文件信息
return scene_reader.get_one_scene(scene)
all_objs = {} # 暂时不使用
for f in files: @router.get("/auto_annotate")
try: def auto_annotate(scene: str, frame: str):
with open(os.path.join(path, "label", f)) as fd: """自动标注"""
boxes = json.load(fd) print(f"Auto annotate {scene}, {frame}")
for b in boxes: file_path = f'./data/{scene}/lidar/{frame}.pcd'
o = {"category": b.get("obj_type"), "id": b.get("obj_id")} if not os.path.exists(file_path):
if not o["category"] or not o["id"]: continue raise HTTPException(status_code=404, detail=f"File not found: {file_path}")
return pre_annotate.annotate_file(file_path)
k = f"{o['category']}-{o['id']}" @router.get("/data/{file:path}")
if k in all_objs: def get_file(file):
all_objs[k]['count'] += 1 file_path = f'/data/{file}'
else: """获取文件"""
all_objs[k] = { conn=MinioServer()
"category": o["category"], minio_file=conn.get_file(MINIO_BUCKET, file_path)
"id": o["id"], if minio_file is None:
"count": 1 raise HTTPException(status_code=404, detail="File not found")
# media_type, _ = mimetypes.guess_type(minio_file)
# if media_type is None:
media_type = 'application/octet-stream' # 默认二进制流
# 设置响应头,告诉浏览器这是一个附件,并建议文件名
headers = {
'Content-Disposition': f'attachment; filename="{os.path.basename(file_path)}"',
"Content-Type": media_type,
} }
except (IOError, json.JSONDecodeError) as e: # 使用 StreamingResponse 进行流式响应
print(f"Error processing file {f}: {e}") # response.data 是一个类似文件的对象,可以被直接读取
return list(all_objs.values()) return Response(
minio_file.read(), # 将整个文件读入内存后返回(简单但内存占用高)
# 更优的流式方式见下方说明
media_type=media_type,
headers=headers
)
# @router.get("/get_file")
def get_file(file_path:str):
"""获取文件"""
conn=MinioServer()
minio_file=conn.get_file(MINIO_BUCKET, file_path)
# media_type, _ = mimetypes.guess_type(minio_file)
# if media_type is None:
media_type = 'application/octet-stream' # 默认二进制流
# 设置响应头,告诉浏览器这是一个附件,并建议文件名
headers = {
'Content-Disposition': f'attachment; filename="{os.path.basename(file_path)}"',
"Content-Type": media_type,
}
# 使用 StreamingResponse 进行流式响应
# response.data 是一个类似文件的对象,可以被直接读取
return Response(
minio_file.read(), # 将整个文件读入内存后返回(简单但内存占用高)
# 更优的流式方式见下方说明
media_type=media_type,
headers=headers
)

@ -2,38 +2,28 @@
import os import os
import json import json
from crud.service.scene_service import SceneService
this_dir = os.path.dirname(os.path.abspath(__file__)) this_dir = os.path.dirname(os.path.abspath(__file__))
root_dir = os.path.join(this_dir, "data") root_dir = os.path.join(this_dir, "data")
def get_all_scenes():
all_scenes = get_scene_names()
print(all_scenes)
return list(map(get_one_scene, all_scenes))
def get_all_scene_desc():
names = get_scene_names()
descs = {}
for n in names:
descs[n] = get_scene_desc(n)
return descs
def get_scene_names(): def get_scene_names():
scenes = os.listdir(root_dir) scenes = SceneService.get_scene_names()
scenes = filter(lambda s: not os.path.exists(os.path.join(root_dir, s, "disable")), scenes)
scenes = list(scenes) scenes = list(scenes)
scenes.sort() scenes.sort()
return scenes return scenes
def get_scene_desc(s):
scene_dir = os.path.join(root_dir, s)
if os.path.exists(os.path.join(scene_dir, "desc.json")):
with open(os.path.join(scene_dir, "desc.json")) as f:
desc = json.load(f)
return desc
return None
def get_one_scene(s): def get_one_scene(s):
# 获取s目录下的所有文件信息 # 获取s目录下的所有文件信息
result = SceneService.get_scene_info(s)
return result[0] if result else None
# todo 旧的
def get_one_scene_(s):
# 获取s目录下的所有文件信息
"""{ """{
"scene": "example", # 第一层的目录名 "scene": "example", # 第一层的目录名
"frames": [ # label下的文件名即标注结果文件 "frames": [ # label下的文件名即标注结果文件
@ -250,28 +240,18 @@ def get_one_scene(s):
def read_annotations(scene, frame): def read_annotations(scene, frame):
filename = os.path.join(root_dir, scene, "label", frame+".json") return SceneService.get_label_json(scene, frame)
if (os.path.isfile(filename)): # filename = os.path.join(root_dir, scene, "label", frame+".json")
with open(filename,"r") as f: # if (os.path.isfile(filename)):
ann=json.load(f) # with open(filename,"r") as f:
#print(ann) # ann=json.load(f)
return ann # #print(ann)
else: # return ann
return [] # else:
# return []
def read_ego_pose(scene, frame):
filename = os.path.join(root_dir, scene, "ego_pose", frame+".json")
if (os.path.isfile(filename)): # def save_annotations(scene, frame, anno):
with open(filename,"r") as f: # filename = os.path.join(root_dir, scene, "label", frame+".json")
p=json.load(f) # with open(filename, 'w') as outfile:
return p # json.dump(anno, outfile)
else:
return None
def save_annotations(scene, frame, anno):
filename = os.path.join(root_dir, scene, "label", frame+".json")
with open(filename, 'w') as outfile:
json.dump(anno, outfile)
if __name__ == "__main__":
print(get_all_scenes())

@ -1,15 +1,16 @@
from typing import List
import json
import os
import sys
import numpy as np import numpy as np
from crud.entity.models import SceneWorldItem
from crud.service.scene_service import SceneService
class LabelChecker: class LabelChecker:
def __init__(self, path): def __init__(self, path):
self.path = path self.path = path
self.load_frame_ids() self.load_frame_ids()
self.load_labels() self.load_labels()
self.def_labels = [ self.def_labels = [
"Car","Pedestrian","Van","Bus","Truck","ScooterRider","Scooter","BicycleRider","Bicycle","Motorcycle","MotorcyleRider","PoliceCar","TourCar","RoadWorker","Child", "Car","Pedestrian","Van","Bus","Truck","ScooterRider","Scooter","BicycleRider","Bicycle","Motorcycle","MotorcyleRider","PoliceCar","TourCar","RoadWorker","Child",
"BabyCart","Cart","Cone","FireHydrant","SaftyTriangle","PlatformCart","ConstructionCart","RoadBarrel","TrafficBarrier","LongVehicle","BicycleGroup","ConcreteTruck", "BabyCart","Cart","Cone","FireHydrant","SaftyTriangle","PlatformCart","ConstructionCart","RoadBarrel","TrafficBarrier","LongVehicle","BicycleGroup","ConcreteTruck",
@ -22,6 +23,7 @@ class LabelChecker:
def clear_messages(self): def clear_messages(self):
self.messages = [] self.messages = []
def show_messages(self): def show_messages(self):
print(111)
for m in self.messages: for m in self.messages:
print(m["frame_id"], m["obj_id"], m["desc"]) print(m["frame_id"], m["obj_id"], m["desc"])
@ -33,25 +35,20 @@ class LabelChecker:
}) })
def load_frame_ids(self): def load_frame_ids(self):
lidar_files = os.listdir(os.path.join(self.path, 'lidar')) ids = SceneService.get_frame_ids(self.path)
ids = list(map(lambda f: os.path.splitext(f)[0], lidar_files))
self.frame_ids = ids self.frame_ids = ids
print("frame_ids:",self.frame_ids)
def load_labels(self): def load_labels(self):
label_folder = os.path.join(self.path, 'label') # label_folder = os.path.join(self.path, 'label')
files = os.listdir(label_folder) # files = os.listdir(label_folder)
labels = {} labels = {}
obj_ids = {} obj_ids = {}
scene_items:List[SceneWorldItem] = SceneService.get_scene_items(self.path)
files.sort() for s in scene_items:
print(files) l = s.label_json
for f in files: frame_id = s.frame
with open(os.path.join(label_folder, f),'r') as fp:
l = json.load(fp)
#print(l)
frame_id = os.path.splitext(f)[0]
labels[frame_id] = l labels[frame_id] = l
for o in l: for o in l:
obj_id = o['obj_id'] obj_id = o['obj_id']
if frame_id: if frame_id:
@ -63,6 +60,7 @@ class LabelChecker:
self.labels = labels self.labels = labels
self.obj_ids = obj_ids self.obj_ids = obj_ids
#templates #templates
def check_one_label(self, func): def check_one_label(self, func):
for f in self.labels: for f in self.labels:
@ -171,11 +169,12 @@ class LabelChecker:
self.check_one_obj(lambda id, o: self.check_obj_size(id ,o)) self.check_one_obj(lambda id, o: self.check_obj_size(id ,o))
self.check_one_obj(lambda id, o: self.check_obj_direction(id ,o)) self.check_one_obj(lambda id, o: self.check_obj_direction(id ,o))
self.check_one_obj(lambda id, o: self.check_obj_type_consistency(id ,o)) self.check_one_obj(lambda id, o: self.check_obj_type_consistency(id ,o))
#
if __name__ == "__main__": # if __name__ == "__main__":
ck = LabelChecker(sys.argv[1]) # ck = LabelChecker(sys.argv[1])
ck.check() # # ck = LabelChecker("example")
ck.show_messages() # ck.check()
# ck.show_messages()
#

@ -35,6 +35,44 @@ wheels = [
{ url = "https://mirrors.aliyun.com/pypi/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc" }, { url = "https://mirrors.aliyun.com/pypi/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc" },
] ]
[[package]]
name = "argon2-cffi"
version = "25.1.0"
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
dependencies = [
{ name = "argon2-cffi-bindings" },
]
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/0e/89/ce5af8a7d472a67cc819d5d998aa8c82c5d860608c4db9f46f1162d7dab9/argon2_cffi-25.1.0.tar.gz", hash = "sha256:694ae5cc8a42f4c4e2bf2ca0e64e51e23a040c6a517a85074683d3959e1346c1" }
wheels = [
{ url = "https://mirrors.aliyun.com/pypi/packages/4f/d3/a8b22fa575b297cd6e3e3b0155c7e25db170edf1c74783d6a31a2490b8d9/argon2_cffi-25.1.0-py3-none-any.whl", hash = "sha256:fdc8b074db390fccb6eb4a3604ae7231f219aa669a2652e0f20e16ba513d5741" },
]
[[package]]
name = "argon2-cffi-bindings"
version = "25.1.0"
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
dependencies = [
{ name = "cffi" },
]
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/5c/2d/db8af0df73c1cf454f71b2bbe5e356b8c1f8041c979f505b3d3186e520a9/argon2_cffi_bindings-25.1.0.tar.gz", hash = "sha256:b957f3e6ea4d55d820e40ff76f450952807013d361a65d7f28acc0acbf29229d" }
wheels = [
{ url = "https://mirrors.aliyun.com/pypi/packages/1d/57/96b8b9f93166147826da5f90376e784a10582dd39a393c99bb62cfcf52f0/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:aecba1723ae35330a008418a91ea6cfcedf6d31e5fbaa056a166462ff066d500" },
{ url = "https://mirrors.aliyun.com/pypi/packages/0a/08/a9bebdb2e0e602dde230bdde8021b29f71f7841bd54801bcfd514acb5dcf/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2630b6240b495dfab90aebe159ff784d08ea999aa4b0d17efa734055a07d2f44" },
{ url = "https://mirrors.aliyun.com/pypi/packages/b6/02/d297943bcacf05e4f2a94ab6f462831dc20158614e5d067c35d4e63b9acb/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:7aef0c91e2c0fbca6fc68e7555aa60ef7008a739cbe045541e438373bc54d2b0" },
{ url = "https://mirrors.aliyun.com/pypi/packages/c1/93/44365f3d75053e53893ec6d733e4a5e3147502663554b4d864587c7828a7/argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e021e87faa76ae0d413b619fe2b65ab9a037f24c60a1e6cc43457ae20de6dc6" },
{ url = "https://mirrors.aliyun.com/pypi/packages/09/52/94108adfdd6e2ddf58be64f959a0b9c7d4ef2fa71086c38356d22dc501ea/argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e924cfc503018a714f94a49a149fdc0b644eaead5d1f089330399134fa028a" },
{ url = "https://mirrors.aliyun.com/pypi/packages/72/70/7a2993a12b0ffa2a9271259b79cc616e2389ed1a4d93842fac5a1f923ffd/argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c87b72589133f0346a1cb8d5ecca4b933e3c9b64656c9d175270a000e73b288d" },
{ url = "https://mirrors.aliyun.com/pypi/packages/78/9a/4e5157d893ffc712b74dbd868c7f62365618266982b64accab26bab01edc/argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1db89609c06afa1a214a69a462ea741cf735b29a57530478c06eb81dd403de99" },
{ url = "https://mirrors.aliyun.com/pypi/packages/74/cd/15777dfde1c29d96de7f18edf4cc94c385646852e7c7b0320aa91ccca583/argon2_cffi_bindings-25.1.0-cp39-abi3-win32.whl", hash = "sha256:473bcb5f82924b1becbb637b63303ec8d10e84c8d241119419897a26116515d2" },
{ url = "https://mirrors.aliyun.com/pypi/packages/e2/c6/a759ece8f1829d1f162261226fbfd2c6832b3ff7657384045286d2afa384/argon2_cffi_bindings-25.1.0-cp39-abi3-win_amd64.whl", hash = "sha256:a98cd7d17e9f7ce244c0803cad3c23a7d379c301ba618a5fa76a67d116618b98" },
{ url = "https://mirrors.aliyun.com/pypi/packages/42/b9/f8d6fa329ab25128b7e98fd83a3cb34d9db5b059a9847eddb840a0af45dd/argon2_cffi_bindings-25.1.0-cp39-abi3-win_arm64.whl", hash = "sha256:b0fdbcf513833809c882823f98dc2f931cf659d9a1429616ac3adebb49f5db94" },
{ url = "https://mirrors.aliyun.com/pypi/packages/11/2d/ba4e4ca8d149f8dcc0d952ac0967089e1d759c7e5fcf0865a317eb680fbb/argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6dca33a9859abf613e22733131fc9194091c1fa7cb3e131c143056b4856aa47e" },
{ url = "https://mirrors.aliyun.com/pypi/packages/5c/82/9b2386cc75ac0bd3210e12a44bfc7fd1632065ed8b80d573036eecb10442/argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21378b40e1b8d1655dd5310c84a40fc19a9aa5e6366e835ceb8576bf0fea716d" },
{ url = "https://mirrors.aliyun.com/pypi/packages/31/db/740de99a37aa727623730c90d92c22c9e12585b3c98c54b7960f7810289f/argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d588dec224e2a83edbdc785a5e6f3c6cd736f46bfd4b441bbb5aa1f5085e584" },
{ url = "https://mirrors.aliyun.com/pypi/packages/71/7a/47c4509ea18d755f44e2b92b7178914f0c113946d11e16e626df8eaa2b0b/argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5acb4e41090d53f17ca1110c3427f0a130f944b896fc8c83973219c97f57b690" },
{ url = "https://mirrors.aliyun.com/pypi/packages/ee/82/82745642d3c46e7cea25e1885b014b033f4693346ce46b7f47483cf5d448/argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:da0c79c23a63723aa5d782250fbf51b768abca630285262fb5144ba5ae01e520" },
]
[[package]] [[package]]
name = "astunparse" name = "astunparse"
version = "1.6.3" version = "1.6.3"
@ -84,6 +122,29 @@ wheels = [
{ url = "https://mirrors.aliyun.com/pypi/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de" }, { url = "https://mirrors.aliyun.com/pypi/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de" },
] ]
[[package]]
name = "cffi"
version = "2.0.0"
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
dependencies = [
{ name = "pycparser", marker = "implementation_name != 'PyPy'" },
]
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529" }
wheels = [
{ url = "https://mirrors.aliyun.com/pypi/packages/93/d7/516d984057745a6cd96575eea814fe1edd6646ee6efd552fb7b0921dec83/cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44" },
{ url = "https://mirrors.aliyun.com/pypi/packages/9e/84/ad6a0b408daa859246f57c03efd28e5dd1b33c21737c2db84cae8c237aa5/cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49" },
{ url = "https://mirrors.aliyun.com/pypi/packages/50/bd/b1a6362b80628111e6653c961f987faa55262b4002fcec42308cad1db680/cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c" },
{ url = "https://mirrors.aliyun.com/pypi/packages/4f/27/6933a8b2562d7bd1fb595074cf99cc81fc3789f6a6c05cdabb46284a3188/cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb" },
{ url = "https://mirrors.aliyun.com/pypi/packages/05/eb/b86f2a2645b62adcfff53b0dd97e8dfafb5c8aa864bd0d9a2c2049a0d551/cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0" },
{ url = "https://mirrors.aliyun.com/pypi/packages/9f/e0/6cbe77a53acf5acc7c08cc186c9928864bd7c005f9efd0d126884858a5fe/cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4" },
{ url = "https://mirrors.aliyun.com/pypi/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453" },
{ url = "https://mirrors.aliyun.com/pypi/packages/21/7a/13b24e70d2f90a322f2900c5d8e1f14fa7e2a6b3332b7309ba7b2ba51a5a/cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495" },
{ url = "https://mirrors.aliyun.com/pypi/packages/60/99/c9dc110974c59cc981b1f5b66e1d8af8af764e00f0293266824d9c4254bc/cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5" },
{ url = "https://mirrors.aliyun.com/pypi/packages/49/72/ff2d12dbf21aca1b32a40ed792ee6b40f6dc3a9cf1644bd7ef6e95e0ac5e/cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb" },
{ url = "https://mirrors.aliyun.com/pypi/packages/e2/cc/027d7fb82e58c48ea717149b03bcadcbdc293553edb283af792bd4bcbb3f/cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a" },
{ url = "https://mirrors.aliyun.com/pypi/packages/33/fa/072dd15ae27fbb4e06b437eb6e944e75b068deb09e2a2826039e49ee2045/cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739" },
]
[[package]] [[package]]
name = "charset-normalizer" name = "charset-normalizer"
version = "3.4.3" version = "3.4.3"
@ -298,6 +359,23 @@ wheels = [
{ url = "https://mirrors.aliyun.com/pypi/packages/a3/de/c648ef6835192e6e2cc03f40b19eeda4382c49b5bafb43d88b931c4c74ac/google_pasta-0.2.0-py3-none-any.whl", hash = "sha256:b32482794a366b5366a32c92a9a9201b107821889935a02b3e51f6b432ea84ed" }, { url = "https://mirrors.aliyun.com/pypi/packages/a3/de/c648ef6835192e6e2cc03f40b19eeda4382c49b5bafb43d88b931c4c74ac/google_pasta-0.2.0-py3-none-any.whl", hash = "sha256:b32482794a366b5366a32c92a9a9201b107821889935a02b3e51f6b432ea84ed" },
] ]
[[package]]
name = "greenlet"
version = "3.2.4"
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/03/b8/704d753a5a45507a7aab61f18db9509302ed3d0a27ac7e0359ec2905b1a6/greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d" }
wheels = [
{ url = "https://mirrors.aliyun.com/pypi/packages/7d/ed/6bfa4109fcb23a58819600392564fea69cdc6551ffd5e69ccf1d52a40cbc/greenlet-3.2.4-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8c68325b0d0acf8d91dde4e6f930967dd52a5302cd4062932a6b2e7c2969f47c" },
{ url = "https://mirrors.aliyun.com/pypi/packages/2a/fc/102ec1a2fc015b3a7652abab7acf3541d58c04d3d17a8d3d6a44adae1eb1/greenlet-3.2.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:94385f101946790ae13da500603491f04a76b6e4c059dab271b3ce2e283b2590" },
{ url = "https://mirrors.aliyun.com/pypi/packages/c5/26/80383131d55a4ac0fb08d71660fd77e7660b9db6bdb4e8884f46d9f2cc04/greenlet-3.2.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f10fd42b5ee276335863712fa3da6608e93f70629c631bf77145021600abc23c" },
{ url = "https://mirrors.aliyun.com/pypi/packages/9f/7c/e7833dbcd8f376f3326bd728c845d31dcde4c84268d3921afcae77d90d08/greenlet-3.2.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c8c9e331e58180d0d83c5b7999255721b725913ff6bc6cf39fa2a45841a4fd4b" },
{ url = "https://mirrors.aliyun.com/pypi/packages/e9/49/547b93b7c0428ede7b3f309bc965986874759f7d89e4e04aeddbc9699acb/greenlet-3.2.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58b97143c9cc7b86fc458f215bd0932f1757ce649e05b640fea2e79b54cedb31" },
{ url = "https://mirrors.aliyun.com/pypi/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d" },
{ url = "https://mirrors.aliyun.com/pypi/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5" },
{ url = "https://mirrors.aliyun.com/pypi/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f" },
{ url = "https://mirrors.aliyun.com/pypi/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c" },
]
[[package]] [[package]]
name = "grpcio" name = "grpcio"
version = "1.75.1" version = "1.75.1"
@ -524,6 +602,22 @@ wheels = [
{ url = "https://mirrors.aliyun.com/pypi/packages/b9/a9/21c9439d698fac5f0de8fc68b2405b738ed1f00e1279c76f2d9aa5521ead/matplotlib-3.10.7-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:53cc80662dd197ece414dd5b66e07370201515a3eaf52e7c518c68c16814773b" }, { url = "https://mirrors.aliyun.com/pypi/packages/b9/a9/21c9439d698fac5f0de8fc68b2405b738ed1f00e1279c76f2d9aa5521ead/matplotlib-3.10.7-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:53cc80662dd197ece414dd5b66e07370201515a3eaf52e7c518c68c16814773b" },
] ]
[[package]]
name = "minio"
version = "7.2.4"
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
dependencies = [
{ name = "argon2-cffi" },
{ name = "certifi" },
{ name = "pycryptodome" },
{ name = "typing-extensions" },
{ name = "urllib3" },
]
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/3a/2e/7bd24eb2e02a19a03bd0e73e59c051c62c62cabdd305ccbc59a90143752c/minio-7.2.4.tar.gz", hash = "sha256:d504d8464e5198fb74dd9b572cc88b185ae7997c17705e8c09f3fef2f439d984" }
wheels = [
{ url = "https://mirrors.aliyun.com/pypi/packages/a8/29/17ec9cecedad692cf18abd0b5e57d7008d1dda8929915e7cfee76ea0e849/minio-7.2.4-py3-none-any.whl", hash = "sha256:91b51c21d25e3ee6d51f52eab126d6c974371add0d77951e42c322a59c5533e7" },
]
[[package]] [[package]]
name = "ml-dtypes" name = "ml-dtypes"
version = "0.2.0" version = "0.2.0"
@ -664,6 +758,39 @@ wheels = [
{ url = "https://mirrors.aliyun.com/pypi/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a" }, { url = "https://mirrors.aliyun.com/pypi/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a" },
] ]
[[package]]
name = "pycparser"
version = "2.23"
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2" }
wheels = [
{ url = "https://mirrors.aliyun.com/pypi/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934" },
]
[[package]]
name = "pycryptodome"
version = "3.23.0"
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/8e/a6/8452177684d5e906854776276ddd34eca30d1b1e15aa1ee9cefc289a33f5/pycryptodome-3.23.0.tar.gz", hash = "sha256:447700a657182d60338bab09fdb27518f8856aecd80ae4c6bdddb67ff5da44ef" }
wheels = [
{ url = "https://mirrors.aliyun.com/pypi/packages/db/6c/a1f71542c969912bb0e106f64f60a56cc1f0fabecf9396f45accbe63fa68/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:187058ab80b3281b1de11c2e6842a357a1f71b42cb1e15bce373f3d238135c27" },
{ url = "https://mirrors.aliyun.com/pypi/packages/6e/4e/a066527e079fc5002390c8acdd3aca431e6ea0a50ffd7201551175b47323/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:cfb5cd445280c5b0a4e6187a7ce8de5a07b5f3f897f235caa11f1f435f182843" },
{ url = "https://mirrors.aliyun.com/pypi/packages/50/52/adaf4c8c100a8c49d2bd058e5b551f73dfd8cb89eb4911e25a0c469b6b4e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67bd81fcbe34f43ad9422ee8fd4843c8e7198dd88dd3d40e6de42ee65fbe1490" },
{ url = "https://mirrors.aliyun.com/pypi/packages/5f/e9/a09476d436d0ff1402ac3867d933c61805ec2326c6ea557aeeac3825604e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8987bd3307a39bc03df5c8e0e3d8be0c4c3518b7f044b0f4c15d1aa78f52575" },
{ url = "https://mirrors.aliyun.com/pypi/packages/f9/c5/ffe6474e0c551d54cab931918127c46d70cab8f114e0c2b5a3c071c2f484/pycryptodome-3.23.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa0698f65e5b570426fc31b8162ed4603b0c2841cbb9088e2b01641e3065915b" },
{ url = "https://mirrors.aliyun.com/pypi/packages/18/28/e199677fc15ecf43010f2463fde4c1a53015d1fe95fb03bca2890836603a/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:53ecbafc2b55353edcebd64bf5da94a2a2cdf5090a6915bcca6eca6cc452585a" },
{ url = "https://mirrors.aliyun.com/pypi/packages/ce/ea/4fdb09f2165ce1365c9eaefef36625583371ee514db58dc9b65d3a255c4c/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:156df9667ad9f2ad26255926524e1c136d6664b741547deb0a86a9acf5ea631f" },
{ url = "https://mirrors.aliyun.com/pypi/packages/22/82/6edc3fc42fe9284aead511394bac167693fb2b0e0395b28b8bedaa07ef04/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:dea827b4d55ee390dc89b2afe5927d4308a8b538ae91d9c6f7a5090f397af1aa" },
{ url = "https://mirrors.aliyun.com/pypi/packages/59/fe/aae679b64363eb78326c7fdc9d06ec3de18bac68be4b612fc1fe8902693c/pycryptodome-3.23.0-cp37-abi3-win32.whl", hash = "sha256:507dbead45474b62b2bbe318eb1c4c8ee641077532067fec9c1aa82c31f84886" },
{ url = "https://mirrors.aliyun.com/pypi/packages/54/2f/e97a1b8294db0daaa87012c24a7bb714147c7ade7656973fd6c736b484ff/pycryptodome-3.23.0-cp37-abi3-win_amd64.whl", hash = "sha256:c75b52aacc6c0c260f204cbdd834f76edc9fb0d8e0da9fbf8352ef58202564e2" },
{ url = "https://mirrors.aliyun.com/pypi/packages/18/3d/f9441a0d798bf2b1e645adc3265e55706aead1255ccdad3856dbdcffec14/pycryptodome-3.23.0-cp37-abi3-win_arm64.whl", hash = "sha256:11eeeb6917903876f134b56ba11abe95c0b0fd5e3330def218083c7d98bbcb3c" },
{ url = "https://mirrors.aliyun.com/pypi/packages/d9/12/e33935a0709c07de084d7d58d330ec3f4daf7910a18e77937affdb728452/pycryptodome-3.23.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ddb95b49df036ddd264a0ad246d1be5b672000f12d6961ea2c267083a5e19379" },
{ url = "https://mirrors.aliyun.com/pypi/packages/22/0b/aa8f9419f25870889bebf0b26b223c6986652bdf071f000623df11212c90/pycryptodome-3.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e95564beb8782abfd9e431c974e14563a794a4944c29d6d3b7b5ea042110b4" },
{ url = "https://mirrors.aliyun.com/pypi/packages/d4/5e/63f5cbde2342b7f70a39e591dbe75d9809d6338ce0b07c10406f1a140cdc/pycryptodome-3.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14e15c081e912c4b0d75632acd8382dfce45b258667aa3c67caf7a4d4c13f630" },
{ url = "https://mirrors.aliyun.com/pypi/packages/d6/92/608fbdad566ebe499297a86aae5f2a5263818ceeecd16733006f1600403c/pycryptodome-3.23.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7fc76bf273353dc7e5207d172b83f569540fc9a28d63171061c42e361d22353" },
{ url = "https://mirrors.aliyun.com/pypi/packages/d1/92/2eadd1341abd2989cce2e2740b4423608ee2014acb8110438244ee97d7ff/pycryptodome-3.23.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:45c69ad715ca1a94f778215a11e66b7ff989d792a4d63b68dc586a1da1392ff5" },
]
[[package]] [[package]]
name = "pydantic" name = "pydantic"
version = "2.12.0" version = "2.12.0"
@ -711,6 +838,15 @@ wheels = [
{ url = "https://mirrors.aliyun.com/pypi/packages/7e/78/3093b334e9c9796c8236a4701cd2ddef1c56fb0928fe282a10c797644380/pydantic_core-2.41.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:13ab9cc2de6f9d4ab645a050ae5aee61a2424ac4d3a16ba23d4c2027705e0301" }, { url = "https://mirrors.aliyun.com/pypi/packages/7e/78/3093b334e9c9796c8236a4701cd2ddef1c56fb0928fe282a10c797644380/pydantic_core-2.41.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:13ab9cc2de6f9d4ab645a050ae5aee61a2424ac4d3a16ba23d4c2027705e0301" },
] ]
[[package]]
name = "pymysql"
version = "1.1.2"
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/f5/ae/1fe3fcd9f959efa0ebe200b8de88b5a5ce3e767e38c7ac32fb179f16a388/pymysql-1.1.2.tar.gz", hash = "sha256:4961d3e165614ae65014e361811a724e2044ad3ea3739de9903ae7c21f539f03" }
wheels = [
{ url = "https://mirrors.aliyun.com/pypi/packages/7c/4c/ad33b92b9864cbde84f259d5df035a6447f91891f5be77788e2a3892bce3/pymysql-1.1.2-py3-none-any.whl", hash = "sha256:e6b1d89711dd51f8f74b1631fe08f039e7d76cf67a42a323d3178f0f25762ed9" },
]
[[package]] [[package]]
name = "pyparsing" name = "pyparsing"
version = "3.2.5" version = "3.2.5"
@ -819,6 +955,40 @@ wheels = [
{ url = "https://mirrors.aliyun.com/pypi/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2" }, { url = "https://mirrors.aliyun.com/pypi/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2" },
] ]
[[package]]
name = "sqlalchemy"
version = "2.0.44"
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
dependencies = [
{ name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" },
{ name = "typing-extensions" },
]
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/f0/f2/840d7b9496825333f532d2e3976b8eadbf52034178aac53630d09fe6e1ef/sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22" }
wheels = [
{ url = "https://mirrors.aliyun.com/pypi/packages/a2/a7/e9ccfa7eecaf34c6f57d8cb0bb7cbdeeff27017cc0f5d0ca90fdde7a7c0d/sqlalchemy-2.0.44-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c77f3080674fc529b1bd99489378c7f63fcb4ba7f8322b79732e0258f0ea3ce" },
{ url = "https://mirrors.aliyun.com/pypi/packages/b1/e1/50bc121885bdf10833a4f65ecbe9fe229a3215f4d65a58da8a181734cae3/sqlalchemy-2.0.44-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c26ef74ba842d61635b0152763d057c8d48215d5be9bb8b7604116a059e9985" },
{ url = "https://mirrors.aliyun.com/pypi/packages/46/f2/a8573b7230a3ce5ee4b961a2d510d71b43872513647398e595b744344664/sqlalchemy-2.0.44-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4a172b31785e2f00780eccab00bc240ccdbfdb8345f1e6063175b3ff12ad1b0" },
{ url = "https://mirrors.aliyun.com/pypi/packages/4a/d8/c63d8adb6a7edaf8dcb6f75a2b1e9f8577960a1e489606859c4d73e7d32b/sqlalchemy-2.0.44-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9480c0740aabd8cb29c329b422fb65358049840b34aba0adf63162371d2a96e" },
{ url = "https://mirrors.aliyun.com/pypi/packages/ee/a6/243d277a4b54fae74d4797957a7320a5c210c293487f931cbe036debb697/sqlalchemy-2.0.44-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:17835885016b9e4d0135720160db3095dc78c583e7b902b6be799fb21035e749" },
{ url = "https://mirrors.aliyun.com/pypi/packages/5f/f8/6a39516ddd75429fd4ee5a0d72e4c80639fab329b2467c75f363c2ed9751/sqlalchemy-2.0.44-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cbe4f85f50c656d753890f39468fcd8190c5f08282caf19219f684225bfd5fd2" },
{ url = "https://mirrors.aliyun.com/pypi/packages/43/f0/118355d4ad3c39d9a2f5ee4c7304a9665b3571482777357fa9920cd7a6b4/sqlalchemy-2.0.44-cp310-cp310-win32.whl", hash = "sha256:2fcc4901a86ed81dc76703f3b93ff881e08761c63263c46991081fd7f034b165" },
{ url = "https://mirrors.aliyun.com/pypi/packages/61/83/6ae5f9466f8aa5d0dcebfff8c9c33b98b27ce23292df3b990454b3d434fd/sqlalchemy-2.0.44-cp310-cp310-win_amd64.whl", hash = "sha256:9919e77403a483ab81e3423151e8ffc9dd992c20d2603bf17e4a8161111e55f5" },
{ url = "https://mirrors.aliyun.com/pypi/packages/9c/5e/6a29fa884d9fb7ddadf6b69490a9d45fded3b38541713010dad16b77d015/sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05" },
]
[[package]]
name = "sqlmodel"
version = "0.0.27"
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
dependencies = [
{ name = "pydantic" },
{ name = "sqlalchemy" },
]
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/90/5a/693d90866233e837d182da76082a6d4c2303f54d3aaaa5c78e1238c5d863/sqlmodel-0.0.27.tar.gz", hash = "sha256:ad1227f2014a03905aef32e21428640848ac09ff793047744a73dfdd077ff620" }
wheels = [
{ url = "https://mirrors.aliyun.com/pypi/packages/8c/92/c35e036151fe53822893979f8a13e6f235ae8191f4164a79ae60a95d66aa/sqlmodel-0.0.27-py3-none-any.whl", hash = "sha256:667fe10aa8ff5438134668228dc7d7a08306f4c5c4c7e6ad3ad68defa0e7aa49" },
]
[[package]] [[package]]
name = "starlette" name = "starlette"
version = "0.48.0" version = "0.48.0"
@ -842,6 +1012,9 @@ dependencies = [
{ name = "fastapi" }, { name = "fastapi" },
{ name = "filterpy" }, { name = "filterpy" },
{ name = "jinja2" }, { name = "jinja2" },
{ name = "minio" },
{ name = "pymysql" },
{ name = "sqlmodel" },
{ name = "tensorflow" }, { name = "tensorflow" },
{ name = "tensorflow-macos", marker = "sys_platform == 'darwin'" }, { name = "tensorflow-macos", marker = "sys_platform == 'darwin'" },
{ name = "uvicorn" }, { name = "uvicorn" },
@ -854,6 +1027,9 @@ requires-dist = [
{ name = "fastapi", specifier = ">=0.104.0" }, { name = "fastapi", specifier = ">=0.104.0" },
{ name = "filterpy", specifier = ">=1.4.5" }, { name = "filterpy", specifier = ">=1.4.5" },
{ name = "jinja2", specifier = ">=3.1.6" }, { name = "jinja2", specifier = ">=3.1.6" },
{ name = "minio", specifier = "==7.2.4" },
{ name = "pymysql", specifier = ">=1.1.2" },
{ name = "sqlmodel", specifier = ">=0.0.27" },
{ name = "tensorflow", specifier = "==2.15.0" }, { name = "tensorflow", specifier = "==2.15.0" },
{ name = "tensorflow-macos", marker = "sys_platform == 'darwin'", specifier = "==2.15.0" }, { name = "tensorflow-macos", marker = "sys_platform == 'darwin'", specifier = "==2.15.0" },
{ name = "uvicorn", specifier = ">=0.37.0" }, { name = "uvicorn", specifier = ">=0.37.0" },

Loading…
Cancel
Save