Compare commits

...

3 Commits

@ -1,9 +1,10 @@
import json
from datetime import datetime
from fastapi import File, UploadFile, APIRouter, BackgroundTasks, HTTPException
from fastapi import File, UploadFile, APIRouter, BackgroundTasks, HTTPException, Form, Query
from fastapi.responses import JSONResponse, StreamingResponse, FileResponse
from pathlib import Path
from typing import List
from typing import List, Optional
from time import sleep
import time
@ -22,7 +23,7 @@ from app.tools.doc2mysql import (
get_file_path,
get_weekly_file,
save_raw_files,
get_raw_file,
get_raw_file, query_vacation_data, delete_vacation_data, save_vacation_data,
)
from app.tools.move_raw_files import move_raw_files
@ -61,7 +62,9 @@ data_dict = {}
summary="可上传所有文件",
description="完成文件上传如果文件doc格式则转换成docx",
)
async def upload_file(files: List[UploadFile] = File(...)):
async def upload_file(files: List[UploadFile] = File(...),
vacationDataRequest: Optional[str] = Form(None),
powerfile: UploadFile = File(None),):
try:
if not os.path.exists(UPLOAD_DIR):
os.makedirs(UPLOAD_DIR)
@ -119,9 +122,40 @@ async def upload_file(files: List[UploadFile] = File(...)):
else:
sleep(2)
vacation_id = None
if vacationDataRequest is not None:
if powerfile is None:
return JSONResponse(
content={
"status_code": 500,
"detail": "节假日的停电情况文件不能为空。",
}
)
else:
cleaned_filename = clean_file_names(powerfile.filename)
logger.info(f"清洗后的文件名:{cleaned_filename}")
file_path = os.path.join(UPLOAD_DIR, cleaned_filename)
with open(file_path, "wb") as buffer:
shutil.copyfileobj(powerfile.file, buffer)
data_dict = json.loads(vacationDataRequest)
vacation_data_info = query_vacation_data(data_dict["last_year_period"])
if vacation_data_info is not None:
delete_vacation_data(data_dict["last_year_period"])
vacationData = {
"last_year_period": data_dict["last_year_period"],
"report_json": data_dict["report_json"],
"created_at": datetime.now(),
"updated_at": datetime.now(),
"power_file": cleaned_filename,
}
data_value = save_vacation_data(vacationData)
vacation_id = data_value.id
return JSONResponse(
content={"status_code": 200, "detail": "文件上传并成功处理数据。"}
content={
"status_code": 200,
"detail": "文件上传并成功处理数据。",
"data": vacation_id,
}
)
# ---------------------------------------线上版本从doc转docx代码---------------------------------------
@ -130,13 +164,14 @@ async def upload_file(files: List[UploadFile] = File(...)):
logger.exception(f"文件上传失败:{e}")
return JSONResponse(content={"status_code": 500, "detail": f"文件上传失败{e}"})
@router.get(
"/generate_report/",
summary="生成日报",
description="生成日报,将生成的简报和日报文档转成html返回前端",
)
async def generate_report(background_tasks: BackgroundTasks, time_type: int = 0):
async def generate_report(background_tasks: BackgroundTasks,
vacation_id: Optional[int] = Query(None, description="指定使用的假期数据ID"),
time_type: Optional[int] = Query(None, description="时间为17还是24")):
global data_dict
try:
logger.info("开始生成日报")

@ -5,12 +5,14 @@ from datetime import datetime
# from datetime import date
from docx import Document
from sqlalchemy import delete
from sqlalchemy.orm import Session
from sqlalchemy.dialects.mysql import insert
# from io import BytesIO
from app.entity.database.session import get_db
from app.entity.models.PowerOutageStats import DailyReport, DailyReportRawdata
from app.entity.models.VacationData import VacationData
# 获取日志记录器
logger = logging.getLogger(__name__)
@ -67,6 +69,52 @@ def save_raw_files(data_dict):
print(f"原数据文档路径写入数据库失败{e}")
def delete_vacation_data(last_year_period: str):
try:
logger.info("开始查询去年记录")
db: Session = next(get_db())
print(f"last_year_period: {last_year_period}")
delete_statement = delete(VacationData).where(
VacationData.last_year_period == last_year_period
)
result = db.execute(delete_statement)
db.commit()
return result
except Exception as e:
logger.exception(f"查询需下载的记录失败:{e}")
return None
def query_vacation_data(last_year_period: str):
try:
logger.info("开始删除数据")
db: Session = next(get_db())
print(f"last_year_period: {last_year_period}")
vacation_data_info = (
db.query(VacationData)
.filter(VacationData.last_year_period == last_year_period)
.first()
)
return vacation_data_info
except Exception as e:
logger.exception(f"查询需下载的记录失败:{e}")
return None
def save_vacation_data(data_dict):
try:
logger.info("开始写入mysql")
# 获取数据库连接
db: Session = next(get_db())
new_vacation = VacationData(**data_dict)
db.add(new_vacation)
db.commit()
db.refresh(new_vacation)
return new_vacation
except Exception as e:
print(f"原数据文档路径写入数据库失败{e}")
def get_file_path(statistics_time: datetime):
try:
logger.info("开始查询需下载的记录")

@ -1,4 +1,9 @@
# 2025-07-11
1) 日报模板
2) tools/final_doc.py
3) tools/replace_table.py
3) tools/replace_table.py
# 2025-07-17
1) router.py
2) doc2mysql.py
3) 增加表 vacation_data
Loading…
Cancel
Save