feat 🐛: init
commit
febff32998
@ -0,0 +1,24 @@
|
||||
|
||||
# 使用 Python 基础镜像
|
||||
FROM python:3.12
|
||||
|
||||
# 设置工作目录
|
||||
WORKDIR /app
|
||||
|
||||
# 更换 pip 源为阿里云源
|
||||
RUN pip config set global.index-url https://mirrors.aliyun.com/pypi/simple/
|
||||
|
||||
# 复制项目文件
|
||||
COPY requirements.txt .
|
||||
|
||||
# 安装 Python 依赖
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# 复制项目代码
|
||||
COPY . .
|
||||
|
||||
# 暴露端口
|
||||
EXPOSE 8000
|
||||
|
||||
# 启动 FastAPI 应用
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "5000"]
|
Binary file not shown.
Binary file not shown.
@ -0,0 +1,6 @@
|
||||
from app.entity.database.base import Base
|
||||
from app.entity.database.session import engine
|
||||
from app.entity.models.PowerOutageStats import DailyReport
|
||||
|
||||
# 创建所有表
|
||||
Base.metadata.create_all(bind=engine)
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,4 @@
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
# 创建基类
|
||||
Base = declarative_base()
|
@ -0,0 +1,118 @@
|
||||
from sqlalchemy import Column, Integer, TIMESTAMP, DateTime, String, JSON
|
||||
from sqlalchemy import func as func_sql
|
||||
from app.entity.database.base import Base
|
||||
|
||||
# class PowerOutageStats(Base):
|
||||
# """
|
||||
# 停电统计数据表 ORM 模型
|
||||
# """
|
||||
# __tablename__ = 'power_outage_stats' # 表名
|
||||
#
|
||||
# # 定义字段
|
||||
# id = Column(Integer, primary_key=True, autoincrement=True, comment='主键ID')
|
||||
# province_company = Column(String(100), nullable=False, comment='省公司')
|
||||
# outage_users = Column(Integer, nullable=False, comment='停电用户数')
|
||||
# outage_ratio = Column(String(100), nullable=False,comment='停电环比(百分比)')
|
||||
# short_outage_users = Column(Integer, comment='短时停电用户数')
|
||||
# outage_user_ratio = Column(String(100), nullable=False,comment='停电用户占本单位比例(百分比)')
|
||||
# repair_count = Column(Integer, comment='故障抢修数')
|
||||
# repair_arrival_time = Column(String(100), nullable=False,comment='故障抢修到位时间(小时)')
|
||||
# repair_completion_time = Column(String(100), nullable=False, comment='故障抢修完成时间(小时)')
|
||||
# complaint_count = Column(Integer, comment='投诉数量')
|
||||
# complaint_ratio = Column(String(100), nullable=False,comment='投诉环比(百分比)')
|
||||
# public_opinion_count = Column(Integer, comment='舆情数量')
|
||||
# public_opinion_ratio = Column(String(100), nullable=False,comment='舆情环比(百分比)')
|
||||
# major_event_count = Column(Integer, comment='重大事件数量')
|
||||
# statistics_time = Column(Date, nullable=False,comment='统计时间')
|
||||
# created_at = Column(TIMESTAMP, server_default=func_sql.now(), comment='记录创建时间')
|
||||
# updated_at = Column(TIMESTAMP, server_default=func_sql.now(), onupdate=func_sql.now(), comment='记录更新时间')
|
||||
|
||||
|
||||
# 日报存储路径
|
||||
class DailyReport(Base):
|
||||
"""
|
||||
将日报/简报以二进制的形式保存到数据库
|
||||
"""
|
||||
|
||||
__tablename__ = "daily_report"
|
||||
|
||||
# 定义字段
|
||||
id = Column(Integer, primary_key=True, autoincrement=True, comment="主键ID")
|
||||
report_title = Column(String(100), nullable=False, comment="日报标题")
|
||||
daily_report = Column(String(100), nullable=False, comment="日报保存路径")
|
||||
daily_repo_simple = Column(String(100), nullable=False, comment="简报保存路径")
|
||||
save_folder = Column(String(100), nullable=False, comment="保存的子文件夹")
|
||||
statistics_time = Column(
|
||||
DateTime, nullable=False, comment="统计时间", unique=True, index=True
|
||||
)
|
||||
created_at = Column(
|
||||
TIMESTAMP, server_default=func_sql.now(), comment="记录创建时间"
|
||||
)
|
||||
updated_at = Column(
|
||||
TIMESTAMP,
|
||||
server_default=func_sql.now(),
|
||||
onupdate=func_sql.now(),
|
||||
comment="记录更新时间",
|
||||
)
|
||||
|
||||
# daily_repo_simple_excel = Column(String(100), nullable=False,comment='简报excel保存路径')
|
||||
# img = Column(String(100), nullable=False,comment='图片保存路径')
|
||||
def to_dict(self):
|
||||
return {
|
||||
"id": self.id,
|
||||
"report_title": self.report_title,
|
||||
"daily_report": self.daily_report,
|
||||
"daily_repo_simple": self.daily_repo_simple,
|
||||
"save_folder": self.save_folder,
|
||||
"statistics_time": self.statistics_time.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"created_at": self.created_at.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"updated_at": self.updated_at.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
# 'daily_repo_simple_excel':self.daily_repo_simple_excel,
|
||||
# 'img':self.img
|
||||
}
|
||||
|
||||
|
||||
# 原始文件存储路径
|
||||
class DailyReportRawdata(Base):
|
||||
"""
|
||||
将日报/简报以二进制的形式保存到数据库
|
||||
"""
|
||||
|
||||
__tablename__ = "daily_report_rawdata"
|
||||
|
||||
# 定义字段
|
||||
id = Column(Integer, primary_key=True, autoincrement=True, comment="主键ID")
|
||||
report_title = Column(String(100), nullable=False, comment="日报标题")
|
||||
save_folder = Column(String(100), nullable=False, comment="保存的子文件夹")
|
||||
# sentiment_doc = Column(String(100), nullable=False,comment='舆情word原始文件保存路径')
|
||||
# complaint_doc = Column(String(100), nullable=False,comment='投诉word原始文件保存路径')
|
||||
# complaint_tb = Column(String(100), nullable=False,comment='投诉excel原始文件保存路径')
|
||||
# power_off_tb = Column(String(100), nullable=False,comment='停电excel原始文件保存路径')
|
||||
raw_data_path = Column(JSON, nullable=False, comment="原始文件保存路径")
|
||||
statistics_time = Column(
|
||||
DateTime, nullable=False, comment="统计时间", unique=True, index=True
|
||||
)
|
||||
created_at = Column(
|
||||
TIMESTAMP, server_default=func_sql.now(), comment="记录创建时间"
|
||||
)
|
||||
updated_at = Column(
|
||||
TIMESTAMP,
|
||||
server_default=func_sql.now(),
|
||||
onupdate=func_sql.now(),
|
||||
comment="记录更新时间",
|
||||
)
|
||||
|
||||
# daily_repo_simple_excel = Column(String(100), nullable=False,comment='简报excel保存路径')
|
||||
# img = Column(String(100), nullable=False,comment='图片保存路径')
|
||||
def to_dict(self):
|
||||
return {
|
||||
"id": self.id,
|
||||
"report_title": self.report_title,
|
||||
"save_folder": self.save_folder,
|
||||
"raw_data_path": self.raw_data_path,
|
||||
"statistics_time": self.statistics_time.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"created_at": self.created_at.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"updated_at": self.updated_at.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
# 'daily_repo_simple_excel':self.daily_repo_simple_excel,
|
||||
# 'img':self.img
|
||||
}
|
Binary file not shown.
Binary file not shown.
@ -0,0 +1,31 @@
|
||||
{
|
||||
"version": 1.0,
|
||||
"disable_existing_loggers": false,
|
||||
"formatters": {
|
||||
"standard": {
|
||||
"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
}
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"class": "logging.StreamHandler",
|
||||
"level": "INFO",
|
||||
"formatter": "standard",
|
||||
"stream": "ext://sys.stdout"
|
||||
},
|
||||
"file": {
|
||||
"class": "logging.FileHandler",
|
||||
"level": "DEBUG",
|
||||
"formatter": "standard",
|
||||
"filename": "app.log",
|
||||
"mode": "a"
|
||||
}
|
||||
},
|
||||
"loggers": {
|
||||
"": {
|
||||
"handlers": ["console", "file"],
|
||||
"level": "DEBUG",
|
||||
"propagate": true
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
import json
|
||||
import logging
|
||||
import logging.config
|
||||
import os
|
||||
|
||||
|
||||
def setup_logging(
|
||||
default_path="logging.conf", default_level=logging.INFO, env_key="LOG_CFG"
|
||||
):
|
||||
"""Setup logging configuration"""
|
||||
path = default_path
|
||||
value = os.getenv(env_key, None)
|
||||
if value:
|
||||
path = value
|
||||
if os.path.exists(path):
|
||||
with open(path, "rt") as f:
|
||||
config = json.load(f)
|
||||
logging.config.dictConfig(config)
|
||||
else:
|
||||
logging.basicConfig(level=default_level)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
setup_logging()
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,106 @@
|
||||
# from datetime import date
|
||||
# from sqlalchemy.orm import Session
|
||||
# from openpyxl import load_workbook
|
||||
# from pathlib import Path
|
||||
#
|
||||
# import pandas as pd
|
||||
# import os
|
||||
# import logging
|
||||
#
|
||||
# from app.entity.database.session import get_db
|
||||
# from app.entity.models.PowerOutageStats import PowerOutageStats
|
||||
#
|
||||
# # 获取日志记录器
|
||||
# logger = logging.getLogger(__name__)
|
||||
#
|
||||
# # 三份累计表
|
||||
# def accumulated_statistics(start_time, end_time, save_path=None):
|
||||
#
|
||||
# try:
|
||||
# logger.info('对数据库的查询结果进行处理,完成三个累计表数据的组装')
|
||||
#
|
||||
# db: Session = next(get_db())
|
||||
# # 查询某个时间段的数据
|
||||
# results = db.query(PowerOutageStats.province_company,PowerOutageStats.outage_users,PowerOutageStats.short_outage_users,
|
||||
# PowerOutageStats.repair_count,PowerOutageStats.complaint_count,PowerOutageStats.public_opinion_count,
|
||||
# PowerOutageStats.major_event_count, PowerOutageStats.statistics_time)
|
||||
#
|
||||
# # Excel 模板路径
|
||||
# # 获取当前文件夹路径
|
||||
# current_path = Path(__file__).parent
|
||||
# templates_path = str(os.path.join(current_path.parent, 'templates')).replace('\\', '/')
|
||||
#
|
||||
# # 加载 Excel 模板
|
||||
# book = load_workbook(f'{templates_path}/累计数据模板.xlsx')
|
||||
#
|
||||
# # 选择要写入的 Sheet 页
|
||||
# sheet_name = 'Sheet1' # 替换为你的 Sheet 页名称
|
||||
# sheet = book[sheet_name]
|
||||
#
|
||||
# # 查询结果用pandas进行处理
|
||||
# if results:
|
||||
#
|
||||
# # 将数据转成pandas数据结构
|
||||
# df = pd.read_sql(results.statement, results.session.bind)
|
||||
# # 插入序号列作为第一列
|
||||
# df.insert(0, 'num', df.index + 1)
|
||||
#
|
||||
# # 组成表1数据
|
||||
# df_temp = df[(df['statistics_time'] >= start_time) & (df['statistics_time'] <= end_time)]
|
||||
# df_table1 = df_temp[['statistics_time', 'outage_users', 'complaint_count','public_opinion_count']]
|
||||
# df_table1 = df_table1.groupby('statistics_time').sum()
|
||||
# df_table1 = df_table1.reset_index()
|
||||
#
|
||||
# # 表1写入excel的位置
|
||||
# start_row1 = 3
|
||||
# start_col1 = 1
|
||||
# print(df_table1)
|
||||
# write_to_excel(df_table1,sheet,start_row1,start_col1)
|
||||
#
|
||||
#
|
||||
# # 组成表2数据
|
||||
# df_table2 = df_temp[['statistics_time', 'outage_users', 'short_outage_users', 'repair_count','complaint_count','public_opinion_count']]
|
||||
# df_table2 = df_table2.groupby('statistics_time').sum()
|
||||
# df_table2 = df_table2.reset_index()
|
||||
#
|
||||
# # 表2写入excel的位置
|
||||
# start_row2 = 3
|
||||
# start_col2 = 6
|
||||
# print(df_table2)
|
||||
# write_to_excel(df_table2,sheet,start_row2,start_col2)
|
||||
#
|
||||
# # 表3写入excel的位置
|
||||
# start_row3 = 3
|
||||
# start_col3 = 13
|
||||
# df_table3 = df.drop('statistics_time', axis=1)
|
||||
# write_to_excel(df_table3,sheet,start_row3,start_col3)
|
||||
#
|
||||
# # 最终结果生成
|
||||
# book.save(f'{save_path}/累积统计表.xlsx')
|
||||
# except Exception as e:
|
||||
# logger.error(f'写入excel失败: {e}')
|
||||
# raise e
|
||||
#
|
||||
# #对三张表进行组装
|
||||
# def write_to_excel(df, sheet, start_row, start_col):
|
||||
#
|
||||
# try:
|
||||
# logger.info('开始写入excel')
|
||||
# # 将 DataFrame 写入指定位置
|
||||
# for i, row in enumerate(df.itertuples(index=False), start=start_row):
|
||||
# for j, value in enumerate(row, start=start_col):
|
||||
# sheet.cell(row=i, column=j, value=value)
|
||||
# except Exception as e:
|
||||
# logger.error(f'写入excel失败: {e}')
|
||||
# raise e
|
||||
#
|
||||
#
|
||||
# if __name__ == '__main__':
|
||||
#
|
||||
#
|
||||
# start_time = date(2025,3,9)
|
||||
# end_time = date(2025,3,10)
|
||||
# print(end_time)
|
||||
# accumulated_statistics(start_time, end_time)
|
||||
#
|
||||
#
|
@ -0,0 +1,16 @@
|
||||
import re
|
||||
|
||||
|
||||
# 针对文件名进行数据清洗
|
||||
def clean_file_names(filename: str):
|
||||
"""
|
||||
针对文件名进行数据清洗
|
||||
:param filename:
|
||||
:return:
|
||||
"""
|
||||
# 移除所有非字母、数字、点和下划线的字符
|
||||
cleaned = re.sub(r"[^\w.-]|[\s\r\n]*", "", filename)
|
||||
# 确保文件名不为空
|
||||
if not cleaned:
|
||||
cleaned = "untitled"
|
||||
return cleaned
|
@ -0,0 +1,159 @@
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
# import io
|
||||
|
||||
# from datetime import date
|
||||
from docx import Document
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
# from io import BytesIO
|
||||
|
||||
from app.entity.database.session import get_db
|
||||
from app.entity.models.PowerOutageStats import DailyReport, DailyReportRawdata
|
||||
|
||||
# 获取日志记录器
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# 将数据保存到数据库
|
||||
def save_word_document(data_dict):
|
||||
try:
|
||||
logger.info("开始写入mysql")
|
||||
|
||||
# 获取数据库连接
|
||||
db: Session = next(get_db())
|
||||
|
||||
stmt = (
|
||||
insert(DailyReport)
|
||||
.values(**data_dict)
|
||||
.on_duplicate_key_update(statistics_time=data_dict["statistics_time"])
|
||||
)
|
||||
|
||||
result = db.execute(stmt)
|
||||
db.commit()
|
||||
|
||||
logger.info(f"数据写入数据库成功,受影响的行数:{result.rowcount}")
|
||||
|
||||
return {"status": "success", "affected_rows": result.rowcount}
|
||||
|
||||
except Exception as e:
|
||||
print(f"日报文档路径写入数据库失败{e}")
|
||||
|
||||
|
||||
# 原始文件保存路径到数据库
|
||||
# 将数据保存到数据库
|
||||
def save_raw_files(data_dict):
|
||||
try:
|
||||
logger.info("开始写入mysql")
|
||||
|
||||
# 获取数据库连接
|
||||
db: Session = next(get_db())
|
||||
|
||||
stmt = (
|
||||
insert(DailyReportRawdata)
|
||||
.values(**data_dict)
|
||||
.on_duplicate_key_update(statistics_time=data_dict["statistics_time"])
|
||||
)
|
||||
|
||||
result = db.execute(stmt)
|
||||
db.commit()
|
||||
|
||||
logger.info(f"数据写入数据库成功,受影响的行数:{result.rowcount}")
|
||||
|
||||
return {"status": "success", "affected_rows": result.rowcount}
|
||||
|
||||
except Exception as e:
|
||||
print(f"原数据文档路径写入数据库失败{e}")
|
||||
|
||||
|
||||
def get_file_path(statistics_time: datetime):
|
||||
try:
|
||||
logger.info("开始查询需下载的记录")
|
||||
|
||||
db: Session = next(get_db())
|
||||
print(f"statistics_time: {statistics_time}, type: {type(statistics_time)}")
|
||||
file_info = (
|
||||
db.query(DailyReport)
|
||||
.filter(DailyReport.statistics_time == statistics_time)
|
||||
.first()
|
||||
)
|
||||
query = db.query(DailyReport).filter(
|
||||
DailyReport.statistics_time == statistics_time
|
||||
)
|
||||
print(query.statement.compile(compile_kwargs={"literal_binds": True}))
|
||||
all_statistics_times = db.query(DailyReport.statistics_time).all()
|
||||
print(f"All statistics_time in DB: {all_statistics_times}")
|
||||
if not file_info:
|
||||
logger.info("查询需下载的记录失败")
|
||||
return None
|
||||
|
||||
logger.info("查询需下载的记录成功")
|
||||
return file_info
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"查询需下载的记录失败:{e}")
|
||||
return None
|
||||
|
||||
|
||||
def get_weekly_file(start_time: datetime, end_time: datetime):
|
||||
try:
|
||||
logger.info("开始查询周报路径")
|
||||
|
||||
db: Session = next(get_db())
|
||||
|
||||
file_info = (
|
||||
db.query(DailyReport)
|
||||
.filter(
|
||||
DailyReport.statistics_time >= start_time,
|
||||
DailyReport.statistics_time <= end_time,
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
if not file_info:
|
||||
logger.info("无该时间段周报路径数据")
|
||||
return None
|
||||
|
||||
logger.info("查询周报路径成功")
|
||||
return file_info
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"查询周报路径失败:{e}")
|
||||
return None
|
||||
|
||||
|
||||
# 原始文件的路径
|
||||
def get_raw_file(start_time: datetime, end_time: datetime):
|
||||
try:
|
||||
logger.info("开始查询原始文件路径")
|
||||
|
||||
db: Session = next(get_db())
|
||||
|
||||
file_info = (
|
||||
db.query(DailyReportRawdata)
|
||||
.filter(
|
||||
DailyReportRawdata.statistics_time >= start_time,
|
||||
DailyReportRawdata.statistics_time <= end_time,
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
if not file_info:
|
||||
logger.info("无该时间段原始文件路径")
|
||||
return None
|
||||
|
||||
logger.info("查询原始文件路径成功")
|
||||
return file_info
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"查询原始文件路径失败:{e}")
|
||||
return None
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
file_path = r"E:\work_data\work\三工单日报\三工单\20250311\20250311日报\公司全国“两会”保供电期间配网设备运行及三工单监测日报-20250311.docx"
|
||||
|
||||
doc1 = Document(file_path)
|
||||
|
||||
# print(callable(save_word_document(doc1,2025,3,11)))
|
@ -0,0 +1,105 @@
|
||||
from datetime import datetime, timedelta
|
||||
from lunarcalendar import Converter, Lunar # 用于农历转换
|
||||
|
||||
# 为完成,需求不明确
|
||||
holiday_dict = {
|
||||
"元旦": {
|
||||
"type": "fixed",
|
||||
"date": {"month": 1, "day": 1},
|
||||
"duration": 1, # 假期天数
|
||||
},
|
||||
"春节": {
|
||||
"type": "lunar",
|
||||
"date": {"month": 1, "day": 1}, # 农历正月初一
|
||||
"duration": 7,
|
||||
},
|
||||
"清明节": {"type": "fixed", "date": {"month": 4, "day": 4}, "duration": 3},
|
||||
"劳动节": {"type": "fixed", "date": {"month": 5, "day": 1}, "duration": 5},
|
||||
"端午节": {
|
||||
"type": "lunar",
|
||||
"date": {"month": 5, "day": 5}, # 农历五月初五
|
||||
"duration": 3,
|
||||
},
|
||||
"中秋节": {
|
||||
"type": "lunar",
|
||||
"date": {"month": 8, "day": 15}, # 农历八月十五
|
||||
"duration": 1,
|
||||
},
|
||||
"国庆节": {"type": "fixed", "date": {"month": 10, "day": 1}, "duration": 7},
|
||||
}
|
||||
|
||||
|
||||
def is_holiday(year, month, day):
|
||||
current_date = datetime(year, month, day)
|
||||
|
||||
# 检查是否是固定节假日
|
||||
for holiday, info in holiday_dict.items():
|
||||
if info["type"] == "fixed":
|
||||
holiday_date = datetime(year, info["date"]["month"], info["date"]["day"])
|
||||
if (current_date - holiday_date).days >= 0 and (
|
||||
current_date - holiday_date
|
||||
).days < info["duration"]:
|
||||
print(f"今天是:{holiday}")
|
||||
return True, holiday
|
||||
|
||||
# 检查是否是农历节假日
|
||||
for holiday, info in holiday_dict.items():
|
||||
if info["type"] == "lunar":
|
||||
lunar_month = info["date"]["month"]
|
||||
lunar_day = info["date"]["day"]
|
||||
|
||||
# 将农历转换为公历
|
||||
lunar = Lunar(year, lunar_month, lunar_day)
|
||||
solar_date = Converter.Lunar2Solar(lunar)
|
||||
|
||||
# 判断当前日期是否在农历节假日范围内
|
||||
delta = (
|
||||
current_date
|
||||
- datetime(solar_date.year, solar_date.month, solar_date.day)
|
||||
).days
|
||||
if delta >= 0 and delta < info["duration"]:
|
||||
print(f"今天是:{holiday}")
|
||||
return True, holiday
|
||||
|
||||
return False, None
|
||||
|
||||
|
||||
def get_last_year_holiday_data(year, month, day, holiday_name):
|
||||
last_year = year - 1
|
||||
|
||||
if holiday_dict[holiday_name]["type"] == "fixed":
|
||||
# 获取去年节假日的开始日期和结束日期
|
||||
start_date = datetime(
|
||||
last_year,
|
||||
holiday_dict[holiday_name]["date"]["month"],
|
||||
holiday_dict[holiday_name]["date"]["day"],
|
||||
)
|
||||
end_date = start_date + timedelta(
|
||||
days=holiday_dict[holiday_name]["duration"] - 1
|
||||
)
|
||||
else:
|
||||
lunar_month = holiday_dict[holiday_name]["date"]["month"]
|
||||
lunar_day = holiday_dict[holiday_name]["date"]["day"]
|
||||
|
||||
# 将去年的农历转换为公历
|
||||
lunar = Lunar(last_year, lunar_month, lunar_day)
|
||||
solar_date = Converter.Lunar2Solar(lunar)
|
||||
start_date = datetime(solar_date.year, solar_date.month, solar_date.day)
|
||||
end_date = start_date + timedelta(
|
||||
days=holiday_dict[holiday_name]["duration"] - 1
|
||||
)
|
||||
|
||||
print(f"去年节假日数据时间段: {start_date} 至 {end_date}")
|
||||
# 在这里调用数据库查询或其他方法获取去年的数据
|
||||
# last_year_data = query_data_from_db(start_date, end_date)
|
||||
|
||||
return start_date, end_date
|
||||
|
||||
|
||||
year = 2024
|
||||
month = 9
|
||||
day = 17
|
||||
is_holiday_flag, holiday_name = is_holiday(year, month, day)
|
||||
if is_holiday_flag:
|
||||
start_date, end_date = get_last_year_holiday_data(year, month, day, holiday_name)
|
||||
print(f"去年{holiday_name}的日期范围:{start_date} - {end_date}")
|
@ -0,0 +1,34 @@
|
||||
import os
|
||||
import shutil
|
||||
|
||||
|
||||
# 将快报下载文件夹的文件,挪到最终保存的文件夹,之后清空下载文件夹
|
||||
def move_files(folder_download, folder_all):
|
||||
try:
|
||||
# 先判断文件夹是否存在
|
||||
if not os.path.exists(folder_download):
|
||||
os.makedirs(folder_download)
|
||||
if not os.path.exists(folder_all):
|
||||
os.makedirs(folder_all)
|
||||
|
||||
# 要转移的文件名
|
||||
keywords = ["简版", "日报.zip"]
|
||||
|
||||
# 遍历源文件夹中的所有文件
|
||||
for root, dirs, files in os.walk(folder_download):
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
# 检查文件名是否包含任何关键词
|
||||
has_keyword = False
|
||||
for keyword in keywords:
|
||||
if keyword in file:
|
||||
has_keyword = True
|
||||
break
|
||||
|
||||
# 如果文件名不包含任何关键词,则移动文件
|
||||
if not has_keyword:
|
||||
shutil.move(file_path, os.path.join(folder_all, file))
|
||||
print(f"已移动文件: {file_path} 到 {folder_all}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"发生错误: {e}")
|
@ -0,0 +1,46 @@
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
# 将原始的四份文件转移到下载文件夹
|
||||
def move_raw_files(folder_before, folder_after, save_folder):
|
||||
try:
|
||||
# 确保目标文件夹存在
|
||||
Path(folder_after).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# 文件格式
|
||||
extensions = (".docx", ".xlsx", ".xls")
|
||||
|
||||
# 创建子文件夹
|
||||
if not os.path.exists(f"{folder_after}/{save_folder}"):
|
||||
os.makedirs(f"{folder_after}/{save_folder}", exist_ok=True)
|
||||
|
||||
# 遍历源文件夹
|
||||
file_paths = []
|
||||
for item in os.listdir(folder_before):
|
||||
item_path = os.path.join(folder_before, item)
|
||||
|
||||
# 只处理文件(不处理子文件夹)且扩展名匹配
|
||||
if os.path.isfile(item_path) and item.lower().endswith(extensions):
|
||||
# 构造目标路径
|
||||
target_path = os.path.join(f"{folder_after}/{save_folder}", item)
|
||||
|
||||
# 移动文件
|
||||
shutil.move(item_path, target_path)
|
||||
file_paths.append(f"{folder_after}/{save_folder}/{item}")
|
||||
|
||||
print(f"已移动: {item}")
|
||||
|
||||
print(f"\n移动完成! 共移动了 {len(file_paths)} 个文件到 {folder_after}")
|
||||
print(f"文件路径: {file_paths}")
|
||||
return file_paths
|
||||
|
||||
except Exception as e:
|
||||
print(f"原始文件移动失败: {e}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
folder_before = r"E:\code\python_code\daily_work\backend\temp_uploads"
|
||||
folder_after = r"E:\code\python_code\daily_work\backend\temp_download_raw"
|
||||
move_raw_files(folder_before, folder_after, "202505")
|
Binary file not shown.
@ -0,0 +1,45 @@
|
||||
import logging
|
||||
|
||||
# 获取日志记录器
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# 将文档中的字符串变量替换成提取内容
|
||||
def replace_text_in_paragraph(paragraph, old_text, new_text):
|
||||
try:
|
||||
if old_text in paragraph.text: # 检查段落中是否存在模板字符串
|
||||
# 遍历段落的每个运行
|
||||
for run in paragraph.runs:
|
||||
if old_text in run.text:
|
||||
run.text = run.text.replace(old_text, new_text)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"替换段落里的文本失败:{e}")
|
||||
print(f"替换段落里的文本失败:{e}")
|
||||
|
||||
|
||||
def replace_text_in_docx(doc, replacements):
|
||||
try:
|
||||
logger.info("开始替换段落中的文本")
|
||||
# 替换段落中的文本
|
||||
for paragraph in doc.paragraphs:
|
||||
for old_text, new_text in replacements.items():
|
||||
replace_text_in_paragraph(paragraph, old_text, new_text)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"替换段落中的文本失败:{e}")
|
||||
print(f"替换段落中的文本失败:{e}")
|
||||
|
||||
try:
|
||||
logger.info("开始替换表格中的文本")
|
||||
# 替换表格中的文本
|
||||
for table in doc.tables:
|
||||
for row in table.rows:
|
||||
for cell in row.cells:
|
||||
for old_text, new_text in replacements.items():
|
||||
if old_text in cell.text:
|
||||
cell.text = cell.text.replace(old_text, new_text)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"替换表格中的文本失败:{e}")
|
||||
print(f"替换表格中的文本失败:{e}")
|
Binary file not shown.
@ -0,0 +1,39 @@
|
||||
#!/bin/bash
|
||||
|
||||
# 设置监听的目标文件夹
|
||||
TARGET_DIR="/app/temp_uploads"
|
||||
|
||||
while true; do
|
||||
# 检查文件夹是否存在
|
||||
if [ ! -d "$TARGET_DIR" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 查找文件夹下所有的 .doc 文件
|
||||
doc_files=$(find "$TARGET_DIR" -type f -name "*.doc")
|
||||
|
||||
# 若找到 .doc 文件,则进行转换
|
||||
if [ -n "$doc_files" ]; then
|
||||
for doc_file in $doc_files; do
|
||||
# 获取不带扩展名的文件名
|
||||
base_name="${doc_file%.*}"
|
||||
# 生成对应的 .docx 文件路径
|
||||
docx_file="${base_name}.docx"
|
||||
|
||||
# 使用 soffice 进行转换
|
||||
soffice --headless --convert-to docx "$doc_file" --outdir "$TARGET_DIR"
|
||||
|
||||
# 检查转换是否成功
|
||||
if [ -f "$docx_file" ]; then
|
||||
# 转换成功,删除原有的 .doc 文件
|
||||
rm "$doc_file"
|
||||
echo "已将 $doc_file 转换为 $docx_file 并删除原文件。"
|
||||
sleep 3
|
||||
else
|
||||
echo "转换 $doc_file 失败。"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
done
|
||||
|
||||
|
@ -0,0 +1,58 @@
|
||||
|
||||
services:
|
||||
# 前端服务
|
||||
frontend:
|
||||
image: daily-report-frontend:1.0.0 # 替换为你的前端 Docker 镜像
|
||||
logging:
|
||||
driver: "json-file" # 日志驱动,默认为 json-file
|
||||
options:
|
||||
max-size: "10m" # 单个日志文件的最大大小
|
||||
max-file: "3" # 最多保留的日志文件数量
|
||||
ports:
|
||||
- "8086:80" # 映射前端服务端口
|
||||
volumes:
|
||||
- ../dist:/usr/share/nginx/html
|
||||
- ../nginx.conf:/etc/nginx/nginx.conf
|
||||
networks:
|
||||
- app-network
|
||||
|
||||
# 后端服务
|
||||
backend:
|
||||
image: daily-report-api:1.0.0 # 替换为你的后端 Docker 镜像
|
||||
logging:
|
||||
driver: "json-file" # 日志驱动,默认为 json-file
|
||||
options:
|
||||
max-size: "10m" # 单个日志文件的最大大小
|
||||
max-file: "3" # 最多保留的日志文件数量
|
||||
ports:
|
||||
- "5000:5000" # 映射后端服务端口
|
||||
environment:
|
||||
- LOG_DIR=/app/logs
|
||||
- DATA_DIR=/app/temp_data
|
||||
# 挂载卷
|
||||
volumes:
|
||||
# # 挂载日志目录
|
||||
- $PWD:/app
|
||||
depends_on:
|
||||
- libreoffice
|
||||
networks:
|
||||
- app-network
|
||||
|
||||
# LibreOffice 服务
|
||||
libreoffice:
|
||||
image: linuxserver/libreoffice:latest # 替换为你的 LibreOffice Docker 镜像
|
||||
logging:
|
||||
driver: "json-file" # 日志驱动,默认为 json-file
|
||||
options:
|
||||
max-size: "10m" # 单个日志文件的最大大小
|
||||
max-file: "3" # 最多保留的日志文件数量
|
||||
volumes:
|
||||
- $PWD:/app
|
||||
ports:
|
||||
- "8100:8100" # 暴露 LibreOffice 服务端口
|
||||
networks:
|
||||
- app-network
|
||||
# 相同网络
|
||||
networks:
|
||||
app-network:
|
||||
driver: bridge
|
@ -0,0 +1,34 @@
|
||||
from fastapi import FastAPI
|
||||
from app.api.router import router
|
||||
|
||||
import logging
|
||||
from app.logging_config import setup_logging
|
||||
|
||||
# 加载日志配置
|
||||
setup_logging()
|
||||
|
||||
# 获取日志记录器
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
app = FastAPI(
|
||||
title="Daily Report API",
|
||||
description="三工单日报、简报的api",
|
||||
version="1.0.0",
|
||||
)
|
||||
|
||||
# 代理前端静态文件html等
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
|
||||
app.mount("/sgd/file", StaticFiles(directory="temp_downloads"), name="temp_downloads")
|
||||
app.mount(
|
||||
"/sgd/file", StaticFiles(directory="temp_downloads"), name="temp_download_raw"
|
||||
)
|
||||
|
||||
# 使用路由
|
||||
app.include_router(router)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
Binary file not shown.
Loading…
Reference in New Issue