fix(auth): prevent AttributeError when querying permissions for users with no role
This commit is contained in:
45
deploy_patch.sh
Normal file
45
deploy_patch.sh
Normal file
@ -0,0 +1,45 @@
|
||||
#!/bin/bash
|
||||
|
||||
# === 配置项 ===
|
||||
SERVER="dxc@172.16.0.198"
|
||||
REMOTE_DIR="/opt/inventory-app"
|
||||
TIMESTAMP=$(date +%Y%m%d_%H%M)
|
||||
|
||||
# 核心魔法:只定义你要发布的具体文件列表!
|
||||
FILES_TO_DEPLOY=(
|
||||
"inventory-backend/app/api/v1/inbound/base.py"
|
||||
"inventory-backend/app/services/inbound/base_service.py"
|
||||
"inventory-web/src/api/material_base.ts"
|
||||
"inventory-web/src/components/SpecHelper/index.vue"
|
||||
"inventory-web/src/layout/index.vue"
|
||||
)
|
||||
|
||||
echo "==================================================="
|
||||
echo "🚀 开始【局部补丁】部署 (仅覆盖特定的 ${#FILES_TO_DEPLOY[@]} 个文件)"
|
||||
echo "==================================================="
|
||||
|
||||
# 1. 本地精准打包
|
||||
echo "[1/3] 正在提取指定文件并打包..."
|
||||
# tar 打包时会自动保留文件的原有目录结构
|
||||
tar -czf patch.tar.gz "${FILES_TO_DEPLOY[@]}"
|
||||
if [ $? -ne 0 ]; then echo "❌ 打包失败,请检查文件列表中的路径是否正确!"; exit 1; fi
|
||||
|
||||
# 2. 传输到生产环境的 /tmp 目录
|
||||
echo "[2/3] 正在传输补丁包到服务器..."
|
||||
scp patch.tar.gz $SERVER:/tmp/patch.tar.gz
|
||||
|
||||
# 3. 服务器执行覆盖与重启
|
||||
echo "[3/3] 正在服务器上覆盖指定文件并热更新 (可能需要输入密码)..."
|
||||
# 注意:这里直接在 $REMOTE_DIR 解压,tar 会按照原路径精准覆盖那 5 个文件,绝对不碰别的!
|
||||
ssh -t $SERVER "cd $REMOTE_DIR && \
|
||||
sudo tar -xzf /tmp/patch.tar.gz && \
|
||||
sudo docker compose -f docker-compose.prod.yml build backend frontend && \
|
||||
sudo docker compose -f docker-compose.prod.yml up -d backend frontend && \
|
||||
sudo rm /tmp/patch.tar.gz"
|
||||
|
||||
# 清理本地临时压缩包
|
||||
rm patch.tar.gz
|
||||
|
||||
echo "==================================================="
|
||||
echo "✅ 局部部署完成!请刷新服务器网页查看最新规格连号助手。"
|
||||
echo "==================================================="
|
||||
@ -1,7 +1,6 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
# --- 数据库服务 ---
|
||||
db:
|
||||
image: postgres:15-alpine
|
||||
container_name: inventory_db
|
||||
@ -11,42 +10,35 @@ services:
|
||||
POSTGRES_PASSWORD: 1234
|
||||
POSTGRES_DB: inventory_system
|
||||
volumes:
|
||||
# 数据持久化
|
||||
- ./pgdata_docker:/var/lib/postgresql/data
|
||||
ports:
|
||||
- "5434:5432"
|
||||
- "5435:5432"
|
||||
|
||||
# --- 后端 Flask 服务 ---
|
||||
backend:
|
||||
build:
|
||||
context: ./inventory-backend # 指向你的新后端目录
|
||||
context: ./inventory-backend
|
||||
container_name: inventory_api
|
||||
restart: always
|
||||
ports:
|
||||
- "8000:8000"
|
||||
volumes:
|
||||
- ./inventory-backend:/app # 挂载代码,实现热更新
|
||||
# 【核心修改】显式挂载 uploads 目录,确保图片持久化且宿主机可见
|
||||
- ./inventory-backend:/app
|
||||
- ./inventory-backend/uploads:/app/uploads
|
||||
command: gunicorn -c gunicorn.conf.py run:app --reload
|
||||
environment:
|
||||
# Host 必须写 'db'
|
||||
DATABASE_URL: postgresql://test:1234@db:5432/inventory_system
|
||||
depends_on:
|
||||
- db
|
||||
|
||||
# --- 前端 Vue 开发服务 ---
|
||||
frontend:
|
||||
build:
|
||||
context: ./inventory-web
|
||||
container_name: inventory_ui
|
||||
restart: always
|
||||
# 把本地代码挂载进去,实现“热更新”
|
||||
volumes:
|
||||
- ./inventory-web:/app
|
||||
- /app/node_modules # 排除 node_modules,防止冲突
|
||||
# 开发模式端口通常是 5173
|
||||
- /app/node_modules
|
||||
ports:
|
||||
- "5173:5173"
|
||||
- "5175:5173"
|
||||
depends_on:
|
||||
- backend
|
||||
- backend
|
||||
|
||||
@ -345,9 +345,13 @@ class AuthService:
|
||||
'elements': ['inbound_buy:unit_price', ...]
|
||||
}
|
||||
"""
|
||||
# 防御性编程:role_code 为空时直接返回空权限,避免后续 SQL 崩溃
|
||||
if not role_code:
|
||||
return {'menus': [], 'elements': []}
|
||||
|
||||
# 超级管理员返回所有权限(通配符)
|
||||
from app.utils.constants import UserRole
|
||||
if role_code and role_code.upper() == UserRole.SUPER_ADMIN:
|
||||
if role_code.upper() == UserRole.SUPER_ADMIN:
|
||||
# 返回通配符,表示拥有所有菜单和元素权限
|
||||
return {
|
||||
'menus': ['*'],
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"dev": "vite --host",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
@ -35,4 +35,4 @@
|
||||
"overrides": {
|
||||
"vite": "npm:rolldown-vite@7.2.5"
|
||||
}
|
||||
}
|
||||
}
|
||||
26
sync_db.sh
26
sync_db.sh
@ -1,21 +1,22 @@
|
||||
#!/bin/bash
|
||||
|
||||
# ==========================================
|
||||
# 1. 本地 WSL 数据库配置 (根据你之前的数据)
|
||||
# 1. 本地 WSL 数据库配置
|
||||
# ==========================================
|
||||
LOCAL_CONTAINER="inventory_db"
|
||||
LOCAL_DB_USER="test"
|
||||
LOCAL_DB_NAME="inventory_system"
|
||||
|
||||
# ==========================================
|
||||
# 2. 远程服务器 SSH 配置 (根据你的截图)
|
||||
# 2. 远程服务器 SSH 配置
|
||||
# ==========================================
|
||||
REMOTE_USER="dxc"
|
||||
REMOTE_HOST="172.16.0.198"
|
||||
REMOTE_PORT="22"
|
||||
REMOTE_DIR="/opt/inventory-app" # 用于存放备份
|
||||
|
||||
# ==========================================
|
||||
# 3. 远程服务器 Docker 配置 (根据你的 docker-compose.prod.yml)
|
||||
# 3. 远程服务器 Docker 配置
|
||||
# ==========================================
|
||||
REMOTE_CONTAINER="inventory_db_prod"
|
||||
REMOTE_DB_USER="prod_user"
|
||||
@ -26,14 +27,28 @@ REMOTE_DB_NAME="inventory_system"
|
||||
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||
DUMP_FILE="db_sync_${TIMESTAMP}.sql.gz"
|
||||
LOCAL_DUMP_PATH="/tmp/${DUMP_FILE}"
|
||||
REMOTE_BACKUP_FILE="${REMOTE_DIR}/data_copy/DB_BACKUP_${TIMESTAMP}.sql.gz"
|
||||
|
||||
echo "========================================================"
|
||||
echo " 🔄 开始同步 WSL 数据库到远程服务器 (${REMOTE_HOST})"
|
||||
echo " ⚠️ 注意:线上旧数据将被完全覆盖!"
|
||||
echo "========================================================"
|
||||
|
||||
# --- 新增:步骤 0: 远程服务器数据备份 ---
|
||||
echo -e "\n[0/4] 🛡️ 正在备份线上服务器数据库..."
|
||||
ssh -p ${REMOTE_PORT} ${REMOTE_USER}@${REMOTE_HOST} << EOF
|
||||
mkdir -p ${REMOTE_DIR}/data_copy
|
||||
# 导出线上数据作为备份
|
||||
docker exec -e PGPASSWORD="${REMOTE_DB_PASS}" ${REMOTE_CONTAINER} pg_dump -U ${REMOTE_DB_USER} -d ${REMOTE_DB_NAME} -O -x | gzip > ${REMOTE_BACKUP_FILE}
|
||||
echo " -> 线上备份已保存至: ${REMOTE_BACKUP_FILE}"
|
||||
EOF
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "❌ 线上备份失败!为保证数据安全,同步已终止!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# --- 步骤 1: 本地导出 ---
|
||||
echo -e "\n[1/4] 📦 正在本地打包数据库..."
|
||||
# 注意:这里使用 pg_dump 导出,为了兼容性,排除可能引起冲突的权限所有者信息 (-O -x)
|
||||
docker exec ${LOCAL_CONTAINER} pg_dump -U ${LOCAL_DB_USER} -d ${LOCAL_DB_NAME} -O -x | gzip > ${LOCAL_DUMP_PATH}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "❌ 本地数据库导出失败!请检查本地 inventory_db 容器是否正常运行。"
|
||||
@ -57,7 +72,6 @@ ssh -p ${REMOTE_PORT} ${REMOTE_USER}@${REMOTE_HOST} << EOF
|
||||
docker cp /tmp/${DUMP_FILE} ${REMOTE_CONTAINER}:/tmp/${DUMP_FILE}
|
||||
|
||||
echo " -> 危险操作:清空服务器旧数据环境..."
|
||||
# 传入 PGPASSWORD 环境变量以防密码拦截
|
||||
docker exec -e PGPASSWORD="${REMOTE_DB_PASS}" ${REMOTE_CONTAINER} psql -U ${REMOTE_DB_USER} -d ${REMOTE_DB_NAME} -c "DROP SCHEMA public CASCADE; CREATE SCHEMA public; GRANT ALL ON SCHEMA public TO ${REMOTE_DB_USER};"
|
||||
|
||||
echo " -> 正在导入最新数据..."
|
||||
@ -74,4 +88,4 @@ rm ${LOCAL_DUMP_PATH}
|
||||
|
||||
echo -e "\n========================================================"
|
||||
echo "🎉 数据库全量替换成功!快去刷新你的线上系统看看吧!"
|
||||
echo "========================================================"
|
||||
echo "========================================================"
|
||||
102
库研操作/导入数据库.py
Normal file
102
库研操作/导入数据库.py
Normal file
@ -0,0 +1,102 @@
|
||||
import pandas as pd
|
||||
import psycopg2
|
||||
|
||||
# 1. 数据库配置
|
||||
DB_CONFIG = {
|
||||
'dbname': 'inventory_system',
|
||||
'user': 'test',
|
||||
'password': '1234',
|
||||
'host': 'localhost',
|
||||
'port': '5435'
|
||||
}
|
||||
|
||||
# 2. Excel 文件路径
|
||||
EXCEL_FILE = '筛选后的库存统计.xlsx'
|
||||
|
||||
|
||||
def fix_category_data_no_nan():
|
||||
try:
|
||||
print("正在读取 Excel 文件...")
|
||||
|
||||
# 【修改点 1】:明确限制只读取到第四级
|
||||
possible_category_cols = ['类别一级', '类别二级', '类别三级', '类别四级']
|
||||
|
||||
df_header = pd.read_excel(EXCEL_FILE, nrows=0)
|
||||
actual_category_cols = [col for col in possible_category_cols if col in df_header.columns]
|
||||
|
||||
needed_columns = ['资产名称', '规格型号'] + actual_category_cols
|
||||
|
||||
df = pd.read_excel(EXCEL_FILE, dtype=str, usecols=lambda x: x in needed_columns)
|
||||
df = df.where(pd.notnull(df), None)
|
||||
df = df.drop_duplicates(subset=['资产名称', '规格型号'])
|
||||
|
||||
print(f"发现了 {len(df)} 种独立物料,准备修复类别并清除 'nan'...")
|
||||
|
||||
conn = psycopg2.connect(**DB_CONFIG)
|
||||
cur = conn.cursor()
|
||||
|
||||
update_count = 0
|
||||
|
||||
for index, row in df.iterrows():
|
||||
name = row.get('资产名称')
|
||||
spec_model = row.get('规格型号')
|
||||
|
||||
# 清理规格型号,防止它也被 pandas 变成了 'nan'
|
||||
clean_spec = None if pd.isna(spec_model) or str(spec_model).lower() == 'nan' else str(spec_model).strip()
|
||||
|
||||
if not name or str(name).lower() == 'nan':
|
||||
continue
|
||||
|
||||
# --- 核心逻辑:只拼接前4级,并且严格过滤 nan ---
|
||||
category_parts = []
|
||||
for col in actual_category_cols:
|
||||
val = row.get(col)
|
||||
if val is not None:
|
||||
str_val = str(val).strip()
|
||||
# 【修改点 2】:增加对 'nan' 和 'None' 字符串的拦截
|
||||
if str_val != '' and str_val.lower() != 'nan' and str_val.lower() != 'none':
|
||||
category_parts.append(str_val)
|
||||
|
||||
full_category = "/".join(category_parts)
|
||||
|
||||
if not full_category:
|
||||
continue
|
||||
|
||||
prefixed_name = f"库研*{name}"
|
||||
prefixed_spec = f"KY*{clean_spec}" if clean_spec else None
|
||||
|
||||
# 执行更新操作
|
||||
update_query = """
|
||||
UPDATE material_base
|
||||
SET category = %s
|
||||
WHERE (name = %s OR name = %s)
|
||||
AND (
|
||||
(spec_model = %s OR spec_model = %s)
|
||||
OR (spec_model IS NULL AND %s IS NULL)
|
||||
) \
|
||||
"""
|
||||
|
||||
cur.execute(update_query, (
|
||||
full_category,
|
||||
name, prefixed_name,
|
||||
clean_spec, prefixed_spec, clean_spec
|
||||
))
|
||||
|
||||
update_count += cur.rowcount
|
||||
|
||||
conn.commit()
|
||||
print(f"✅ 完美修复!清除了讨厌的 'nan',共修正了 {update_count} 条记录。")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ 发生错误: {e}")
|
||||
if 'conn' in locals() and conn:
|
||||
conn.rollback()
|
||||
finally:
|
||||
if 'cur' in locals() and cur:
|
||||
cur.close()
|
||||
if 'conn' in locals() and conn:
|
||||
conn.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
fix_category_data_no_nan()
|
||||
BIN
库研操作/库存统计_20260413_094414.xlsx
Normal file
BIN
库研操作/库存统计_20260413_094414.xlsx
Normal file
Binary file not shown.
29
库研操作/筛选.py
Normal file
29
库研操作/筛选.py
Normal file
@ -0,0 +1,29 @@
|
||||
import pandas as pd
|
||||
|
||||
# 1. 读取您的Excel文件
|
||||
file_path = '库存统计_20260413_094414.xlsx'
|
||||
df = pd.read_excel(file_path)
|
||||
|
||||
# 指定要进行筛选的列名(根据您的截图,列名应为“仓库位置”)
|
||||
col_name = '仓库位置'
|
||||
|
||||
# 2. 数据清洗:确保该列都是字符串格式,并处理可能存在的空值(NaN)
|
||||
# 这一步是为了防止后续字符串操作报错
|
||||
df[col_name] = df[col_name].astype(str)
|
||||
|
||||
# 3. 进行筛选
|
||||
# 条件 A: str.count('/') == 2 (说明通过斜杠分割后只有3个部分,即3层)
|
||||
# 条件 B: str.endswith('/1') (说明最后是以 /1 结尾的,即最后一层是1)
|
||||
condition = (df[col_name].str.count('/') == 2) & (df[col_name].str.endswith('/1'))
|
||||
|
||||
# 将满足条件的数据提取出来
|
||||
filtered_df = df[condition]
|
||||
|
||||
# 4. 打印查看筛选后的前几行结果
|
||||
print("筛选出的符合要求的数据如下:")
|
||||
print(filtered_df[[col_name]])
|
||||
|
||||
# 5. (可选)将筛选后的结果保存为新的 Excel 文件
|
||||
output_path = '筛选后的库存统计.xlsx'
|
||||
filtered_df.to_excel(output_path, index=False)
|
||||
print(f"\n筛选完成,结果已保存至:{output_path}")
|
||||
BIN
库研操作/筛选后的库存统计.xlsx
Normal file
BIN
库研操作/筛选后的库存统计.xlsx
Normal file
Binary file not shown.
Reference in New Issue
Block a user