@@ -2101,6 +2179,8 @@ const Dashboard = (() => {
diff --git a/server/app.py b/server/app.py
index cdfd8b7..60eca4e 100644
--- a/server/app.py
+++ b/server/app.py
@@ -12,7 +12,7 @@ try:
except Exception:
redis = None
_redis_client = None
-_audit_cache = {'pdd': {'ts': 0, 'list': []}, 'yt': {'ts': 0, 'list': []}}
+_audit_cache = {'pdd': {'ts': 0, 'list': []}, 'yt': {'ts': 0, 'list': []}, 'tx': {'ts': 0, 'list': []}, 'mt': {'ts': 0, 'list': []}}
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
DB_PATH = os.path.join(BASE_DIR, 'data.db')
@@ -1265,13 +1265,217 @@ def audit_yt():
return jsonify({'list': []})
+@app.get('/api/audit/tx')
+@require_login
+def audit_tx():
+ start = datetime.utcnow()
+ try:
+ q_start = request.args.get('start')
+ q_end = request.args.get('end')
+ q_limit = request.args.get('limit')
+ q_order = request.args.get('order', 'desc')
+ has_filter = bool(q_start or q_end or q_limit or q_order)
+
+ # 缓存优化:3秒内不重复查询
+ if (not has_filter) and ((datetime.utcnow().timestamp() - _audit_cache['tx']['ts']) < 3):
+ return jsonify({'list': _audit_cache['tx']['list']})
+
+ r = get_redis()
+ # 设置Redis超时为5秒
+ r.connection_pool.connection_kwargs['socket_timeout'] = 5
+
+ items = []
+ # 支持大数据量:默认返回全部数据,可通过limit参数限制
+ max_items = int(q_limit) if q_limit else 50000
+
+ for key in ['mac_batch_audit_tx', 'audit:tx', 'tx:audit']:
+ try:
+ if r.exists(key):
+ t = r.type(key)
+ if t == 'list':
+ # 获取全部数据或限制数量
+ total = r.llen(key)
+ fetch_count = min(total, max_items)
+ items = r.lrange(key, -fetch_count, -1)
+ elif t == 'zset':
+ items = r.zrevrange(key, 0, max_items - 1)
+ elif t == 'stream':
+ entries = r.xrevrange(key, max='+', min='-', count=max_items)
+ items = [json.dumps(v) for _id, v in entries]
+ else:
+ v = r.get(key)
+ items = [v] if v else []
+ break
+ except Exception as e:
+ log('audit_tx_error', f'Redis query error: {str(e)}')
+ continue
+
+ res = [parse_audit_line(x) for x in items]
+ if q_start or q_end:
+ def to_epoch(s):
+ try:
+ if not s:
+ return None
+ if 'T' in s or 'Z' in s or '+' in s:
+ return datetime.fromisoformat(s.replace('Z','+00:00')).timestamp()
+ if ' ' in s and ':' in s:
+ return datetime.strptime(s, '%Y-%m-%d %H:%M:%S').timestamp()
+ return datetime.strptime(s, '%Y-%m-%d').timestamp()
+ except Exception:
+ return None
+ s_epoch = to_epoch(q_start) if q_start else None
+ e_epoch = to_epoch(q_end) if q_end else None
+ tmp = []
+ for r0 in res:
+ ts = to_epoch(r0.get('ts_cn'))
+ if ts is None:
+ continue
+ if s_epoch is not None and ts < s_epoch:
+ continue
+ if e_epoch is not None and ts > e_epoch:
+ continue
+ tmp.append(r0)
+ res = tmp
+ try:
+ def to_key(r):
+ s = r.get('ts_cn') or ''
+ try:
+ if 'T' in s or 'Z' in s or '+' in s:
+ return datetime.fromisoformat(s.replace('Z','+00:00')).timestamp()
+ if ' ' in s and ':' in s:
+ return datetime.strptime(s, '%Y-%m-%d %H:%M:%S').timestamp()
+ return datetime.strptime(s, '%Y-%m-%d').timestamp()
+ except Exception:
+ return 0
+ res.sort(key=to_key, reverse=(q_order != 'asc'))
+ except Exception:
+ res.reverse()
+ if q_limit:
+ try:
+ lim = int(q_limit)
+ if lim > 0:
+ res = res[:lim]
+ except Exception:
+ pass
+ if not has_filter:
+ _audit_cache['tx'] = {'ts': datetime.utcnow().timestamp(), 'list': res}
+ dur = (datetime.utcnow() - start).total_seconds()
+ log('audit_tx_cost', f"{dur}s len={len(res)}")
+ return jsonify({'list': res})
+ except Exception as e:
+ log('audit_tx_error', str(e))
+ return jsonify({'list': []})
+
+
+@app.get('/api/audit/mt')
+@require_login
+def audit_mt():
+ start = datetime.utcnow()
+ try:
+ q_start = request.args.get('start')
+ q_end = request.args.get('end')
+ q_limit = request.args.get('limit')
+ q_order = request.args.get('order', 'desc')
+ has_filter = bool(q_start or q_end or q_limit or q_order)
+
+ # 缓存优化:3秒内不重复查询
+ if (not has_filter) and ((datetime.utcnow().timestamp() - _audit_cache['mt']['ts']) < 3):
+ return jsonify({'list': _audit_cache['mt']['list']})
+
+ r = get_redis()
+ # 设置Redis超时为5秒
+ r.connection_pool.connection_kwargs['socket_timeout'] = 5
+
+ items = []
+ # 支持大数据量:默认返回全部数据,可通过limit参数限制
+ max_items = int(q_limit) if q_limit else 50000
+
+ for key in ['mac_batch_audit_mt', 'audit:mt', 'mt:audit']:
+ try:
+ if r.exists(key):
+ t = r.type(key)
+ if t == 'list':
+ # 获取全部数据或限制数量
+ total = r.llen(key)
+ fetch_count = min(total, max_items)
+ items = r.lrange(key, -fetch_count, -1)
+ elif t == 'zset':
+ items = r.zrevrange(key, 0, max_items - 1)
+ elif t == 'stream':
+ entries = r.xrevrange(key, max='+', min='-', count=max_items)
+ items = [json.dumps(v) for _id, v in entries]
+ else:
+ v = r.get(key)
+ items = [v] if v else []
+ break
+ except Exception as e:
+ log('audit_mt_error', f'Redis query error: {str(e)}')
+ continue
+
+ res = [parse_audit_line(x) for x in items]
+ if q_start or q_end:
+ def to_epoch(s):
+ try:
+ if not s:
+ return None
+ if 'T' in s or 'Z' in s or '+' in s:
+ return datetime.fromisoformat(s.replace('Z','+00:00')).timestamp()
+ if ' ' in s and ':' in s:
+ return datetime.strptime(s, '%Y-%m-%d %H:%M:%S').timestamp()
+ return datetime.strptime(s, '%Y-%m-%d').timestamp()
+ except Exception:
+ return None
+ s_epoch = to_epoch(q_start) if q_start else None
+ e_epoch = to_epoch(q_end) if q_end else None
+ tmp = []
+ for r0 in res:
+ ts = to_epoch(r0.get('ts_cn'))
+ if ts is None:
+ continue
+ if s_epoch is not None and ts < s_epoch:
+ continue
+ if e_epoch is not None and ts > e_epoch:
+ continue
+ tmp.append(r0)
+ res = tmp
+ try:
+ def to_key(r):
+ s = r.get('ts_cn') or ''
+ try:
+ if 'T' in s or 'Z' in s or '+' in s:
+ return datetime.fromisoformat(s.replace('Z','+00:00')).timestamp()
+ if ' ' in s and ':' in s:
+ return datetime.strptime(s, '%Y-%m-%d %H:%M:%S').timestamp()
+ return datetime.strptime(s, '%Y-%m-%d').timestamp()
+ except Exception:
+ return 0
+ res.sort(key=to_key, reverse=(q_order != 'asc'))
+ except Exception:
+ res.reverse()
+ if q_limit:
+ try:
+ lim = int(q_limit)
+ if lim > 0:
+ res = res[:lim]
+ except Exception:
+ pass
+ if not has_filter:
+ _audit_cache['mt'] = {'ts': datetime.utcnow().timestamp(), 'list': res}
+ dur = (datetime.utcnow() - start).total_seconds()
+ log('audit_mt_cost', f"{dur}s len={len(res)}")
+ return jsonify({'list': res})
+ except Exception as e:
+ log('audit_mt_error', str(e))
+ return jsonify({'list': []})
+
+
@app.get('/api/audit/diagnose')
@require_login
def audit_diagnose():
try:
r = get_redis()
result = {}
- for key in ['mac_batch_audit_pdd', 'mac_batch_audit_yt', 'batch_sn_mapping_pdd', 'batch_sn_mapping_yt']:
+ for key in ['mac_batch_audit_pdd', 'mac_batch_audit_yt', 'mac_batch_audit_tx', 'mac_batch_audit_mt', 'batch_sn_mapping_pdd', 'batch_sn_mapping_yt']:
try:
t = r.type(key)
if t == 'list':