119 lines
5.3 KiB
Python
119 lines
5.3 KiB
Python
|
|
#!/usr/bin/env python3
|
|||
|
|
"""
|
|||
|
|
Проверка несоответствия между documents_uploaded и clpr_claim_documents
|
|||
|
|
"""
|
|||
|
|
import asyncio
|
|||
|
|
import asyncpg
|
|||
|
|
import json
|
|||
|
|
|
|||
|
|
POSTGRES_HOST = "147.45.189.234"
|
|||
|
|
POSTGRES_PORT = 5432
|
|||
|
|
POSTGRES_DB = "default_db"
|
|||
|
|
POSTGRES_USER = "gen_user"
|
|||
|
|
POSTGRES_PASSWORD = "2~~9_^kVsU?2\\S"
|
|||
|
|
|
|||
|
|
CLAIM_ID = "bddb6815-8e17-4d54-a721-5e94382942c7"
|
|||
|
|
|
|||
|
|
async def check_mismatch():
|
|||
|
|
conn = await asyncpg.connect(
|
|||
|
|
host=POSTGRES_HOST,
|
|||
|
|
port=POSTGRES_PORT,
|
|||
|
|
database=POSTGRES_DB,
|
|||
|
|
user=POSTGRES_USER,
|
|||
|
|
password=POSTGRES_PASSWORD
|
|||
|
|
)
|
|||
|
|
|
|||
|
|
try:
|
|||
|
|
# Находим UUID claim
|
|||
|
|
claim_row = await conn.fetchrow("""
|
|||
|
|
SELECT id FROM clpr_claims
|
|||
|
|
WHERE id::text = $1 OR payload->>'claim_id' = $1
|
|||
|
|
ORDER BY updated_at DESC
|
|||
|
|
LIMIT 1
|
|||
|
|
""", CLAIM_ID)
|
|||
|
|
|
|||
|
|
if not claim_row:
|
|||
|
|
print(f"❌ Черновик {CLAIM_ID} не найден!")
|
|||
|
|
return
|
|||
|
|
|
|||
|
|
claim_uuid = claim_row['id']
|
|||
|
|
|
|||
|
|
# Получаем payload
|
|||
|
|
payload_row = await conn.fetchrow("""
|
|||
|
|
SELECT payload FROM clpr_claims WHERE id = $1
|
|||
|
|
""", claim_uuid)
|
|||
|
|
|
|||
|
|
payload = payload_row['payload'] if isinstance(payload_row['payload'], dict) else json.loads(payload_row['payload'])
|
|||
|
|
|
|||
|
|
# Получаем документы из таблицы
|
|||
|
|
table_docs = await conn.fetch("""
|
|||
|
|
SELECT
|
|||
|
|
ccd.id,
|
|||
|
|
ccd.claim_id,
|
|||
|
|
ccd.field_name,
|
|||
|
|
ccd.file_id,
|
|||
|
|
ccd.file_name,
|
|||
|
|
ccd.original_file_name,
|
|||
|
|
ccd.uploaded_at
|
|||
|
|
FROM clpr_claim_documents ccd
|
|||
|
|
WHERE ccd.claim_id = $1
|
|||
|
|
ORDER BY ccd.uploaded_at DESC
|
|||
|
|
""", str(claim_uuid))
|
|||
|
|
|
|||
|
|
print(f"📋 Документы в таблице clpr_claim_documents ({len(table_docs)} шт.):")
|
|||
|
|
for i, doc in enumerate(table_docs):
|
|||
|
|
print(f" {i+1}. field_name: {doc['field_name']}")
|
|||
|
|
print(f" file_id: {doc['file_id']}")
|
|||
|
|
print(f" file_name: {doc['file_name']}")
|
|||
|
|
print(f" original_file_name: {doc['original_file_name']}")
|
|||
|
|
print(f" uploaded_at: {doc['uploaded_at']}")
|
|||
|
|
|
|||
|
|
print(f"\n📋 Документы в documents_uploaded ({len(payload.get('documents_uploaded', []))} шт.):")
|
|||
|
|
for i, doc in enumerate(payload.get('documents_uploaded', [])):
|
|||
|
|
print(f" {i+1}. Тип: {doc.get('type', 'N/A')} / {doc.get('id', 'N/A')}")
|
|||
|
|
print(f" file_id: {doc.get('file_id', 'N/A')}")
|
|||
|
|
print(f" original_file_name: {doc.get('original_file_name', 'N/A')}")
|
|||
|
|
|
|||
|
|
print(f"\n📋 Документы в documents_meta ({len(payload.get('documents_meta', []))} шт.):")
|
|||
|
|
for i, doc in enumerate(payload.get('documents_meta', [])):
|
|||
|
|
print(f" {i+1}. field_label: {doc.get('field_label', 'N/A')}")
|
|||
|
|
print(f" field_name: {doc.get('field_name', 'N/A')}")
|
|||
|
|
print(f" file_id: {doc.get('file_id', 'N/A')}")
|
|||
|
|
|
|||
|
|
# Проверяем, какие документы из documents_uploaded отсутствуют в таблице
|
|||
|
|
print(f"\n🔍 Проверка отсутствующих документов:")
|
|||
|
|
table_file_ids = {doc['file_id'] for doc in table_docs}
|
|||
|
|
uploaded_file_ids = {doc.get('file_id') for doc in payload.get('documents_uploaded', []) if doc.get('file_id')}
|
|||
|
|
|
|||
|
|
missing_in_table = uploaded_file_ids - table_file_ids
|
|||
|
|
if missing_in_table:
|
|||
|
|
print(f" ⚠️ В documents_uploaded есть, но нет в таблице ({len(missing_in_table)} шт.):")
|
|||
|
|
for file_id in missing_in_table:
|
|||
|
|
doc = next((d for d in payload.get('documents_uploaded', []) if d.get('file_id') == file_id), None)
|
|||
|
|
if doc:
|
|||
|
|
print(f" - {doc.get('type', 'N/A')}: {file_id[:80]}...")
|
|||
|
|
print(f" original_file_name: {doc.get('original_file_name', 'N/A')}")
|
|||
|
|
else:
|
|||
|
|
print(f" ✅ Все документы из documents_uploaded есть в таблице")
|
|||
|
|
|
|||
|
|
# Проверяем field_name
|
|||
|
|
print(f"\n🔍 Проверка field_name:")
|
|||
|
|
table_field_names = {doc['field_name'] for doc in table_docs}
|
|||
|
|
meta_field_names = {doc.get('field_name') for doc in payload.get('documents_meta', []) if doc.get('field_name')}
|
|||
|
|
|
|||
|
|
print(f" В таблице: {sorted(table_field_names)}")
|
|||
|
|
print(f" В documents_meta: {sorted(meta_field_names)}")
|
|||
|
|
|
|||
|
|
# Проверяем, есть ли конфликты по field_name
|
|||
|
|
if len(table_docs) < len(payload.get('documents_uploaded', [])):
|
|||
|
|
print(f"\n ⚠️ Возможная причина: несколько документов с одинаковым field_name")
|
|||
|
|
print(f" В таблице используется UNIQUE constraint на (claim_id, field_name)")
|
|||
|
|
print(f" Если два документа имеют одинаковый field_name, второй перезапишет первый")
|
|||
|
|
|
|||
|
|
finally:
|
|||
|
|
await conn.close()
|
|||
|
|
|
|||
|
|
if __name__ == "__main__":
|
|||
|
|
asyncio.run(check_mismatch())
|
|||
|
|
|