Save all currently accumulated repository changes as a backup snapshot for Gitea so no local work is lost.
279 lines
11 KiB
PHP
279 lines
11 KiB
PHP
<?php
|
||
/**
|
||
* Глубокий поиск файла в S3
|
||
*/
|
||
|
||
error_reporting(E_ALL);
|
||
ini_set('display_errors', 1);
|
||
|
||
require_once 'config.inc.php';
|
||
require_once 'crm_extensions/file_storage/config.php';
|
||
require_once 'include/database/PearDatabase.php';
|
||
|
||
global $adb;
|
||
if (!$adb) {
|
||
$adb = PearDatabase::getInstance();
|
||
}
|
||
|
||
$recordId = 397217;
|
||
|
||
echo "=== Глубокий поиск файла для документа ID: $recordId ===\n\n";
|
||
|
||
// Получаем информацию о документе
|
||
$query = "SELECT s3_bucket, s3_key, filename, title, notecontent, filesize
|
||
FROM vtiger_notes
|
||
WHERE notesid = ?";
|
||
$result = $adb->pquery($query, array($recordId));
|
||
$row = $adb->fetchByAssoc($result, -1, false);
|
||
|
||
$originalBucket = $row['s3_bucket'];
|
||
$originalKey = $row['s3_key'];
|
||
$filename = $row['filename'];
|
||
$filesize = $row['filesize'];
|
||
$fileNameFromKey = basename($originalKey);
|
||
$fileBaseName = pathinfo($fileNameFromKey, PATHINFO_FILENAME); // 1762504696855
|
||
|
||
echo "📄 Параметры поиска:\n";
|
||
echo " - Original Bucket: $originalBucket\n";
|
||
echo " - Original Key: $originalKey\n";
|
||
echo " - Filesize: $filesize байт\n";
|
||
echo " - Filename: $fileNameFromKey\n";
|
||
echo " - Base name: $fileBaseName\n\n";
|
||
|
||
// Загружаем конфиг S3
|
||
$config = require 'crm_extensions/file_storage/config.php';
|
||
require_once 'vendor/autoload.php';
|
||
|
||
$awsClient = new Aws\S3\S3Client([
|
||
'version' => 'latest',
|
||
'region' => $config['s3']['region'],
|
||
'endpoint' => $config['s3']['endpoint'],
|
||
'use_path_style_endpoint' => $config['s3']['use_path_style_endpoint'],
|
||
'credentials' => [
|
||
'key' => $config['s3']['key'],
|
||
'secret' => $config['s3']['secret'],
|
||
],
|
||
]);
|
||
|
||
// 1. Поиск по размеру файла в оригинальном bucket
|
||
echo "🔍 1. Поиск файлов с размером $filesize байт в bucket $originalBucket...\n";
|
||
try {
|
||
$result = $awsClient->listObjectsV2([
|
||
'Bucket' => $originalBucket,
|
||
'MaxKeys' => 10000
|
||
]);
|
||
|
||
$foundBySize = [];
|
||
if (isset($result['Contents'])) {
|
||
foreach ($result['Contents'] as $object) {
|
||
// Допуск ±1% по размеру
|
||
$sizeDiff = abs($object['Size'] - $filesize);
|
||
if ($sizeDiff <= ($filesize * 0.01)) {
|
||
$foundBySize[] = [
|
||
'key' => $object['Key'],
|
||
'size' => $object['Size'],
|
||
'modified' => $object['LastModified']->format('Y-m-d H:i:s')
|
||
];
|
||
}
|
||
}
|
||
}
|
||
|
||
if (!empty($foundBySize)) {
|
||
echo " ✅ Найдено " . count($foundBySize) . " файлов с похожим размером:\n";
|
||
foreach ($foundBySize as $file) {
|
||
echo " - Key: {$file['key']}\n";
|
||
echo " Size: {$file['size']} байт (разница: " . abs($file['size'] - $filesize) . ")\n";
|
||
echo " Modified: {$file['modified']}\n";
|
||
|
||
// Проверяем, это ли наш файл
|
||
if ($file['size'] == $filesize) {
|
||
echo " ⭐ РАЗМЕР СОВПАДАЕТ ТОЧНО!\n";
|
||
|
||
// Пробуем получить файл
|
||
try {
|
||
$headResult = $awsClient->headObject([
|
||
'Bucket' => $originalBucket,
|
||
'Key' => $file['key']
|
||
]);
|
||
|
||
echo " ✅ Файл доступен!\n";
|
||
|
||
// Генерируем presigned URL
|
||
$cmd = $awsClient->getCommand('GetObject', [
|
||
'Bucket' => $originalBucket,
|
||
'Key' => $file['key']
|
||
]);
|
||
$request = $awsClient->createPresignedRequest($cmd, '+10 minutes');
|
||
$presignedUrl = (string)$request->getUri();
|
||
|
||
echo " 🔗 Presigned URL: $presignedUrl\n";
|
||
|
||
// Проверяем доступность
|
||
$ch = curl_init($presignedUrl);
|
||
curl_setopt($ch, CURLOPT_NOBODY, true);
|
||
curl_setopt($ch, CURLOPT_FOLLOWLOCATION, true);
|
||
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
|
||
curl_setopt($ch, CURLOPT_TIMEOUT, 10);
|
||
curl_exec($ch);
|
||
$httpCode = curl_getinfo($ch, CURLINFO_HTTP_CODE);
|
||
curl_close($ch);
|
||
|
||
if ($httpCode == 200) {
|
||
echo " ✅ URL работает!\n\n";
|
||
echo "💾 SQL для обновления БД:\n";
|
||
echo "UPDATE vtiger_notes SET s3_bucket = '$originalBucket', s3_key = '{$file['key']}' WHERE notesid = $recordId;\n\n";
|
||
}
|
||
} catch (Exception $e) {
|
||
echo " ❌ Ошибка доступа: " . $e->getMessage() . "\n";
|
||
}
|
||
}
|
||
echo "\n";
|
||
}
|
||
} else {
|
||
echo " ❌ Файлы с похожим размером не найдены\n\n";
|
||
}
|
||
} catch (Exception $e) {
|
||
echo " ❌ Ошибка: " . $e->getMessage() . "\n\n";
|
||
}
|
||
|
||
// 2. Поиск по имени файла (частичное совпадение)
|
||
echo "🔍 2. Поиск файлов с именем содержащим '$fileBaseName'...\n";
|
||
try {
|
||
$result = $awsClient->listObjectsV2([
|
||
'Bucket' => $originalBucket,
|
||
'MaxKeys' => 10000
|
||
]);
|
||
|
||
$foundByName = [];
|
||
if (isset($result['Contents'])) {
|
||
foreach ($result['Contents'] as $object) {
|
||
$objectKey = $object['Key'];
|
||
$objectName = basename($objectKey);
|
||
|
||
// Ищем файлы с похожим именем
|
||
if (strpos($objectName, $fileBaseName) !== false ||
|
||
strpos($objectKey, $fileBaseName) !== false) {
|
||
$foundByName[] = [
|
||
'key' => $objectKey,
|
||
'size' => $object['Size'],
|
||
'modified' => $object['LastModified']->format('Y-m-d H:i:s')
|
||
];
|
||
}
|
||
}
|
||
}
|
||
|
||
if (!empty($foundByName)) {
|
||
echo " ✅ Найдено " . count($foundByName) . " файлов с похожим именем:\n";
|
||
foreach ($foundByName as $file) {
|
||
echo " - Key: {$file['key']}\n";
|
||
echo " Size: {$file['size']} байт\n";
|
||
echo " Modified: {$file['modified']}\n\n";
|
||
}
|
||
} else {
|
||
echo " ❌ Файлы с похожим именем не найдены\n\n";
|
||
}
|
||
} catch (Exception $e) {
|
||
echo " ❌ Ошибка: " . $e->getMessage() . "\n\n";
|
||
}
|
||
|
||
// 3. Проверяем другие bucket'ы из БД
|
||
echo "🔍 3. Проверка других bucket'ов из БД...\n";
|
||
$query = "SELECT DISTINCT s3_bucket FROM vtiger_notes WHERE s3_bucket IS NOT NULL AND s3_bucket != '' LIMIT 10";
|
||
$result = $adb->pquery($query, array());
|
||
|
||
$otherBuckets = [];
|
||
while ($row = $adb->fetchByAssoc($result, -1, false)) {
|
||
if ($row['s3_bucket'] != $originalBucket) {
|
||
$otherBuckets[] = $row['s3_bucket'];
|
||
}
|
||
}
|
||
|
||
if (!empty($otherBuckets)) {
|
||
echo " Найдено " . count($otherBuckets) . " других bucket'ов\n";
|
||
foreach ($otherBuckets as $bucket) {
|
||
echo " Проверяю bucket: $bucket\n";
|
||
|
||
try {
|
||
// Пробуем найти файл по оригинальному key
|
||
try {
|
||
$headResult = $awsClient->headObject([
|
||
'Bucket' => $bucket,
|
||
'Key' => $originalKey
|
||
]);
|
||
|
||
echo " ✅ Файл найден по оригинальному key!\n";
|
||
echo " 💾 SQL: UPDATE vtiger_notes SET s3_bucket = '$bucket', s3_key = '$originalKey' WHERE notesid = $recordId;\n\n";
|
||
} catch (Exception $e) {
|
||
// Не найден, продолжаем
|
||
}
|
||
|
||
// Ищем по размеру
|
||
$result = $awsClient->listObjectsV2([
|
||
'Bucket' => $bucket,
|
||
'MaxKeys' => 1000
|
||
]);
|
||
|
||
if (isset($result['Contents'])) {
|
||
foreach ($result['Contents'] as $object) {
|
||
if ($object['Size'] == $filesize) {
|
||
echo " ✅ Найден файл с таким же размером: {$object['Key']}\n";
|
||
echo " 💾 SQL: UPDATE vtiger_notes SET s3_bucket = '$bucket', s3_key = '{$object['Key']}' WHERE notesid = $recordId;\n\n";
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
} catch (Exception $e) {
|
||
echo " ⚠️ Ошибка доступа к bucket: " . $e->getMessage() . "\n";
|
||
}
|
||
}
|
||
} else {
|
||
echo " Других bucket'ов не найдено\n\n";
|
||
}
|
||
|
||
// 4. Проверяем логи загрузки файлов
|
||
echo "🔍 4. Поиск в логах загрузки файлов...\n";
|
||
$logFiles = [
|
||
'logs/webservice.log',
|
||
'logs/debug.log',
|
||
'logs/file_upload.log',
|
||
'logs/s3_upload.log'
|
||
];
|
||
|
||
$foundInLogs = false;
|
||
foreach ($logFiles as $logFile) {
|
||
if (file_exists($logFile)) {
|
||
echo " Проверяю $logFile...\n";
|
||
$content = file_get_contents($logFile);
|
||
|
||
// Ищем упоминания нашего файла
|
||
if (strpos($content, $fileBaseName) !== false ||
|
||
strpos($content, $originalKey) !== false ||
|
||
strpos($content, (string)$recordId) !== false) {
|
||
echo " ✅ Найдены упоминания в логах!\n";
|
||
|
||
// Извлекаем строки с упоминанием
|
||
$lines = explode("\n", $content);
|
||
$relevantLines = [];
|
||
foreach ($lines as $line) {
|
||
if (strpos($line, $fileBaseName) !== false ||
|
||
strpos($line, $originalKey) !== false ||
|
||
strpos($line, (string)$recordId) !== false) {
|
||
$relevantLines[] = $line;
|
||
if (count($relevantLines) >= 10) break; // Ограничиваем вывод
|
||
}
|
||
}
|
||
|
||
foreach ($relevantLines as $line) {
|
||
echo " " . trim($line) . "\n";
|
||
}
|
||
$foundInLogs = true;
|
||
}
|
||
}
|
||
}
|
||
|
||
if (!$foundInLogs) {
|
||
echo " Упоминания в логах не найдены\n\n";
|
||
}
|
||
|
||
echo "\n=== Глубокий поиск завершен ===\n";
|