connect_error) { die("Connection failed: " . $mysqli->connect_error); } $mysqli->set_charset("utf8"); // Logging $logDir = $ROOT . 'logs'; if (!is_dir($logDir)) mkdir($logDir, 0755, true); $logFile = $logDir . '/phase2_batch_update.log'; function logln($msg) { global $logFile; $line = '[' . date('Y-m-d H:i:s') . '] ' . $msg . PHP_EOL; file_put_contents($logFile, $line, FILE_APPEND | LOCK_EX); echo $line; } // Load migration results to get S3 URLs function loadMigrationResults() { global $ROOT; $resultsDir = $ROOT . 'crm_extensions/file_storage/'; $files = glob($resultsDir . 'migration_results_*.json'); $urlMap = []; foreach ($files as $file) { $data = json_decode(file_get_contents($file), true); if (isset($data['copied'])) { foreach ($data['copied'] as $item) { if (isset($item['notesid']) && isset($item['url'])) { $urlMap[$item['notesid']] = $item['url']; } } } } logln("Loaded " . count($urlMap) . " S3 URLs from migration results"); return $urlMap; } logln("Starting Phase 2 batch update"); logln("Parameters: limit=$limit, offset=$offset, dry-run=" . ($dryRun ? 'true' : 'false') . ", activate-external=" . ($activateExternal ? 'true' : 'false')); // Load S3 URLs $urlMap = loadMigrationResults(); // Find records that need updating $query = "SELECT notesid, filename, s3_key, s3_bucket, filelocationtype FROM vtiger_notes WHERE s3_key IS NOT NULL AND s3_key != '' AND filelocationtype != 'E' LIMIT $limit OFFSET $offset"; $result = $mysqli->query($query); if (!$result) { die("Query failed: " . $mysqli->error); } $records = []; while ($row = $result->fetch_assoc()) { $records[] = $row; } logln("Found " . count($records) . " records to update (offset=$offset, limit=$limit)"); if (empty($records)) { logln("No records to update. Migration Phase 2 may be complete."); exit(0); } $updated = 0; $skipped = 0; $errors = 0; foreach ($records as $record) { $notesid = $record['notesid']; $currentFilename = $record['filename']; $s3Key = $record['s3_key']; $s3Bucket = $record['s3_bucket']; $currentType = $record['filelocationtype']; // Get S3 URL from migration results $s3Url = isset($urlMap[$notesid]) ? $urlMap[$notesid] : null; if (!$s3Url) { // Construct URL from bucket and key $s3Url = "https://s3.twcstorage.ru/$s3Bucket/" . urlencode($s3Key); logln("CONSTRUCTED URL for notesid=$notesid: $s3Url"); } logln("Processing notesid=$notesid, current_type=$currentType"); if (!$dryRun) { // Create backup $backupDir = $ROOT . 'crm_extensions/file_storage/backups/'; if (!is_dir($backupDir)) mkdir($backupDir, 0755, true); $backupFile = $backupDir . 'phase2_batch_backup_' . $notesid . '_' . date('Ymd_His') . '.json'; $backup = [ 'notesid' => $notesid, 'original_filename' => $currentFilename, 'original_filelocationtype' => $currentType, 'timestamp' => date('c') ]; file_put_contents($backupFile, json_encode($backup, JSON_PRETTY_PRINT | JSON_UNESCAPED_UNICODE)); // Update database if ($activateExternal) { $updateQuery = "UPDATE vtiger_notes SET filename = ?, filelocationtype = 'E' WHERE notesid = ?"; $stmt = $mysqli->prepare($updateQuery); $stmt->bind_param('si', $s3Url, $notesid); } else { // Just update filename, keep filelocationtype as is $updateQuery = "UPDATE vtiger_notes SET filename = ? WHERE notesid = ?"; $stmt = $mysqli->prepare($updateQuery); $stmt->bind_param('si', $s3Url, $notesid); } if ($stmt->execute()) { logln("UPDATED notesid=$notesid -> External URL: $s3Url"); $updated++; } else { logln("ERROR updating notesid=$notesid: " . $stmt->error); $errors++; } $stmt->close(); } else { logln("DRY-RUN: Would update notesid=$notesid -> $s3Url" . ($activateExternal ? " (External)" : "")); $updated++; } } logln("Phase 2 batch update completed:"); logln("- Updated: $updated"); logln("- Skipped: $skipped"); logln("- Errors: $errors"); logln("- Mode: " . ($dryRun ? "DRY-RUN" : "LIVE")); $mysqli->close(); ?>