154 lines
4.7 KiB
PHP
154 lines
4.7 KiB
PHP
<?php
|
|
/**
|
|
* Complete S3 Migration - завершает миграцию используя данные из migration_results_*.json
|
|
*/
|
|
|
|
ini_set('memory_limit', '1024M');
|
|
set_time_limit(0);
|
|
date_default_timezone_set('Europe/Moscow');
|
|
|
|
$ROOT = '/var/www/fastuser/data/www/crm.clientright.ru/';
|
|
require_once $ROOT . 'config.inc.php';
|
|
|
|
// CLI options
|
|
$opts = getopt('', [
|
|
'dry-run::',
|
|
'limit::'
|
|
]);
|
|
|
|
$dryRun = isset($opts['dry-run']) ? (int)$opts['dry-run'] !== 0 : true;
|
|
$limit = isset($opts['limit']) ? (int)$opts['limit'] : 0;
|
|
|
|
// Database connection
|
|
$mysqli = new mysqli($dbconfig['db_server'], $dbconfig['db_username'], $dbconfig['db_password'], $dbconfig['db_name']);
|
|
if ($mysqli->connect_error) {
|
|
die("Connection failed: " . $mysqli->connect_error);
|
|
}
|
|
$mysqli->set_charset("utf8");
|
|
|
|
echo "=== Complete S3 Migration ===\n";
|
|
echo "Dry run: " . ($dryRun ? "YES" : "NO") . "\n";
|
|
echo "Limit: " . ($limit > 0 ? $limit : "UNLIMITED") . "\n\n";
|
|
|
|
// Load S3 files data
|
|
$s3_files_path = __DIR__ . '/all_s3_files.json';
|
|
if (!file_exists($s3_files_path)) {
|
|
die("ERROR: all_s3_files.json not found. Run the analysis script first.\n");
|
|
}
|
|
|
|
$s3_data = json_decode(file_get_contents($s3_files_path), true);
|
|
if (!$s3_data) {
|
|
die("ERROR: Failed to parse all_s3_files.json\n");
|
|
}
|
|
|
|
$copied_ids = $s3_data['copied_ids'];
|
|
echo "Total files copied to S3: " . count($copied_ids) . "\n";
|
|
|
|
// Find files that need S3 metadata
|
|
$id_chunks = array_chunk($copied_ids, 1000); // Process in chunks
|
|
$total_updated = 0;
|
|
$total_processed = 0;
|
|
|
|
foreach ($id_chunks as $chunk_index => $chunk) {
|
|
$id_list = implode(',', array_map('intval', $chunk));
|
|
|
|
// Find records without S3 metadata
|
|
$query = "SELECT notesid, filename
|
|
FROM vtiger_notes
|
|
WHERE notesid IN ($id_list)
|
|
AND (s3_key IS NULL OR s3_key = '')
|
|
AND filelocationtype = 'I'";
|
|
|
|
if ($limit > 0 && $total_processed >= $limit) {
|
|
echo "Reached limit of $limit records\n";
|
|
break;
|
|
}
|
|
|
|
$result = $mysqli->query($query);
|
|
if (!$result) {
|
|
echo "ERROR: Query failed: " . $mysqli->error . "\n";
|
|
continue;
|
|
}
|
|
|
|
$chunk_updated = 0;
|
|
while ($row = $result->fetch_assoc()) {
|
|
$notesid = $row['notesid'];
|
|
$filename = $row['filename'];
|
|
|
|
$total_processed++;
|
|
|
|
if ($limit > 0 && $total_processed > $limit) {
|
|
break 2;
|
|
}
|
|
|
|
echo "Processing ID $notesid: $filename\n";
|
|
|
|
// Look for this file in migration results
|
|
$s3_info = findFileInMigrationResults($notesid);
|
|
|
|
if ($s3_info) {
|
|
echo " Found S3 info: {$s3_info['key']}\n";
|
|
|
|
if (!$dryRun) {
|
|
// Update database
|
|
$s3_key = $s3_info['key'];
|
|
$s3_bucket = 'f9825c87-4e3558f6-f9b6-405c-ad3d-d1535c49b61c'; // TWC bucket
|
|
$s3_etag = isset($s3_info['etag']) ? $s3_info['etag'] : '';
|
|
$s3_url = "https://s3.twcstorage.ru/{$s3_bucket}/{$s3_key}";
|
|
|
|
$update_query = "UPDATE vtiger_notes SET
|
|
s3_key = ?,
|
|
s3_bucket = ?,
|
|
s3_etag = ?,
|
|
filename = ?,
|
|
filelocationtype = 'E'
|
|
WHERE notesid = ?";
|
|
|
|
$stmt = $mysqli->prepare($update_query);
|
|
$stmt->bind_param("ssssi", $s3_key, $s3_bucket, $s3_etag, $s3_url, $notesid);
|
|
|
|
if ($stmt->execute()) {
|
|
$chunk_updated++;
|
|
echo " ✅ Updated database\n";
|
|
} else {
|
|
echo " ❌ Failed to update: " . $stmt->error . "\n";
|
|
}
|
|
$stmt->close();
|
|
} else {
|
|
echo " [DRY RUN] Would update database\n";
|
|
$chunk_updated++;
|
|
}
|
|
} else {
|
|
echo " ❌ No S3 info found\n";
|
|
}
|
|
}
|
|
|
|
$total_updated += $chunk_updated;
|
|
echo "Chunk $chunk_index: Updated $chunk_updated records\n\n";
|
|
}
|
|
|
|
$mysqli->close();
|
|
|
|
echo "\n=== SUMMARY ===\n";
|
|
echo "Total processed: $total_processed\n";
|
|
echo "Total updated: $total_updated\n";
|
|
echo "Dry run: " . ($dryRun ? "YES" : "NO") . "\n";
|
|
|
|
function findFileInMigrationResults($notesid) {
|
|
$pattern = __DIR__ . '/migration_results_*.json';
|
|
$files = glob($pattern);
|
|
|
|
foreach ($files as $file) {
|
|
$data = json_decode(file_get_contents($file), true);
|
|
if (!$data) continue;
|
|
|
|
foreach ($data['copied'] as $item) {
|
|
if ($item['notesid'] == $notesid) {
|
|
return $item;
|
|
}
|
|
}
|
|
}
|
|
|
|
return null;
|
|
}
|