feat(webstatement): optimize customer data processing with batch handling

- Tambahkan konstanta `CHUNK_SIZE` untuk memproses data dalam ukuran batch guna mengurangi penggunaan memori.
- Perkenalkan properti baru `customerBatch` untuk menyimpan data sementara sebelum disimpan ke database.
- Ubah metode `processRow` agar menambahkan data ke batch menggunakan metode baru `addToBatch` daripada langsung menyimpannya.
- Tambahkan metode `saveBatch` untuk melakukan penyimpanan batch secara bulk menggunakan `Customer::upsert` dengan pengelolaan kolom unik dan kolom yang perlu diperbarui.
- Tambahkan log untuk setiap chunk yang telah berhasil diproses, membantu memonitor progres saat pemrosesan file CSV dengan ukuran besar.
- Pastikan sisa data yang belum diproses di akhir loop juga disimpan dengan memanggil `saveBatch`.
- Tangani kegagalan penyimpanan batch dengan log error dan reset batch untuk menghindari re-pemrosesan data yang gagal.
- Optimalkan performa dengan menambahkan timestamp (`created_at` dan `updated_at`) secara langsung saat menambahkan data ke batch.
This commit is contained in:
daengdeni
2025-05-28 09:27:49 +07:00
parent a8dafb23c5
commit cbfe2c4aa9

View File

@@ -20,10 +20,12 @@
private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds
private const FILENAME = 'ST.CUSTOMER.csv';
private const DISK_NAME = 'sftpStatement';
private const CHUNK_SIZE = 1000; // Process data in chunks to reduce memory usage
private string $period = '';
private int $processedCount = 0;
private int $errorCount = 0;
private array $customerBatch = [];
/**
* Create a new job instance.
@@ -61,6 +63,7 @@
set_time_limit(self::MAX_EXECUTION_TIME);
$this->processedCount = 0;
$this->errorCount = 0;
$this->customerBatch = [];
}
private function processPeriod()
@@ -111,10 +114,23 @@
$headers = (new Customer())->getFillable();
$rowCount = 0;
$chunkCount = 0;
while (($row = fgetcsv($handle, 0, self::CSV_DELIMITER)) !== false) {
$rowCount++;
$this->processRow($row, $headers, $rowCount, $filePath);
// Process in chunks to avoid memory issues
if (count($this->customerBatch) >= self::CHUNK_SIZE) {
$this->saveBatch();
$chunkCount++;
Log::info("Processed chunk $chunkCount ({$this->processedCount} records so far)");
}
}
// Process any remaining records
if (!empty($this->customerBatch)) {
$this->saveBatch();
}
fclose($handle);
@@ -131,17 +147,24 @@
}
$data = array_combine($headers, $row);
$this->saveRecord($data, $rowCount, $filePath);
$this->addToBatch($data, $rowCount, $filePath);
}
private function saveRecord(array $data, int $rowCount, string $filePath)
/**
* Add record to batch instead of saving immediately
*/
private function addToBatch(array $data, int $rowCount, string $filePath)
: void
{
try {
if (isset($data['customer_code']) && $data['customer_code'] !== 'customer_code') {
$customer = Customer::firstOrNew(['customer_code' => $data['customer_code']]);
$customer->fill($data);
$customer->save();
// Add timestamp fields
$now = now();
$data['created_at'] = $now;
$data['updated_at'] = $now;
// Add to customer batch
$this->customerBatch[] = $data;
$this->processedCount++;
}
} catch (Exception $e) {
@@ -150,6 +173,32 @@
}
}
/**
* Save batched records to the database
*/
private function saveBatch()
: void
{
try {
if (!empty($this->customerBatch)) {
// Bulk insert/update customers
Customer::upsert(
$this->customerBatch,
['customer_code'], // Unique key
array_diff((new Customer())->getFillable(), ['customer_code']) // Update columns
);
// Reset customer batch after processing
$this->customerBatch = [];
}
} catch (Exception $e) {
Log::error("Error in saveBatch: " . $e->getMessage());
$this->errorCount += count($this->customerBatch);
// Reset batch even if there's an error to prevent reprocessing the same failed records
$this->customerBatch = [];
}
}
private function cleanup(string $tempFilePath)
: void
{