- **Validasi Data:** - Menambahkan validasi untuk memastikan bahwa setiap `entryData` adalah array dan memiliki properti `stmt_entry_id`. - Log peringatan ditambahkan untuk mendeteksi struktur data yang tidak valid. - **Perbaikan Logging:** - Logging ditingkatkan untuk mencatat data invalid yang ditemukan selama proses. - Menambahkan log peringatan dengan struktur data detail saat validasi gagal. - **Penghapusan Nested Loop:** - Memperbaiki logika iterasi dengan menghapus nested loop dan langsung memproses tiap elemen `entryBatch`. - **Penghitungan Kesalahan:** - Menambahkan penghitungan `errorCount` untuk melacak jumlah data yang mengalami validasi gagal. Perubahan ini meningkatkan keandalan proses dengan validasi tambahan, mencegah error akibat struktur data tidak valid, serta memberikan informasi log yang lebih rinci. Signed-off-by: Daeng Deni Mardaeni <ddeni05@gmail.com>
256 lines
8.6 KiB
PHP
256 lines
8.6 KiB
PHP
<?php
|
|
namespace Modules\Webstatement\Jobs;
|
|
|
|
use Exception;
|
|
use Illuminate\Bus\Queueable;
|
|
use Illuminate\Contracts\Queue\ShouldQueue;
|
|
use Illuminate\Foundation\Bus\Dispatchable;
|
|
use Illuminate\Queue\InteractsWithQueue;
|
|
use Illuminate\Queue\SerializesModels;
|
|
use Illuminate\Support\Facades\Log;
|
|
use Illuminate\Support\Facades\Storage;
|
|
use Modules\Webstatement\Models\StmtEntry;
|
|
use Illuminate\Support\Facades\DB;
|
|
|
|
class ProcessStmtEntryDataJob implements ShouldQueue
|
|
{
|
|
use Dispatchable, InteractsWithQueue, Queueable, SerializesModels;
|
|
|
|
private const CSV_DELIMITER = '~';
|
|
private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds
|
|
private const FILENAME = 'ST.STMT.ENTRY.csv';
|
|
private const DISK_NAME = 'sftpStatement';
|
|
private const CHUNK_SIZE = 1000; // Process data in chunks to reduce memory usage
|
|
|
|
private string $period = '';
|
|
private int $processedCount = 0;
|
|
private int $errorCount = 0;
|
|
private array $entryBatch = [];
|
|
|
|
/**
|
|
* Create a new job instance.
|
|
*/
|
|
public function __construct(string $period = '')
|
|
{
|
|
$this->period = $period;
|
|
}
|
|
|
|
/**
|
|
* Execute the job.
|
|
*/
|
|
public function handle()
|
|
: void
|
|
{
|
|
try {
|
|
$this->initializeJob();
|
|
|
|
if ($this->period === '') {
|
|
Log::warning('No period provided for statement entry data processing');
|
|
return;
|
|
}
|
|
|
|
$this->processPeriod();
|
|
$this->logJobCompletion();
|
|
} catch (Exception $e) {
|
|
Log::error('Error in ProcessStmtEntryDataJob: ' . $e->getMessage());
|
|
throw $e;
|
|
}
|
|
}
|
|
|
|
private function initializeJob()
|
|
: void
|
|
{
|
|
set_time_limit(self::MAX_EXECUTION_TIME);
|
|
$this->processedCount = 0;
|
|
$this->errorCount = 0;
|
|
$this->entryBatch = [];
|
|
}
|
|
|
|
private function processPeriod()
|
|
: void
|
|
{
|
|
$disk = Storage::disk(self::DISK_NAME);
|
|
$filename = "{$this->period}." . self::FILENAME;
|
|
$filePath = "{$this->period}/$filename";
|
|
|
|
if (!$this->validateFile($disk, $filePath)) {
|
|
return;
|
|
}
|
|
|
|
$tempFilePath = $this->createTemporaryFile($disk, $filePath, $filename);
|
|
$this->processFile($tempFilePath, $filePath);
|
|
$this->cleanup($tempFilePath);
|
|
}
|
|
|
|
private function validateFile($disk, string $filePath)
|
|
: bool
|
|
{
|
|
Log::info("Processing statement entry file: $filePath");
|
|
|
|
if (!$disk->exists($filePath)) {
|
|
Log::warning("File not found: $filePath");
|
|
return false;
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
private function createTemporaryFile($disk, string $filePath, string $filename)
|
|
: string
|
|
{
|
|
$tempFilePath = storage_path("app/temp_$filename");
|
|
file_put_contents($tempFilePath, $disk->get($filePath));
|
|
return $tempFilePath;
|
|
}
|
|
|
|
private function processFile(string $tempFilePath, string $filePath)
|
|
: void
|
|
{
|
|
$handle = fopen($tempFilePath, "r");
|
|
if ($handle === false) {
|
|
Log::error("Unable to open file: $filePath");
|
|
return;
|
|
}
|
|
|
|
$headers = (new StmtEntry())->getFillable();
|
|
$rowCount = 0;
|
|
$chunkCount = 0;
|
|
|
|
while (($row = fgetcsv($handle, 0, self::CSV_DELIMITER)) !== false) {
|
|
$rowCount++;
|
|
$this->processRow($row, $headers, $rowCount, $filePath);
|
|
|
|
// Process in chunks to avoid memory issues
|
|
if (count($this->entryBatch) >= self::CHUNK_SIZE) {
|
|
$this->saveBatch();
|
|
$chunkCount++;
|
|
Log::info("Processed chunk $chunkCount ({$this->processedCount} records so far)");
|
|
}
|
|
}
|
|
|
|
// Process any remaining records
|
|
if (!empty($this->entryBatch)) {
|
|
$this->saveBatch();
|
|
}
|
|
|
|
fclose($handle);
|
|
Log::info("Completed processing $filePath. Processed {$this->processedCount} records with {$this->errorCount} errors.");
|
|
}
|
|
|
|
private function processRow(array $row, array $headers, int $rowCount, string $filePath)
|
|
: void
|
|
{
|
|
if (count($headers) !== count($row)) {
|
|
Log::warning("Row $rowCount in $filePath has incorrect column count. Expected: " .
|
|
count($headers) . ", Got: " . count($row));
|
|
return;
|
|
}
|
|
|
|
$data = array_combine($headers, $row);
|
|
$this->cleanTransReference($data);
|
|
$this->addToBatch($data, $rowCount, $filePath);
|
|
}
|
|
|
|
private function cleanTransReference(array &$data)
|
|
: void
|
|
{
|
|
if (isset($data['trans_reference'])) {
|
|
// Clean trans_reference from \\BNK if present
|
|
$data['trans_reference'] = preg_replace('/\\\\.*$/', '', $data['trans_reference']);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Add record to batch instead of saving immediately
|
|
*/
|
|
private function addToBatch(array $data, int $rowCount, string $filePath)
|
|
: void
|
|
{
|
|
try {
|
|
if (isset($data['stmt_entry_id']) && $data['stmt_entry_id'] !== 'stmt_entry_id') {
|
|
// Add timestamp fields
|
|
$now = now();
|
|
$data['created_at'] = $now;
|
|
$data['updated_at'] = $now;
|
|
|
|
// Add to entry batch
|
|
$this->entryBatch[] = $data;
|
|
$this->processedCount++;
|
|
}
|
|
} catch (Exception $e) {
|
|
$this->errorCount++;
|
|
Log::error("Error processing Statement Entry at row $rowCount in $filePath: " . $e->getMessage());
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Simpan batch data ke database menggunakan updateOrCreate
|
|
* untuk menghindari error unique constraint
|
|
*
|
|
* @return void
|
|
*/
|
|
private function saveBatch(): void
|
|
{
|
|
Log::info('Memulai proses saveBatch dengan updateOrCreate');
|
|
|
|
DB::beginTransaction();
|
|
|
|
try {
|
|
if (!empty($this->entryBatch)) {
|
|
$totalProcessed = 0;
|
|
|
|
// Process each entry data directly (tidak ada nested array)
|
|
foreach ($this->entryBatch as $entryData) {
|
|
// Validasi bahwa entryData adalah array dan memiliki stmt_entry_id
|
|
if (is_array($entryData) && isset($entryData['stmt_entry_id'])) {
|
|
// Gunakan updateOrCreate untuk menghindari duplicate key error
|
|
StmtEntry::updateOrCreate(
|
|
[
|
|
'stmt_entry_id' => $entryData['stmt_entry_id']
|
|
],
|
|
$entryData
|
|
);
|
|
|
|
$totalProcessed++;
|
|
} else {
|
|
Log::warning('Invalid entry data structure', ['data' => $entryData]);
|
|
$this->errorCount++;
|
|
}
|
|
}
|
|
|
|
DB::commit();
|
|
|
|
Log::info("Berhasil memproses {$totalProcessed} record dengan updateOrCreate");
|
|
|
|
// Reset entry batch after successful processing
|
|
$this->entryBatch = [];
|
|
}
|
|
} catch (Exception $e) {
|
|
DB::rollback();
|
|
|
|
Log::error("Error in saveBatch: " . $e->getMessage() . "\n" . $e->getTraceAsString());
|
|
$this->errorCount += count($this->entryBatch);
|
|
|
|
// Reset batch even if there's an error to prevent reprocessing the same failed records
|
|
$this->entryBatch = [];
|
|
|
|
throw $e;
|
|
}
|
|
}
|
|
|
|
private function cleanup(string $tempFilePath)
|
|
: void
|
|
{
|
|
if (file_exists($tempFilePath)) {
|
|
unlink($tempFilePath);
|
|
}
|
|
}
|
|
|
|
private function logJobCompletion()
|
|
: void
|
|
{
|
|
Log::info("Statement Entry data processing completed. " .
|
|
"Total processed: {$this->processedCount}, Total errors: {$this->errorCount}");
|
|
}
|
|
}
|