refactor(jobs): optimize job classes by adding modular methods and constants

- Menambahkan konstanta baru pada setiap job untuk meningkatkan keterbacaan:
  - `CSV_DELIMITER` untuk delimiter CSV.
  - `MAX_EXECUTION_TIME` untuk batas waktu eksekusi (86400 detik).
  - `FILENAME` untuk nama file masing-masing job.
  - `DISK_NAME` untuk disk yang digunakan.
- Mengubah `protected` menjadi `private` untuk properti seperti:
  - `$period`, `$processedCount`, dan `$errorCount` di semua job.
- Memindahkan logika proses menjadi metode modular untuk meningkatkan modularitas:
  - Metode `initializeJob` untuk inisialisasi counter.
  - Metode `processPeriod` untuk menangani file dan memulai proses.
  - Metode `validateFile` untuk validasi keberadaan file.
  - Metode `createTemporaryFile` untuk menyalin file sementara.
  - Metode `processFile` untuk membaca isi file CSV dan memproses.
  - Metode `processRow`/`mapAndSaveRecord`/`saveRecord` untuk pemrosesan dan penyimpanan data.
  - Metode `cleanup` untuk menghapus file sementara.
  - Metode `logJobCompletion` untuk logging hasil akhir.
- Menerapkan pengolahan file dengan sistem logging terperinci:
  - Logging row dengan kolom tidak sesuai akan menghasilkan peringatan.
  - Mencatat jumlah record berhasil diproses serta error.

Refaktor ini bertujuan untuk meningkatkan kualitas kode melalui modularisasi, keterbacaan, dan kemudahan pengujian ulang di seluruh kelas job.
This commit is contained in:
daengdeni
2025-05-26 15:26:43 +07:00
parent 41ed7c1ed9
commit c6363473ac
15 changed files with 2035 additions and 1250 deletions

View File

@@ -16,7 +16,26 @@
{
use Dispatchable, InteractsWithQueue, Queueable, SerializesModels;
protected $period;
private const CSV_DELIMITER = '~';
private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds
private const FILENAME = 'ST.CATEGORY.csv';
private const DISK_NAME = 'sftpStatement';
private const HEADER_MAP = [
'id' => 'id_category',
'date_time' => 'date_time',
'description' => 'description',
'short_name' => 'short_name',
'system_ind' => 'system_ind',
'record_status' => 'record_status',
'co_code' => 'co_code',
'curr_no' => 'curr_no',
'l_db_cr_ind' => 'l_db_cr_ind',
'category_code' => 'category_code'
];
private string $period;
private int $processedCount = 0;
private int $errorCount = 0;
/**
* Create a new job instance.
@@ -33,98 +52,151 @@
: void
{
try {
set_time_limit(24 * 60 * 60);
$disk = Storage::disk('sftpStatement');
$processedCount = 0;
$errorCount = 0;
$this->initializeJob();
if ($this->period === '') {
Log::warning('No period provided for category data processing');
return;
}
// Construct the filename based on the period folder name
$filename = "{$this->period}.ST.CATEGORY.csv";
$filePath = "{$this->period}/$filename";
Log::info("Processing category file: $filePath");
if (!$disk->exists($filePath)) {
Log::warning("File not found: $filePath");
return;
}
// Create a temporary local copy of the file
$tempFilePath = storage_path("app/temp_$filename");
file_put_contents($tempFilePath, $disk->get($filePath));
$handle = fopen($tempFilePath, "r");
if ($handle !== false) {
// Get the headers from the first row
$headerRow = fgetcsv($handle, 0, "~");
// Map the headers to our model fields
$headerMap = [
'id' => 'id_category',
'date_time' => 'date_time',
'description' => 'description',
'short_name' => 'short_name',
'system_ind' => 'system_ind',
'record_status' => 'record_status',
'co_code' => 'co_code',
'curr_no' => 'curr_no',
'l_db_cr_ind' => 'l_db_cr_ind',
'category_code' => 'category_code'
];
$rowCount = 0;
while (($row = fgetcsv($handle, 0, "~")) !== false) {
$rowCount++;
if (count($headerRow) === count($row)) {
// Combine the header row with the data row
$rawData = array_combine($headerRow, $row);
// Map the raw data to our model fields
$data = [];
foreach ($headerMap as $csvField => $modelField) {
$data[$modelField] = $rawData[$csvField] ?? null;
}
try {
// Skip header row if it was included in the data
if ($data['id_category'] !== 'id') {
// Use firstOrNew instead of updateOrCreate
$category = Category::firstOrNew(['id_category' => $data['id_category']]);
$category->fill($data);
$category->save();
$processedCount++;
}
} catch (Exception $e) {
$errorCount++;
Log::error("Error processing Category at row $rowCount in $filePath: " . $e->getMessage());
}
} else {
Log::warning("Row $rowCount in $filePath has incorrect column count. Expected: " . count($headerRow) . ", Got: " . count($row));
}
}
fclose($handle);
Log::info("Completed processing $filePath. Processed $processedCount records with $errorCount errors.");
// Clean up the temporary file
unlink($tempFilePath);
} else {
Log::error("Unable to open file: $filePath");
}
Log::info("Category data processing completed. Total processed: $processedCount, Total errors: $errorCount");
$this->processPeriod();
$this->logJobCompletion();
} catch (Exception $e) {
Log::error('Error in ProcessCategoryDataJob: ' . $e->getMessage());
throw $e;
}
}
private function initializeJob()
: void
{
set_time_limit(self::MAX_EXECUTION_TIME);
$this->processedCount = 0;
$this->errorCount = 0;
}
private function processPeriod()
: void
{
$disk = Storage::disk(self::DISK_NAME);
$filename = "{$this->period}." . self::FILENAME;
$filePath = "{$this->period}/$filename";
if (!$this->validateFile($disk, $filePath)) {
return;
}
$tempFilePath = $this->createTemporaryFile($disk, $filePath, $filename);
$this->processFile($tempFilePath, $filePath);
$this->cleanup($tempFilePath);
}
private function validateFile($disk, string $filePath)
: bool
{
Log::info("Processing category file: $filePath");
if (!$disk->exists($filePath)) {
Log::warning("File not found: $filePath");
return false;
}
return true;
}
private function createTemporaryFile($disk, string $filePath, string $filename)
: string
{
$tempFilePath = storage_path("app/temp_$filename");
file_put_contents($tempFilePath, $disk->get($filePath));
return $tempFilePath;
}
private function processFile(string $tempFilePath, string $filePath)
: void
{
$handle = fopen($tempFilePath, "r");
if ($handle === false) {
Log::error("Unable to open file: $filePath");
return;
}
// Get the headers from the first row
$headerRow = fgetcsv($handle, 0, self::CSV_DELIMITER);
if (!$headerRow) {
fclose($handle);
return;
}
$rowCount = 0;
while (($row = fgetcsv($handle, 0, self::CSV_DELIMITER)) !== false) {
$rowCount++;
$this->processRow($headerRow, $row, $rowCount, $filePath);
}
fclose($handle);
Log::info("Completed processing $filePath. Processed {$this->processedCount} records with {$this->errorCount} errors.");
}
private function processRow(array $headerRow, array $row, int $rowCount, string $filePath)
: void
{
if (count($headerRow) !== count($row)) {
Log::warning("Row $rowCount in $filePath has incorrect column count. Expected: " .
count($headerRow) . ", Got: " . count($row));
return;
}
// Combine the header row with the data row
$rawData = array_combine($headerRow, $row);
$this->mapAndSaveRecord($rawData, $rowCount, $filePath);
}
private function mapAndSaveRecord(array $rawData, int $rowCount, string $filePath)
: void
{
// Map the raw data to our model fields
$data = [];
foreach (self::HEADER_MAP as $csvField => $modelField) {
$data[$modelField] = $rawData[$csvField] ?? null;
}
// Skip header row if it was included in the data
if ($data['id_category'] === 'id') {
return;
}
$this->saveRecord($data, $rowCount, $filePath);
}
private function saveRecord(array $data, int $rowCount, string $filePath)
: void
{
try {
// Use firstOrNew instead of updateOrCreate
$category = Category::firstOrNew(['id_category' => $data['id_category']]);
$category->fill($data);
$category->save();
$this->processedCount++;
} catch (Exception $e) {
$this->errorCount++;
Log::error("Error processing Category at row $rowCount in $filePath: " . $e->getMessage());
}
}
private function cleanup(string $tempFilePath)
: void
{
if (file_exists($tempFilePath)) {
unlink($tempFilePath);
}
}
private function logJobCompletion()
: void
{
Log::info("Category data processing completed. " .
"Total processed: {$this->processedCount}, Total errors: {$this->errorCount}");
}
}