From 9cdc7f948760cc6ca8e9088f6fc634f4c4d52b47 Mon Sep 17 00:00:00 2001 From: Daeng Deni Mardaeni Date: Mon, 28 Jul 2025 16:00:45 +0700 Subject: [PATCH] refactor(webstatement): Migrasi ke StagingController dan ubah storage disk ke local 'staging' Perubahan yang dilakukan: - Hapus file `MigrasiController.php` yang tidak lagi digunakan - Ganti referensi controller dari `MigrasiController` menjadi `StagingController` di `ProcessDailyMigration.php` - Update semua Job class untuk menggunakan disk `staging` menggantikan `sftpStatement` - Ganti konstanta `DISK_NAME` di class berikut: * `ProcessAccountDataJob` * `ProcessArrangementDataJob` * `ProcessAtmTransactionJob` * `ProcessBillDetailDataJob` * `ProcessCategoryDataJob` * `ProcessCompanyDataJob` * `ProcessCustomerDataJob` * `ProcessDataCaptureDataJob` * `ProcessFtTxnTypeConditionJob` * `ProcessFundsTransferDataJob` * `ProcessProvinceDataJob` - Komentari sementara `array_pop()` di `ProcessDataCaptureDataJob` untuk debugging - Rapikan whitespace dan formatting di `GenerateClosingBalanceReportCommand` - Sesuaikan konfigurasi storage agar menggunakan local filesystem (`disk: staging`) - Konsolidasikan penamaan dan penggunaan disk untuk environment `staging` - Hilangkan ketergantungan terhadap koneksi SFTP dalam proses development/staging Manfaat: - Mempercepat proses development dan debugging dengan akses file lokal - Menyederhanakan konfigurasi untuk staging environment - Meningkatkan konsistensi dan maintainability kode - Mengurangi potensi error akibat koneksi eksternal (SFTP) --- app/Console/ProcessDailyMigration.php | 4 +- ...siController.php => StagingController.php} | 4 +- app/Jobs/ProcessAccountDataJob.php | 2 +- app/Jobs/ProcessArrangementDataJob.php | 2 +- app/Jobs/ProcessAtmTransactionJob.php | 2 +- app/Jobs/ProcessBillDetailDataJob.php | 2 +- app/Jobs/ProcessCategoryDataJob.php | 2 +- app/Jobs/ProcessCompanyDataJob.php | 2 +- app/Jobs/ProcessCustomerDataJob.php | 2 +- app/Jobs/ProcessDataCaptureDataJob.php | 4 +- app/Jobs/ProcessFtTxnTypeConditionJob.php | 2 +- app/Jobs/ProcessFundsTransferDataJob.php | 2 +- app/Jobs/ProcessProvinceDataJob.php | 62 +++++++++---------- app/Jobs/ProcessSectorDataJob.php | 2 +- app/Jobs/ProcessStmtEntryDataJob.php | 2 +- app/Jobs/ProcessStmtEntryDetailDataJob.php | 2 +- app/Jobs/ProcessStmtNarrFormatDataJob.php | 2 +- app/Jobs/ProcessStmtNarrParamDataJob.php | 2 +- app/Jobs/ProcessTellerDataJob.php | 2 +- app/Jobs/ProcessTransactionDataJob.php | 2 +- 20 files changed, 53 insertions(+), 53 deletions(-) rename app/Http/Controllers/{MigrasiController.php => StagingController.php} (98%) diff --git a/app/Console/ProcessDailyMigration.php b/app/Console/ProcessDailyMigration.php index bcad6ad..a4a35e9 100644 --- a/app/Console/ProcessDailyMigration.php +++ b/app/Console/ProcessDailyMigration.php @@ -4,7 +4,7 @@ namespace Modules\Webstatement\Console; use Exception; use Illuminate\Console\Command; -use Modules\Webstatement\Http\Controllers\MigrasiController; +use Modules\Webstatement\Http\Controllers\StagingController; use Illuminate\Support\Facades\Log; class ProcessDailyMigration extends Command @@ -46,7 +46,7 @@ class ProcessDailyMigration extends Command $this->info('Period: ' . ($period ?? '-1 day (default)')); try { - $controller = app(MigrasiController::class); + $controller = app(StagingController::class); $response = $controller->index($processParameter, $period); $responseData = json_decode($response->getContent(), true); diff --git a/app/Http/Controllers/MigrasiController.php b/app/Http/Controllers/StagingController.php similarity index 98% rename from app/Http/Controllers/MigrasiController.php rename to app/Http/Controllers/StagingController.php index 726c341..4feb970 100644 --- a/app/Http/Controllers/MigrasiController.php +++ b/app/Http/Controllers/StagingController.php @@ -27,7 +27,7 @@ ProcessProvinceDataJob, ProcessStmtEntryDetailDataJob}; - class MigrasiController extends Controller + class StagingController extends Controller { private const PROCESS_TYPES = [ 'transaction' => ProcessTransactionDataJob::class, @@ -116,7 +116,7 @@ 'period' => $period ]); - $disk = Storage::disk('sftpStatement'); + $disk = Storage::disk('staging'); if ($processParameter) { Log::info('Processing parameter data'); diff --git a/app/Jobs/ProcessAccountDataJob.php b/app/Jobs/ProcessAccountDataJob.php index 7fb10d1..2b1fa3c 100644 --- a/app/Jobs/ProcessAccountDataJob.php +++ b/app/Jobs/ProcessAccountDataJob.php @@ -20,7 +20,7 @@ class ProcessAccountDataJob implements ShouldQueue private const CSV_DELIMITER = '~'; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.ACCOUNT.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private const CHUNK_SIZE = 1000; // Process data in chunks to reduce memory usage private string $period = ''; diff --git a/app/Jobs/ProcessArrangementDataJob.php b/app/Jobs/ProcessArrangementDataJob.php index de98ba6..664d887 100644 --- a/app/Jobs/ProcessArrangementDataJob.php +++ b/app/Jobs/ProcessArrangementDataJob.php @@ -19,7 +19,7 @@ private const CSV_DELIMITER = '~'; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.AA.ARRANGEMENT.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private const CHUNK_SIZE = 1000; // Process data in chunks to reduce memory usage private string $period = ''; diff --git a/app/Jobs/ProcessAtmTransactionJob.php b/app/Jobs/ProcessAtmTransactionJob.php index 7f1a15d..5a7430b 100644 --- a/app/Jobs/ProcessAtmTransactionJob.php +++ b/app/Jobs/ProcessAtmTransactionJob.php @@ -19,7 +19,7 @@ private const CSV_DELIMITER = '~'; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.ATM.TRANSACTION.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private const CHUNK_SIZE = 1000; // Process data in chunks to reduce memory usage private const HEADER_MAP = [ 'id' => 'transaction_id', diff --git a/app/Jobs/ProcessBillDetailDataJob.php b/app/Jobs/ProcessBillDetailDataJob.php index d04ad33..f4b7836 100644 --- a/app/Jobs/ProcessBillDetailDataJob.php +++ b/app/Jobs/ProcessBillDetailDataJob.php @@ -19,7 +19,7 @@ private const CSV_DELIMITER = '~'; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.AA.BILL.DETAILS.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private const CHUNK_SIZE = 1000; // Process data in chunks to reduce memory usage private string $period = ''; diff --git a/app/Jobs/ProcessCategoryDataJob.php b/app/Jobs/ProcessCategoryDataJob.php index c30bb00..1ace7a6 100644 --- a/app/Jobs/ProcessCategoryDataJob.php +++ b/app/Jobs/ProcessCategoryDataJob.php @@ -19,7 +19,7 @@ private const CSV_DELIMITER = '~'; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.CATEGORY.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private const HEADER_MAP = [ 'id' => 'id_category', 'date_time' => 'date_time', diff --git a/app/Jobs/ProcessCompanyDataJob.php b/app/Jobs/ProcessCompanyDataJob.php index 658cf98..de196d1 100644 --- a/app/Jobs/ProcessCompanyDataJob.php +++ b/app/Jobs/ProcessCompanyDataJob.php @@ -19,7 +19,7 @@ private const CSV_DELIMITER = '~'; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.COMPANY.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private const FIELD_MAP = [ 'id' => null, // Not mapped to model 'date_time' => null, // Not mapped to model diff --git a/app/Jobs/ProcessCustomerDataJob.php b/app/Jobs/ProcessCustomerDataJob.php index 635e16c..93327f9 100644 --- a/app/Jobs/ProcessCustomerDataJob.php +++ b/app/Jobs/ProcessCustomerDataJob.php @@ -19,7 +19,7 @@ private const CSV_DELIMITER = '~'; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.CUSTOMER.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private const CHUNK_SIZE = 1000; // Process data in chunks to reduce memory usage private string $period = ''; diff --git a/app/Jobs/ProcessDataCaptureDataJob.php b/app/Jobs/ProcessDataCaptureDataJob.php index b36f49e..781af2c 100644 --- a/app/Jobs/ProcessDataCaptureDataJob.php +++ b/app/Jobs/ProcessDataCaptureDataJob.php @@ -19,7 +19,7 @@ private const CSV_DELIMITER = '~'; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.DATA.CAPTURE.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private const CHUNK_SIZE = 1000; // Process data in chunks to reduce memory usage private const CSV_HEADERS = [ 'id', @@ -187,7 +187,7 @@ { // Exclude the last field from CSV if (count($row) > 0) { - array_pop($row); + //array_pop($row); Log::info("Excluded last field from row $rowCount. New column count: " . count($row)); } diff --git a/app/Jobs/ProcessFtTxnTypeConditionJob.php b/app/Jobs/ProcessFtTxnTypeConditionJob.php index 2aa2c0b..1e1374b 100644 --- a/app/Jobs/ProcessFtTxnTypeConditionJob.php +++ b/app/Jobs/ProcessFtTxnTypeConditionJob.php @@ -27,7 +27,7 @@ ]; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.FT.TXN.TYPE.CONDITION.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private string $period = ''; private int $processedCount = 0; diff --git a/app/Jobs/ProcessFundsTransferDataJob.php b/app/Jobs/ProcessFundsTransferDataJob.php index 05d6220..ca78ad9 100644 --- a/app/Jobs/ProcessFundsTransferDataJob.php +++ b/app/Jobs/ProcessFundsTransferDataJob.php @@ -19,7 +19,7 @@ private const CSV_DELIMITER = '~'; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.FUNDS.TRANSFER.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private string $period = ''; private int $processedCount = 0; diff --git a/app/Jobs/ProcessProvinceDataJob.php b/app/Jobs/ProcessProvinceDataJob.php index 11fce4b..66ccd28 100644 --- a/app/Jobs/ProcessProvinceDataJob.php +++ b/app/Jobs/ProcessProvinceDataJob.php @@ -20,7 +20,7 @@ class ProcessProvinceDataJob implements ShouldQueue private const CSV_DELIMITER = '~'; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.PROVINCE.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private string $period = ''; private int $processedCount = 0; @@ -29,7 +29,7 @@ class ProcessProvinceDataJob implements ShouldQueue /** * Membuat instance job baru untuk memproses data provinsi - * + * * @param string $period Periode data yang akan diproses */ public function __construct(string $period = '') @@ -41,17 +41,17 @@ class ProcessProvinceDataJob implements ShouldQueue /** * Menjalankan job untuk memproses file ST.PROVINCE.csv * Menggunakan transaction untuk memastikan konsistensi data - * + * * @return void * @throws Exception */ public function handle(): void { DB::beginTransaction(); - + try { Log::info('ProcessProvinceDataJob: Memulai pemrosesan data provinsi'); - + $this->initializeJob(); if ($this->period === '') { @@ -62,10 +62,10 @@ class ProcessProvinceDataJob implements ShouldQueue $this->processPeriod(); $this->logJobCompletion(); - + DB::commit(); Log::info('ProcessProvinceDataJob: Transaction berhasil di-commit'); - + } catch (Exception $e) { DB::rollback(); Log::error('ProcessProvinceDataJob: Error dalam pemrosesan, transaction di-rollback: ' . $e->getMessage()); @@ -76,7 +76,7 @@ class ProcessProvinceDataJob implements ShouldQueue /** * Inisialisasi pengaturan job * Mengatur timeout dan reset counter - * + * * @return void */ private function initializeJob(): void @@ -85,14 +85,14 @@ class ProcessProvinceDataJob implements ShouldQueue $this->processedCount = 0; $this->errorCount = 0; $this->skippedCount = 0; - + Log::info('ProcessProvinceDataJob: Job diinisialisasi dengan timeout ' . self::MAX_EXECUTION_TIME . ' detik'); } /** * Memproses file untuk periode tertentu * Mengambil file dari SFTP dan memproses data - * + * * @return void */ private function processPeriod(): void @@ -101,7 +101,7 @@ class ProcessProvinceDataJob implements ShouldQueue $filePath = "$this->period/" . self::FILENAME; Log::info('ProcessProvinceDataJob: Memproses periode ' . $this->period); - + if (!$this->validateFile($disk, $filePath)) { return; } @@ -113,7 +113,7 @@ class ProcessProvinceDataJob implements ShouldQueue /** * Validasi keberadaan file di storage - * + * * @param mixed $disk Storage disk instance * @param string $filePath Path file yang akan divalidasi * @return bool @@ -133,7 +133,7 @@ class ProcessProvinceDataJob implements ShouldQueue /** * Membuat file temporary untuk pemrosesan - * + * * @param mixed $disk Storage disk instance * @param string $filePath Path file sumber * @return string Path file temporary @@ -142,7 +142,7 @@ class ProcessProvinceDataJob implements ShouldQueue { $tempFilePath = storage_path("app/temp_" . self::FILENAME); file_put_contents($tempFilePath, $disk->get($filePath)); - + Log::info("ProcessProvinceDataJob: File temporary dibuat: $tempFilePath"); return $tempFilePath; } @@ -150,7 +150,7 @@ class ProcessProvinceDataJob implements ShouldQueue /** * Memproses file CSV dan mengimpor data ke database * Format CSV: id~date_time~province~province_name - * + * * @param string $tempFilePath Path file temporary * @param string $filePath Path file asli untuk logging * @return void @@ -164,20 +164,20 @@ class ProcessProvinceDataJob implements ShouldQueue } Log::info("ProcessProvinceDataJob: Memulai pemrosesan file: $filePath"); - + $rowCount = 0; $isFirstRow = true; while (($row = fgetcsv($handle, 0, self::CSV_DELIMITER)) !== false) { $rowCount++; - + // Skip header row if ($isFirstRow) { $isFirstRow = false; Log::info("ProcessProvinceDataJob: Melewati header row: " . implode(self::CSV_DELIMITER, $row)); continue; } - + $this->processRow($row, $rowCount, $filePath); } @@ -187,7 +187,7 @@ class ProcessProvinceDataJob implements ShouldQueue /** * Memproses satu baris data CSV - * + * * @param array $row Data baris CSV * @param int $rowCount Nomor baris untuk logging * @param string $filePath Path file untuk logging @@ -207,16 +207,16 @@ class ProcessProvinceDataJob implements ShouldQueue 'code' => trim($row[2]), // province code 'name' => trim($row[3]) // province_name ]; - + Log::debug("ProcessProvinceDataJob: Memproses baris $rowCount dengan data: " . json_encode($data)); - + $this->saveRecord($data, $rowCount, $filePath); } /** * Menyimpan record provinsi ke database * Menggunakan updateOrCreate untuk menghindari duplikasi - * + * * @param array $data Data provinsi yang akan disimpan * @param int $rowCount Nomor baris untuk logging * @param string $filePath Path file untuk logging @@ -237,10 +237,10 @@ class ProcessProvinceDataJob implements ShouldQueue ['code' => $data['code']], // Kondisi pencarian ['name' => $data['name']] // Data yang akan diupdate/insert ); - + $this->processedCount++; Log::debug("ProcessProvinceDataJob: Berhasil menyimpan provinsi ID: {$province->id}, Code: {$data['code']}, Name: {$data['name']}"); - + } catch (Exception $e) { $this->errorCount++; Log::error("ProcessProvinceDataJob: Error menyimpan data provinsi pada baris $rowCount di $filePath: " . $e->getMessage()); @@ -250,7 +250,7 @@ class ProcessProvinceDataJob implements ShouldQueue /** * Membersihkan file temporary - * + * * @param string $tempFilePath Path file temporary yang akan dihapus * @return void */ @@ -264,7 +264,7 @@ class ProcessProvinceDataJob implements ShouldQueue /** * Logging hasil akhir pemrosesan job - * + * * @return void */ private function logJobCompletion(): void @@ -273,14 +273,14 @@ class ProcessProvinceDataJob implements ShouldQueue "Total diproses: {$this->processedCount}, " . "Total error: {$this->errorCount}, " . "Total dilewati: {$this->skippedCount}"; - + Log::info($message); - + // Log summary untuk monitoring if ($this->errorCount > 0) { Log::warning("ProcessProvinceDataJob: Terdapat {$this->errorCount} error dalam pemrosesan"); } - + if ($this->skippedCount > 0) { Log::info("ProcessProvinceDataJob: Terdapat {$this->skippedCount} baris yang dilewati"); } @@ -288,7 +288,7 @@ class ProcessProvinceDataJob implements ShouldQueue /** * Handle job failure - * + * * @param Exception $exception * @return void */ @@ -297,4 +297,4 @@ class ProcessProvinceDataJob implements ShouldQueue Log::error('ProcessProvinceDataJob: Job gagal dijalankan: ' . $exception->getMessage()); Log::error('ProcessProvinceDataJob: Stack trace: ' . $exception->getTraceAsString()); } -} \ No newline at end of file +} diff --git a/app/Jobs/ProcessSectorDataJob.php b/app/Jobs/ProcessSectorDataJob.php index 3cd8ff7..15ad207 100644 --- a/app/Jobs/ProcessSectorDataJob.php +++ b/app/Jobs/ProcessSectorDataJob.php @@ -19,7 +19,7 @@ class ProcessSectorDataJob implements ShouldQueue private const CSV_DELIMITER = '~'; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.SECTOR.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private string $period = ''; private int $processedCount = 0; diff --git a/app/Jobs/ProcessStmtEntryDataJob.php b/app/Jobs/ProcessStmtEntryDataJob.php index f4f04c1..0e3314e 100644 --- a/app/Jobs/ProcessStmtEntryDataJob.php +++ b/app/Jobs/ProcessStmtEntryDataJob.php @@ -19,7 +19,7 @@ private const CSV_DELIMITER = '~'; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.STMT.ENTRY.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private const CHUNK_SIZE = 1000; // Process data in chunks to reduce memory usage private string $period = ''; diff --git a/app/Jobs/ProcessStmtEntryDetailDataJob.php b/app/Jobs/ProcessStmtEntryDetailDataJob.php index 47ceb8f..8689ecd 100644 --- a/app/Jobs/ProcessStmtEntryDetailDataJob.php +++ b/app/Jobs/ProcessStmtEntryDetailDataJob.php @@ -20,7 +20,7 @@ class ProcessStmtEntryDetailDataJob implements ShouldQueue private const CSV_DELIMITER = '~'; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.STMT.ENTRY.DETAIL.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private const CHUNK_SIZE = 1000; // Process data in chunks to reduce memory usage private string $period = ''; diff --git a/app/Jobs/ProcessStmtNarrFormatDataJob.php b/app/Jobs/ProcessStmtNarrFormatDataJob.php index e342aef..f27ce76 100644 --- a/app/Jobs/ProcessStmtNarrFormatDataJob.php +++ b/app/Jobs/ProcessStmtNarrFormatDataJob.php @@ -19,7 +19,7 @@ private const CSV_DELIMITER = '~'; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.STMT.NARR.FORMAT.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private string $period = ''; private int $processedCount = 0; diff --git a/app/Jobs/ProcessStmtNarrParamDataJob.php b/app/Jobs/ProcessStmtNarrParamDataJob.php index 2e62faa..062617c 100644 --- a/app/Jobs/ProcessStmtNarrParamDataJob.php +++ b/app/Jobs/ProcessStmtNarrParamDataJob.php @@ -19,7 +19,7 @@ private const CSV_DELIMITER = '~'; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.STMT.NARR.PARAM.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private string $period = ''; private int $processedCount = 0; diff --git a/app/Jobs/ProcessTellerDataJob.php b/app/Jobs/ProcessTellerDataJob.php index ea0bd05..20e4e08 100644 --- a/app/Jobs/ProcessTellerDataJob.php +++ b/app/Jobs/ProcessTellerDataJob.php @@ -19,7 +19,7 @@ private const CSV_DELIMITER = '~'; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.TELLER.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private const CHUNK_SIZE = 1000; // Process data in chunks to reduce memory usage private const HEADER_MAP = [ 'id' => 'id_teller', diff --git a/app/Jobs/ProcessTransactionDataJob.php b/app/Jobs/ProcessTransactionDataJob.php index e226ca0..0dd2f54 100644 --- a/app/Jobs/ProcessTransactionDataJob.php +++ b/app/Jobs/ProcessTransactionDataJob.php @@ -19,7 +19,7 @@ private const CSV_DELIMITER = '~'; private const MAX_EXECUTION_TIME = 86400; // 24 hours in seconds private const FILENAME = 'ST.TRANSACTION.csv'; - private const DISK_NAME = 'sftpStatement'; + private const DISK_NAME = 'staging'; private string $period = ''; private int $processedCount = 0;