@inproceedings{bbd5f26f11d348d2bb56c3f7ca123bda,
title = "Implementation of change data capture in ETL process for data warehouse using HDFS and apache spark",
abstract = "This study aims to increase ETL process efficiency flud reduce processing time by applying the method of Change Data Capture (CDC) in distributed system using Hadoop Distributed file System (HDFS) and Apache Spark in the data warehouse of Learning Analytics system of Universitas Indonesia. Usually, increases in I lie number of records in the data source result in an increase in ETL processing time for the data warehouse system. This condition occurs as a result of inefficient ETL process using the full load method. Using the tull load method, ETL has to process the same number of records as the number of records in the data sources. The proposed ETL model design with the application of CDC method using HDFS and Apache Spark can reduce the amount of data in the ETL process. Consequently, the process becomes more efficient and the ETL processing time Is reduced approximately 53% in average.",
keywords = "big data, change data capture, data warehouse, distributed system, extract transform load",
author = "Denny and Atmaja, {I. Putu Medagia} and Ari Saptawijaya and Siti Aminah",
note = "Publisher Copyright: {\textcopyright} 2017 IEEE.; 2017 International Workshop on Big Data and Information Security, WBIS 2017 ; Conference date: 23-09-2017 Through 24-09-2017",
year = "2017",
month = jul,
day = "2",
doi = "10.1109/IWBIS.2017.8275102",
language = "English",
series = "Proceedings - WBIS 2017: 2017 International Workshop on Big Data and Information Security",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "49--55",
booktitle = "Proceedings - WBIS 2017",
address = "United States",
}