flume採集oracle資料到hdfs配置
阿新 • • 發佈:2019-01-31
tier1.sources = srctest
tier1.channels = chtest
tier1.sinks = sinktest
tier1.sources.srctest.type = org.keedio.flume.source.SQLSource
tier1.sources.srctest.hibernate.connection.url = jdbc:oracle:thin:@*.*.*.*:1521:testdb
tier1.sources.srctest.hibernate.connection.user = test
tier1.sources.srctest.hibernate.connection.password = testdev
tier1.sources.srctest.hibernate.connection.autocommit = true
tier1.sources.srctest.hibernate.dialect = org.hibernate.dialect.Oracle10gDialect
tier1.sources.srctest.hibernate.connection.driver_class = oracle.jdbc.driver.OracleDriver
tier1.sources.srctest.table = testtable
tier1.sources.srctest.run.query.delay=10000
tier1.sources.srctest.status.file.path = /var/log/flume-ng
tier1.sources.srctest.status.file.name = sqlSource.status
#tier1.sources.srctest.start.from = 0
#tier1.sources.srctest.custom.query = select * from testtable
tier1.sources.srctest.batch.size = 1000
tier1.sources.srctest.max.rows = 1000
tier1.sources.srctest.hibernate.connection.provider_class = org.hibernate.connection.C3P0ConnectionProvider
tier1.sources.srctest.hibernate.c3p0.min_size=1
tier1.sources.srctest.hibernate.c3p0.max_size=10
tier1.channels.chtest.type = file
tier1.channels.chtest.checkpointDir = /var/log/flume-ng/checkpoint
tier1.channels.chtest.dataDirs = /var/log/flume-ng/data
#tier1.channels.chtest.capacity=10000
#tier1.channels.chtest.transactionCapacity=1000
#tier1.channels.chtest.byteCapacityBufferPercentage=20
#tier1.channels.chtest.byteCapacity=1600
#tier1.channels.chtest.capacity = 100
tier1.sources.srctest.channels=chtest
tier1.sinks.sinktest.channel = chtest
tier1.sinks.sinktest.type = hdfs
tier1.sinks.sinktest.hdfs.path = /user/flume/testtable/%Y-%m-%d
tier1.sinks.sinktest.hdfs.filePrefix = data
tier1.sinks.sinktest.hdfs.inUsePrefix = .
tier1.sinks.sinktest.hdfs.rollInterval = 30
tier1.sinks.sinktest.hdfs.rollSize = 0
tier1.sinks.sinktest.hdfs.rollCount = 0
tier1.sinks.sinktest.hdfs.batchSize = 1000
tier1.sinks.sinktest.hdfs.writeFormat = text
tier1.sinks.sinktest.hdfs.fileType = DataStream
tier1.channels = chtest
tier1.sinks = sinktest
tier1.sources.srctest.type = org.keedio.flume.source.SQLSource
tier1.sources.srctest.hibernate.connection.url = jdbc:oracle:thin:@*.*.*.*:1521:testdb
tier1.sources.srctest.hibernate.connection.user = test
tier1.sources.srctest.hibernate.connection.password = testdev
tier1.sources.srctest.hibernate.connection.autocommit = true
tier1.sources.srctest.hibernate.dialect = org.hibernate.dialect.Oracle10gDialect
tier1.sources.srctest.hibernate.connection.driver_class = oracle.jdbc.driver.OracleDriver
tier1.sources.srctest.table = testtable
tier1.sources.srctest.run.query.delay=10000
tier1.sources.srctest.status.file.path = /var/log/flume-ng
tier1.sources.srctest.status.file.name = sqlSource.status
#tier1.sources.srctest.start.from = 0
#tier1.sources.srctest.custom.query = select * from testtable
tier1.sources.srctest.batch.size = 1000
tier1.sources.srctest.max.rows = 1000
tier1.sources.srctest.hibernate.connection.provider_class = org.hibernate.connection.C3P0ConnectionProvider
tier1.sources.srctest.hibernate.c3p0.min_size=1
tier1.sources.srctest.hibernate.c3p0.max_size=10
tier1.channels.chtest.type = file
tier1.channels.chtest.checkpointDir = /var/log/flume-ng/checkpoint
tier1.channels.chtest.dataDirs = /var/log/flume-ng/data
#tier1.channels.chtest.capacity=10000
#tier1.channels.chtest.transactionCapacity=1000
#tier1.channels.chtest.byteCapacityBufferPercentage=20
#tier1.channels.chtest.byteCapacity=1600
#tier1.channels.chtest.capacity = 100
tier1.sources.srctest.channels=chtest
tier1.sinks.sinktest.channel = chtest
tier1.sinks.sinktest.type = hdfs
tier1.sinks.sinktest.hdfs.path = /user/flume/testtable/%Y-%m-%d
tier1.sinks.sinktest.hdfs.filePrefix = data
tier1.sinks.sinktest.hdfs.inUsePrefix = .
tier1.sinks.sinktest.hdfs.rollInterval = 30
tier1.sinks.sinktest.hdfs.rollSize = 0
tier1.sinks.sinktest.hdfs.rollCount = 0
tier1.sinks.sinktest.hdfs.batchSize = 1000
tier1.sinks.sinktest.hdfs.writeFormat = text
tier1.sinks.sinktest.hdfs.fileType = DataStream