Storm 讀取MySQL資料實踐
阿新 • • 發佈:2019-01-22
maven依賴包:
<dependency>
<groupId>org.apache.storm</groupId>
<artifactId>storm-core</artifactId>
<version>1.1.1</version>
<scope>provided</scope>
</dependency>
<dependency >
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>6.0.2</version>
</dependency>
Spout:
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Map;
import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Values;
public class MysqlSpout extends BaseRichSpout {
String url = "jdbc:mysql://192.168.1.177:3306/bigdata";
String username = "root";
String password = "*******";
private ResultSet res;
private Statement sta;
private SpoutOutputCollector collector;
int id = 0;
public void nextTuple() {
String str = "";
try {
if (res.next()) {
String username = res.getString(1);
String school = res.getString(2);
String device = res.getString(3);
String logintime = res.getString(4);
str += username+"\t"+school+"\t"+logintime+"\t"+device;
collector.emit(new Values(str));
}
} catch (SQLException e) {
e.printStackTrace();
}
}
public void open(Map arg0, TopologyContext topology, SpoutOutputCollector collector) {
try {
String driver = "com.mysql.jdbc.Driver";
Class.forName(driver);
Connection conn = DriverManager.getConnection("jdbc:mysql://192.168.1.177:3306/bigdata", "root", "chineseall");
sta = conn.createStatement();
res = sta.executeQuery("select username,school,devicetype,logintime from device_log");
} catch (SQLException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
this.collector = collector;
}
public void declareOutputFields(OutputFieldsDeclarer output) {
output.declare(new Fields("device"));
}
}
Bolt1:
import java.util.HashMap;
import java.util.Map;
import org.apache.storm.topology.BasicOutputCollector;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseBasicBolt;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.tuple.Values;
public class Trans extends BaseBasicBolt {
final Map<String, Sql> map = new HashMap<String, Sql>();
public void execute(Tuple arg0, BasicOutputCollector output) {
String str = String.valueOf(arg0.getValueByField("device"));
String[] arrys = str.split("\t");
String username = arrys[0];
String school = arrys[1];
String time = arrys[2].split(" ")[0];
String logintime = time.split("-")[0] + time.split("-")[1];
String device = arrys[3];
Sql sql = null;
if (map.containsKey(username + " " + logintime)) {
sql = map.get(username + " " + logintime);
sql.setUsername(username);
sql.setSchool(school);
sql.setDate(logintime);
sql.setDevice_type(device);
if("PC".equals(device)){
int pc = sql.getPc();
sql.setPc(pc+1);
}else if("安卓".equals(device)){
int android = sql.getAndroid();
sql.setAndroid(android+1);
}else if("蘋果".equals(device)){
int apple = sql.getApple();
sql.setApple(apple+1);
}
int sum = sql.getSum();
sql.setSum(sum+1);
}else{
sql = new Sql();
sql.setUsername(username);
sql.setSchool(school);
sql.setDate(logintime);
sql.setDevice_type(device);
if("PC".equals(device)){
sql.setPc(1);
}else if("安卓".equals(device)){
sql.setAndroid(1);
}else if("蘋果".equals(device)){
sql.setApple(1);
}
sql.setSum(1);
}
map.put(username + " " + logintime, sql);
output.emit(new Values(sql));
}
public void declareOutputFields(OutputFieldsDeclarer output) {
output.declare(new Fields("device"));
}
}
Bolt2:
import org.apache.storm.task.OutputCollector;
import org.apache.storm.topology.BasicOutputCollector;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseBasicBolt;
import org.apache.storm.tuple.Tuple;
public class Output extends BaseBasicBolt {
OutputCollector collector;
public void execute(Tuple arg0, BasicOutputCollector outputCollector) {
Sql sql = (Sql)arg0.getValueByField("device");
System.out.println(sql.getUsername()+"\t"
+sql.getSchool()+"\t"
+sql.getDate()+"\t"
+sql.getPc()+"\t"
+sql.getAndroid()+"\t"
+sql.getApple()+"\t"
+sql.getSum());
}
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
}
}
Job:
import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.StormSubmitter;
import org.apache.storm.generated.AlreadyAliveException;
import org.apache.storm.generated.AuthorizationException;
import org.apache.storm.generated.InvalidTopologyException;
import org.apache.storm.topology.TopologyBuilder;
public class StartStorm {
public static void main(String[] args) throws AlreadyAliveException, InvalidTopologyException, AuthorizationException {
TopologyBuilder builder = new TopologyBuilder();
builder.setSpout("1", new MysqlSpout());
builder.setBolt("2", new Trans()).shuffleGrouping("1");
builder.setBolt("3", new Output()).shuffleGrouping("2");
Config config = new Config();
config.setDebug(false);
if (args != null && args.length > 0) {
config.setNumWorkers(2);
StormSubmitter.submitTopology("mysql", config, builder.createTopology());
} else {
LocalCluster local = new LocalCluster();
local.submitTopology("topo", config, builder.createTopology());
}
}
}
builder.setBolt(“2”, new Trans()).shuffleGrouping(“1”);
shuffleGrouping(“1”)在這裡1記錄bolt資料的來源是哪個Spout或者Bolt
builder.setBolt(“3”, new wordcount(), 5).fieldsGrouping(“2”, new Fields(“word”));
第二個引數new Fields(“word”) 表示按照名為word的分組來分發資料