转载

Storm 整合jdbc wordcount

<dependency>
      <groupId>org.apache.storm</groupId>
      <artifactId>storm-jdbc</artifactId>
      <version>1.1.1</version>
      <type>jar</type>
    </dependency>

    <dependency>
      <groupId>mysql</groupId>
      <artifactId>mysql-connector-java</artifactId>
      <version>5.1.38</version>
    </dependency>
package com.waiting;


import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.jdbc.bolt.JdbcInsertBolt;
import org.apache.storm.jdbc.common.Column;
import org.apache.storm.jdbc.common.ConnectionProvider;
import org.apache.storm.jdbc.common.HikariCPConnectionProvider;
import org.apache.storm.jdbc.common.JdbcClient;
import org.apache.storm.jdbc.mapper.JdbcMapper;
import org.apache.storm.jdbc.mapper.SimpleJdbcMapper;
import org.apache.storm.shade.com.google.common.collect.Lists;
import org.apache.storm.shade.com.google.common.collect.Maps;
import org.apache.storm.shade.org.eclipse.jetty.io.Connection;
import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.TopologyBuilder;
import org.apache.storm.topology.base.BaseRichBolt;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.ITuple;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.tuple.Values;
import org.apache.storm.utils.Utils;

import java.io.Serializable;
import java.sql.Types;
import java.util.*;

public class LocalWordCountJDBCStormTopology {


    public static class DataSourceSpout extends BaseRichSpout {

        private SpoutOutputCollector collector;

        @Override
        public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
            this.collector = collector;
        }


        public static final String[] words = new String[]{"apple", "orange", "pineapple", "bannaer"};

        @Override
        public void nextTuple() {
             Random random = new Random();
             String word = words[random.nextInt(words.length)];

             this.collector.emit(new Values(word));

             System.out.println("word:" + word);

            Utils.sleep(1000);

        }

        @Override
        public void declareOutputFields(OutputFieldsDeclarer declarer) {

            declarer.declare(new Fields("line")
            );
        }
    }

    public static class SplitBolt extends BaseRichBolt{

        private OutputCollector collector;
        @Override
        public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
            this.collector = collector;
        }

        @Override
        public void execute(Tuple input) {
            String word = input.getStringByField("line");
            this.collector.emit(new Values(word));
        }

        @Override
        public void declareOutputFields(OutputFieldsDeclarer declarer) {
           declarer.declare(new Fields("word"));
        }
    }

    public static class CountBolt extends BaseRichBolt{

        private OutputCollector collector;
        @Override
        public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
            this.collector = collector;
        }

        Map<String, Integer> map = new HashMap<String, Integer>();
        @Override
        public void execute(Tuple input) {
            String word = input.getStringByField("word");
            Integer count = map.get(word);
            if(count == null){
                count = 0;
            }
            count ++;
            map.put(word, count);

            this.collector.emit(new Values(word, map.get(word)));
        }

        @Override
        public void declareOutputFields(OutputFieldsDeclarer declarer) {
            declarer.declare(new Fields("word", "count"));
        }
    }




    public static class MysqlCountBolt extends BaseRichBolt{

        private OutputCollector collector;
        private JdbcClient jdbcClient;
        private ConnectionProvider connectionProvider;

        public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
            this.collector = collector;
            Map hikariConfigMap = Maps.newHashMap();
            hikariConfigMap.put("dataSourceClassName","com.mysql.jdbc.jdbc2.optional.MysqlDataSource");
            hikariConfigMap.put("dataSource.url", "jdbc:mysql://localhost/wordcount");
            hikariConfigMap.put("dataSource.user","root");
            hikariConfigMap.put("dataSource.password","test");
            connectionProvider = new HikariCPConnectionProvider(hikariConfigMap);
            //对数据库连接池进行初始化
            connectionProvider.prepare();
            jdbcClient = new JdbcClient(connectionProvider, 30);
        }

        Map<String,Integer> map = new HashMap<String,Integer>();

        public void execute(Tuple input) {
            String word = input.getStringByField("word");
            Integer count = input.getIntegerByField("count");

            List<Column> list = new ArrayList();
            list.add(new Column("word", word, Types.VARCHAR));
            List<List<Column>> select = jdbcClient.select("select word from wordcount where word = ?",list);

            Long n = select.stream().count();
            if(n>=1){
                //update
                jdbcClient.executeSql("update wordcount set count = "+count+" where word = '"+word+"'");

            }else{
                //insert
                jdbcClient.executeSql("insert into wordcount values( '"+word+"',"+count+")");

            }
        }

        public void declareOutputFields(OutputFieldsDeclarer declarer) {

//            declarer.declare(new Fields("word","count"));
        }

        @Override
        public void cleanup() {
            connectionProvider.cleanup();
        }
    }

    public static void main(String[] args){

        TopologyBuilder builder = new TopologyBuilder();
        builder.setSpout("DataSourceSpout", new DataSourceSpout());
        builder.setBolt("SplitBolt", new SplitBolt()).shuffleGrouping("DataSourceSpout");
        builder.setBolt("CountBolt", new CountBolt()).shuffleGrouping("SplitBolt");

//        Map hikariConfigMap = Maps.newHashMap();
//        hikariConfigMap.put("dataSourceClassName","com.mysql.jdbc.jdbc2.optional.MysqlDataSource");
//        hikariConfigMap.put("dataSource.url", "jdbc:mysql://localhost/wordcount");
//        hikariConfigMap.put("dataSource.user","root");
//        hikariConfigMap.put("dataSource.password","test");
//        ConnectionProvider connectionProvider = new HikariCPConnectionProvider(hikariConfigMap);
//
//        String tableName = "wordcount";
//        JdbcMapper simpleJdbcMapper = new SimpleJdbcMapper(tableName, connectionProvider);
//
//        JdbcInsertBolt userPersistanceBolt = new JdbcInsertBolt(connectionProvider, simpleJdbcMapper)
//                .withTableName(tableName)
//                .withQueryTimeoutSecs(30);


        builder.setBolt("MysqlCountBolt", new MysqlCountBolt()).shuffleGrouping("CountBolt");

        LocalCluster cluster = new LocalCluster();
        cluster.submitTopology("LocalWordCountStormTopology", new Config(), builder.createTopology());
    }
}
4531
原文  http://www.waitingfy.com/archives/4531
正文到此结束
Loading...