多数据源切换.md

初始化、添加、切换数据源

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
// 初始化默认MYSQL连接
MysqlDataSourceUtil.getInstance().initDataSource();
// 初始化默认HIVE连接
HiveDataSourceUtil.getInstance().initDataSource();

// 初始化其他MYSQL连接
sourceService service = (sourceService) SpringContextUtil.getBean("sourceService");
List<AnalysisDataSource> data = service.selectAll();
if (data != null && data.size() != 0) {
logger.info("开始初始化数据源......");
for (AnalysisDataSource source : data) {
MysqlDataSourceUtil.getInstance().initOthersDataSource(source);
logger.info("初始化数据源:(" + source.getId() + ")完毕......");
}
MysqlDataSourceUtil.getInstance().flushDataSource();
} else {
logger.info("数据源暂无配置......");
}
logger.info("==============springboot启动成功=================");

//切换数据源
HiveDataSourceUtil.getInstance().setDataSource(hiveKey);
MysqlDataSourceUtil.getInstance().setDataSource(dbkey);

以下为实现代码:

  1. HIVE连接池配置(HiveDruidConfig.java)

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    package com.isoftstone.ismart.analysis.config;

    import com.alibaba.druid.pool.DruidDataSource;
    import com.isoftstone.ismart.analysis.util.datasoruce.HiveDataSourceUtil;
    import org.slf4j.Logger;
    import org.slf4j.LoggerFactory;
    import org.springframework.beans.factory.annotation.Qualifier;
    import org.springframework.boot.context.properties.ConfigurationProperties;
    import org.springframework.context.annotation.Bean;
    import org.springframework.context.annotation.Configuration;
    import org.springframework.jdbc.core.JdbcTemplate;

    import javax.sql.DataSource;
    import java.util.HashMap;
    import java.util.Map;

    /**
    * HIVE连接池配置
    *
    * @author Colin.Ye
    * @version 1.0
    * @ClassName HiveDruidConfig
    * @date 2019/4/22
    **/
    @Configuration
    @ConfigurationProperties(prefix = "hive")
    public class HiveDruidConfig {
    private static final Logger logger = LoggerFactory.getLogger(HiveDruidConfig.class);

    private String url;
    private String user;
    private String password;
    private String driverClassName;
    private int initialSize;
    private int minIdle;
    private int maxActive;
    private int maxWait;
    private int timeBetweenEvictionRunsMillis;
    private int minEvictableIdleTimeMillis;
    private String validationQuery;
    private boolean testWhileIdle;
    private boolean testOnBorrow;
    private boolean testOnReturn;
    private boolean poolPreparedStatements;
    private int maxPoolPreparedStatementPerConnectionSize;


    @Bean(name = "hiveDruidDataSource")
    @Qualifier("hiveDruidDataSource")
    public DataSource hiveDruidDataSource() {
    DruidDataSource datasource = new DruidDataSource();
    datasource.setUrl(url);
    datasource.setUsername(user);
    datasource.setPassword(password);
    datasource.setDriverClassName(driverClassName);

    // pool configuration
    datasource.setInitialSize(initialSize);
    datasource.setMinIdle(minIdle);
    datasource.setMaxActive(maxActive);
    datasource.setMaxWait(maxWait);
    datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
    datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
    datasource.setValidationQuery(validationQuery);
    datasource.setTestWhileIdle(testWhileIdle);
    datasource.setTestOnBorrow(testOnBorrow);
    datasource.setTestOnReturn(testOnReturn);
    datasource.setPoolPreparedStatements(poolPreparedStatements);
    datasource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
    logger.debug("Hive DataSource Inject Successfully...");
    return datasource;
    }

    @Bean(name = "hiveDynamicDataSource")
    public DataSource hiveDynamicDataSource() {
    HiveDataSourceUtil myDynamicDataSource = HiveDataSourceUtil.getInstance();
    // 配置多数据源
    Map<Object, Object> targetDataSources = new HashMap<>();
    targetDataSources.put("master", hiveDruidDataSource());
    myDynamicDataSource.setTargetDataSources(targetDataSources);
    //设置默认数据源,在动态添加数据源的时候,就可以不再添加此数据源了
    myDynamicDataSource.setDefaultTargetDataSource(hiveDruidDataSource());
    return myDynamicDataSource;
    }

    // 此处省略各个属性的get和set方法

    @Bean(name = "hiveDruidTemplate")
    public JdbcTemplate hiveDruidTemplate(@Qualifier("hiveDynamicDataSource") DataSource dataSource) {
    return new JdbcTemplate(dataSource);
    }
    }
  2. MySql连接池配置(Config.java)

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    98
    99
    100
    101
    102
    103
    104
    105
    106
    107
    108
    109
    110
    111
    112
    113
    114
    115
    116
    117
    118
    119
    120
    121
    package com.isoftstone.ismart.analysis.config;

    import com.alibaba.druid.pool.DruidDataSource;
    import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceBuilder;
    import com.alibaba.druid.support.http.StatViewServlet;
    import com.alibaba.druid.support.http.WebStatFilter;
    import com.github.pagehelper.PageHelper;
    import com.isoftstone.ismart.analysis.filter.SwitchDBFilter;
    import com.isoftstone.ismart.analysis.filter.analysisFilter;
    import com.isoftstone.ismart.analysis.util.datasoruce.MysqlDataSourceUtil;
    import com.mchange.v2.c3p0.ComboPooledDataSource;
    import org.apache.ibatis.plugin.Interceptor;
    import org.mybatis.spring.SqlSessionFactoryBean;
    import org.slf4j.Logger;
    import org.springframework.boot.context.properties.ConfigurationProperties;
    import org.springframework.boot.web.servlet.FilterRegistrationBean;
    import org.springframework.boot.web.servlet.ServletRegistrationBean;
    import org.springframework.context.annotation.Bean;
    import org.springframework.context.annotation.Configuration;
    import org.springframework.context.annotation.Primary;
    import org.springframework.jdbc.datasource.DataSourceTransactionManager;
    import org.springframework.transaction.PlatformTransactionManager;

    import javax.sql.DataSource;
    import java.beans.PropertyVetoException;
    import java.util.HashMap;
    import java.util.Map;
    import java.util.Properties;

    @Configuration
    public class Config {
    private final Logger logger = org.slf4j.LoggerFactory.getLogger(Config.class);
    /**
    * @return
    * @Bean 防止数据监控报错,无法查看数据源
    * @ConfigurationProperties 会把配置文件的参数自动赋值到dataSource里。
    * @Primary 用于标识默认使用的 DataSource Bean
    */
    @Bean(destroyMethod = "close", initMethod = "init", name = "masterDataSource")
    @ConfigurationProperties(prefix = "spring.datasource")
    @Primary
    public DataSource masterDataSource() {
    logger.info("创建masterDataSource");
    DruidDataSource druidDataSource = DruidDataSourceBuilder.create().build();
    return druidDataSource;
    }

    @Bean(name = "dynamicDataSource")
    public DataSource dynamicDataSource() {
    MysqlDataSourceUtil myDynamicDataSource = MysqlDataSourceUtil.getInstance();
    // 配置多数据源
    Map<Object, Object> targetDataSources = new HashMap<>();
    targetDataSources.put("master", masterDataSource());
    myDynamicDataSource.setTargetDataSources(targetDataSources);
    //设置默认数据源,在动态添加数据源的时候,就可以不再添加此数据源了
    myDynamicDataSource.setDefaultTargetDataSource(masterDataSource());
    return myDynamicDataSource;
    }

    /**
    * 配置 SqlSessionFactoryBean
    */
    @Bean(value = "sqlSessionFactoryBean222")
    @ConfigurationProperties(prefix = "mybatis")
    public SqlSessionFactoryBean sqlSessionFactoryBean() {
    SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean();
    // 配置数据源,此处配置为关键配置,如果没有将 dynamicDataSource 作为数据源则不能实现切换
    sqlSessionFactoryBean.setDataSource(dynamicDataSource());
    sqlSessionFactoryBean.setPlugins(new Interceptor[]{pageHelper()});
    return sqlSessionFactoryBean;
    }

    /**
    * 注入 DataSourceTransactionManager 用于事务管理
    */
    @Bean
    public PlatformTransactionManager transactionManager() {
    return new DataSourceTransactionManager(dynamicDataSource());
    }

    /**
    * druid过滤器
    *
    * @return
    */
    @Bean
    public FilterRegistrationBean statFilter() {
    //创建过滤器
    FilterRegistrationBean filterRegistrationBean = new FilterRegistrationBean();
    filterRegistrationBean.setFilter(new WebStatFilter());
    //设置过滤器过滤路径
    filterRegistrationBean.addUrlPatterns("/*");
    //忽略过滤的形式
    filterRegistrationBean.addInitParameter("exclusions", "*.js,*.gif,*.png,*.css,*.ico,/druid/*");
    return filterRegistrationBean;
    }

    // @Bean
    public Interceptor pageHelper() {
    //分页插件
    PageHelper pageHelper = new PageHelper();
    Properties properties = new Properties();
    properties.setProperty("reasonable", "true");
    properties.setProperty("dialect", "mysql");
    //RowBounds参数offset作为PageNum使用 - 默认不使用
    properties.setProperty("offsetAsPageNum", "true");
    //RowBounds是否进行count查询 - 默认不查询
    properties.setProperty("rowBoundsWithCount", "true");
    //当设置为true的时候,如果pagesize设置为0(或RowBounds的limit=0),就不执行分页,返回全部结果
    properties.setProperty("pageSizeZero", "true");
    //分页合理化
    properties.setProperty("reasonable", "true");
    //是否支持接口参数来传递分页参数,默认false
    properties.setProperty("supportMethodsArguments", "true");
    properties.setProperty("returnPageInfo", "check");
    properties.setProperty("params", "count=countSql");
    properties.setProperty("params", "pageNum=page;pageSize=rows;orderBy=orderBy");
    pageHelper.setProperties(properties);
    return pageHelper;
    }
    }

  1. AnalysisDataSource.java
    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    package com.isoftstone.ismart.analysis.entity;

    import com.alibaba.druid.pool.DruidDataSource;

    import java.io.Serializable;
    import java.util.Date;

    public class AnalysisDataSource implements Serializable {

    private static final long serialVersionUID = 1L;

    private String id;

    private String url;

    private String username;

    private String password;

    private Byte sourcetype;

    private Date createtime;

    private Date updatetime;

    private Byte sourcestatus;

    private String version;

    private DruidDataSource druidDataSource;

    // 此处省略各个属性的get和set方法

    }

  1. 多数据源切换工具抽象类(BaseDataSource.java)
    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    package com.isoftstone.ismart.analysis.util.datasoruce;

    import com.alibaba.druid.pool.DruidDataSource;
    import com.isoftstone.ismart.analysis.entity.AnalysisDataSource;
    import com.isoftstone.ismart.analysis.util.SpringContextUtil;
    import org.slf4j.Logger;
    import org.slf4j.LoggerFactory;
    import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;

    import java.util.HashMap;
    import java.util.Map;

    /**
    * 多数据源切换工具抽象类
    *
    * @author Colin.Ye
    * @version 1.0
    * @ClassName BaseDataSource
    * @date 2019/4/23
    **/
    public abstract class BaseDataSource extends AbstractRoutingDataSource {
    private final Logger log = LoggerFactory.getLogger(BaseDataSource.class);
    protected final Map<Object, Object> dataSourceMap = new HashMap<>();
    // 对当前线程的操作-线程安全的
    private final ThreadLocal<String> contextHolder = new ThreadLocal<String>();

    public final ThreadLocal<String> key = new ThreadLocal<String>();

    // 调用此方法,切换数据源
    public void setDataSource(String dataSource) {
    if (dataSourceMap.containsKey(dataSource)) {
    contextHolder.set(dataSource);
    key.set(dataSource);
    } else {
    throw new RuntimeException("数据源:" + dataSource + "不存在");
    }
    }

    // 获取数据源
    public String getDataSource() {
    return contextHolder.get();
    }

    // 删除数据源
    public void clearDataSource() {
    contextHolder.remove();
    }


    /**
    * 初始化默认数据源-抽象类
    */
    protected abstract void initDataSource();

    /**
    * 初始化默认数据源
    *
    * @param springBeanId
    */
    protected void initDataSource(String springBeanId) {
    //获取masterDataSource
    DruidDataSource masterDataSource = (DruidDataSource) SpringContextUtil.getBean(springBeanId);
    addDataSource("master", masterDataSource);
    }

    /**
    * 加载数据源-抽象类
    */
    protected abstract void flushDataSource();

    /**
    * 添加数据源
    *
    * @param key
    * @param masterDataSource
    */
    protected void addDataSource(String key, DruidDataSource masterDataSource) {
    dataSourceMap.put(key, masterDataSource);
    }

    /**
    * 初始化其他数据源
    *
    * @param analysisDataSource
    */
    protected abstract void initOthersDataSource(AnalysisDataSource analysisDataSource);
    }

  1. HIVE多数据源切换工具类(HiveDataSourceUtil.java)
    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    package com.isoftstone.ismart.analysis.util.datasoruce;

    import com.isoftstone.ismart.analysis.entity.AnalysisDataSource;
    import com.isoftstone.ismart.analysis.util.SpringContextUtil;
    import org.apache.commons.lang3.StringUtils;
    import org.slf4j.Logger;
    import org.slf4j.LoggerFactory;


    /**
    * HIVE多数据源切换工具类
    *
    * @author Colin.Ye
    * @version 1.0
    * @ClassName HiveDataSourceUtil
    * @date 2019/4/23
    **/
    public class HiveDataSourceUtil extends BaseDataSource {
    private final Logger log = LoggerFactory.getLogger(HiveDataSourceUtil.class);

    private static class HiveDataSourceInstance {
    private static final HiveDataSourceUtil INSTANCE = new HiveDataSourceUtil();
    }

    public static HiveDataSourceUtil getInstance() {
    return HiveDataSourceInstance.INSTANCE;
    }

    /**
    * 初始化默认数据源
    */
    @Override
    public void initDataSource() {
    //获取masterDataSource
    super.initDataSource("hiveDruidDataSource");
    //刷新数据源
    flushDataSource();
    }

    /**
    * 加载数据源
    */
    @Override
    public void flushDataSource() {
    //获取spring管理的dynamicDataSource
    HiveDataSourceUtil myDynamicDataSource = (HiveDataSourceUtil) SpringContextUtil.getBean("hiveDynamicDataSource");
    //将数据源设置到 targetDataSources
    myDynamicDataSource.setTargetDataSources(dataSourceMap);
    //将 targetDataSources 中的连接信息放入 resolvedDataSources 管理
    myDynamicDataSource.afterPropertiesSet();
    }

    /**
    * 初始化其他数据源
    *
    * @param analysisDataSource
    */
    @Override
    public void initOthersDataSource(AnalysisDataSource analysisDataSource) {
    if (analysisDataSource == null || StringUtils.isBlank(analysisDataSource.getId())) {
    return;
    }
    // pool configuration
    // datasource.setInitialSize(initialSize);
    // datasource.setMinIdle(minIdle);
    // datasource.setMaxActive(maxActive);
    // datasource.setMaxWait(maxWait);
    // datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
    // datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
    // datasource.setValidationQuery(validationQuery);
    // datasource.setTestWhileIdle(testWhileIdle);
    // datasource.setTestOnBorrow(testOnBorrow);
    // datasource.setTestOnReturn(testOnReturn);
    // datasource.setPoolPreparedStatements(poolPreparedStatements);
    // datasource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
    //添加数据源到map
    addDataSource(analysisDataSource.getId(), analysisDataSource.getDruidDataSource());
    }

    @Override
    protected Object determineCurrentLookupKey() {
    //获取当前线程的数据源,如果不存在使用master数据源
    String datasource = getDataSource();
    if (StringUtils.isBlank(datasource)) {
    datasource = "master";
    }
    logger.info("hive-datasource=" + datasource);
    return datasource;
    }
    }

  1. MYSQL多数据源切换工具类(MysqlDataSourceUtil.java)
    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    package com.isoftstone.ismart.analysis.util.datasoruce;

    import com.alibaba.druid.pool.DruidDataSource;
    import com.isoftstone.ismart.analysis.config.Config;
    import com.isoftstone.ismart.analysis.entity.AnalysisDataSource;
    import com.isoftstone.ismart.analysis.util.SpringContextUtil;
    import org.apache.commons.lang3.StringUtils;
    import org.slf4j.Logger;
    import org.slf4j.LoggerFactory;

    import java.sql.Connection;
    import java.sql.SQLException;

    /**
    * MYSQL多数据源切换工具类
    *
    * @author Colin.Ye
    * @version 1.0
    * @ClassName MysqlDataSourceUtil
    * @date 2019/4/23
    **/
    public class MysqlDataSourceUtil extends BaseDataSource {
    private final Logger log = LoggerFactory.getLogger(HiveDataSourceUtil.class);

    private static class MysqlDataSourceInstance {
    private static final MysqlDataSourceUtil INSTANCE = new MysqlDataSourceUtil();
    }

    public static MysqlDataSourceUtil getInstance() {
    return MysqlDataSourceInstance.INSTANCE;
    }

    /**
    * 初始化默认数据源
    */
    @Override
    public void initDataSource() {
    //获取masterDataSource
    super.initDataSource("masterDataSource");
    //刷新数据源
    flushDataSource();
    }

    /**
    * 加载数据源
    */
    @Override
    public void flushDataSource() {
    //获取spring管理的dynamicDataSource
    MysqlDataSourceUtil myDynamicDataSource = (MysqlDataSourceUtil) SpringContextUtil.getBean("dynamicDataSource");
    //将数据源设置到 targetDataSources
    myDynamicDataSource.setTargetDataSources(dataSourceMap);
    //将 targetDataSources 中的连接信息放入 resolvedDataSources 管理
    myDynamicDataSource.afterPropertiesSet();
    }

    /**
    * 初始化其他数据源
    *
    * @param analysisDataSource
    */
    @Override
    public void initOthersDataSource(AnalysisDataSource analysisDataSource) {
    if (analysisDataSource == null || StringUtils.isBlank(analysisDataSource.getId())) {
    return;
    }
    DruidDataSource druidDataSource = new DruidDataSource();
    druidDataSource.setUsername(analysisDataSource.getUsername());
    druidDataSource.setPassword(analysisDataSource.getPassword());
    //在此处可以查询出所有的数据源(例如,配置文件,数据库)然后添加
    druidDataSource.setDriverClassName("com.mysql.jdbc.Driver");
    druidDataSource.setUrl("jdbc:mysql://" + analysisDataSource.getUrl() + "?useUnicode=true&characterEncoding=UTF-8&zeroDateTimeBehavior=convertToNull&allowMultiQueries=true");
    //添加数据源到map
    addDataSource(analysisDataSource.getId(), druidDataSource);
    }

    public synchronized Connection getPoolConnection() {
    Connection connection = null;
    try {
    connection = Config.getPool().getConnection();
    } catch (SQLException e) {
    e.printStackTrace();
    }
    return connection;
    }

    @Override
    protected Object determineCurrentLookupKey() {
    //获取当前线程的数据源,如果不存在使用master数据源
    String datasource = getDataSource();
    if (com.mysql.jdbc.StringUtils.isNullOrEmpty(datasource)) {
    datasource = "master";
    }
    logger.info("mysql-datasource=" + datasource);
    return datasource;
    }
    }

借鉴文章如下
SpringBoot重点详解–整合hive-jdbc
SpringBoot整合hive-jdbc遇到的坑

相关文章

评论系统未开启,无法评论!