Zebra的spring boot 动态多数据源---实现详解

2018-01-25  本文已影响0人  邓启翔

使用过springboot的人都知道想要配置多数据源很麻烦,需要自己写Datasource、SqlsessionFactory、MapperScannerConfigurer三个bean的注入,并且不能动态配置,对于开发人员要求高。

国信zebra架构实现了动态多数据源配置,实现思路是在spring bean初始化前,读取配置文件的datasource配置,然后根据配置文件信息,动态将Datasource、SqlsessionFactory、MapperScannerConfigurer三个bean注入到spring容器中。

第一步:设计配置文件内容

#SQL日志输出配置

logging.level.com.guosen.zebra.hello.mapper=debug

#第一个数据源的数据库url

zebra.database.url[0]=jdbc:jtds:sqlserver://0.0.0.0:1433/tst

#第一个数据源的数据库用户名

zebra.database.username[0]=sa

#第一个数据源的数据库密码

zebra.database.pwd[0]=12345678

#第一个数据源的mapper包名

zebra.database.basePackage[0]=com.guosen.zebra.hello.mapper

#第一个数据源的名称

zebra.database.dataSourceName[0]=gum

第二步:设计初始化bean的类

public class DruidDBConfig {

    private DruidDbProperties druidDbProperties;

    //初始化DataSource

    public BeanDefinitionBuilder createDataSource(String url, String username, String password) {

        BeanDefinitionBuilder beanDefinitionBuilder = BeanDefinitionBuilder .genericBeanDefinition(DruidDataSource.class);         beanDefinitionBuilder.addPropertyValue("url", url);

        beanDefinitionBuilder.addPropertyValue("username", username);

        beanDefinitionBuilder.addPropertyValue("password", password);

        beanDefinitionBuilder.addPropertyValue("initialSize", getDruidDbProperties().getInitialSize());         beanDefinitionBuilder.addPropertyValue("minIdle", getDruidDbProperties().getMinIdle());         beanDefinitionBuilder.addPropertyValue("maxActive", getDruidDbProperties().getMaxActive());         beanDefinitionBuilder.addPropertyValue("maxWait", getDruidDbProperties().getMaxWait());         beanDefinitionBuilder.addPropertyValue("timeBetweenEvictionRunsMillis",         getDruidDbProperties().getTimeBetweenEvictionRunsMillis());

        beanDefinitionBuilder.addPropertyValue("minEvictableIdleTimeMillis", getDruidDbProperties().getMinEvictableIdleTimeMillis());         beanDefinitionBuilder.addPropertyValue("validationQuery", getDruidDbProperties().getValidationQuery());         beanDefinitionBuilder.addPropertyValue("testWhileIdle", getDruidDbProperties().isTestWhileIdle());         beanDefinitionBuilder.addPropertyValue("testOnBorrow", getDruidDbProperties().isTestOnBorrow());         beanDefinitionBuilder.addPropertyValue("testOnReturn", getDruidDbProperties().isTestOnReturn());

        return beanDefinitionBuilder;

    }

    //初始化SqlSessionFactory

    public BeanDefinitionBuilder createSqlSessionFactory(DataSource dataSource) throws Exception {

        BeanDefinitionBuilder beanDefinitionBuilder = BeanDefinitionBuilder .genericBeanDefinition(SqlSessionFactoryBean.class);         beanDefinitionBuilder.addPropertyValue("dataSource",dataSource);

        PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();

        Resource[] rs = resolver.getResources("classpath:*Mapper.xml");

        System.err.println("get Mapper Resources size" +rs.length);

        beanDefinitionBuilder.addPropertyValue("mapperLocations", rs);

        return beanDefinitionBuilder;

.    }

    //初始化 MapperScannerConfigurer

    public BeanDefinitionBuilder createMapperScannerConfigurer(String sqlSessionFactoryName, String basePackage) throws Exception {         BeanDefinitionBuilder beanDefinitionBuilder = BeanDefinitionBuilder .genericBeanDefinition(MapperScannerConfigurer.class);         beanDefinitionBuilder.addPropertyValue("sqlSessionFactoryBeanName", sqlSessionFactoryName);         beanDefinitionBuilder.addPropertyValue("basePackage", basePackage);

        return beanDefinitionBuilder;

    }

    //初始化 DataSourceTransactionManager

public BeanDefinitionBuilder createAnnotationDrivenTransactionManager(DataSource dataSource) {

        BeanDefinitionBuilder beanDefinitionBuilder = BeanDefinitionBuilder .genericBeanDefinition(DataSourceTransactionManager.class);         beanDefinitionBuilder.addConstructorArgValue(dataSource);

        return beanDefinitionBuilder;

    }

    public static DruidDBConfig create() throws IOException {

        DruidDbProperties properties = new DruidDbProperties();

        DruidDBConfig conf = new DruidDBConfig();

        conf.setDruidDbProperties(properties);

        return conf;

    }

}

第三步:将初始化的bean定义注入到spring容器中

@Configuration

@EnableConfigurationProperties(DatabasesProperties.class)

public class DatabaseInit implements BeanDefinitionRegistryPostProcessor, ApplicationContextAware {

    private static ApplicationContext ctx;

    @Override

    public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {

        ctx = applicationContext;

    }

    @Override

    public void postProcessBeanDefinitionRegistry(BeanDefinitionRegistry registry) throws BeansException {

        int idx = 0;

        try {

          for (String url : databasesProperties.getUrl()) {

                // 注入datasource

                BeanDefinitionBuilder ds = druidDBConfig.createDataSource(url,                             databasesProperties.getUsername().get(idx), databasesProperties.getPwd().get(idx));                                                                        registry.registerBeanDefinition(databasesProperties.getDataSourceName().get(idx), ds.getBeanDefinition());

                // 注入sessionFactory

                DataSource dataSource = (DataSource) ctx.getBean(databasesProperties.getDataSourceName().get(idx));                 BeanDefinitionBuilder sqlSessionFactory = druidDBConfig.createSqlSessionFactory(dataSource);                                        registry.registerBeanDefinition("SqlSessionFactory" + databasesProperties.getDataSourceName().get(idx),                 sqlSessionFactory.getBeanDefinition());

                // 注入mapperscan

                BeanDefinitionBuilder sacnner = druidDBConfig.createMapperScannerConfigurer( "SqlSessionFactory" +                    databasesProperties.getDataSourceName().get(idx), databasesProperties.getBasePackage().get(idx));                 registry.registerBeanDefinition( "MapperScannerConfigurer" + databasesProperties.getDataSourceName().get(idx),                             sacnner.getBeanDefinition());

                // 注入TX

                BeanDefinitionBuilder tm = druidDBConfig.createAnnotationDrivenTransactionManager(dataSource); r                                      registry.registerBeanDefinition("TransactionManager" + databasesProperties.getDataSourceName().get(idx),                 tm.getBeanDefinition());

                idx++;

            }

        }

    }

}

测试

public class HelloServiceImpl implements HelloService{

    @Autowired

    ConfMapper mapper;

    public HelloReply sayHello(HelloRequest hellorequest) {

        List> list= mapper.getServerVersionConf(null);

        System.out.println(list);

        HelloReply reply = new HelloReply();

        reply.setMessage(test.sayHello(hellorequest.getName()));

        System.out.println("from method");

        return reply;

}

测试结果

2018-01-25 10:26:44.506 DEBUG 17800 --- [ault-executor-0] c.g.z.h.m.C.getServerVersionConf        : ==>  Preparing: SELECT * FROM SERVER_VERSION_CONF

2018-01-25 10:26:44.693 DEBUG 17800 --- [ault-executor-0] c.g.z.h.m.C.getServerVersionConf        : ==> Parameters:

2018-01-25 10:26:44.835 DEBUG 17800 --- [ault-executor-0] c.g.z.h.m.C.getServerVersionConf        : <==      Total: 4

搞定!

zebra
上一篇下一篇

猜你喜欢

热点阅读