zoukankan      html  css  js  c++  java
  • 动态数据源配置

    一、动态数据源上下文

    package tv.zhongchi.base.common.datasource;
    
    import org.slf4j.Logger;
    import org.slf4j.LoggerFactory;
    
    /**
     * @ClassName DBContextHolder
     * @Author ZhangRF
     * @CreateDate 2020/6/17
     * @Decription 动态数据源上下文
     */
    public class DBContextHolder {
        private final static Logger log = LoggerFactory.getLogger(DBContextHolder.class);
        // 对当前线程的操作-线程安全的
        private static final ThreadLocal<String> contextHolder = new ThreadLocal<String>();
    
        // 调用此方法,切换数据源
        public static void setDataSource(String dataSource) {
            contextHolder.set(dataSource);
        }
    
        // 获取数据源
        public static String getDataSource() {
            return contextHolder.get();
        }
    
        // 删除数据源
        public static void clearDataSource() {
            contextHolder.remove();
        }
    
    }

    二、动态数据源设置DruidDBConfig 

    package tv.zhongchi.base.common.datasource;
    
    import com.alibaba.druid.pool.DruidDataSource;
    import com.baomidou.mybatisplus.annotation.DbType;
    import com.baomidou.mybatisplus.annotation.FieldStrategy;
    import com.baomidou.mybatisplus.annotation.IdType;
    import com.baomidou.mybatisplus.autoconfigure.ConfigurationCustomizer;
    import com.baomidou.mybatisplus.core.MybatisConfiguration;
    import com.baomidou.mybatisplus.core.config.GlobalConfig;
    import com.baomidou.mybatisplus.extension.MybatisMapWrapperFactory;
    import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor;
    import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean;
    import org.apache.ibatis.logging.stdout.StdOutImpl;
    import org.apache.ibatis.plugin.Interceptor;
    import org.apache.ibatis.session.SqlSessionFactory;
    import org.apache.ibatis.type.JdbcType;
    import org.slf4j.Logger;
    import org.slf4j.LoggerFactory;
    import org.springframework.beans.factory.annotation.Qualifier;
    import org.springframework.beans.factory.annotation.Value;
    import org.springframework.context.annotation.Bean;
    import org.springframework.context.annotation.Configuration;
    import org.springframework.context.annotation.Primary;
    import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
    import org.springframework.jdbc.datasource.DataSourceTransactionManager;
    import org.springframework.transaction.PlatformTransactionManager;
    import org.springframework.transaction.annotation.EnableTransactionManagement;
    import tv.zhongchi.base.common.handler.MyMetaObjectHandler;
    
    import javax.sql.DataSource;
    import java.sql.SQLException;
    import java.util.ArrayList;
    import java.util.HashMap;
    import java.util.List;
    import java.util.Map;
    
    /**
     * @ClassName DruidDBConfig
     * @Author ZhangRF
     * @CreateDate 2020/6/17
     * @Decription DruidDBConfig类被@Configuration标注,用作配置信息; DataSource对象被@Bean声明,为Spring容器所管理,表示这里定义的DataSource将覆盖其他来源的DataSource。
     */
    @Configuration
    @EnableTransactionManagement
    public class DruidDBConfig {
        private final Logger log = LoggerFactory.getLogger(getClass());
    
        // adi数据库连接信息
        @Value("${spring.datasource.url}")
        private String dbUrl;
        @Value("${spring.datasource.username}")
        private String username;
        @Value("${spring.datasource.password}")
        private String password;
        @Value("${spring.datasource.driverClassName}")
        private String driverClassName;
        // 连接池连接信息
        @Value("${spring.datasource.initialSize}")
        private int initialSize;
        @Value("${spring.datasource.minIdle}")
        private int minIdle;
        @Value("${spring.datasource.maxActive}")
        private int maxActive;
        @Value("${spring.datasource.maxWait}")
        private int maxWait;
        @Value("${spring.profiles.active}")
        private String profile;
    
        @Bean // 声明其为Bean实例
        @Primary // 在同样的DataSource中,首先使用被标注的DataSource
        @Qualifier("adiDataSource")
        public DataSource dataSource() throws SQLException {
            DruidDataSource datasource = new DruidDataSource();
            // 基础连接信息
            datasource.setUrl(this.dbUrl);
            datasource.setUsername(username);
            datasource.setPassword(password);
            datasource.setDriverClassName(driverClassName);
            // 连接池连接信息
            datasource.setInitialSize(initialSize);
            datasource.setMinIdle(minIdle);
            datasource.setMaxActive(maxActive);
            datasource.setMaxWait(maxWait);
            datasource.setPoolPreparedStatements(true); //是否缓存preparedStatement,也就是PSCache。PSCache对支持游标的数据库性能提升巨大,比如说oracle。在mysql下建议关闭。
            datasource.setMaxPoolPreparedStatementPerConnectionSize(50);
            datasource.setConnectionProperties("oracle.net.CONNECT_TIMEOUT=6000;oracle.jdbc.ReadTimeout=60000");//对于耗时长的查询sql,会受限于ReadTimeout的控制,单位毫秒
            datasource.setTestOnBorrow(true); //申请连接时执行validationQuery检测连接是否有效,这里建议配置为TRUE,防止取到的连接不可用
            datasource.setTestWhileIdle(true);//建议配置为true,不影响性能,并且保证安全性。申请连接的时候检测,如果空闲时间大于timeBetweenEvictionRunsMillis,执行validationQuery检测连接是否有效。
            String validationQuery = "select 1 from dual";
            datasource.setValidationQuery(validationQuery); //用来检测连接是否有效的sql,要求是一个查询语句。如果validationQuery为null,testOnBorrow、testOnReturn、testWhileIdle都不会起作用。
            datasource.setFilters("stat,wall");//属性类型是字符串,通过别名的方式配置扩展插件,常用的插件有:监控统计用的filter:stat日志用的filter:log4j防御sql注入的filter:wall
            datasource.setTimeBetweenEvictionRunsMillis(60000); //配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
            datasource.setMinEvictableIdleTimeMillis(180000); //配置一个连接在池中最小生存的时间,单位是毫秒,这里配置为3分钟180000
            datasource.setKeepAlive(true); //打开druid.keepAlive之后,当连接池空闲时,池中的minIdle数量以内的连接,空闲时间超过minEvictableIdleTimeMillis,则会执行keepAlive操作,即执行druid.validationQuery指定的查询SQL,一般为select * from dual,只要minEvictableIdleTimeMillis设置的小于防火墙切断连接时间,就可以保证当连接空闲时自动做保活检测,不会被防火墙切断
            datasource.setRemoveAbandoned(true); //是否移除泄露的连接/超过时间限制是否回收。
            datasource.setRemoveAbandonedTimeout(3600); //泄露连接的定义时间(要超过最大事务的处理时间);单位为秒。这里配置为1小时
            datasource.setLogAbandoned(true); ////移除泄露连接发生是是否记录日志
            return datasource;
        }
    
        @Bean(name = "dynamicDataSource")
        @Qualifier("dynamicDataSource")
        public DynamicDataSource dynamicDataSource() throws SQLException {
            DynamicDataSource dynamicDataSource = new DynamicDataSource();
            //配置缺省的数据源
            dynamicDataSource.setDefaultTargetDataSource(dataSource());
            Map<Object, Object> targetDataSources = new HashMap<Object, Object>(1);
            targetDataSources.put("adiDataSource", dataSource());
            dynamicDataSource.setTargetDataSources(targetDataSources);
            return dynamicDataSource;
        }
    
    
        /**
         * 注入驼峰命名
         *
         * @return
         */
        @Bean
        public ConfigurationCustomizer configurationCustomizer() {
            return i -> i.setObjectWrapperFactory(new MybatisMapWrapperFactory());
        }
    
        @Bean
        public EasySqlInjector easySqlInjector() {
            return new EasySqlInjector();
        }
    
        /**
         * 动态据源事务
         *
         * @return
         */
        @Bean
        public PlatformTransactionManager transactionManager() {
            try {
                return new DataSourceTransactionManager(dynamicDataSource());
            } catch (SQLException throwables) {
                throwables.printStackTrace();
            }
            return null;
        }
    
        /**
         * 动态数据源切换配置
         *
         * @return
         * @throws Exception
         */
        @Bean("sqlSessionFactory")
        public SqlSessionFactory sqlSessionFactory() throws Exception {
            MybatisSqlSessionFactoryBean sqlSessionFactory = new MybatisSqlSessionFactoryBean();
            sqlSessionFactory.setDataSource(dynamicDataSource());
            sqlSessionFactory.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath*:/mapper/**/*.xml"));
            sqlSessionFactory.setTypeAliasesPackage("tv.zhongchi.*.*.po");
    
            MybatisConfiguration configuration = new MybatisConfiguration();
            configuration.setJdbcTypeForNull(JdbcType.NULL);
            configuration.setMapUnderscoreToCamelCase(true);
            configuration.setCacheEnabled(false);
            if ("test".equals(profile) || "dev".equals(profile)) {
                configuration.setLogImpl(StdOutImpl.class);
            }
            sqlSessionFactory.setConfiguration(configuration);
    
            //全局配置
            GlobalConfig globalConfig = new GlobalConfig();
            GlobalConfig.DbConfig dbConfig = new GlobalConfig.DbConfig();
            dbConfig.setIdType(IdType.AUTO);
            dbConfig.setTableUnderline(true);
            dbConfig.setCapitalMode(false);
            dbConfig.setUpdateStrategy(FieldStrategy.NOT_EMPTY);
            globalConfig.setDbConfig(dbConfig);
            globalConfig.setSqlInjector(easySqlInjector());
    
            globalConfig.setMetaObjectHandler(new MyMetaObjectHandler());
            sqlSessionFactory.setGlobalConfig(globalConfig);
    
            //分页插件
            List<Interceptor> interceptors = new ArrayList<>();
            PaginationInterceptor paginationInterceptor = new PaginationInterceptor();
            // 设置数据库
            paginationInterceptor.setDialectType(DbType.MYSQL.getDb());
            interceptors.add(paginationInterceptor);
            sqlSessionFactory.setPlugins(interceptors.toArray(new Interceptor[0]));
            return sqlSessionFactory.getObject();
        }
    
    }

    三、 动态数据源

    package tv.zhongchi.base.common.datasource;
    
    
    import com.alibaba.druid.pool.DruidDataSource;
    import com.alibaba.druid.stat.DruidDataSourceStatManager;
    import org.slf4j.Logger;
    import org.slf4j.LoggerFactory;
    import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
    import org.springframework.scheduling.annotation.Scheduled;
    import org.springframework.util.StringUtils;
    import tv.zhongchi.common.exception.BizException;
    
    import java.sql.Connection;
    import java.sql.DriverManager;
    import java.sql.SQLException;
    import java.util.Map;
    import java.util.Set;
    import java.util.concurrent.ConcurrentHashMap;
    
    
    /**
     * @ClassName DynamicDataSource
     * @Author ZhangRF
     * @CreateDate 2020/6/17
     * @Decription 动态数据源
     */
    public class DynamicDataSource extends AbstractRoutingDataSource {
        private final Logger log = LoggerFactory.getLogger(getClass());
        private static Map<Object, Object> dynamicTargetDataSources = new ConcurrentHashMap<>();
        private static Map<Object, DynamicDataSourceTimer> timerMap = new ConcurrentHashMap<>();
    
        /**
         * 通过定时任务周期性清除不使用的数据源
         */
        @Scheduled(initialDelay = 10 * 60 * 1000, fixedRate = 10 * 60 * 1000)
        public void clearTask() {
            // 遍历timetMap,判断
            clearIdleDDS();
        }
    
        /**
         * 设置当前数据源
         *
         * @return
         */
        @Override
        protected Object determineCurrentLookupKey() {
            String datasource = DBContextHolder.getDataSource();
            if (datasource != null) {
                updateTimer(datasource);
            } else {
                datasource = "adiDataSource";
            }
    
            return datasource;
        }
    
        @Override
        public void setTargetDataSources(Map<Object, Object> targetDataSources) {
    
            super.setTargetDataSources(targetDataSources);
            this.dynamicTargetDataSources = targetDataSources;
            targetDataSources.forEach((k, v) -> {
                DruidDataSource dataSource = (DruidDataSource) v;
                timerMap.put(k, new DynamicDataSourceTimer(dataSource));
            });
    
        }
    
        /**
         * 动态增加数据源
         *
         * @param dataSourceVO
         * @return
         */
        public synchronized boolean addDataSource(DataSourceVO dataSourceVO) throws Exception {
            try {
                Connection connection = null;
                try { // 排除连接不上的错误
                    Class.forName(dataSourceVO.getDatasourceDriver());
                    // 相当于连接数据库
                    connection = DriverManager.getConnection(dataSourceVO.getDatasourceUrl(), dataSourceVO.getDatasourceUsername(), dataSourceVO.getDatasourcePassword());
    
                } catch (Exception e) {
                    throw new BizException(401L, "数据源异常,请联系管理员");
                } finally {
                    if (connection != null && !connection.isClosed()) {
                        connection.close();
                    }
                }
                Map<Object, Object> dynamicTargetDataSources2 = this.dynamicTargetDataSources;
                if (dynamicTargetDataSources2.containsKey(String.valueOf(dataSourceVO.getTenantId()))) {
                    return true;
                }
                //添加数据源
                Long tenantId = dataSourceVO.getTenantId();
                if (StringUtils.isEmpty(tenantId)) {
                    throw new BizException(401L, "数据源异常,请联系管理员");
                }
                DruidDataSource druidDataSource = new DruidDataSource();
                druidDataSource.setName(String.valueOf(dataSourceVO.getTenantId()));
                druidDataSource.setDriverClassName(dataSourceVO.getDatasourceDriver());
                druidDataSource.setUrl(dataSourceVO.getDatasourceUrl());
                druidDataSource.setUsername(dataSourceVO.getDatasourceUsername());
                druidDataSource.setPassword(dataSourceVO.getDatasourcePassword());
                druidDataSource.setTestOnBorrow(true); //申请连接时执行validationQuery检测连接是否有效,这里建议配置为TRUE,防止取到的连接不可用
                druidDataSource.setTestWhileIdle(true);//建议配置为true,不影响性能,并且保证安全性。申请连接的时候检测,如果空闲时间大于timeBetweenEvictionRunsMillis,执行validationQuery检测连接是否有效。
                druidDataSource.setFilters("stat");//属性类型是字符串,通过别名的方式配置扩展插件,常用的插件有:监控统计用的filter:stat日志用的filter:log4j防御sql注入的filter:wall
                druidDataSource.setTimeBetweenEvictionRunsMillis(4000); //配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
                druidDataSource.setMinEvictableIdleTimeMillis(2000); //配置一个连接在池中最小生存的时间,单位是毫秒,
                druidDataSource.setKeepAlive(true); //打开druid.keepAlive之后,当连接池空闲时,池中的minIdle数量以内的连接,空闲时间超过minEvictableIdleTimeMillis,则会执行keepAlive操作,即执行druid.validationQuery指定的查询SQL,一般为select * from dual,只要minEvictableIdleTimeMillis设置的小于防火墙切断连接时间,就可以保证当连接空闲时自动做保活检测,不会被防火墙切断
                druidDataSource.setRemoveAbandoned(true); //是否移除泄露的连接/超过时间限制是否回收。
                druidDataSource.setRemoveAbandonedTimeout(600); //泄露连接的定义时间(要超过最大事务的处理时间);单位为秒。这里配置为10分钟
                druidDataSource.setLogAbandoned(true); ////移除泄露连接发生是是否记录日志
                druidDataSource.init();
                this.dynamicTargetDataSources.put(tenantId, druidDataSource);
                this.setTargetDataSources(this.dynamicTargetDataSources);// 将map赋值给父类的TargetDataSources
                super.afterPropertiesSet();// 将TargetDataSources中的连接信息放入resolvedDataSources管理
                log.info(tenantId + "数据源初始化成功");
                return true;
            } catch (Exception e) {
                log.error(e + "");
                throw new BizException(401L, "数据源异常,请联系管理员");
            }
        }
    
        // 删除数据源
        public boolean delDatasources(String datasourceid) {
            Map<Object, Object> dynamicTargetDataSources2 = this.dynamicTargetDataSources;
            if (dynamicTargetDataSources2.containsKey(datasourceid)) {
                Set<DruidDataSource> druidDataSourceInstances = DruidDataSourceStatManager.getDruidDataSourceInstances();
                for (DruidDataSource l : druidDataSourceInstances) {
                    if (datasourceid.equals(l.getName())) {
                        dynamicTargetDataSources2.remove(datasourceid);
                        DruidDataSourceStatManager.removeDataSource(l);
                        setTargetDataSources(dynamicTargetDataSources2);// 将map赋值给父类的TargetDataSources
                        super.afterPropertiesSet();// 将TargetDataSources中的连接信息放入resolvedDataSources管理
                        return true;
                    }
                }
                return false;
            } else {
                return false;
            }
        }
    
        // 测试数据源连接是否有效
        public boolean testDatasource(DataSourceVO dataSourceVO) throws SQLException {
            Connection connection = null;
            try { // 排除连接不上的错误
                Class.forName(dataSourceVO.getDatasourceDriver());
                // 相当于连接数据库
                connection = DriverManager.getConnection(dataSourceVO.getDatasourceUrl(), dataSourceVO.getDatasourceUsername(), dataSourceVO.getDatasourcePassword());
                return true;
            } catch (Exception e) {
                return false;
            } finally {
                if (connection != null && !connection.isClosed()) {
                    connection.close();
                }
            }
        }
    
        /**
         * 创建数据源
         *
         * @param dataSourceVO
         * @throws Exception
         */
        public void createDataSourceWithCheck(DataSourceVO dataSourceVO) throws Exception {
            String datasourceId = String.valueOf(dataSourceVO.getTenantId());
            DBContextHolder.setDataSource(datasourceId);
            Map<Object, Object> dynamicTargetDataSources2 = this.dynamicTargetDataSources;
            if (!dynamicTargetDataSources2.containsKey(datasourceId)) {
                //测试连接
                if (!testDatasource(dataSourceVO)) {
                    this.addDataSource(dataSourceVO);
                }
            }
    
        }
    
        private void clearIdleDDS() {
            timerMap.forEach((k, v) -> {
                if (v.checkAndClose()) {
                    delDatasources(k.toString());
                }
            });
        }
    
        private void updateTimer(String lookupKey) {
            // 更新时间戳
            DynamicDataSourceTimer timer = timerMap.get(lookupKey);
            if (timer != null) {
                timer.refreshTime();
            }
        }
    
    }

    四、动态数据源切面拦截

    package tv.zhongchi.base.common.datasource;
    
    import com.alibaba.fastjson.JSONObject;
    import lombok.extern.slf4j.Slf4j;
    import org.apache.shiro.authz.AuthorizationException;
    import org.aspectj.lang.ProceedingJoinPoint;
    import org.aspectj.lang.annotation.Around;
    import org.aspectj.lang.annotation.Aspect;
    import org.springframework.beans.BeanUtils;
    import org.springframework.core.annotation.Order;
    import org.springframework.data.redis.core.RedisTemplate;
    import org.springframework.stereotype.Component;
    import org.springframework.util.StringUtils;
    import org.springframework.web.bind.annotation.RestControllerAdvice;
    import org.springframework.web.context.request.RequestContextHolder;
    import org.springframework.web.context.request.ServletRequestAttributes;
    import tv.zhongchi.base.user.mapper.DatasourceMapper;
    import tv.zhongchi.base.user.po.DatasourcePO;
    import tv.zhongchi.common.constants.RedisConstants;
    import tv.zhongchi.common.enums.ClientEnum;
    import tv.zhongchi.common.exception.BizException;
    import tv.zhongchi.common.util.HttpRequestUtil;
    import tv.zhongchi.common.util.JsonUtil;
    import tv.zhongchi.common.util.JwtUtil;
    import tv.zhongchi.common.util.StringUtil;
    import tv.zhongchi.common.vo.ResultVO;
    import tv.zhongchi.common.vo.query.EConditionOperator;
    import tv.zhongchi.common.vo.query.QueryConditions;
    
    import javax.annotation.Resource;
    import javax.servlet.http.HttpServletRequest;
    import javax.servlet.http.HttpSession;
    import java.util.concurrent.TimeUnit;
    
    /**
     * @ClassName DynamicDataSource
     * @Author ZhangRF
     * @CreateDate 2020/6/17
     * @Decription 动态数据源切面拦截
     */
    @Aspect
    @Component
    @Order(1)
    @Slf4j
    @RestControllerAdvice
    public class DynamicDataSourceAspect {
        @Resource
        private DynamicDataSource dynamicDataSource;
        @Resource
        private DatasourceMapper datasourceMapper;
        @Resource
        private RedisTemplate<String, Object> redisTemplate;
    
        private static final String TENANTID = "tenantId";
    
        @Around("execution(* tv.zhongchi.*.*..controller..*(..)) || execution(* tv.zhongchi.*.*.*(..))")
        public Object doAround(ProceedingJoinPoint jp) throws Throwable {
            ServletRequestAttributes sra = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
            Object result = null;
            ResultVO resultVO = ResultVO.fail();
            String key = RedisConstants.DATA_SOURCE_INFO_KEY_PREFIX;
            try {
                HttpServletRequest request = sra.getRequest();
                HttpSession session = sra.getRequest().getSession(true);
                String tenantId = (String) session.getAttribute("tenantId");
                if (StringUtil.isEmpty(tenantId)) {
                    tenantId = request.getParameter("tenantId");
                }
    
                if (!StringUtil.isEmpty(tenantId) && !tenantId.equals(String.valueOf(0))) {
                    //创建数据源
                    key = key + tenantId;
                    DataSourceVO dataSourceVO = (DataSourceVO) redisTemplate.opsForValue().get(key);
                    if (dataSourceVO == null || dataSourceVO.getTenantId() == null) {
                        QueryConditions<DatasourcePO> datasourcePOQueryConditions = new QueryConditions<>();
                        datasourcePOQueryConditions.addCondition("tenant_id", EConditionOperator.EQULS, tenantId);
                        DatasourcePO datasourcePO = datasourceMapper.selectOne(datasourcePOQueryConditions.getQueryWrapper());
                        if (datasourcePO == null || datasourcePO.getId() == null) {
                            resultVO.setCode(401);
                            resultVO.setMessage("数据源不存在,请重新登陆");
                            result = resultVO;
                            return result;
                        } else {
                            dataSourceVO = new DataSourceVO();
                            BeanUtils.copyProperties(datasourcePO, dataSourceVO);
                            redisTemplate.opsForValue().set(key + datasourcePO.getTenantId(), dataSourceVO, 1, TimeUnit.DAYS);
                        }
                    }
    
                    try {
                        dynamicDataSource.createDataSourceWithCheck(dataSourceVO);
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                    //切换数据源
                    DBContextHolder.setDataSource(String.valueOf(dataSourceVO.getTenantId()));
                    result = jp.proceed();
                } else {
                    try {
                        //根据token查询用户租户Id
                        String token = HttpRequestUtil.getToken();
                        String loginCode = JwtUtil.getLoginCode(token);
                        ClientEnum clientEnum = JwtUtil.getClientEnum(token);
                        Object o = redisTemplate.opsForValue().get(RedisConstants.USER_LOGIN_TOKEN_KEY_PREFIX + loginCode + clientEnum);
                        String s = JsonUtil.toJSONString(o);
                        JSONObject parse = (JSONObject) JSONObject.parse(s);
                        tenantId = parse.getString("tenantId");
                    } catch (Exception e) {
                    }
    
                    if (StringUtils.isEmpty(tenantId) || tenantId.equals(String.valueOf(0)) || tenantId.equals(String.valueOf(1))) {
                        //当租户Id为空时或者为0,不切换数据源
                        result = jp.proceed();
                    } else {
                        //创建数据源
                        key = key + tenantId;
                        DataSourceVO dataSourceVO = (DataSourceVO) redisTemplate.opsForValue().get(key);
                        if (dataSourceVO == null || dataSourceVO.getTenantId() == null) {
                            QueryConditions<DatasourcePO> datasourcePOQueryConditions = new QueryConditions<>();
                            datasourcePOQueryConditions.addCondition("tenant_id", EConditionOperator.EQULS, tenantId);
                            DatasourcePO datasourcePO = datasourceMapper.selectOne(datasourcePOQueryConditions.getQueryWrapper());
                            if (datasourcePO == null || datasourcePO.getId() == null) {
                                resultVO.setCode(401);
                                resultVO.setMessage("数据源不存在,请重新登陆");
                                result = resultVO;
                                return result;
                            } else {
                                dataSourceVO = new DataSourceVO();
                                BeanUtils.copyProperties(datasourcePO, dataSourceVO);
                                redisTemplate.opsForValue().set(key + datasourcePO.getTenantId(), dataSourceVO, 1, TimeUnit.DAYS);
                            }
                        }
    
                        try {
                            dynamicDataSource.createDataSourceWithCheck(dataSourceVO);
                        } catch (Exception e) {
                            e.printStackTrace();
                            throw new BizException(401L, "数据源异常,请联系管理员");
                        }
                        //切换数据源
                        DBContextHolder.setDataSource(String.valueOf(dataSourceVO.getTenantId()));
                        result = jp.proceed();
                    }
                }
            } catch (BizException bizException) {
                throw bizException;
            } catch (AuthorizationException authorizationException) {
                authorizationException.printStackTrace();
                log.error("没有权限异常", authorizationException);
                throw new BizException(402L, "没有权限,请联系管理员");
            } catch (Exception ex) {
                ex.printStackTrace();
                Throwable cause = ex.getCause();
                if (!StringUtils.isEmpty(cause) && !StringUtils.isEmpty(cause.getMessage())) {
                    String message = cause.getMessage();
                    if (message.equals("没有权限,请联系管理员") || message.equals("token失效,请重新登录") ||
                            message.equals("数据源异常,请联系管理员")) {
                        throw cause;
                    }
                }
                log.error("sql异常", ex);
                result = ResultVO.fail("系统异常,请联系管理员!");
            }
            return result;
        }
    
    }

    五、配置动态数据源定时器

    package tv.zhongchi.base.common.datasource;
    
    import javax.sql.DataSource;
    
    /**
     * @ClassName DynamicDataSourceTimer
     * @Author ZhangRF
     * @CreateDate 2020/7/16
     * @Decription 动态数据源定时器
     */
    public class DynamicDataSourceTimer {
    
        /**
         * 空闲时间周期。超过这个时长没有访问的数据库连接将被释放。默认为5分钟。
         */
        private static long idlePeriodTime = 5 * 60 * 1000;
    
        /**
         * 动态数据源
         */
        private DataSource dds;
    
        /**
         * 上一次访问的时间
         */
        private long lastUseTime;
    
        public DynamicDataSourceTimer(DataSource dds) {
            this.dds = dds;
            this.lastUseTime = System.currentTimeMillis();
        }
    
        /**
         * 更新最近访问时间
         */
        public void refreshTime() {
            lastUseTime = System.currentTimeMillis();
        }
    
        /**
         * 检测数据连接是否超时关闭。
         *
         * @return true-已超时关闭; false-未超时
         */
        public boolean checkAndClose() {
    
            if (System.currentTimeMillis() - lastUseTime > idlePeriodTime) {
                return true;
            }
    
            return false;
        }
    
    }

    六、DataSourceVO 

    package tv.zhongchi.base.common.datasource;
    
    import lombok.Data;
    
    import java.io.Serializable;
    import java.util.Date;
    
    /**
     * @ClassName DatasiurcePO
     * @CreateDate 2020-06-16 09:26:26
     * @Author ZhangRF
     * @Decription 租户数据源
     */
    @Data
    public class DataSourceVO implements Serializable {
        private static final long serialVersionUID = 1L;
    
        /**
         *
         */
        private Long id;
        /**
         * 租户id
         */
        private Long tenantId;
        /**
         * 数据源url
         */
        private String datasourceUrl;
        /**
         * 数据源用户名
         */
        private String datasourceUsername;
        /**
         * 数据源密码
         */
        private String datasourcePassword;
        /**
         * 数据源驱动
         */
        private String datasourceDriver;
        /**
         * 是否启用(1是0否)
         */
        private Integer statusId;
        /**
         * 是否删除:0否,1是
         */
        private Integer isDelete;
        /**
         * 备注
         */
        private String remark;
        /**
         * 创建人
         */
        private String createUser;
        /**
         * 创建时间
         */
        private Date gmtCreate;
        /**
         * 更新人
         */
        private String modifiedUser;
        /**
         * 更新时间
         */
        private Date gmtModified;
    
    }
  • 相关阅读:
    FreeCAD框架解析
    Python源码的编译与加密
    centos 8及以上安装mysql 8.0
    浮点数运算丢失精度
    使用 Nginx 代理内网 GitLab 并确保 SSH / HTTP 克隆地址正确
    如何实现一个简易版的 Spring
    Spring 是如何造出一个 Bean 的
    从CPU缓存看缓存的套路
    Java 集合类 List 的那些坑
    react 项目中使用antd框架,对导航多层渲染
  • 原文地址:https://www.cnblogs.com/zhangrongfei/p/14807707.html
Copyright © 2011-2022 走看看