mybatis-plus集成shardingsphere

/ 后端 / 没有评论 / 574浏览


仅分表不分库:

一.配置文件方式:

1.配置文件:

application.yml

spring:
  profiles:
    include: mybatis,sharding

application-mybatis.yml 

#spring:
#  datasource:
#    driver-class-name: com.mysql.cj.jdbc.Driver
#    url: jdbc:mysql://127.0.0.1/shading?characterEncoding=UTF-8&allowMultiQueries=true&useSSL=true
#    username: root
#    password: 123456
#    type: com.zaxxer.hikari.HikariDataSource
mybatis-plus:
  configuration.map-underscore-to-camel-case: true
  mapperLocations: classpath*:mapper/*.xml
  # global-config:
  #   db-config:
  #     logic-delete-value: -1
  #     logic-not-delete-value: 0
  configuration:
    log-impl: org.apache.ibatis.logging.stdout.StdOutImpl

 application-sharding.properties

spring.shardingsphere.datasource.names=ds
spring.shardingsphere.datasource.ds.type=com.zaxxer.hikari.HikariDataSource
spring.shardingsphere.datasource.ds.driver-class-name=com.mysql.jdbc.Driver
spring.shardingsphere.datasource.ds.jdbc-url=jdbc:mysql://127.0.0.1/shading?characterEncoding=UTF-8&allowMultiQueries=true&useSSL=true
spring.shardingsphere.datasource.ds.username=root
spring.shardingsphere.datasource.ds.password=123456

展示sql打印

spring.shardingsphere.props.sql.show=true

自定义分表算法

#spring.shardingsphere.sharding.tables.user.table-strategy.standard.sharding-column=id #spring.shardingsphere.sharding.tables.user.table-strategy.standard.precise-algorithm-class-name=com.example.MyCustomShardingAlgorithm

分表配置

spring.shardingsphere.sharding.tables.student.actual-data-nodes=ds.student_$->{0..1}

inline 表达式 (id类型转换 -> id.longValue() -> user_${id.longValue() % 4})

spring.shardingsphere.sharding.tables.student.table-strategy.inline.sharding-column=id spring.shardingsphere.sharding.tables.student.table-strategy.inline.algorithm-expression=student_$->{id % 2} #spring.shardingsphere.sharding.tables.user.key-generator.column=id #spring.shardingsphere.sharding.tables.user.key-generator.type=SNOWFLAKE

或者

application-sharding.yml

spring:
  shardingsphere:
    props:
      sql.show: true
    datasource:
      names: ds0
      ds0:
        type: com.zaxxer.hikari.HikariDataSource
        driver-class-name: com.mysql.jdbc.Driver
        jdbc-url: jdbc:mysql://127.0.0.1/shading?characterEncoding=UTF-8&allowMultiQueries=true&useSSL=true
        username: root
        password: 123456
    sharding:
      tables:
        student:  #t_user表
          key-generator-column-name: id  #主键
          actual-data-nodes: ds0.student_${0..1}  #数据节点 ds${0..1}.student_${0..1}
          # database-strategy:  #分库策略
          #   inline:
          #     sharding-column: id
          #     algorithm-expression: ds${id % 2}
          table-strategy:  #分表策略
            inline:
              shardingColumn: id
              algorithm-expression: student_${id % 2}

2.pom

       <dependency>
            <groupId>org.apache.shardingsphere</groupId>
            <artifactId>sharding-jdbc-spring-boot-starter</artifactId>
            <version>4.0.1</version>
        </dependency>

        <dependency>
            <groupId>org.apache.shardingsphere</groupId>
            <artifactId>sharding-jdbc-spring-namespace</artifactId>
            <version>4.0.1</version>
        </dependency>


        <dependency>
            <groupId>com.baomidou</groupId>
            <artifactId>mybatis-plus-boot-starter</artifactId>
            <version>3.1.0</version>
        </dependency>

3.遇到的坑:

当mybaits-plus版本高于3.1.0时,会报错

org.springframework.dao.InvalidDataAccessApiUsageException: Error attempting to get column 'create_time' from result set.  Cause: java.sql.SQLFeatureNotSupportedException: getObject with type
; getObject with type; nested exception is java.sql.SQLFeatureNotSupportedException: getObject with type

是时间字段使用了LocalDateTime;以前mybaitslus集成druid时,有的版本就会出现错误,需要提升版本;这次使用的HikariDataSource,依然出问题;

当不使用shading时不报错,初步判断问题出在mybaitsplus的版本和shading上;

官方解释了因为集成druid的问题:

Error attempting to get column 'create_time' from result set. Cause: java.sql.SQLFeatureNotSupportedException

3.1.0之前版本没问题,针对3.1.1以及后续版本出现上述问题

现象: 集成druid数据源,使用3.1.0之前版本没问题,升级mp到3.1.1+后,运行时报错:java.sql.SQLFeatureNotSupportedException

原因: mp3.1.1+使用了新版jdbc,LocalDateTime等新日期类型处理方式升级,但druid在1.1.21版本之前不支持,参考issue

解决方案: 1. 升级druid到1.1.21解决这个问题;2.保持mp版本3.1.0;3.紧跟mp版本,换掉druid数据源

所以应该也是因为mybatisplus升级后的版本使用了最新jdbc驱动,而shadingjdbc没有支持,所以报错;

二.javaconfig方式:

1.pom区别,引入非start的boot依赖

         <dependency>
            <groupId>org.apache.shardingsphere</groupId>
            <artifactId>sharding-jdbc-spring-namespace</artifactId>
            <version>4.0.1</version>
        </dependency>

   
        <dependency>
            <groupId>io.shardingsphere</groupId>
            <artifactId>sharding-core</artifactId>
            <version>3.1.0</version>
        </dependency>

2.配置参数:

package com.shadingtest.shadingjdbctest.config;

import com.zaxxer.hikari.HikariDataSource;
import org.apache.shardingsphere.api.config.sharding.ShardingRuleConfiguration;
import org.apache.shardingsphere.api.config.sharding.TableRuleConfiguration;
import org.apache.shardingsphere.api.config.sharding.strategy.ShardingStrategyConfiguration;
import org.apache.shardingsphere.api.config.sharding.strategy.StandardShardingStrategyConfiguration;
import org.apache.shardingsphere.api.sharding.standard.PreciseShardingAlgorithm;
import org.apache.shardingsphere.api.sharding.standard.PreciseShardingValue;
import org.apache.shardingsphere.shardingjdbc.api.ShardingDataSourceFactory;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import javax.sql.DataSource;
import java.lang.management.ManagementFactory;
import java.sql.SQLException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;


@Configuration
public class ShadingTestConfig {

    @Bean("shardingDataSource")
    DataSource getShardingDataSource() throws SQLException {
        ShardingRuleConfiguration shardingRuleConfig = new ShardingRuleConfiguration();
        shardingRuleConfig.setDefaultDataSourceName("ds0");

        //  shardingRuleConfig.setDefaultDatabaseShardingStrategyConfig(new InlineShardingStrategyConfiguration("id", "ds${user_id % 2}"));
        //shardingRuleConfig.setDefaultDatabaseShardingStrategyConfig(new InlineShardingStrategyConfiguration("id", "ds0"));

        TableRuleConfiguration studentTableRule = new TableRuleConfiguration("student", "ds0.student_${0..1}");
        studentTableRule.setTableShardingStrategyConfig(getShardingStrategyConfiguration());
        shardingRuleConfig.getTableRuleConfigs().add(studentTableRule);

        // ShardingPropertiesConstant相关配置选项
        Properties properties = new Properties();
        properties.put("sql.show", true);
        return ShardingDataSourceFactory.createDataSource(createDataSourceMap(), shardingRuleConfig, properties);
    }

    private ShardingStrategyConfiguration getShardingStrategyConfiguration() {
        // 精确匹配
        PreciseShardingAlgorithm<Long> preciseShardingAlgorithm = new PreciseShardingAlgorithm<Long>() {

            @Override
            public String doSharding(Collection<String> collection, PreciseShardingValue<Long> preciseShardingValue) {
                String prefix = preciseShardingValue.getLogicTableName();
                Long id = preciseShardingValue.getValue();
                long index = id % 2;
                // t_order + "" + 0 = t_order0
                String tableName = prefix + "_" + index;
                // 精确查询、更新之类的,可以返回不存在表,进而给前端抛出异常和警告。
                if (collection.contains(tableName) == false) {
                    //  LogUtils.error(log, "PreciseSharding", "orderId:{},不存在对应的数据库表{}!", orderId, tableName);
                    return collection.iterator().next();
                }
                return tableName;
            }
        };
        // 范围匹配
        //  RangeShardingAlgorithm<Long> rangeShardingAlgorithm = new RangeShardingAlgorithm<Long>() {
        //      @Override
        //      public Collection<String> doSharding(Collection<String> availableTargetNames, RangeShardingValue<Long> shardingValue) {
        //          String prefix = shardingValue.getLogicTableName();
        //          Collection<String> resList = new ArrayList<>();

        //          Range<Long> valueRange = shardingValue.getValueRange();
        //          if (!valueRange.hasLowerBound() || !valueRange.hasUpperBound()) {
        //              return availableTargetNames;
        //          }
        //          long lower = shardingValue.getValueRange().lowerEndpoint();
        //          BoundType lowerBoundType = shardingValue.getValueRange().lowerBoundType();
        //          long upper = shardingValue.getValueRange().upperEndpoint();
        //          BoundType upperBoundType = shardingValue.getValueRange().upperBoundType();
        //          long startValue = lower;
        //          long endValue = upper;
        //          if (lowerBoundType.equals(BoundType.OPEN)) {
        //              startValue++;
        //          }
        //          if (upperBoundType.equals(BoundType.OPEN)) {
        //              endValue--;
        //          }

        //          for (long i = startValue; i <= endValue; i++) {
        //              long index = i % 2;
        //              String res = prefix + "" + index;
        //              // 精确查询、更新之类的,可以返回不存在表,进而给前端抛出异常和警告。
        //              if (availableTargetNames.contains(res) == false) {
        //                //  LogUtils.error(log, "RangeSharding", "orderId:{},不存在对应的数据库表{}!", i, res);
        //                  resList.add(res);
        //              }
        //          }
        //          if (resList.size() == 0) {
        //             // LogUtils.error(log, "RangeSharding", "无法获取对应表,因此将对全表进行查询!orderId范围为:{}到{}", startValue, endValue);
        //              return availableTargetNames;
        //          }
        //          return resList;
        //      }
        //  };
        ShardingStrategyConfiguration strategyConf = new StandardShardingStrategyConfiguration("id", preciseShardingAlgorithm);
        return strategyConf;
    }


    Map<String, DataSource> createDataSourceMap() {
        Map<String, DataSource> result = new HashMap<>();
        HikariDataSource hikariDataSource = DataSourceBuilder.create().
                driverClassName("com.mysql.cj.jdbc.Driver").
                url("jdbc:mysql://127.0.0.1/shading?characterEncoding=UTF-8&allowMultiQueries=true&useSSL=true").
                password("123456").
                username("root").
                type(HikariDataSource.class).
                build();
        result.put("ds0", hikariDataSource);
        return result;
    }

    // private KeyGeneratorConfiguration getKeyGeneratorConfigurationForTOrder() {
    //     Properties keyGeneratorProp = getKeyGeneratorProperties();
    //     return new KeyGeneratorConfiguration("SNOWFLAKE", "id", keyGeneratorProp);
    // }

    // private Properties getKeyGeneratorProperties() {
    //     Properties keyGeneratorProp = new Properties();
    //     String distributeProcessIdentify = NetUtils.getLocalAddress() + ":" + getProcessId();
    //     String workId = String.valueOf(convertString2Long(distributeProcessIdentify));
    //     keyGeneratorProp.setProperty("worker.id", workId);
    //     LogUtils.info(log, "shardingsphere init", "shardingsphere work id raw string is {}, work id is {}", distributeProcessIdentify, workId);
    //     return keyGeneratorProp;
    // }

    //   private KeyGeneratorConfiguration getKeyGeneratorConfigurationForTOrderItem() {
    //       Properties keyGeneratorProp = getKeyGeneratorProperties();
    //       return new KeyGeneratorConfiguration("SNOWFLAKE", "id", keyGeneratorProp);
    //   }

    private String getProcessId() {
        String name = ManagementFactory.getRuntimeMXBean().getName();
        String pid = name.split("@")[0];
        return pid;
    }

    private Long convertString2Long(String str) {
        long hashCode = str.hashCode() + System.currentTimeMillis();
        if (hashCode < 0) {
            hashCode = -hashCode;
        }
        return hashCode % (1L << 10);
    }
}