背景
需求是:根据导入的excel,读取sheet空间,每个sheet对应生成一张数据库的表
一个excel包含一个或多个sheet
前言
本文章适用于动态创建表,动态创建表字段、填充数据。
一、依赖
<!--操作Excel工具依赖--> <dependency> <groupId>com.alibaba</groupId> <artifactId>easyexcel</artifactId> <version>3.0.5</version> </dependency> <!--mybatis 分页插件--> <dependency> <groupId>com.github.pagehelper</groupId> <artifactId>pagehelper-spring-boot-starter</artifactId> <version>1.4.5</version> </dependency> <!-- fastjson --> <dependency> <groupId>com.alibaba</groupId> <artifactId>fastjson</artifactId> <version>1.2.70</version> </dependency>
二、数据库记录表
SET NAMES utf8mb4; SET FOREIGN_KEY_CHECKS = 0; -- ---------------------------- -- Table structure for sys_data_directory -- ---------------------------- DROP TABLE IF EXISTS `sys_data_directory`; CREATE TABLE `sys_data_directory` ( `id` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT '主键', `data_name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '数据集、源别名', `parent_id` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT '0' COMMENT '父节点默认为0', `distinctid_isn` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '行政区划内码', `distinctid` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '行政区划', `distinctid_name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '行政区划名称', `distinctid_all_name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '行政区划全名', `create_time` datetime NULL DEFAULT NULL COMMENT '创建时间', `update_time` datetime NULL DEFAULT NULL COMMENT '修改时间', `create_user_id` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '创建人id', `create_user_name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '创建人名称', `update_user_id` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '修改人id', `update_user_name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '修改人名称', `table_name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '导入excel中sheet为表名、表名', `sort` int(11) NULL DEFAULT 0 COMMENT '排序', `data_desc` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '数据源描述', `data_type` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '01民政、02公安、03对比结果、99其他', `address_type` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '01门楼址、02户室址', PRIMARY KEY (`id`) USING BTREE ) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = DYNAMIC; SET FOREIGN_KEY_CHECKS = 1;
三、导入excel
1.代码
1.1Controller层
@Api(tags = "数据源集合") @RestController @RequestMapping("/hvit/dataSet/") public class SysDataDirectoryController { @Autowired private SysDataDirectoryDataService sysDataDirectoryDataService; @RequestMapping(value = "/importExcel", method = RequestMethod.POST, headers = "content-type=multipart/form-data") @ApiOperation(value = "导入文件型数据源") public ResponseEntity importExcel(@RequestParam(value = "file") MultipartFile file, ExcelReq excelReq) throws IOException { return ResponseEntity.ok(sysDataDirectoryDataService.importExcel(file, excelReq)); } }
1.2 ExcelReq类
import io.swagger.annotations.ApiModelProperty; import lombok.Data; import java.io.Serializable; @Data public class ExcelReq implements Serializable { private static final long serialVersionUID = 1L; @ApiModelProperty(name = "id", value = "父节点id") private String id; @ApiModelProperty(name = "dataType", value = "数据来源类型") private String dataType; @ApiModelProperty(name = "anotherName", value = "别名") private String anotherName; @ApiModelProperty(name = "addressType", value = "地址类型") private String addressType; }
1.3Service类
/*** * 导入文件型数据源 * @param file execl文件 * * @return */ @Transactional public R importExcel(MultipartFile file, ExcelReq excelReq) throws IOException { if (file == null) { return R.error("请上传文件!"); } //获取用户信息 SysUser sysUser = sysUserService.findByUserName(getCurrentLoginUserName()); String fileName = file.getOriginalFilename().substring(0, file.getOriginalFilename().lastIndexOf(".")); //文件后缀 String fileSuffix = file.getOriginalFilename().substring(file.getOriginalFilename().lastIndexOf(".")); if (!Constant.EXCEL_XLSX.equals(fileSuffix) && !Constant.EXCEL_XLS.equals(fileSuffix)) { return R.error("请导入excel类型文件!"); } //1.首先获取文件名生成一条记录 String directoryName = StringUtils.isEmpty(excelReq.getAnotherName()) ? fileName : excelReq.getAnotherName(); //创建记录并返回主键id QueryWrapper<SysDataDirectory> wrapper = new QueryWrapper<>(); wrapper.lambda().eq(SysDataDirectory::getCreateUserId, sysUser.getId()); wrapper.lambda().eq(SysDataDirectory::getDataName, directoryName); wrapper.lambda().eq(SysDataDirectory::getParentId, excelReq.getId()); int count = sysDataDirectoryService.count(wrapper); if (count > 0) { return R.error("数据源集合有重名!"); } String uuid = createDataSets(directoryName, null, excelReq.getId(), sysUser, excelReq.getDataType(), excelReq.getAddressType()); EasyExcel.read(file.getInputStream(), new ConfigFilterListener(sysDataDirectoryMapper, uuid, sysDataDirectoryService , excelReq.getDataType(), sysUser, excelReq.getAddressType())).doReadAll(); return R.ok(); }
1.4响应类R
import java.util.HashMap; import java.util.Map; /** * * @author cz * @date 2022-11-10 */ public class R extends HashMap<String, Object> { private static final long serialVersionUID = 1L; public R() { put("code", 200); } public R(Integer code) { put("code", code); put("data", new HashMap<String, Object>()); } public R(Integer code, String msg) { put("code", code); put("msg", msg); put("data", new HashMap<String, Object>()); } public static R error() { return error(500, "未知异常,请联系管理员"); } public static R errorDebug(String message) { return error(500, "未知异常 " + message + ",请联系管理员"); } public static R error(String msg) { return error(500, msg); } public static R error(int code, String msg) { R r = new R(); r.put("code", code); r.put("msg", msg); return r; } public R errorInfo(String msg) { this.put("errorMsg", msg); return this; } public static R ok(String msg) { R r = new R(); r.put("msg", msg); r.put("data", new HashMap<String, Object>()); return r; } public static R ok(Map<String, Object> map) { R r = new R(); r.putAll(map); r.put("data", new HashMap<String, Object>()); return r; } public static R ok() { return new R().put("msg", "success").put("data", new HashMap<String, Object>()); } public static R ok(Integer size) { return new R().put("data", new HashMap<String, Object>((int)Math.round(size / 0.75))); } @Override public R put(String key, Object value) { super.put(key, value); return this; } /** * 添加返回结果数据 * * @param key * @param value * @return */ public R putData(String key, Object value) { Map<String, Object> map = (HashMap<String, Object>)this.get("data"); map.put(key, value); return this; } }
1.5easyExcel监听类ConfigFilterListener
import com.alibaba.excel.context.AnalysisContext; import com.alibaba.excel.metadata.data.ReadCellData; import com.alibaba.excel.read.listener.ReadListener; import com.hvit.data_governance.dataComparison.dao.SysDataDirectoryMapper; import com.hvit.data_governance.dataComparison.entity.SysDataDirectory; import com.hvit.data_governance.dataComparison.service.SysDataDirectoryService; import com.hvit.data_governance.system.entity.SysUser; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.scheduling.annotation.Async; import javax.annotation.Resource; import java.util.*; import java.util.stream.Collectors; @Slf4j public class ConfigFilterListener implements ReadListener<LinkedHashMap<String, String>> { /** * 每隔5条存储数据库,实际使用中可以100条,然后清理list ,方便内存回收 */ private static final int BATCH_COUNT = 1000; private String uuid; private String dataType; private String tableName;//表名 private String columnNames;//字段名 private Integer sort = 0;//排序 private String addressType;//地址类型 private List<LinkedHashMap<String, String>> dataSetList = new ArrayList<>(); @Resource private SysDataDirectoryMapper sysDataDirectoryMapper; @Autowired SysDataDirectoryService sysDataDirectoryService; private SysUser sysUser; //构造函数 public ConfigFilterListener(SysDataDirectoryMapper sysDataDirectoryMapper, String uuid, SysDataDirectoryService sysDataDirectoryService , String dataType, SysUser sysUser, String addressType) { this.uuid = uuid; this.dataType = dataType; this.sysUser = sysUser; this.addressType = addressType; this.sysDataDirectoryMapper = sysDataDirectoryMapper; this.sysDataDirectoryService = sysDataDirectoryService; } /** * 这个每一条数据解析都会来调用 */ @SneakyThrows @Override public void invoke(LinkedHashMap<String, String> linkedHashMap, AnalysisContext analysisContext) { //log.info("解析到一条数据:{}", linkedHashMap); LinkedHashMap<String, String> map = new LinkedHashMap<>(); map.put("uuid", UUID.randomUUID().toString()); Set set = linkedHashMap.keySet(); Iterator iterator = set.iterator(); while (iterator.hasNext()) { Object next = iterator.next(); map.put(next.toString(), linkedHashMap.get(next)); } dataSetList.add(map); // 达到BATCH_COUNT了,需要去存储一次数据库,防止数据几万条数据在内存,容易OOM if (dataSetList.size() >= BATCH_COUNT) { //创建插入语句 StringBuffer sb = new StringBuffer("insert into "); sb.append(this.tableName + " ("); sb.append(this.columnNames + " )"); // 这里也要保存数据,确保最后遗留的数据也存储到数据库 batchInsert(sb.toString(), dataSetList); // 存储完成清理 list dataSetList.clear(); } } /** * 所有数据解析完成了 都会来调用 * * @param analysisContext */ @SneakyThrows @Override public void doAfterAllAnalysed(AnalysisContext analysisContext) { log.info("进入入库操作--->"); if (dataSetList.size() > 0) { //创建插入语句 StringBuffer sb = new StringBuffer("insert into "); sb.append(this.tableName + " ("); sb.append(this.columnNames + " )"); // 这里也要保存数据,确保最后遗留的数据也存储到数据库 batchInsert(sb.toString(), dataSetList); dataSetList.clear(); log.info("所有数据解析完成!"); } } /*** * 读取Excel表格表头 * @param headMap * @param context */ @Override public void invokeHead(Map<Integer, ReadCellData<?>> headMap, AnalysisContext context) { try { //每次执行前需求清除上次的结果 this.columnNames = null; // 当前sheet的名称 编码获取类似 String tableName = context.readSheetHolder().getSheetName(); int tableCount = sysDataDirectoryMapper.existsTable(tableName); List<String> heads = new ArrayList<>(); heads.add("uuid"); if (tableCount > 0) { tableName += "_"+System.currentTimeMillis(); } StringBuffer createTableStr = new StringBuffer("CREATE TABLE "); createTableStr.append(tableName); createTableStr.append(" (uuid varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,"); Collection<ReadCellData<?>> values = headMap.values(); for (ReadCellData<?> value : values) { createTableStr.append(value.getStringValue() + " varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL,"); heads.add(value.getStringValue()); } createTableStr.append("PRIMARY KEY (`uuid`) USING BTREE)"); int updateCount = sysDataDirectoryMapper.createTable(createTableStr.toString()); if (updateCount != 0) { throw new RuntimeException("创建数据库表失败!"); } //创建成功后,得插入一条对应记录 createDataSets(tableName, tableName, uuid, sysUser, dataType, sort, addressType); this.tableName = tableName; this.columnNames = heads.stream().collect(Collectors.joining(",")); sort++; } catch (Exception ex) { //sysDataDirectoryService.removeById(uuid); //throw new RuntimeException("导入失败!请联系管理员!"); } } @Override public boolean hasNext(AnalysisContext context) { return true; } // /** // * 在转换异常 获取其他异常下会调用本接口。抛出异常则停止读取。如果这里不抛出异常则 继续读取下一行。 // * // * @param exception // * @param context // * @throws Exception // */ // @Override // public void onException(Exception exception, AnalysisContext context) { // log.error("解析失败,但是继续解析下一行:{}", exception.getMessage()); // // 如果是某一个单元格的转换异常 能获取到具体行号 // // 如果要获取头的信息 配合invokeHeadMap使用 // if (exception instanceof ExcelDataConvertException) { // ExcelDataConvertException excelDataConvertException = (ExcelDataConvertException) exception; // log.error("第{}行,第{}列解析异常", excelDataConvertException.getRowIndex(), // excelDataConvertException.getColumnIndex()); // } // } @Async public void batchInsert(String tableString, List<LinkedHashMap<String, String>> list) throws Exception { sysDataDirectoryMapper.insertTableData(tableString, list); log.info("存储数据库成功!"); } /*** * 创建目录结构 * @param directoryName 数据源别名 * @param tableName 表名 * @param parentId 父节点id,默认父节点id为0 * @return */ public String createDataSets(String directoryName, String tableName, String parentId, SysUser sysUser, String dataType, Integer sort, String addressType) { SysDataDirectory sysDataDirectory = new SysDataDirectory(); sysDataDirectory.setDataName(directoryName); sysDataDirectory.setParentId(parentId); sysDataDirectory.setCreateTime(new Date()); sysDataDirectory.setCreateUserId(sysUser.getId().toString()); sysDataDirectory.setCreateUserName(sysUser.getUserName()); sysDataDirectory.setDistinctid(sysUser.getDistinctid()); sysDataDirectory.setDistinctidAllName(sysUser.getDistinctidAllName()); sysDataDirectory.setDistinctidIsn(sysUser.getDistinctidIsn()); sysDataDirectory.setDistinctidName(sysUser.getDistinctidName()); sysDataDirectory.setUpdateTime(new Date()); sysDataDirectory.setUpdateUserId(sysUser.getId().toString()); sysDataDirectory.setUpdateUserName(sysUser.getUserName()); sysDataDirectory.setSort(sort); sysDataDirectory.setTableName(tableName); sysDataDirectory.setDataType(dataType); sysDataDirectory.setAddressType(addressType); sysDataDirectoryService.save(sysDataDirectory); return sysDataDirectory.getId(); } }
1.6Dao层Mapper
import com.hvit.data_governance.dataComparison.entity.SysDataDirectory; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import org.apache.ibatis.annotations.*; import java.util.LinkedHashMap; import java.util.List; /** * <p> * Mapper 接口 * </p> * * @author 曹震 * @since 2022-10-24 */ @Mapper public interface SysDataDirectoryMapper extends BaseMapper<SysDataDirectory> { @Update("${tableString}") int createTable(@Param("tableString") String tableString); /** * 判断表是否存在 * * @param tableName 表名称 * @return 结果 * @author yunnuo */ @Select(" SELECT COUNT(*) as count FROM information_schema.TABLES WHERE table_name = #{tableName}") Integer existsTable(@Param("tableName") String tableName); @Insert({"<script>" + "${tableString} " + "values" + "<foreach collection='dataSetList' item='line' index='index' separator=','> " + "<foreach collection='line.values' item='value' open='(' separator=',' close=')'>" + "#{value}" + "</foreach>" + "</foreach>" + "</script>" }) Integer insertTableData(@Param("tableString") String tableString, @Param("dataSetList") List<LinkedHashMap<String, String>> dataSetList); /*** * 获取表的字段信息 * @param tableName * @return */ @Select(" select COLUMN_NAME from information_schema.columns where table_name = #{tableName}") List<String> getColumnName(@Param("tableName") String tableName); /*** * 获取表中数据量 * @param tableName * @return */ @Select(" select count(1) from ${tableName}") Integer getTableDataCount(@Param("tableName") String tableName); }
多个sheet就生成多个表