【EasyExcel】第一篇:动态导入excel,生成对应数据库表

简介: 【EasyExcel】第一篇:动态导入excel,生成对应数据库表

背景

需求是:根据导入的excel,读取sheet空间,每个sheet对应生成一张数据库的表

一个excel包含一个或多个sheet

前言

本文章适用于动态创建表,动态创建表字段、填充数据。

一、依赖

        <!--操作Excel工具依赖-->
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>easyexcel</artifactId>
            <version>3.0.5</version>
        </dependency>
        <!--mybatis 分页插件-->
     
        <dependency>
            <groupId>com.github.pagehelper</groupId>
            <artifactId>pagehelper-spring-boot-starter</artifactId>
            <version>1.4.5</version>
        </dependency>
 
        <!-- fastjson -->
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
            <version>1.2.70</version>
        </dependency>

二、数据库记录表

SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
 
-- ----------------------------
-- Table structure for sys_data_directory
-- ----------------------------
DROP TABLE IF EXISTS `sys_data_directory`;
CREATE TABLE `sys_data_directory`  (
  `id` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT '主键',
  `data_name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '数据集、源别名',
  `parent_id` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT '0' COMMENT '父节点默认为0',
  `distinctid_isn` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '行政区划内码',
  `distinctid` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '行政区划',
  `distinctid_name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '行政区划名称',
  `distinctid_all_name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '行政区划全名',
  `create_time` datetime NULL DEFAULT NULL COMMENT '创建时间',
  `update_time` datetime NULL DEFAULT NULL COMMENT '修改时间',
  `create_user_id` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '创建人id',
  `create_user_name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '创建人名称',
  `update_user_id` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '修改人id',
  `update_user_name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '修改人名称',
  `table_name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '导入excel中sheet为表名、表名',
  `sort` int(11) NULL DEFAULT 0 COMMENT '排序',
  `data_desc` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '数据源描述',
  `data_type` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '01民政、02公安、03对比结果、99其他',
  `address_type` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '01门楼址、02户室址',
  PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = DYNAMIC;
 
SET FOREIGN_KEY_CHECKS = 1;

三、导入excel

1.代码

1.1Controller层

@Api(tags = "数据源集合")
@RestController
@RequestMapping("/hvit/dataSet/")
public class SysDataDirectoryController {
    @Autowired
    private SysDataDirectoryDataService sysDataDirectoryDataService;
 
    @RequestMapping(value = "/importExcel", method = RequestMethod.POST, headers = "content-type=multipart/form-data")
    @ApiOperation(value = "导入文件型数据源")
    public ResponseEntity importExcel(@RequestParam(value = "file") MultipartFile file, ExcelReq excelReq) throws IOException {
        return ResponseEntity.ok(sysDataDirectoryDataService.importExcel(file, excelReq));
    }
}

1.2 ExcelReq类

 
 
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
 
import java.io.Serializable;
 
@Data
public class ExcelReq implements Serializable {
    private static final long serialVersionUID = 1L;
 
    @ApiModelProperty(name = "id", value = "父节点id")
    private String id;
 
    @ApiModelProperty(name = "dataType", value = "数据来源类型")
    private String dataType;
 
    @ApiModelProperty(name = "anotherName", value = "别名")
    private String anotherName;
 
    @ApiModelProperty(name = "addressType", value = "地址类型")
    private String addressType;
}

1.3Service类

 /***
     * 导入文件型数据源
     * @param file execl文件
     *
     * @return
     */
    @Transactional
    public R importExcel(MultipartFile file, ExcelReq excelReq) throws IOException {
        if (file == null) {
            return R.error("请上传文件!");
        }
        //获取用户信息
        SysUser sysUser = sysUserService.findByUserName(getCurrentLoginUserName());
        String fileName = file.getOriginalFilename().substring(0, file.getOriginalFilename().lastIndexOf("."));
        //文件后缀
        String fileSuffix = file.getOriginalFilename().substring(file.getOriginalFilename().lastIndexOf("."));
        if (!Constant.EXCEL_XLSX.equals(fileSuffix) && !Constant.EXCEL_XLS.equals(fileSuffix)) {
            return R.error("请导入excel类型文件!");
        }
        //1.首先获取文件名生成一条记录
        String directoryName = StringUtils.isEmpty(excelReq.getAnotherName()) ? fileName : excelReq.getAnotherName();
        //创建记录并返回主键id
        QueryWrapper<SysDataDirectory> wrapper = new QueryWrapper<>();
        wrapper.lambda().eq(SysDataDirectory::getCreateUserId, sysUser.getId());
        wrapper.lambda().eq(SysDataDirectory::getDataName, directoryName);
        wrapper.lambda().eq(SysDataDirectory::getParentId, excelReq.getId());
        int count = sysDataDirectoryService.count(wrapper);
        if (count > 0) {
            return R.error("数据源集合有重名!");
        }
        String uuid = createDataSets(directoryName, null, excelReq.getId(), sysUser, excelReq.getDataType(), excelReq.getAddressType());
        EasyExcel.read(file.getInputStream(), new ConfigFilterListener(sysDataDirectoryMapper, uuid, sysDataDirectoryService
                , excelReq.getDataType(), sysUser, excelReq.getAddressType())).doReadAll();
        return R.ok();
    }

1.4响应类R

 
import java.util.HashMap;
import java.util.Map;
 
/**
 *
 * @author cz
 * @date 2022-11-10
 */
public class R extends HashMap<String, Object> {
 
    private static final long serialVersionUID = 1L;
 
    public R() {
        put("code", 200);
    }
 
    public R(Integer code) {
        put("code", code);
        put("data", new HashMap<String, Object>());
    }
 
    public R(Integer code, String msg) {
        put("code", code);
        put("msg", msg);
        put("data", new HashMap<String, Object>());
    }
 
    public static R error() {
        return error(500, "未知异常,请联系管理员");
    }
 
    public static R errorDebug(String message) {
        return error(500, "未知异常 " + message + ",请联系管理员");
    }
 
    public static R error(String msg) {
        return error(500, msg);
    }
 
    public static R error(int code, String msg) {
        R r = new R();
        r.put("code", code);
        r.put("msg", msg);
        return r;
    }
 
    public R errorInfo(String msg) {
        this.put("errorMsg", msg);
        return this;
    }
 
    public static R ok(String msg) {
        R r = new R();
        r.put("msg", msg);
        r.put("data", new HashMap<String, Object>());
        return r;
    }
 
    public static R ok(Map<String, Object> map) {
        R r = new R();
        r.putAll(map);
        r.put("data", new HashMap<String, Object>());
        return r;
    }
 
    public static R ok() {
        return new R().put("msg", "success").put("data", new HashMap<String, Object>());
    }
 
    public static R ok(Integer size) {
        return new R().put("data", new HashMap<String, Object>((int)Math.round(size / 0.75)));
    }
 
    @Override
    public R put(String key, Object value) {
        super.put(key, value);
        return this;
    }
 
    /**
     * 添加返回结果数据
     *
     * @param key
     * @param value
     * @return
     */
    public R putData(String key, Object value) {
        Map<String, Object> map = (HashMap<String, Object>)this.get("data");
        map.put(key, value);
        return this;
    }
 
 
}

1.5easyExcel监听类ConfigFilterListener

 
import com.alibaba.excel.context.AnalysisContext;
import com.alibaba.excel.metadata.data.ReadCellData;
import com.alibaba.excel.read.listener.ReadListener;
import com.hvit.data_governance.dataComparison.dao.SysDataDirectoryMapper;
import com.hvit.data_governance.dataComparison.entity.SysDataDirectory;
import com.hvit.data_governance.dataComparison.service.SysDataDirectoryService;
import com.hvit.data_governance.system.entity.SysUser;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
 
import javax.annotation.Resource;
import java.util.*;
import java.util.stream.Collectors;
 
@Slf4j
public class ConfigFilterListener implements ReadListener<LinkedHashMap<String, String>> {
 
    /**
     * 每隔5条存储数据库,实际使用中可以100条,然后清理list ,方便内存回收
     */
    private static final int BATCH_COUNT = 1000;
    private String uuid;
    private String dataType;
    private String tableName;//表名
    private String columnNames;//字段名
    private Integer sort = 0;//排序
    private String addressType;//地址类型
    private List<LinkedHashMap<String, String>> dataSetList = new ArrayList<>();
 
    @Resource
    private SysDataDirectoryMapper sysDataDirectoryMapper;
    @Autowired
    SysDataDirectoryService sysDataDirectoryService;
    private SysUser sysUser;
 
    //构造函数
    public ConfigFilterListener(SysDataDirectoryMapper sysDataDirectoryMapper, String uuid, SysDataDirectoryService sysDataDirectoryService
            , String dataType, SysUser sysUser, String addressType) {
        this.uuid = uuid;
        this.dataType = dataType;
        this.sysUser = sysUser;
        this.addressType = addressType;
        this.sysDataDirectoryMapper = sysDataDirectoryMapper;
        this.sysDataDirectoryService = sysDataDirectoryService;
    }
 
    /**
     * 这个每一条数据解析都会来调用
     */
    @SneakyThrows
    @Override
    public void invoke(LinkedHashMap<String, String> linkedHashMap, AnalysisContext analysisContext) {
        //log.info("解析到一条数据:{}", linkedHashMap);
        LinkedHashMap<String, String> map = new LinkedHashMap<>();
        map.put("uuid", UUID.randomUUID().toString());
        Set set = linkedHashMap.keySet();
        Iterator iterator = set.iterator();
        while (iterator.hasNext()) {
            Object next = iterator.next();
            map.put(next.toString(), linkedHashMap.get(next));
        }
        dataSetList.add(map);
        //  达到BATCH_COUNT了,需要去存储一次数据库,防止数据几万条数据在内存,容易OOM
        if (dataSetList.size() >= BATCH_COUNT) {
            //创建插入语句
            StringBuffer sb = new StringBuffer("insert into ");
            sb.append(this.tableName + " (");
            sb.append(this.columnNames + " )");
            // 这里也要保存数据,确保最后遗留的数据也存储到数据库
            batchInsert(sb.toString(), dataSetList);
            // 存储完成清理 list
            dataSetList.clear();
        }
    }
 
    /**
     * 所有数据解析完成了 都会来调用
     *
     * @param analysisContext
     */
    @SneakyThrows
    @Override
    public void doAfterAllAnalysed(AnalysisContext analysisContext) {
        log.info("进入入库操作--->");
        if (dataSetList.size() > 0) {
            //创建插入语句
            StringBuffer sb = new StringBuffer("insert into ");
            sb.append(this.tableName + " (");
            sb.append(this.columnNames + " )");
            // 这里也要保存数据,确保最后遗留的数据也存储到数据库
            batchInsert(sb.toString(), dataSetList);
            dataSetList.clear();
            log.info("所有数据解析完成!");
        }
    }
 
    /***
     * 读取Excel表格表头
     * @param headMap
     * @param context
     */
    @Override
    public void invokeHead(Map<Integer, ReadCellData<?>> headMap, AnalysisContext context) {
        try {
            //每次执行前需求清除上次的结果
            this.columnNames = null;
            // 当前sheet的名称 编码获取类似
            String tableName = context.readSheetHolder().getSheetName();
            int tableCount = sysDataDirectoryMapper.existsTable(tableName);
            List<String> heads = new ArrayList<>();
            heads.add("uuid");
            if (tableCount > 0) {
                tableName += "_"+System.currentTimeMillis();
            }
            StringBuffer createTableStr = new StringBuffer("CREATE TABLE ");
            createTableStr.append(tableName);
            createTableStr.append(" (uuid varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,");
            Collection<ReadCellData<?>> values = headMap.values();
            for (ReadCellData<?> value : values) {
                createTableStr.append(value.getStringValue() + " varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL,");
                heads.add(value.getStringValue());
            }
            createTableStr.append("PRIMARY KEY (`uuid`) USING BTREE)");
            int updateCount = sysDataDirectoryMapper.createTable(createTableStr.toString());
            if (updateCount != 0) {
                throw new RuntimeException("创建数据库表失败!");
            }
            //创建成功后,得插入一条对应记录
            createDataSets(tableName, tableName, uuid, sysUser, dataType, sort, addressType);
            this.tableName = tableName;
            this.columnNames = heads.stream().collect(Collectors.joining(","));
            sort++;
        } catch (Exception ex) {
            //sysDataDirectoryService.removeById(uuid);
            //throw new RuntimeException("导入失败!请联系管理员!");
        }
 
    }
 
    @Override
    public boolean hasNext(AnalysisContext context) {
        return true;
    }
 
//    /**
//     * 在转换异常 获取其他异常下会调用本接口。抛出异常则停止读取。如果这里不抛出异常则 继续读取下一行。
//     *
//     * @param exception
//     * @param context
//     * @throws Exception
//     */
//    @Override
//    public void onException(Exception exception, AnalysisContext context) {
//        log.error("解析失败,但是继续解析下一行:{}", exception.getMessage());
//        // 如果是某一个单元格的转换异常 能获取到具体行号
//        // 如果要获取头的信息 配合invokeHeadMap使用
//        if (exception instanceof ExcelDataConvertException) {
//            ExcelDataConvertException excelDataConvertException = (ExcelDataConvertException) exception;
//            log.error("第{}行,第{}列解析异常", excelDataConvertException.getRowIndex(),
//                    excelDataConvertException.getColumnIndex());
//        }
//    }
 
    @Async
    public void batchInsert(String tableString, List<LinkedHashMap<String, String>> list) throws Exception {
        sysDataDirectoryMapper.insertTableData(tableString, list);
        log.info("存储数据库成功!");
    }
 
    /***
     * 创建目录结构
     * @param directoryName 数据源别名
     * @param tableName 表名
     * @param parentId 父节点id,默认父节点id为0
     * @return
     */
    public String createDataSets(String directoryName, String tableName, String parentId, SysUser sysUser, String dataType, Integer sort, String addressType) {
        SysDataDirectory sysDataDirectory = new SysDataDirectory();
        sysDataDirectory.setDataName(directoryName);
        sysDataDirectory.setParentId(parentId);
        sysDataDirectory.setCreateTime(new Date());
        sysDataDirectory.setCreateUserId(sysUser.getId().toString());
        sysDataDirectory.setCreateUserName(sysUser.getUserName());
        sysDataDirectory.setDistinctid(sysUser.getDistinctid());
        sysDataDirectory.setDistinctidAllName(sysUser.getDistinctidAllName());
        sysDataDirectory.setDistinctidIsn(sysUser.getDistinctidIsn());
        sysDataDirectory.setDistinctidName(sysUser.getDistinctidName());
        sysDataDirectory.setUpdateTime(new Date());
        sysDataDirectory.setUpdateUserId(sysUser.getId().toString());
        sysDataDirectory.setUpdateUserName(sysUser.getUserName());
        sysDataDirectory.setSort(sort);
        sysDataDirectory.setTableName(tableName);
        sysDataDirectory.setDataType(dataType);
        sysDataDirectory.setAddressType(addressType);
        sysDataDirectoryService.save(sysDataDirectory);
        return sysDataDirectory.getId();
    }
}

1.6Dao层Mapper

 
 
import com.hvit.data_governance.dataComparison.entity.SysDataDirectory;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.*;
 
import java.util.LinkedHashMap;
import java.util.List;
 
/**
 * <p>
 * Mapper 接口
 * </p>
 *
 * @author 曹震
 * @since 2022-10-24
 */
@Mapper
public interface SysDataDirectoryMapper extends BaseMapper<SysDataDirectory> {
 
    @Update("${tableString}")
    int createTable(@Param("tableString") String tableString);
 
    /**
     * 判断表是否存在
     *
     * @param tableName 表名称
     * @return 结果
     * @author yunnuo
     */
    @Select(" SELECT COUNT(*) as count FROM information_schema.TABLES WHERE table_name = #{tableName}")
    Integer existsTable(@Param("tableName") String tableName);
 
    @Insert({"<script>" +
               "${tableString} " +
               "values" +
                  "<foreach collection='dataSetList' item='line' index='index'  separator=','> " +
                    "<foreach collection='line.values' item='value' open='(' separator=',' close=')'>" +
                        "#{value}" +
                    "</foreach>" +
                 "</foreach>" +
            "</script>"
    })
    Integer insertTableData(@Param("tableString") String tableString, @Param("dataSetList") List<LinkedHashMap<String, String>> dataSetList);
 
    /***
     * 获取表的字段信息
     * @param tableName
     * @return
     */
    @Select(" select COLUMN_NAME from information_schema.columns where  table_name = #{tableName}")
    List<String> getColumnName(@Param("tableName") String tableName);
 
    /***
     * 获取表中数据量
     * @param tableName
     * @return
     */
    @Select(" select count(1) from ${tableName}")
    Integer getTableDataCount(@Param("tableName") String tableName);
 
}

多个sheet就生成多个表

相关文章
|
6天前
|
Java 关系型数据库 MySQL
【MySQL × SpringBoot 突发奇想】全面实现流程 · xlsx文件,Excel表格导入数据库的接口(下)
【MySQL × SpringBoot 突发奇想】全面实现流程 · xlsx文件,Excel表格导入数据库的接口
43 0
|
6天前
|
Java 关系型数据库 MySQL
【MySQL × SpringBoot 突发奇想】全面实现流程 · xlsx文件,Excel表格导入数据库的接口(上)
【MySQL × SpringBoot 突发奇想】全面实现流程 · xlsx文件,Excel表格导入数据库的接口
47 0
|
1天前
|
存储 数据挖掘 关系型数据库
DataFrame 与数据库交互:从导入到导出
【5月更文挑战第19天】本文介绍了如何在数据分析中实现DataFrame与MySQL数据库之间的数据交互。通过`pandas`的`read_sql`函数可将数据库中的数据导入DataFrame,处理后使用数据库游标执行插入或更新操作将数据导回。注意数据类型匹配、数据完整性和一致性。对于大量数据,可采用分块读取和批量操作提升效率。这种交互能结合数据库的存储管理和DataFrame的分析功能,提高数据处理效率。
13 2
|
4天前
|
Java Apache 索引
POI操作大全(动态合并单元格,为单元格生成一个自定义的数据显示格式,自定义公式计算结果生成,读取excel,word文件在生成图片,word指定位置生成图片)
POI操作大全(动态合并单元格,为单元格生成一个自定义的数据显示格式,自定义公式计算结果生成,读取excel,word文件在生成图片,word指定位置生成图片)
|
6天前
|
easyexcel Java 关系型数据库
厉害了!12秒将百万数据通过EasyExcel导入MySQL数据库中
【5月更文挑战第12天】厉害了!12秒将百万数据通过EasyExcel导入MySQL数据库中
12 1
|
6天前
|
前端开发 关系型数据库 MySQL
【MySQL × SpringBoot 突发奇想】全面实现流程 · 数据库导出Excel表格文件的接口
【MySQL × SpringBoot 突发奇想】全面实现流程 · 数据库导出Excel表格文件的接口
37 0
|
6天前
|
存储 API C#
C# 实现格式化文本导入到Excel
C# 实现格式化文本导入到Excel
|
6天前
|
SQL 存储 Ubuntu
在ubuntu中将dict.txt导入到数据库sqlite3
这样,你就成功将 `dict.txt` 中的数据导入到名为 `mydatabase.db` 的SQLite3数据库中的 `words` 表格中了。请根据实际情况调整表格结构和数据导入命令。
19 0
|
4天前
|
关系型数据库 MySQL API
实时计算 Flink版产品使用合集之可以通过mysql-cdc动态监听MySQL数据库的数据变动吗
实时计算Flink版作为一种强大的流处理和批处理统一的计算框架,广泛应用于各种需要实时数据处理和分析的场景。实时计算Flink版通常结合SQL接口、DataStream API、以及与上下游数据源和存储系统的丰富连接器,提供了一套全面的解决方案,以应对各种实时计算需求。其低延迟、高吞吐、容错性强的特点,使其成为众多企业和组织实时数据处理首选的技术平台。以下是实时计算Flink版的一些典型使用合集。
79 0
|
6天前
|
关系型数据库 MySQL 数据库
docker MySQL删除数据库时的错误(errno: 39)
docker MySQL删除数据库时的错误(errno: 39)
60 0