mirror of https://github.com/alibaba/easyexcel
Hccake
2 years ago
3 changed files with 140 additions and 0 deletions
@ -0,0 +1,23 @@
|
||||
package com.alibaba.easyexcel.test.demo.read; |
||||
|
||||
import com.alibaba.excel.annotation.ExcelProperty; |
||||
import lombok.Data; |
||||
|
||||
import java.util.Date; |
||||
|
||||
/** |
||||
* 多种表头上传支持的测试实体类 |
||||
* @author hccake |
||||
*/ |
||||
@Data |
||||
public class DemoCompatibleHeaderData { |
||||
|
||||
@ExcelProperty("String") |
||||
private String string; |
||||
|
||||
@ExcelProperty("Date") |
||||
private Date date; |
||||
|
||||
@ExcelProperty("DoubleData") |
||||
private Double doubleData; |
||||
} |
@ -0,0 +1,97 @@
|
||||
package com.alibaba.easyexcel.test.demo.read; |
||||
|
||||
import com.alibaba.excel.context.AnalysisContext; |
||||
import com.alibaba.excel.event.AnalysisEventListener; |
||||
import com.alibaba.excel.metadata.data.ReadCellData; |
||||
import com.alibaba.excel.util.ListUtils; |
||||
import com.alibaba.fastjson.JSON; |
||||
import lombok.extern.slf4j.Slf4j; |
||||
|
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
|
||||
/** |
||||
* 读取头,兼容中英文传入 |
||||
* |
||||
* @author hccake |
||||
*/ |
||||
@Slf4j |
||||
public class DemoCompatibleHeaderDataListener extends AnalysisEventListener<DemoCompatibleHeaderData> { |
||||
|
||||
/** |
||||
* 每隔5条存储数据库,实际使用中可以100条,然后清理list ,方便内存回收 |
||||
*/ |
||||
private static final int BATCH_COUNT = 100; |
||||
/** |
||||
* 缓存的数据 |
||||
*/ |
||||
private List<DemoCompatibleHeaderData> cachedDataList = ListUtils.newArrayListWithExpectedSize(BATCH_COUNT); |
||||
|
||||
/** |
||||
* 这里添加,各种表头到注解表头信息的映射 |
||||
*/ |
||||
private Map<String, String> headerMapping = new HashMap<>(8); |
||||
{ |
||||
headerMapping.put("字符串标题", "String"); |
||||
headerMapping.put("日期标题", "Date"); |
||||
headerMapping.put("数字标题", "DoubleData"); |
||||
} |
||||
|
||||
|
||||
/** |
||||
* 这里会一行行的返回头 |
||||
* |
||||
* @param headMap |
||||
* @param context |
||||
*/ |
||||
@Override |
||||
public void invokeHead(Map<Integer, ReadCellData<?>> headMap, AnalysisContext context) { |
||||
log.info("解析到一条头数据:{}", JSON.toJSONString(headMap)); |
||||
headMap.forEach((key, value) -> { |
||||
// 这里是根据 map 做的表头映射关系,实际可以根据自己的逻辑按需定制
|
||||
// 比如大小写转换,截取尾缀,删除空格,等等等等
|
||||
String stringValue = value.getStringValue(); |
||||
value.setStringValue(headerMapping.getOrDefault(stringValue, stringValue)); |
||||
}); |
||||
} |
||||
|
||||
|
||||
/** |
||||
* 这个每一条数据解析都会来调用 |
||||
* |
||||
* @param data one row value. Is is same as {@link AnalysisContext#readRowHolder()} |
||||
* @param context |
||||
*/ |
||||
@Override |
||||
public void invoke(DemoCompatibleHeaderData data, AnalysisContext context) { |
||||
log.info("解析到一条数据:{}", JSON.toJSONString(data)); |
||||
cachedDataList.add(data); |
||||
// 达到BATCH_COUNT了,需要去存储一次数据库,防止数据几万条数据在内存,容易OOM
|
||||
if (cachedDataList.size() >= BATCH_COUNT) { |
||||
saveData(); |
||||
// 存储完成清理 list
|
||||
cachedDataList = ListUtils.newArrayListWithExpectedSize(BATCH_COUNT); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* 所有数据解析完成了 都会来调用 |
||||
* |
||||
* @param context |
||||
*/ |
||||
@Override |
||||
public void doAfterAllAnalysed(AnalysisContext context) { |
||||
// 这里也要保存数据,确保最后遗留的数据也存储到数据库
|
||||
saveData(); |
||||
log.info("所有数据解析完成!"); |
||||
} |
||||
|
||||
/** |
||||
* 加上存储数据库 |
||||
*/ |
||||
private void saveData() { |
||||
log.info("{}条数据,开始存储数据库!", cachedDataList.size()); |
||||
log.info("存储数据库成功!"); |
||||
} |
||||
} |
Loading…
Reference in new issue