文件中的数据量大,想要入库,统计分析:
代码如图:
package com.mobile.web.api; import com.mobile.commons.JsonResp; import com.mobile.model.LogInfo; import com.mobile.service.LogInfoService; import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Transactional; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; import java.io.*; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Locale; @RestController @RequestMapping(value = "/test") @Transactional public class ImportController { Logger log = Logger.getLogger(this.getClass()); @Autowired private LogInfoService logInfoService; @RequestMapping(value = "/importTxt", method = RequestMethod.GET) public JsonResp importTxt() throws IOException, ParseException { log.debug("开始导入数据"); String encoding = "GBK"; List logInfoList = new ArrayList(); String dir = "E:\\test\\log"; File[] files = new File(dir).listFiles(); for (File file : files){ if (file.isFile() && file.exists()) { //判断文件是否存在 importFile(file, encoding, logInfoList); } else { return JsonResp.toFail("文件不存在,请检查文件位置!"); } } Boolean insertFlag = logInfoService.insertBatch(logInfoList); if (!insertFlag) { return JsonResp.toFail("保存失败"); } return JsonResp.ok(); } public static void importFile(File file, String encoding, List logInfoList) throws IOException, ParseException { InputStreamReader read = null;//考虑到编码格式 try { read = new InputStreamReader( new FileInputStream(file), encoding); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); } BufferedReader bufferedReader = new BufferedReader(read); String lineTxt = null; SimpleDateFormat sdf = new SimpleDateFormat("[dd/MMM/yyyy:HH:mm:ss Z]", Locale.US); while ((lineTxt = bufferedReader.readLine()) != null) { String[] lineArr = lineTxt.split(" "); int len = lineArr.length; LogInfo logInfo = new LogInfo(); String logDate = lineArr[0] + " " + lineArr[1]; System.out.println(sdf.parse(logDate)); //.............时间转换问题 logInfo.setLogTime(sdf.parse(logDate)); logInfo.setAccessIp(lineArr[2]); logInfo.setProxyIp(lineArr[3]); logInfo.setResponseTime(lineArr[4]); logInfo.setReferer(lineArr[5].substring(1, lineArr[5].length() - 1)); logInfo.setMethod(lineArr[6].substring(1)); logInfo.setAccessUrl(lineArr[7].substring(0, lineArr[7].length() - 1)); String accessUrl = lineArr[7]; String[] accessUrlArr = accessUrl.split("/"); logInfo.setItemName(accessUrlArr[3]); logInfo.setHttpcode(lineArr[8]); logInfo.setRequestsize(lineArr[9]); logInfo.setResponsesize(lineArr[10]); logInfo.setCacheHitStatus(lineArr[11]); String[] uaHead = new String[len - 13]; System.arraycopy(lineArr, 12, uaHead, 0, len - 13); logInfo.setUaHead(StringUtils.join(uaHead)); logInfo.setFileType(lineArr[len - 1]); logInfoList.add(logInfo); } read.close(); } }
文件导入,成功;
此时,如果数据量特别大时,会出现入库慢的情况,有另一种方法是:读取文件后,将数据按照想要的格式存如新文件中,然后用sql语句(或navicat客户端)导入文件;
原文地址:https://www.cnblogs.com/mufengforward/p/9482996.html
时间: 2024-11-06 22:16:43