zoukankan      html  css  js  c++  java
  • 批量数据的Excel导入

    public void importIndexHistoryByCsv(String fileName) {
    logger.info("开始获取Csv文件导入到数据库,csv文件名为:" + fileName);
    File file = new File(fileName);
    cnAbsHistoryDataService.importIndexHistoryByCsv(file);

    }



    //指数历史数据导入
    @Override
    public void importIndexHistoryByCsv(File file) {
    List<String> csvList = transCsvToList(file);
    List<AbsSecurityEntity> absSecurityList = new ArrayList<AbsSecurityEntity>();
    List<AbsSecuritySummaryEntity> summaryList = new ArrayList<AbsSecuritySummaryEntity>();

    int j = 0;
    int i = 0;
    try {
    List<AbsSecurityEntity> totalSecurityList = new ArrayList<>();
    for (j = 0; j < csvList.size(); j++) {
    String[] lineDate = csvList.get(j).split(",");
    AbsSecurityEntity security = new AbsSecurityEntity();
    AbsSecuritySummaryEntity summary = new AbsSecuritySummaryEntity();
    i = -1;
    security.setSecurityId(Long.valueOf(lineDate[++i]));
    summary.setNoteId(Long.valueOf(lineDate[i]));
    security.setSecurityCode(lineDate[++i]);
    summary.setSecurityCode(lineDate[i]);
    security.setSecurityName(lineDate[++i]);
    summary.setSecurityName(lineDate[i]);
    security.setTransferDate(DateUtil.isDate(lineDate[++i]));
    summary.setTransferDate(DateUtil.isDate(lineDate[i]));

    summary.setPrice(lineDate[++i] == null ? null : Double.valueOf((String) lineDate[i]));
    summary.setYield(lineDate[++i] == null ? null : Double.valueOf((String) lineDate[i]));
    summary.setWal(lineDate[++i] == null ? null : Double.valueOf((String) lineDate[i]));
    summary.setBeginBalance(lineDate[++i] == null ? null : Double.valueOf((String) lineDate[i]));
    summary.setPrincipalPaid(lineDate[++i] == null ? null : Double.valueOf((String) lineDate[i]));
    summary.setInterestPaid(lineDate[++i] == null ? null : Double.valueOf((String) lineDate[i]));
    ++i;
    summary.setNotional(lineDate[++i] == null ? null : Double.valueOf((String) lineDate[i]));
    ++i;
    summary.setRating(lineDate[++i]);
    absSecurityList.add(security);
    summaryList.add(summary);
    //300条批量导入
    if (j == csvList.size() - 1 || absSecurityList.size() >= 300) {
    totalSecurityList.addAll(absSecurityList);
    if (totalSecurityList.size() > 2000) {
    totalSecurityList = totalSecurityList.stream().sorted(Comparator.comparing(AbsSecurityEntity::getTransferDate)).collect(Collectors.toList());
    totalSecurityList.subList(1, totalSecurityList.size() - 1000).clear();
    }

    for (AbsSecurityEntity entity : absSecurityList) {
    entity.setType("default");
    }
    absSecurityDao.batchAddAbsSecurity(absSecurityList);
    absSecuritySummaryDao.batchAddAbsSecuritySummary(summaryList);
    absSecurityList.clear();
    summaryList.clear();
    }
    }
    } catch (Exception e) {
    logger.error("指数历史数据都 error :" + e);
    throw new BizException("第: " + (j + 2) + "行 , : " + i + " 列,数据格式有误");
    }
    }


    public static List<String> transCsvToList(File file) {
    List<String> dataList = new ArrayList<String>();
    BufferedReader br = null;
    try {
    InputStreamReader isr = new InputStreamReader(new FileInputStream(file), "UTF-8");
    br = new BufferedReader(new BufferedReader(isr));
    // br = new BufferedReader(new FileReader(file));
    String line = "";
    while ((line = br.readLine()) != null) {
    dataList.add(line);
    }
    } catch (Exception e) {
    } finally {
    if (br != null) {
    try {
    br.close();
    br = null;
    } catch (IOException e) {
    e.printStackTrace();
    }
    }
    }
    List<String> subList = dataList.subList(1, dataList.size());
    return subList;
    }
  • 相关阅读:
    Hive—数据库级增、删、改、查
    Kafka—Bootstrap broker hadoop102:2181 (id: -1 rack: null) disconnected
    Kafka—命令行操作
    Kafka—Kafka安装部署
    Kafka—Java HotSpot(TM) 64-Bit Server VM warning: INFO: os::commit_memory(0x00000000c0000000, 1073741824, 0) failed; error='Cannot allocate memory' (errno=12)
    mysql—Job for mysqld.service failed because the control process exited with error code. See "systemctl status mysqld.service" and "journalctl -xe" for details.
    Linux—安装MySQL数据库
    Linux—Yum源配置
    Hive-FAILED: SemanticException org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
    Linux—Crontab定时任务
  • 原文地址:https://www.cnblogs.com/muliu/p/9198752.html
Copyright © 2011-2022 走看看