zoukankan      html  css  js  c++  java
  • 如何使用beanshell写入数据到文件(txt、csv)

    import java.io.BufferedWriter;
    import java.io.FileWriter;
    import java.io.IOException;
    import java.util.UUID;
    
    public class RandomData {
    
    	public static void writeDataToFile(String filename, boolean bb, String ll) {
    		FileWriter fw;
    		BufferedWriter bw = null;
    		try {
    			fw = new FileWriter(filename, bb);
    			bw = new BufferedWriter(fw);
    			//char c[] = ll.toCharArray();
    			bw.write(ll);
    			bw.flush(); // 一定要刷新缓冲区
    			bw.newLine(); // 换行
    			bw.close(); // 一定要关闭数据流
    		} catch (IOException e) {
    			e.printStackTrace();
    		} finally {
    			try {
    				if (bw != null) {
    					bw.close();
    				}
    			} catch (Exception e2) {
    				// TODO: handle exception
    			}
    		}
    
    	}
    }
    
    String creditNo = UUID.randomUUID().toString().substring(0, 36).replace("-", "");
    String thirduserno = UUID.randomUUID().toString().substring(0, 20).replace("-", "");
    String filepath = "D:\BaiduYunDownload\work\pressure_test\banqiandata\pay.csv";
    String data=creditNo+","+thirduserno;
    RandomData.writeDataToFile(filepath,true,data);//将授信申请号及thirdUserNo写入文件
    vars.put("creditNo",creditNo);
    vars.put("thirdUserNo",thirduserno);
    
    log.info("======="+creditNo);
    log.info("+++++++++"+vars.get("creditNo"));
    

      

  • 相关阅读:
    yum 在线安装LAMP
    python-scrapy-增量式
    python-scrapy-分布式爬取
    python-scrapy-全站数据爬取-CrawlSpider
    python-scrapy-中间件的学习
    python-scrapy深度爬取
    python-scrapy环境配置
    django DOM
    window10设置环境变量(以设置虚拟环境和SQL为例)
    加入园子啦
  • 原文地址:https://www.cnblogs.com/HCT118/p/6903175.html
Copyright © 2011-2022 走看看