package com.hadoop.hdfs; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet; import org.junit.Test; import java.io.*; import java.util.HashMap; import java.util.HashSet; public class Suanfa1 { @Test public void a1() throws IOException { BufferedReader bufferedReader = new BufferedReader(new FileReader("D:/aa.txt")); // BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter("D://")) String str1 = ""; while ((str1 = bufferedReader.readLine())!=null){ int i = (int) (hashCode(str1)%1000); BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter("D://aa"+String.valueOf(i)+".txt")); bufferedWriter.write(str1); bufferedWriter.close(); System.out.println(i); } bufferedReader.close(); } public void a2() throws IOException { BufferedReader bufferedReader = new BufferedReader(new FileReader("D:/bb.txt")); // BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter("D://")) String str1 = ""; while ((str1 = bufferedReader.readLine())!=null){ int i = (int) (hashCode(str1)%1000); BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter("D://bb"+String.valueOf(i)+".txt")); bufferedWriter.write(str1); bufferedWriter.close(); } bufferedReader.close(); } public long hashCode(String str) { long h = 0; if (h == 0) { int off = 0; char val[] = str.toCharArray(); long len = str.length(); for (long i = 0; i < len; i++) { h = 31 * h + val[off++]; } } return h; } @Test public void a3() throws IOException { a1(); a2(); for (int i = 0; i < 1000; i++) { BufferedReader bufferedReader1 = new BufferedReader(new FileReader("D://aa"+String.valueOf(i)+".txt")); BufferedReader bufferedReader2 = new BufferedReader(new FileReader("D://bb"+String.valueOf(i)+".txt")); HashSet set = new HashSet(); String input1 = ""; while ((input1 = bufferedReader1.readLine())!=null){ set.add(hashCode(bufferedReader1.readLine())); } String input2 = ""; while ((input2 = bufferedReader2.readLine())!=null){ if (set.contains(hashCode(input2))){ System.out.println(input2); } } } } }