敏感詞過濾工具
阿新 • • 發佈:2018-11-20
SensitivewordEngine.java 敏感詞過濾工具類
package keyFilter; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; /** * @ClassName:SensitivewordEngine * @Description:敏感詞過濾工具類 * @Author:wuke.hwk * @Date:2018/11/20 */ public class SensitivewordEngine { /** * 敏感詞庫 */ public static Map sensitiveWordMap = null; /** * 只過濾最小敏感詞 */ public static int minMatchTYpe = 1; /** * 過濾所有敏感詞 */ public static int maxMatchType = 2; /** * 敏感詞庫敏感詞數量 * * @return */ public static int getWordSize() { if (SensitivewordEngine.sensitiveWordMap == null) { return 0; } return SensitivewordEngine.sensitiveWordMap.size(); } /** * 是否包含敏感詞 * * @param txt * @param matchType * @return */ public static boolean isContaintSensitiveWord(String txt, int matchType) { boolean flag = false; for (int i = 0; i < txt.length(); i++) { int matchFlag = checkSensitiveWord(txt, i, matchType); if (matchFlag > 0) { flag = true; } } return flag; } /** * 獲取敏感詞內容 * * @param txt * @param matchType * @return 敏感詞內容 */ public static Set<String> getSensitiveWord(String txt, int matchType) { Set<String> sensitiveWordList = new HashSet<String>(); for (int i = 0; i < txt.length(); i++) { int length = checkSensitiveWord(txt, i, matchType); if (length > 0) { // 將檢測出的敏感詞儲存到集合中 sensitiveWordList.add(txt.substring(i, i + length)); i = i + length - 1; } } return sensitiveWordList; } /** * 替換敏感詞 * * @param txt * @param matchType * @param replaceChar * @return */ public static String replaceSensitiveWord(String txt, int matchType, String replaceChar) { String resultTxt = txt; Set<String> set = getSensitiveWord(txt, matchType); Iterator<String> iterator = set.iterator(); String word = null; String replaceString = null; while (iterator.hasNext()) { word = iterator.next(); replaceString = getReplaceChars(replaceChar, word.length()); resultTxt = resultTxt.replaceAll(word, replaceString); } return resultTxt; } /** * 替換敏感詞內容 * * @param replaceChar * @param length * @return */ private static String getReplaceChars(String replaceChar, int length) { String resultReplace = replaceChar; for (int i = 1; i < length; i++) { resultReplace += replaceChar; } return resultReplace; } /** * 檢查敏感詞數量 * * @param txt * @param beginIndex * @param matchType * @return */ public static int checkSensitiveWord(String txt, int beginIndex, int matchType) { boolean flag = false; // 記錄敏感詞數量 int matchFlag = 0; char word = 0; Map nowMap = SensitivewordEngine.sensitiveWordMap; for (int i = beginIndex; i < txt.length(); i++) { word = txt.charAt(i); // 判斷該字是否存在於敏感詞庫中 nowMap = (Map) nowMap.get(word); if (nowMap != null) { matchFlag++; // 判斷是否是敏感詞的結尾字,如果是結尾字則判斷是否繼續檢測 if ("1".equals(nowMap.get("isEnd"))) { flag = true; // 判斷過濾型別,如果是小過濾則跳出迴圈,否則繼續迴圈 if (SensitivewordEngine.minMatchTYpe == matchType) { break; } } } else { break; } } if (!flag) { matchFlag = 0; } return matchFlag; } }
SensitiveWordInit.java 敏感詞庫初始化
package keyFilter; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; /** * @ClassName:SensitiveWordInit * @Description:敏感詞庫初始化 * @Author:wuke.hwk * @Date:2018/11/20 */ public class SensitiveWordInit { /** * 敏感詞庫 */ public HashMap sensitiveWordMap; /** * 初始化敏感詞 * * @return */ public Map initKeyWord(List<String> sensitiveWords) { try { // 從敏感詞集合物件中取出敏感詞並封裝到Set集合中 Set<String> keyWordSet = new HashSet<String>(); for (String s : sensitiveWords) { keyWordSet.add(s.trim()); } // 將敏感詞庫加入到HashMap中 addSensitiveWordToHashMap(keyWordSet); } catch (Exception e) { e.printStackTrace(); } return sensitiveWordMap; } /** * 封裝敏感詞庫 * * @param keyWordSet */ @SuppressWarnings("rawtypes") private void addSensitiveWordToHashMap(Set<String> keyWordSet) { // 初始化HashMap物件並控制容器的大小 sensitiveWordMap = new HashMap(keyWordSet.size()); // 敏感詞 String key = null; // 用來按照相應的格式儲存敏感詞庫資料 Map nowMap = null; // 用來輔助構建敏感詞庫 Map<String, String> newWorMap = null; // 使用一個迭代器來迴圈敏感詞集合 Iterator<String> iterator = keyWordSet.iterator(); while (iterator.hasNext()) { key = iterator.next(); // 等於敏感詞庫,HashMap物件在記憶體中佔用的是同一個地址,所以此nowMap物件的變化,sensitiveWordMap物件也會跟著改變 nowMap = sensitiveWordMap; for (int i = 0; i < key.length(); i++) { // 擷取敏感詞當中的字,在敏感詞庫中字為HashMap物件的Key鍵值 char keyChar = key.charAt(i); // 判斷這個字是否存在於敏感詞庫中 Object wordMap = nowMap.get(keyChar); if (wordMap != null) { nowMap = (Map) wordMap; } else { newWorMap = new HashMap<String, String>(); newWorMap.put("isEnd", "0"); nowMap.put(keyChar, newWorMap); nowMap = newWorMap; } // 如果該字是當前敏感詞的最後一個字,則標識為結尾字 if (i == key.length() - 1) { nowMap.put("isEnd", "1"); } System.out.println("封裝敏感詞庫過程:"+sensitiveWordMap); } System.out.println("檢視敏感詞庫資料:" + sensitiveWordMap); } } }
Test.java 測試
package keyFilter; import com.alibaba.fastjson.JSON; import textFilter.WordFilter; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; /** * @Description: * @Author:wuke.hwk * @Date:2018/11/19 */ public class Test { public static void main(String[] args) { sensitiveWordFiltering("中國"); sensitiveWordFiltering("大"); } public static Set<String> sensitiveWordFiltering(String text) { // 初始化敏感詞庫物件 SensitiveWordInit sensitiveWordInit = new SensitiveWordInit(); // 從資料庫中獲取敏感詞物件集合(呼叫的方法來自Dao層,此方法是service層的實現類) List<String> sensitiveWords = new ArrayList<String>(); for(int i=0;i<100000;i++) { String str=""; for(int z=0;z<5;z++){ str= str+(char) (Math.random ()*26+'a'); } sensitiveWords.add(str); } sensitiveWords.add("acc"); sensitiveWords.add("w何北"); sensitiveWords.add("中國"); sensitiveWords.add("大"); sensitiveWords.add("王在"); // 構建敏感詞庫 Map sensitiveWordMap = sensitiveWordInit.initKeyWord(sensitiveWords); // 傳入SensitivewordEngine類中的敏感詞庫 SensitivewordEngine.sensitiveWordMap = sensitiveWordMap; // 得到敏感詞有哪些,傳入2表示獲取所有敏感詞 long startTime=System.currentTimeMillis(); Set<String> set = SensitivewordEngine.getSensitiveWord(text, 2); long endTime=System.currentTimeMillis(); System.out.println("filterTime:"+(endTime-startTime)+"|result:["+JSON.toJSONString(set)+"]"); startTime=System.currentTimeMillis(); boolean a=sensitiveWords.contains(text); endTime=System.currentTimeMillis(); System.out.println("contentTime:"+(endTime-startTime)+"|result:["+a+"]"); WordFilter.init(); startTime=System.currentTimeMillis(); String r=WordFilter.doFilter(text); endTime=System.currentTimeMillis(); System.out.println("wordFilter:"+(endTime-startTime)+"|result:["+JSON.toJSONString(r)+"]"); return set; } }