添加搜索模块,热搜前十,个人搜索记录,搜索记录添加,搜索记录删除,敏感词替换,redis配置没添加

This commit is contained in:
cyk
2023-12-19 21:35:57 +08:00
parent 0721107407
commit 9773f42df7
11 changed files with 1612 additions and 1 deletions

View File

@@ -0,0 +1,33 @@
package com.lovenav.configuration;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.JdkSerializationRedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
@Configuration
public class RedisConfig {
@Autowired
RedisConnectionFactory redisConnectionFactory;
@Bean
public RedisTemplate<String, Object> functionDomainRedisTemplate() {
RedisTemplate<String, Object> redisTemplate = new RedisTemplate<String, Object>();
initDomainRedisTemplate(redisTemplate, redisConnectionFactory);
return redisTemplate;
}
/*
*设置redisTemplate序列化策略否则在使用redisTemplate的时候在redis的客户端查看会出现乱码
*/
private void initDomainRedisTemplate(RedisTemplate<String, Object> redisTemplate, RedisConnectionFactory factory) {
redisTemplate.setKeySerializer(new StringRedisSerializer());
redisTemplate.setHashKeySerializer(new StringRedisSerializer());
redisTemplate.setHashValueSerializer(new JdkSerializationRedisSerializer());
redisTemplate.setValueSerializer(new JdkSerializationRedisSerializer());
redisTemplate.setConnectionFactory(factory);
}
}

View File

@@ -0,0 +1,89 @@
package com.lovenav.configuration;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
@Configuration
@SuppressWarnings({ "rawtypes", "unchecked" })
public class SensitiveWordInit {
// 字符编码
private String ENCODING = "UTF-8";
// 初始化敏感字库
public Map initKeyWord() throws IOException {
// 读取敏感词库 ,存入Set中
Set<String> wordSet = readSensitiveWordFile();
// 将敏感词库加入到HashMap中//确定有穷自动机DFA
return addSensitiveWordToHashMap(wordSet);
}
// 读取敏感词库 ,存入HashMap中
private Set<String> readSensitiveWordFile() throws IOException {
Set<String> wordSet = null;
ClassPathResource classPathResource = new ClassPathResource("static/word.txt");
InputStream inputStream = classPathResource.getInputStream();
//敏感词库
try {
// 读取文件输入流
InputStreamReader read = new InputStreamReader(inputStream, ENCODING);
// 文件是否是文件 和 是否存在
wordSet = new HashSet<String>();
// StringBuffer sb = new StringBuffer();
// BufferedReader是包装类先把字符读到缓存里到缓存满了再读入内存提高了读的效率。
BufferedReader br = new BufferedReader(read);
String txt = null;
// 读取文件将文件内容放入到set中
while ((txt = br.readLine()) != null) {
wordSet.add(txt);
}
br.close();
// 关闭文件流
read.close();
} catch (Exception e) {
e.printStackTrace();
}
return wordSet;
}
// 将HashSet中的敏感词,存入HashMap中
private Map addSensitiveWordToHashMap(Set<String> wordSet) {
// 初始化敏感词容器,减少扩容操作
Map wordMap = new HashMap(wordSet.size());
for (String word : wordSet) {
Map nowMap = wordMap;
for (int i = 0; i < word.length(); i++) {
// 转换成char型
char keyChar = word.charAt(i);
// 获取
Object tempMap = nowMap.get(keyChar);
// 如果存在该key直接赋值
if (tempMap != null) {
nowMap = (Map) tempMap;
}
// 不存在则则构建一个map同时将isEnd设置为0因为他不是最后一个
else {
// 设置标志位
Map<String, String> newMap = new HashMap<String, String>();
newMap.put("isEnd", "0");
// 添加到集合
nowMap.put(keyChar, newMap);
nowMap = newMap;
}
// 最后一个
if (i == word.length() - 1) {
nowMap.put("isEnd", "1");
}
}
}
return wordMap;
}
}