概述
广告,敏感词检测一直以来都是让人头疼的话题,仅仅通过添加敏感词列表是解决不了问题的。今天封禁了这个词,明天又会有新的违禁词冒出来,比起愚公无穷尽的子孙更甚。
敏感词匹配这种治标不治本的方法,在一定的语义下蛮有效的,但是这个场景对高并发,访问QPS高的服务来说,不是很合适。前段时间看到垃圾邮件检测用到的贝叶斯分类算法,这种“半学习”形式的方法的准确度依赖于先验概率的准确性,而公司长期以来整理到的违禁词列表就是一个很好的源,随着贝叶斯分类的数据越来越多,分类的准确性也会越来越高,后期仅仅需要对违禁词文件进行添加即可,方便又准确。
PHP做贝叶斯分类不能很好的利用内存,针对每一个请求都会创建一个进程,各个请求相互独立,所以每个请求都会重新来一遍贝叶斯分类数据集构建,这效率可想而知,因此不打算用PHP去实现。
go语言一直以来以快著称,就用它吧。那么问题又来了,怎么让go作为PHP的后端实现这个检测服务呢。进程间的数据通常来讲有这么几种方式:
- http
- rpc
- unix domain socket
- pipe
看完了 https://blog.csdn.net/lengyuezuixue/article/details/79314987 这篇文章后,决定采用unix domain socket的形式,毕竟NGINX和php-fpm之间的通信都是这么搞起来的,效率应该还不赖。
实现
golang 后端
package main
import (
"src/github.com/ajph/nbclassifier-go"
"log"
"os"
"bufio"
"io"
"net"
"syscall"
"fmt"
"src/github.com/yanyiwu/gojieba"
"strings"
)
const SPAM_CHECK_SOCKET_FILE = "/tmp/spamcheck.sock"
// 使用go 实现简单的贝叶斯分类
func getWords(filepath string)[]string {
file, err := os.Open(filepath)
if err != nil {
log.Fatal(err)
}
defer file.Close()
reader := bufio.NewReader(file)
ret := []string{}
for {
line, err := reader.ReadString('\n')
if err != nil || io.EOF == err {
if line == "" {
break
}
}
line = strings.Trim(line, "\n")
fmt.Println("处理单词:" + line)
ret = append(ret, line)
}
return ret
}
func learn(){
m := nbclassifier.New()
m.NewClass("normal")
normalwords := getWords("normalwords.txt")
//fmt.Println(normalwords)
m.Learn("normal", normalwords...)
//m.Learn("normal", "a", "need")
m.NewClass("forbidden")
forbiddenwords := getWords("forbiddenwords.txt")
//fmt.Println(forbiddenwords)
m.Learn("forbidden", forbiddenwords...)
//m.Learn("forbidden", " design ", "banner", " picture", " logo ", "clip art", " ad ", "clipart", "hairstyles", " drawing", " rendering", " diagram ", " poster", "изображение")
m.NewClass("terror")
terrorwords := getWords("terrorwords.txt")
//fmt.Println(terrorwords)
m.Learn("terror", terrorwords...)
//m.Learn("terror", "...", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "…", "image", "pinterest", ".c", "ltd.", "vector", "quote", "video", "search", "?", "click", "psd", "ai", "print", "file", "related", "download", "submit", "view", "buy", "how", "maker", "online", " on", "by")
m.SaveToFile("materiel.json")
}
func reloadModel() *nbclassifier.Model{
model, _ := nbclassifier.LoadFromFile("materiel.json")
//fmt.Println(model.Classes[0].Items[0])
//fmt.Println(model.Classes[1])
//fmt.Println(model.Classes[2])
return model
}
func match(model *nbclassifier.Model, content string) string {
// 分词
jieba := gojieba.NewJieba()
defer jieba.Free()
words := jieba.Cut(content, true)
cls, unsure,_ := model.Classify(words...)
fmt.Println("检测到分类为:" + cls.Id)
result := "normal"
if unsure == false {
result = cls.Id
fmt.Println(cls, unsure)
}
return result
}
func run() {
socket, _ := net.Listen("unix", SPAM_CHECK_SOCKET_FILE)
defer syscall.Unlink(SPAM_CHECK_SOCKET_FILE)
learn()
// 训练物料
model := reloadModel()
for {
client, _ := socket.Accept()
buf := make([]byte, 1024)
datalength, _ := client.Read(buf)
data := buf[:datalength]
fmt.Println("client msg:" + string(data))
checkret := match(model, string(data))
fmt.Println("check result: " + checkret)
response := []byte("")
if len(checkret) > 0 {
response = []byte(checkret)
}
_,_ = client.Write(response)
}
}
func main() {
// 开启sock,检测服务
run()
//fmt.Println(reloadModel())
}
php 前端
<?php
$msg = "你说谎, 你放屁,你这个傻子";
$SOCKET_FILE = "/tmp/spamcheck.sock";
$socket = socket_create(AF_UNIX, SOCK_STREAM, 0);
socket_connect($socket, $SOCKET_FILE);
socket_send($socket, $msg, strlen($msg), 0);
$response = socket_read($socket, 1024);
socket_close($socket);
var_dump($response);
测试
总结整理
目前看起来,sock的形式还有蛮多局限性的。目前只是单机,后续可以考虑继续进行优化。先这么着吧,后面应该还要继续跟进...
有疑问加站长微信联系(非本文作者)