最近用golang开发了一个socket服务端,主要逻辑就是接收客户端发来的数据解析后存入opentsdb中,但是跑一段时间后发现内存就会增长,而且不会被gc回收,初步判定是由于大量make []byte 导致内存不释放,代码如下,求高人解惑?
```
func StartTCP() error {
tcp_addr, _ := net.ResolveTCPAddr("tcp4", tcp_listen)
listener, err := net.ListenTCP("tcp4", tcp_addr)
if err != nil {
return err
}
defer listener.Close()
log.Info("start tcp listen on %s", tcp_listen)
for {
conn, err := listener.AcceptTCP()
if err != nil {
log.Error("accept error %s", err.Error())
continue
}
log.Info("new session create %s", conn.RemoteAddr().String())
go HandlerConn(conn)
}
}
func ReadPacket(conn *net.TCPConn) []byte {
head := make([]byte, 4)
_, err := io.ReadFull(conn, head)
if err != nil {
return nil
}
size := binary.BigEndian.Uint32(head)
data := make([]byte, size)
_, err = io.ReadFull(conn, data)
if err != nil {
return nil
}
return data
}
func HandlerConn(conn *net.TCPConn) {
addr := conn.RemoteAddr().String()
defer func() {
if err := conn.Close(); err != nil {
log.Error("close session error %s", err.Error())
} else {
log.Info("session closed %s", addr)
}
}()
data := ReadPacket(conn)
if len(data) == 0 {
log.Warn("empty data")
return
}
go HandlerMsg(conn, data, t)
}
```
附pprof信息
```
(pprof) top
9.92GB of 9.93GB total ( 100%)
Dropped 359 nodes (cum <= 0.05GB)
flat flat% sum% cum cum%
9.92GB 100% 100% 9.92GB 100% main.ReadPacket
0 0% 100% 9.92GB 100% main.HandlerConn
0 0% 100% 9.92GB 100% runtime.goexit
(pprof) list main.ReadPacket
Total: 9.93GB
ROUTINE ======================== main.ReadPacket in /tmp/td- server/tcp_server.go
9.92GB 9.92GB (flat, cum) 100% of Total
. . 30: go HandlerConn(conn)
. . 31: }
. . 32:}
. . 33:
. . 34:func ReadPacket(conn *net.TCPConn) []byte {
. . 35: head := make([]byte, 4)
. . 36: _, err := io.ReadFull(conn, head)
. . 37: if err != nil {
. . 38: return nil
. . 39: }
. . 40: size := binary.BigEndian.Uint32(head)
9.92GB 9.92GB 41: data := make([]byte, size)
. . 42: _, err = io.ReadFull(conn, data)
. . 43: if err != nil {
. . 44: return nil
. . 45: }
. . 46: return data
. . 47:}
```
-----------------------------------------------------------------------------------
上次重启过后运行了一周目前很稳定,pprof发现内存占用和之前完全不一样,很奇怪
最新pprof
```
(pprof) top
3201.70kB of 3201.70kB total ( 100%)
Dropped 362 nodes (cum <= 16.01kB)
Showing top 10 nodes out of 19 (cum >= 1536.14kB)
flat flat% sum% cum cum%
1024.11kB 31.99% 31.99% 1024.11kB 31.99% main.(*Device).getIndex
641.34kB 20.03% 52.02% 641.34kB 20.03% main.init_config
512.14kB 16.00% 68.01% 512.14kB 16.00% mcommoninit
512.08kB 15.99% 84.01% 512.08kB 15.99% database/sql.(*DB).addDepLocked
512.03kB 15.99% 100% 512.03kB 15.99% bufio.(*Reader).ReadString
0 0% 100% 512.08kB 15.99% database/sql.(*DB).Query
0 0% 100% 512.08kB 15.99% database/sql.(*DB).QueryRow
0 0% 100% 512.08kB 15.99% database/sql.(*DB).conn
0 0% 100% 512.08kB 15.99% database/sql.(*DB).query
0 0% 100% 1536.14kB 47.98% main.(*Device).CollectHW
(pprof)
```
也可以看看这个 资源池 https://github.com/youtube/vitess/tree/master/go/pools,或者 https://github.com/taruti/pool,https://github.com/stretchr/hoard
#13
更多评论