初始化项目

This commit is contained in:
皮蛋13361098506
2025-01-06 16:01:02 +08:00
commit 1b77f62820
575 changed files with 69193 additions and 0 deletions

View File

@@ -0,0 +1,80 @@
package gameLogMgr
import (
"fmt"
"github.com/Shopify/sarama"
"goutil/debugUtil"
"goutil/logUtilPlus"
)
var (
producer sarama.AsyncProducer
)
// 启动生产者
// 参数:
// brokerList:Broker地址
// userId:用户名(可默认为空字符串)
// passWard:密码(可默认为空字符串)
// 返回值:
// 无
func Start(brokerList []string, userId string, passWard string) {
/*
设置 acks = all。acks 是 Producer 的一个参数,代表了你对“已提交”消息的定义。如果设置成 all则表明所有副本 Broker 都要接收到消息,该消息才算是“已提交”。这是最高等级的“已提交”定义。
对于游戏日志设置为WaitForLocal即可如果是游戏数据则应设置为WaitForAll
设置 retries 为一个较大的值。这里的 retries 同样是 Producer 的参数,对应前面提到的 Producer 自动重试。当出现网络的瞬时抖动时,消息发送可能会失败,此时配置了 retries > 0 的 Producer 能够自动重试消息发送,避免消息丢失。
*/
var err error
config := sarama.NewConfig()
config.Net.SASL.User = userId
config.Net.SASL.Password = passWard
config.Producer.Return.Successes = false
config.Producer.Return.Errors = true
config.Producer.Retry.Max = 10
config.Producer.RequiredAcks = sarama.WaitForLocal
producer, err = sarama.NewAsyncProducer(brokerList, config)
if err != nil {
panic(fmt.Errorf("Kafka Start failed. Error: %v\n", err))
}
go func() {
for err := range producer.Errors() {
debugUtil.Printf("Send message to kafka failed. Error: %v\n", err.Err)
logUtilPlus.ErrorLog("Send message to kafka failed. Error: %v\n", err.Err)
}
}()
}
func Stop() {
if producer != nil {
err := producer.Close()
if err != nil {
debugUtil.Printf("Stop kafka failed. Error: %v\n", err)
logUtilPlus.ErrorLog("Stop kafka failed. Error: %v\n", err)
}
}
}
// 写入游戏日志
// 参数:
// serverGroupId: 游戏服务器组Id
// key: 标识
// message: 日志
// 返回值: 无
func Write(topic string, serverGroupId int32, message string) {
if producer == nil {
debugUtil.Printf("Send message to kafka failed. producer is nil")
logUtilPlus.ErrorLog("Send message to kafka failed. producer is nil")
return
}
msg := &sarama.ProducerMessage{}
msg.Topic = topic
msg.Key = sarama.StringEncoder(fmt.Sprintf("%d", serverGroupId))
msg.Value = sarama.ByteEncoder(message)
// Send to kafka
producer.Input() <- msg
}

View File

@@ -0,0 +1,67 @@
package gameLogMgr
import (
"bytes"
"fmt"
"testing"
"time"
"goutil/debugUtil"
"goutil/stringUtil"
"goutil/timeUtil"
)
func TestWrite(t *testing.T) {
debugUtil.SetDebug(true)
brokerList := []string{"10.1.0.202:9092", "10.1.0.204:9092", "10.1.0.205:9092"}
Start(brokerList, "", "")
topic := "test2"
serverGroupId := int32(20011)
for i := 0; i < 5; i++ {
Write(topic, serverGroupId, getGameLog(i))
}
time.Sleep(5 * time.Second)
Stop()
}
func BenchmarkWrite(b *testing.B) {
debugUtil.SetDebug(true)
topic := "test2"
serverGroupId := int32(20011)
brokerList := []string{"10.1.0.202:9092", "10.1.0.204:9092", "10.1.0.205:9092"}
Start(brokerList, "", "")
b.ResetTimer()
for i := 0; i < b.N; i++ {
Write(topic, serverGroupId, getGameLog(i))
}
b.StopTimer()
Stop()
}
func getGameLog(int2 int) string {
//kafkaLog组装
var buffer bytes.Buffer
buffer.WriteString("{")
buffer.WriteString(fmt.Sprintf("\"#account_id\":\"%s\"", "123456789123456789"))
buffer.WriteString(",")
buffer.WriteString(fmt.Sprintf("\"#time\":\"%s\"", time.Now().Format("2006-01-02 15:04:05")))
buffer.WriteString(",")
buffer.WriteString(fmt.Sprintf("\"#uuid\":\"%s\"", stringUtil.GetNewGUID()))
buffer.WriteString(",")
buffer.WriteString(fmt.Sprintf("\"#event_id\":\"\""))
buffer.WriteString(",")
buffer.WriteString(fmt.Sprintf("\"#type\":\"track\""))
buffer.WriteString(",")
buffer.WriteString(fmt.Sprintf("\"#event_name\":\"achievement_change_log\""))
buffer.WriteString(",")
buffer.WriteString(fmt.Sprintf("\"properties\":{\"PartnerId\":%d,\"ServerId\":%d,\"Crtime\":\"%s\",\"Crdate\":\"%s\"}", 600021, int2, timeUtil.ToDateTimeString2(time.Now()), timeUtil.ToDateString2(time.Now())))
buffer.WriteString("}")
return buffer.String()
}

View File

@@ -0,0 +1,14 @@
package gameLogMgr
// 游戏日志对象
type GameLog struct {
ServerGroupId int32 // 服务器组Id
LogSql string // 日志Sql
}
func newGameLog(serverGroupId int32, logSql string) *GameLog {
return &GameLog{
ServerGroupId: serverGroupId,
LogSql: logSql,
}
}