feat: 添加mq,将消息发送至mq
parent
309bacad83
commit
68461c4d13
@ -1,11 +0,0 @@
|
||||
package bilibili
|
||||
|
||||
import "time"
|
||||
|
||||
type Config struct {
|
||||
Url string // 弹幕服务器url
|
||||
GetRoomUrl string // 获取房间信息url
|
||||
RoomId int64 // 待连接roomId
|
||||
UserId int64 // 用于连接的userId,0则随机生成
|
||||
HeartbeatInterval time.Duration // 心跳间隔 单位s
|
||||
}
|
@ -1,15 +1,59 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"live-gateway/bilibili"
|
||||
"live-gateway/logger"
|
||||
"fmt"
|
||||
c "github.com/gookit/config/v2"
|
||||
"github.com/gookit/config/v2/yaml"
|
||||
"live-gateway/pkg/logger"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Bilibili bilibili.Config
|
||||
// Log 日志配置
|
||||
Log struct {
|
||||
File logger.FileConfig
|
||||
Console logger.ConsoleConfig
|
||||
var (
|
||||
_, b, _, _ = runtime.Caller(0)
|
||||
Root = filepath.Join(filepath.Dir(b), "../")
|
||||
)
|
||||
|
||||
var Config config
|
||||
|
||||
type (
|
||||
Kafka struct {
|
||||
Addr []string
|
||||
Topic string
|
||||
}
|
||||
config struct {
|
||||
Bilibili struct {
|
||||
Url string // 弹幕服务器url
|
||||
GetRoomUrl string // 获取房间信息url
|
||||
RoomId int64 // 待连接roomId
|
||||
UserId int64 // 用于连接的userId,0则随机生成
|
||||
HeartbeatInterval time.Duration // 心跳间隔 单位s
|
||||
}
|
||||
// Log 日志配置
|
||||
Log struct {
|
||||
File logger.FileConfig
|
||||
Console logger.ConsoleConfig
|
||||
}
|
||||
// Kafka 队列配置
|
||||
Kafka struct {
|
||||
Danmaku Kafka
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
func init() {
|
||||
var err error
|
||||
c.AddDriver(yaml.Driver)
|
||||
|
||||
err = c.LoadFiles(Root + "/config.yml")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
err = c.BindStruct("", &Config)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Printf("%+v\n", Config)
|
||||
}
|
||||
|
@ -1,61 +0,0 @@
|
||||
package mq
|
||||
|
||||
import (
|
||||
"github.com/Shopify/sarama"
|
||||
"live-gateway/logger"
|
||||
)
|
||||
|
||||
type Producer struct {
|
||||
producer sarama.SyncProducer
|
||||
asyncProducer sarama.AsyncProducer
|
||||
}
|
||||
|
||||
func NewProducer() *Producer {
|
||||
result := &Producer{}
|
||||
|
||||
config := sarama.NewConfig()
|
||||
config.Producer.RequiredAcks = sarama.WaitForAll // 等待所有follower都回复ack,确保kafka不丢消息
|
||||
config.Producer.Return.Successes = true
|
||||
config.Producer.Partitioner = sarama.NewHashPartitioner // 对key进行hash,同样的key落到同样的partition,保证个人消息有序性
|
||||
|
||||
var err error
|
||||
client, err := sarama.NewClient([]string{"127.0.0.1:9093"}, config)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// 暂时使用同步producer
|
||||
|
||||
result.producer, err = sarama.NewSyncProducerFromClient(client)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
result.asyncProducer, err = sarama.NewAsyncProducerFromClient(client)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (p *Producer) SendDanmaku(uid, msg string) {
|
||||
p.SendMessageSync("danmaku", uid, msg)
|
||||
}
|
||||
|
||||
func (p *Producer) SendMessageAsync(topic, uid string, msg string) {
|
||||
p.asyncProducer.Input()
|
||||
}
|
||||
|
||||
func (p *Producer) SendMessageSync(topic, uid string, msg string) {
|
||||
partition, offset, err := p.producer.SendMessage(&sarama.ProducerMessage{
|
||||
Topic: topic,
|
||||
Key: sarama.StringEncoder(uid),
|
||||
Value: sarama.StringEncoder(msg),
|
||||
})
|
||||
if err != nil {
|
||||
logger.SLog.Error("err", err)
|
||||
return
|
||||
}
|
||||
logger.SLog.Debug("success partition:", partition, "offset", offset)
|
||||
}
|
@ -0,0 +1,179 @@
|
||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.28.0
|
||||
// protoc v3.19.4
|
||||
// source: danmaku.proto
|
||||
|
||||
package pb
|
||||
|
||||
import (
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
)
|
||||
|
||||
const (
|
||||
// Verify that this generated code is sufficiently up-to-date.
|
||||
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
|
||||
// Verify that runtime/protoimpl is sufficiently up-to-date.
|
||||
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
|
||||
)
|
||||
|
||||
type Danmaku struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Platform string `protobuf:"bytes,1,opt,name=platform,proto3" json:"platform,omitempty"`
|
||||
Uid int64 `protobuf:"varint,2,opt,name=uid,proto3" json:"uid,omitempty"`
|
||||
Uname string `protobuf:"bytes,3,opt,name=uname,proto3" json:"uname,omitempty"`
|
||||
Content string `protobuf:"bytes,4,opt,name=content,proto3" json:"content,omitempty"`
|
||||
SendTime int64 `protobuf:"varint,5,opt,name=sendTime,proto3" json:"sendTime,omitempty"`
|
||||
}
|
||||
|
||||
func (x *Danmaku) Reset() {
|
||||
*x = Danmaku{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_danmaku_proto_msgTypes[0]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *Danmaku) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*Danmaku) ProtoMessage() {}
|
||||
|
||||
func (x *Danmaku) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_danmaku_proto_msgTypes[0]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use Danmaku.ProtoReflect.Descriptor instead.
|
||||
func (*Danmaku) Descriptor() ([]byte, []int) {
|
||||
return file_danmaku_proto_rawDescGZIP(), []int{0}
|
||||
}
|
||||
|
||||
func (x *Danmaku) GetPlatform() string {
|
||||
if x != nil {
|
||||
return x.Platform
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *Danmaku) GetUid() int64 {
|
||||
if x != nil {
|
||||
return x.Uid
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *Danmaku) GetUname() string {
|
||||
if x != nil {
|
||||
return x.Uname
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *Danmaku) GetContent() string {
|
||||
if x != nil {
|
||||
return x.Content
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *Danmaku) GetSendTime() int64 {
|
||||
if x != nil {
|
||||
return x.SendTime
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
var File_danmaku_proto protoreflect.FileDescriptor
|
||||
|
||||
var file_danmaku_proto_rawDesc = []byte{
|
||||
0x0a, 0x0d, 0x64, 0x61, 0x6e, 0x6d, 0x61, 0x6b, 0x75, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12,
|
||||
0x02, 0x70, 0x62, 0x22, 0x83, 0x01, 0x0a, 0x07, 0x44, 0x61, 0x6e, 0x6d, 0x61, 0x6b, 0x75, 0x12,
|
||||
0x1a, 0x0a, 0x08, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28,
|
||||
0x09, 0x52, 0x08, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d, 0x12, 0x10, 0x0a, 0x03, 0x75,
|
||||
0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x03, 0x75, 0x69, 0x64, 0x12, 0x14, 0x0a,
|
||||
0x05, 0x75, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x75, 0x6e,
|
||||
0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x04,
|
||||
0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, 0x1a, 0x0a,
|
||||
0x08, 0x73, 0x65, 0x6e, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x52,
|
||||
0x08, 0x73, 0x65, 0x6e, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x42, 0x05, 0x5a, 0x03, 0x2f, 0x70, 0x62,
|
||||
0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||
}
|
||||
|
||||
var (
|
||||
file_danmaku_proto_rawDescOnce sync.Once
|
||||
file_danmaku_proto_rawDescData = file_danmaku_proto_rawDesc
|
||||
)
|
||||
|
||||
func file_danmaku_proto_rawDescGZIP() []byte {
|
||||
file_danmaku_proto_rawDescOnce.Do(func() {
|
||||
file_danmaku_proto_rawDescData = protoimpl.X.CompressGZIP(file_danmaku_proto_rawDescData)
|
||||
})
|
||||
return file_danmaku_proto_rawDescData
|
||||
}
|
||||
|
||||
var file_danmaku_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
|
||||
var file_danmaku_proto_goTypes = []interface{}{
|
||||
(*Danmaku)(nil), // 0: pb.Danmaku
|
||||
}
|
||||
var file_danmaku_proto_depIdxs = []int32{
|
||||
0, // [0:0] is the sub-list for method output_type
|
||||
0, // [0:0] is the sub-list for method input_type
|
||||
0, // [0:0] is the sub-list for extension type_name
|
||||
0, // [0:0] is the sub-list for extension extendee
|
||||
0, // [0:0] is the sub-list for field type_name
|
||||
}
|
||||
|
||||
func init() { file_danmaku_proto_init() }
|
||||
func file_danmaku_proto_init() {
|
||||
if File_danmaku_proto != nil {
|
||||
return
|
||||
}
|
||||
if !protoimpl.UnsafeEnabled {
|
||||
file_danmaku_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*Danmaku); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
type x struct{}
|
||||
out := protoimpl.TypeBuilder{
|
||||
File: protoimpl.DescBuilder{
|
||||
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
|
||||
RawDescriptor: file_danmaku_proto_rawDesc,
|
||||
NumEnums: 0,
|
||||
NumMessages: 1,
|
||||
NumExtensions: 0,
|
||||
NumServices: 0,
|
||||
},
|
||||
GoTypes: file_danmaku_proto_goTypes,
|
||||
DependencyIndexes: file_danmaku_proto_depIdxs,
|
||||
MessageInfos: file_danmaku_proto_msgTypes,
|
||||
}.Build()
|
||||
File_danmaku_proto = out.File
|
||||
file_danmaku_proto_rawDesc = nil
|
||||
file_danmaku_proto_goTypes = nil
|
||||
file_danmaku_proto_depIdxs = nil
|
||||
}
|
@ -0,0 +1,13 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package pb;
|
||||
|
||||
option go_package = "/pb";
|
||||
|
||||
message Danmaku {
|
||||
string platform = 1;
|
||||
int64 uid = 2;
|
||||
string uname = 3;
|
||||
string content = 4;
|
||||
int64 sendTime = 5;
|
||||
}
|
@ -0,0 +1 @@
|
||||
protoc --go_opt=paths=source_relative --go-grpc_opt=paths=source_relative --go-grpc_opt=require_unimplemented_servers=false --go_out=. --go-grpc_out=. --proto_path=. *.proto
|
@ -0,0 +1,41 @@
|
||||
package kafka
|
||||
|
||||
import (
|
||||
"github.com/Shopify/sarama"
|
||||
"live-gateway/pkg/logger"
|
||||
)
|
||||
|
||||
type Consumer struct {
|
||||
client sarama.Client
|
||||
topic string
|
||||
consumer sarama.Consumer
|
||||
partitions []int32
|
||||
}
|
||||
|
||||
func NewKafkaConsumer(addr []string, topic string) (*Consumer, error) {
|
||||
p := Consumer{}
|
||||
p.topic = topic
|
||||
|
||||
config := sarama.NewConfig()
|
||||
config.Version = sarama.V3_1_0_0
|
||||
config.Consumer.Offsets.Initial = sarama.OffsetNewest
|
||||
|
||||
var err error
|
||||
p.client, err = sarama.NewClient(addr, config)
|
||||
if err != nil {
|
||||
logger.SLog.Error("new kafka client err:", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p.consumer, err = sarama.NewConsumerFromClient(p.client)
|
||||
if err != nil {
|
||||
logger.SLog.Error("new kafka consumer err:", err)
|
||||
return nil, err
|
||||
}
|
||||
p.partitions, err = p.consumer.Partitions(topic)
|
||||
if err != nil {
|
||||
logger.SLog.Errorf("get partitions for topic %s err", topic)
|
||||
return nil, err
|
||||
}
|
||||
return &p, nil
|
||||
}
|
@ -0,0 +1,47 @@
|
||||
package kafka
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/Shopify/sarama"
|
||||
"live-gateway/pkg/logger"
|
||||
)
|
||||
|
||||
type ConsumerGroup struct {
|
||||
sarama.ConsumerGroup
|
||||
groupId string
|
||||
topics []string
|
||||
}
|
||||
|
||||
type ConsumerGroupConfig struct {
|
||||
KafkaVersion sarama.KafkaVersion
|
||||
OffsetsInitial int64
|
||||
IsReturnErr bool
|
||||
}
|
||||
|
||||
func NewConsumerGroup(config *ConsumerGroupConfig, addr, topics []string, groupId string) (*ConsumerGroup, error) {
|
||||
c := sarama.NewConfig()
|
||||
c.Version = config.KafkaVersion
|
||||
c.Consumer.Offsets.Initial = config.OffsetsInitial
|
||||
c.Consumer.Return.Errors = config.IsReturnErr
|
||||
|
||||
client, err := sarama.NewClient(addr, c)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
consumerGroup, err := sarama.NewConsumerGroupFromClient(groupId, client)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &ConsumerGroup{consumerGroup, groupId, topics}, nil
|
||||
}
|
||||
|
||||
func (cg *ConsumerGroup) RegisterHandlerAndConsumer(handler sarama.ConsumerGroupHandler) {
|
||||
ctx := context.Background()
|
||||
for {
|
||||
err := cg.ConsumerGroup.Consume(ctx, cg.topics, handler)
|
||||
if err != nil {
|
||||
logger.SLog.Error("RegisterHandlerAndConsumer error: ", err)
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,62 @@
|
||||
package kafka
|
||||
|
||||
import (
|
||||
"github.com/Shopify/sarama"
|
||||
"google.golang.org/protobuf/proto"
|
||||
"live-gateway/pkg/logger"
|
||||
)
|
||||
|
||||
type Producer struct {
|
||||
topic string
|
||||
client sarama.Client
|
||||
producer sarama.AsyncProducer
|
||||
}
|
||||
|
||||
func NewKafkaProducer(addr []string, topic string) *Producer {
|
||||
p := Producer{}
|
||||
|
||||
config := sarama.NewConfig() //Instantiate a sarama Config
|
||||
config.Producer.Return.Successes = true //Whether to enable the successes channel to be notified after the message is sent successfully
|
||||
config.Producer.RequiredAcks = sarama.WaitForAll //Set producer Message Reply level 0 1 all
|
||||
config.Producer.Partitioner = sarama.NewHashPartitioner //Set the hash-key automatic hash partition. When sending a message, you must specify the key value of the message. If there is no key, the partition will be selected randomly
|
||||
|
||||
p.topic = topic
|
||||
|
||||
var err error
|
||||
p.client, err = sarama.NewClient(addr, config)
|
||||
if err != nil {
|
||||
logger.SLog.Error("new kafka client err:", err)
|
||||
return &p
|
||||
}
|
||||
p.producer, err = sarama.NewAsyncProducerFromClient(p.client)
|
||||
if err != nil {
|
||||
logger.SLog.Error("new kafka producer err:", err)
|
||||
return &p
|
||||
}
|
||||
|
||||
go func() {
|
||||
for range p.producer.Successes() {
|
||||
}
|
||||
}()
|
||||
|
||||
return &p
|
||||
}
|
||||
|
||||
func (p *Producer) SendMessageAsync(m proto.Message, key ...string) error {
|
||||
kMsg := &sarama.ProducerMessage{}
|
||||
kMsg.Topic = p.topic
|
||||
if len(key) > 0 {
|
||||
kMsg.Key = sarama.StringEncoder(key[0])
|
||||
}
|
||||
bMsg, err := proto.Marshal(m)
|
||||
if err != nil {
|
||||
logger.SLog.Error("proto marshal err:", err)
|
||||
return err
|
||||
}
|
||||
kMsg.Value = sarama.ByteEncoder(bMsg)
|
||||
|
||||
select {
|
||||
case p.producer.Input() <- kMsg:
|
||||
}
|
||||
return nil
|
||||
}
|
Loading…
Reference in New Issue