lucas
2 months ago
25 changed files with 964 additions and 26 deletions
@ -0,0 +1,129 @@ |
|||||
|
package adaptors |
||||
|
|
||||
|
import ( |
||||
|
"encoding/json" |
||||
|
"fmt" |
||||
|
"goUpload/consumers/AXYraw" |
||||
|
"goUpload/consumers/GZGZM" |
||||
|
"goUpload/dbHelper" |
||||
|
"goUpload/models" |
||||
|
"log" |
||||
|
"math" |
||||
|
"time" |
||||
|
) |
||||
|
|
||||
|
// Adaptor_AXY_LastRAW 安心云 kafka iota数据 转换 es设备数据
|
||||
|
type Adaptor_AXY_LastRAW struct { |
||||
|
AXYraw.Info |
||||
|
Redis *dbHelper.RedisHelper |
||||
|
} |
||||
|
|
||||
|
func (the Adaptor_AXY_LastRAW) Transform(topic, rawMsg string) []byte { |
||||
|
iotaData := models.IotaData{} |
||||
|
json.Unmarshal([]byte(rawMsg), &iotaData) |
||||
|
return the.Theme2GzGZM(iotaData) |
||||
|
} |
||||
|
|
||||
|
func (the Adaptor_AXY_LastRAW) Theme2GzGZM(iotaData models.IotaData) (result []byte) { |
||||
|
if !iotaData.Data.Success() { |
||||
|
return |
||||
|
} |
||||
|
log.Printf("设备[%s] 数据时间 %s", iotaData.DeviceId, iotaData.TriggerTime) |
||||
|
the.GetDeviceInfo(iotaData.DeviceId) |
||||
|
return result |
||||
|
} |
||||
|
func (the Adaptor_AXY_LastRAW) getSensorId(sensorId string) GZGZM.SensorInfo { |
||||
|
s := GZGZM.SensorInfo{} |
||||
|
//if v, ok := the.SensorInfoMap[sensorId]; ok {
|
||||
|
// s = v
|
||||
|
//}
|
||||
|
return s |
||||
|
} |
||||
|
func (the Adaptor_AXY_LastRAW) getCodeBytes(sensorCode int16) []byte { |
||||
|
|
||||
|
bytes := make([]byte, 0) |
||||
|
bytes = append(bytes, |
||||
|
byte(sensorCode&0xFF), |
||||
|
byte(sensorCode>>8), |
||||
|
) |
||||
|
|
||||
|
return bytes |
||||
|
} |
||||
|
|
||||
|
func (the Adaptor_AXY_LastRAW) getTimeBytes(sensorTime time.Time) []byte { |
||||
|
|
||||
|
year := int8(sensorTime.Year() - 1900) |
||||
|
month := int8(sensorTime.Month()) |
||||
|
day := int8(sensorTime.Day()) |
||||
|
hour := int8(sensorTime.Hour()) |
||||
|
minute := int8(sensorTime.Minute()) |
||||
|
millisecond := uint16(sensorTime.Second()*1000 + sensorTime.Nanosecond()/1e6) |
||||
|
bytes := make([]byte, 0) |
||||
|
bytes = append(bytes, |
||||
|
byte(year), |
||||
|
byte(month), |
||||
|
byte(day), |
||||
|
byte(hour), |
||||
|
byte(minute), |
||||
|
byte(millisecond&0xFF), |
||||
|
byte(millisecond>>8), |
||||
|
) |
||||
|
|
||||
|
return bytes |
||||
|
} |
||||
|
|
||||
|
func (the Adaptor_AXY_LastRAW) getDatasBytes(datas []float32) []byte { |
||||
|
|
||||
|
bytes := make([]byte, 0) |
||||
|
for _, data := range datas { |
||||
|
bits := math.Float32bits(data) |
||||
|
bytes = append(bytes, |
||||
|
byte(bits&0xFF), |
||||
|
byte(bits>>8&0xFF), |
||||
|
byte(bits>>16&0xFF), |
||||
|
byte(bits>>24&0xFF), |
||||
|
) |
||||
|
} |
||||
|
|
||||
|
return bytes |
||||
|
} |
||||
|
|
||||
|
func (the Adaptor_AXY_LastRAW) getPayloadHeader(floatCount int16) []byte { |
||||
|
|
||||
|
bytes := make([]byte, 0) |
||||
|
|
||||
|
bytes = append(bytes, |
||||
|
//报文类型
|
||||
|
0x02, |
||||
|
0x00, |
||||
|
//1:上行信息
|
||||
|
0x01, |
||||
|
//默认,通讯计算机编号
|
||||
|
0x00, |
||||
|
//命令码
|
||||
|
0x01, |
||||
|
//报文长度
|
||||
|
byte((floatCount*4+9)&0xFF), |
||||
|
byte((floatCount*4+9)>>8), |
||||
|
) |
||||
|
|
||||
|
return bytes |
||||
|
} |
||||
|
|
||||
|
func (the Adaptor_AXY_LastRAW) GetDeviceInfo(deviceId string) []byte { |
||||
|
Key_Iota_device := "iota_device" |
||||
|
key_Thing_struct := "thing_struct" |
||||
|
key_Iota_meta := "iota_meta" |
||||
|
k1 := fmt.Sprintf("%s:%s", Key_Iota_device, deviceId) |
||||
|
dev := models.IotaDevice{} |
||||
|
ts := models.ThingStruct{} |
||||
|
devMeta := models.DeviceMeta{} |
||||
|
err1 := the.Redis.GetObj(k1, &dev) |
||||
|
k2 := fmt.Sprintf("%s:%s", key_Thing_struct, dev.ThingId) |
||||
|
err2 := the.Redis.GetObj(k2, &ts) |
||||
|
k3 := fmt.Sprintf("%s:%s", key_Iota_meta, dev.DeviceMeta.Id) |
||||
|
err3 := the.Redis.GetObj(k3, &devMeta) |
||||
|
println(err1, err2, err3) |
||||
|
|
||||
|
return make([]byte, 0) |
||||
|
} |
@ -0,0 +1,36 @@ |
|||||
|
{ |
||||
|
"consumer": "consumerAXYraw", |
||||
|
"ioConfig": { |
||||
|
"in": { |
||||
|
"kafka": { |
||||
|
"brokers": [ |
||||
|
"10.8.30.160:30992" |
||||
|
], |
||||
|
"groupId": "synchronizeRaw", |
||||
|
"topics": [ |
||||
|
"RawData" |
||||
|
] |
||||
|
} |
||||
|
}, |
||||
|
"out": { |
||||
|
"es": { |
||||
|
"address": ["http://10.8.30.142:30092"], |
||||
|
"index": "anxincloud_raws_last", |
||||
|
"auth": { |
||||
|
"userName": "post", |
||||
|
"password": "123" |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
}, |
||||
|
"info": { |
||||
|
"common": { |
||||
|
"structureId": "3676" |
||||
|
}, |
||||
|
"queryComponent":{ |
||||
|
"redis": { |
||||
|
"address": "10.8.30.142:30379" |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
} |
@ -1 +0,0 @@ |
|||||
package constKey |
|
@ -0,0 +1,31 @@ |
|||||
|
package AXYraw |
||||
|
|
||||
|
import "goUpload/config" |
||||
|
|
||||
|
type ConfigFile struct { |
||||
|
config.Consumer |
||||
|
IoConfig ioConfig `json:"ioConfig"` |
||||
|
Info Info `json:"info"` |
||||
|
} |
||||
|
type ioConfig struct { |
||||
|
In in `json:"in"` |
||||
|
Out out `json:"out"` |
||||
|
} |
||||
|
type in struct { |
||||
|
Kafka config.KafkaConfig `json:"kafka"` |
||||
|
} |
||||
|
|
||||
|
type out struct { |
||||
|
Es config.EsConfig `json:"es"` |
||||
|
} |
||||
|
|
||||
|
type Info struct { |
||||
|
Common map[string]string `json:"common"` |
||||
|
QueryComponent queryComponent `json:"queryComponent"` |
||||
|
} |
||||
|
|
||||
|
type queryComponent struct { |
||||
|
Redis struct { |
||||
|
Address string `json:"address"` |
||||
|
} `json:"redis"` |
||||
|
} |
@ -0,0 +1,128 @@ |
|||||
|
package consumers |
||||
|
|
||||
|
import ( |
||||
|
"encoding/json" |
||||
|
"goUpload/adaptors" |
||||
|
"goUpload/consumers/AXYraw" |
||||
|
"goUpload/dbHelper" |
||||
|
"goUpload/dbHelper/_kafka" |
||||
|
"log" |
||||
|
"time" |
||||
|
) |
||||
|
|
||||
|
type consumerAXYraw struct { |
||||
|
//数据缓存管道
|
||||
|
dataCache chan []byte |
||||
|
//具体配置
|
||||
|
ConfigInfo AXYraw.ConfigFile |
||||
|
InKafka _kafka.KafkaHelper |
||||
|
OutEs dbHelper.ESHelper |
||||
|
infoRedis *dbHelper.RedisHelper |
||||
|
} |
||||
|
|
||||
|
func (the *consumerAXYraw) LoadConfigJson(cfgStr string) { |
||||
|
// 将 JSON 格式的数据解析到结构体中
|
||||
|
err := json.Unmarshal([]byte(cfgStr), &the.ConfigInfo) |
||||
|
if err != nil { |
||||
|
log.Printf("读取配置文件[%s]异常 err=%v", cfgStr, err.Error()) |
||||
|
panic(err) |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
func (the *consumerAXYraw) Initial(cfg string) error { |
||||
|
the.dataCache = make(chan []byte, 200) |
||||
|
|
||||
|
the.LoadConfigJson(cfg) |
||||
|
err := the.inputInitial() |
||||
|
if err != nil { |
||||
|
return err |
||||
|
} |
||||
|
err = the.outputInitial() |
||||
|
if err != nil { |
||||
|
return err |
||||
|
} |
||||
|
err = the.infoComponentInitial() |
||||
|
return err |
||||
|
} |
||||
|
func (the *consumerAXYraw) inputInitial() error { |
||||
|
//数据入口
|
||||
|
the.InKafka = _kafka.KafkaHelper{ |
||||
|
Brokers: the.ConfigInfo.IoConfig.In.Kafka.Brokers, |
||||
|
GroupId: the.ConfigInfo.IoConfig.In.Kafka.GroupId, |
||||
|
} |
||||
|
the.InKafka.Initial() |
||||
|
for _, inTopic := range the.ConfigInfo.IoConfig.In.Kafka.Topics { |
||||
|
the.InKafka.Subscribe(inTopic, the.onData) |
||||
|
} |
||||
|
|
||||
|
the.InKafka.Worker() |
||||
|
return nil |
||||
|
} |
||||
|
func (the *consumerAXYraw) outputInitial() error { |
||||
|
//数据出口
|
||||
|
the.OutEs = *dbHelper.NewESHelper( |
||||
|
the.ConfigInfo.IoConfig.Out.Es.Address, |
||||
|
the.ConfigInfo.IoConfig.Out.Es.Auth.UserName, |
||||
|
the.ConfigInfo.IoConfig.Out.Es.Auth.Password, |
||||
|
) |
||||
|
|
||||
|
return nil |
||||
|
} |
||||
|
|
||||
|
func (the *consumerAXYraw) infoComponentInitial() error { |
||||
|
//数据出口
|
||||
|
addr := the.ConfigInfo.Info.QueryComponent.Redis.Address |
||||
|
the.infoRedis = dbHelper.NewRedisHelper("", addr) |
||||
|
|
||||
|
return nil |
||||
|
} |
||||
|
|
||||
|
func (the *consumerAXYraw) RefreshTask() { |
||||
|
the.tokenRefresh() |
||||
|
ticker := time.NewTicker(24 * time.Hour) |
||||
|
defer ticker.Stop() |
||||
|
for true { |
||||
|
<-ticker.C |
||||
|
the.tokenRefresh() |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
func (the *consumerAXYraw) tokenRefresh() { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
func (the *consumerAXYraw) Work() { |
||||
|
|
||||
|
go func() { |
||||
|
for { |
||||
|
pushBytes := <-the.dataCache |
||||
|
log.Printf("取出ch数据,剩余[%d] ", len(the.dataCache)) |
||||
|
|
||||
|
log.Printf("推送[%v]: len=%d", "OutEs", len(pushBytes)) |
||||
|
//the.OutEs.PublishWithHeader(pushBytes, map[string]string{"Authorization": the.OutEs.Token})
|
||||
|
time.Sleep(10 * time.Millisecond) |
||||
|
} |
||||
|
|
||||
|
}() |
||||
|
} |
||||
|
func (the *consumerAXYraw) onData(topic string, msg string) bool { |
||||
|
if len(msg) > 80 { |
||||
|
log.Printf("recv:[%s]:%s ...", topic, msg[:80]) |
||||
|
} |
||||
|
adaptor := the.getAdaptor() |
||||
|
if adaptor != nil { |
||||
|
needPush := adaptor.Transform(topic, msg) |
||||
|
|
||||
|
if len(needPush) > 0 { |
||||
|
the.dataCache <- needPush |
||||
|
} |
||||
|
} |
||||
|
return true |
||||
|
} |
||||
|
func (the *consumerAXYraw) getAdaptor() (adaptor adaptors.IAdaptor3) { |
||||
|
|
||||
|
adaptor = adaptors.Adaptor_AXY_LastRAW{ |
||||
|
Redis: the.infoRedis, |
||||
|
} |
||||
|
return adaptor |
||||
|
} |
@ -0,0 +1,23 @@ |
|||||
|
package dbHelper |
||||
|
|
||||
|
import "testing" |
||||
|
|
||||
|
type res struct { |
||||
|
RLLYCJ string `json:"LLYCJ"` |
||||
|
RLLCacheMap string `json:"LLCacheMap"` |
||||
|
} |
||||
|
|
||||
|
func TestRedis(t *testing.T) { |
||||
|
addr := "10.8.30.160:30379" |
||||
|
redis := NewRedisHelper("", addr) |
||||
|
|
||||
|
key1 := "RLLYCJ" |
||||
|
//v := redis.Get(key1)
|
||||
|
//println(v)
|
||||
|
|
||||
|
key2 := "RLLCacheMap" |
||||
|
res1 := res{} |
||||
|
|
||||
|
v2 := redis.MGet(&res1, key1, key2) |
||||
|
println(v2) |
||||
|
} |
@ -0,0 +1,231 @@ |
|||||
|
package dbHelper |
||||
|
|
||||
|
import ( |
||||
|
"bytes" |
||||
|
"context" |
||||
|
"encoding/json" |
||||
|
"fmt" |
||||
|
elasticsearch6 "github.com/elastic/go-elasticsearch/v6" |
||||
|
"github.com/elastic/go-elasticsearch/v6/esapi" |
||||
|
"goUpload/models" |
||||
|
"io" |
||||
|
"log" |
||||
|
"strings" |
||||
|
) |
||||
|
|
||||
|
type ESHelper struct { |
||||
|
addresses []string |
||||
|
//org string
|
||||
|
esClient *elasticsearch6.Client |
||||
|
} |
||||
|
|
||||
|
func NewESHelper(addresses []string, user, pwd string) *ESHelper { |
||||
|
es, _ := elasticsearch6.NewClient(elasticsearch6.Config{ |
||||
|
Addresses: addresses, |
||||
|
Username: user, |
||||
|
Password: pwd, |
||||
|
}) |
||||
|
res, err := es.Info() |
||||
|
if err != nil { |
||||
|
log.Fatalf("Error getting response: %s", err) |
||||
|
} |
||||
|
log.Printf("链接到es[%s]info=%v", elasticsearch6.Version, res) |
||||
|
return &ESHelper{ |
||||
|
addresses: addresses, |
||||
|
esClient: es, |
||||
|
} |
||||
|
} |
||||
|
func (the *ESHelper) SearchRaw(index, reqBody string) []models.HitRaw { |
||||
|
body := &bytes.Buffer{} |
||||
|
body.WriteString(reqBody) |
||||
|
response, err := the.esClient.Search( |
||||
|
the.esClient.Search.WithIndex(index), |
||||
|
the.esClient.Search.WithBody(body), |
||||
|
) |
||||
|
defer response.Body.Close() |
||||
|
if err != nil { |
||||
|
return nil |
||||
|
} |
||||
|
r := models.EsRawResp{} |
||||
|
// Deserialize the response into a map.
|
||||
|
if err := json.NewDecoder(response.Body).Decode(&r); err != nil { |
||||
|
log.Fatalf("Error parsing the response body: %s", err) |
||||
|
} |
||||
|
return r.Hits.Hits |
||||
|
} |
||||
|
func (the *ESHelper) Search(index, reqBody string) { |
||||
|
body := &bytes.Buffer{} |
||||
|
body.WriteString(reqBody) |
||||
|
response, err := the.esClient.Search( |
||||
|
the.esClient.Search.WithIndex(index), |
||||
|
the.esClient.Search.WithBody(body), |
||||
|
) |
||||
|
|
||||
|
if err != nil { |
||||
|
//return nil, err
|
||||
|
} |
||||
|
log.Println(response.Status()) |
||||
|
var r map[string]any |
||||
|
// Deserialize the response into a map.
|
||||
|
if err := json.NewDecoder(response.Body).Decode(&r); err != nil { |
||||
|
log.Fatalf("Error parsing the response body: %s", err) |
||||
|
} |
||||
|
// Print the response status, number of results, and request duration.
|
||||
|
log.Printf( |
||||
|
"[%s] %d hits; took: %dms", |
||||
|
response.Status(), |
||||
|
int(r["hits"].(map[string]any)["total"].(float64)), |
||||
|
int(r["took"].(float64)), |
||||
|
) |
||||
|
|
||||
|
for _, hit := range r["hits"].(map[string]any)["hits"].([]any) { |
||||
|
|
||||
|
source := hit.(map[string]any)["_source"] |
||||
|
log.Printf(" * ID=%s, %s", hit.(map[string]any)["_id"], source) |
||||
|
} |
||||
|
log.Println(strings.Repeat("=", 37)) |
||||
|
} |
||||
|
func (the *ESHelper) request(index, reqBody string) (map[string]any, error) { |
||||
|
// Set up the request object.
|
||||
|
req := esapi.IndexRequest{ |
||||
|
Index: index, |
||||
|
//DocumentID: strconv.Itoa(i + 1),
|
||||
|
Body: strings.NewReader(reqBody), |
||||
|
Refresh: "true", |
||||
|
} |
||||
|
// Perform the request with the client.
|
||||
|
res, err := req.Do(context.Background(), the.esClient) |
||||
|
if err != nil { |
||||
|
log.Fatalf("Error getting response: %s", err) |
||||
|
} |
||||
|
defer res.Body.Close() |
||||
|
var r map[string]any |
||||
|
if res.IsError() { |
||||
|
log.Printf("[%s] Error indexing document ID=%d", res.Status(), 0) |
||||
|
} else { |
||||
|
// Deserialize the response into a map.
|
||||
|
|
||||
|
if err := json.NewDecoder(res.Body).Decode(&r); err != nil { |
||||
|
log.Printf("Error parsing the response body: %s", err) |
||||
|
} else { |
||||
|
// Print the response status and indexed document version.
|
||||
|
log.Printf("[%s] %s; version=%d", res.Status(), r["result"], int(r["_version"].(float64))) |
||||
|
} |
||||
|
} |
||||
|
return r, err |
||||
|
} |
||||
|
|
||||
|
func (the *ESHelper) searchRaw(index, reqBody string) (models.IotaData, error) { |
||||
|
respmap, err := the.request(index, reqBody) |
||||
|
if respmap != nil { |
||||
|
|
||||
|
} |
||||
|
iotaDatas := models.IotaData{} |
||||
|
return iotaDatas, err |
||||
|
} |
||||
|
|
||||
|
func (the *ESHelper) searchThemes(index, reqBody string) (models.EsThemeResp, error) { |
||||
|
body := &bytes.Buffer{} |
||||
|
body.WriteString(reqBody) |
||||
|
response, err := the.esClient.Search( |
||||
|
the.esClient.Search.WithIndex(index), |
||||
|
the.esClient.Search.WithBody(body), |
||||
|
) |
||||
|
defer response.Body.Close() |
||||
|
if err != nil { |
||||
|
//return nil, err
|
||||
|
} |
||||
|
log.Println(response.Status()) |
||||
|
r := models.EsThemeResp{} |
||||
|
// Deserialize the response into a map.
|
||||
|
if err := json.NewDecoder(response.Body).Decode(&r); err != nil { |
||||
|
log.Fatalf("Error parsing the response body: %s", err) |
||||
|
} |
||||
|
return r, err |
||||
|
} |
||||
|
func (the *ESHelper) SearchLatestStationData(index string, sensorId int) (models.EsTheme, error) { |
||||
|
//sensorId := 178
|
||||
|
queryBody := fmt.Sprintf(`{ |
||||
|
"size": 1, |
||||
|
"query": { |
||||
|
"term": { |
||||
|
"sensor": { |
||||
|
"value": %d |
||||
|
} |
||||
|
} |
||||
|
}, |
||||
|
"sort": [ |
||||
|
{ |
||||
|
"collect_time": { |
||||
|
"order": "desc" |
||||
|
} |
||||
|
} |
||||
|
] |
||||
|
}`, sensorId) |
||||
|
//index := "go_native_themes"
|
||||
|
themes, err := the.searchThemes(index, queryBody) |
||||
|
|
||||
|
var theme models.EsTheme |
||||
|
if len(themes.Hits.Hits) > 0 { |
||||
|
theme = themes.Hits.Hits[0].Source |
||||
|
} |
||||
|
|
||||
|
return theme, err |
||||
|
} |
||||
|
func (the *ESHelper) BulkWrite(index, reqBody string) { |
||||
|
|
||||
|
body := &bytes.Buffer{} |
||||
|
body.WriteString(reqBody) |
||||
|
bulkRequest := esapi.BulkRequest{ |
||||
|
Index: index, |
||||
|
Body: body, |
||||
|
DocumentType: "_doc", |
||||
|
} |
||||
|
res, err := bulkRequest.Do(context.Background(), the.esClient) |
||||
|
defer res.Body.Close() |
||||
|
if err != nil { |
||||
|
log.Panicf("es 写入[%s],err=%s", index, err.Error()) |
||||
|
} |
||||
|
|
||||
|
if res.StatusCode != 200 && res.StatusCode != 201 { |
||||
|
respBody, _ := io.ReadAll(res.Body) |
||||
|
log.Panicf("es 写入失败,err=%s \n body=%s", string(respBody), reqBody) |
||||
|
} |
||||
|
//log.Printf("es 写入[%s],字符长度=%d,完成", index, len(reqBody))
|
||||
|
|
||||
|
} |
||||
|
|
||||
|
func (the *ESHelper) BulkWriteRaws2Es(index string, raws []models.EsRaw) { |
||||
|
body := strings.Builder{} |
||||
|
for _, raw := range raws { |
||||
|
// scala => val id = UUID.nameUUIDFromBytes(s"${v.deviceId}-${v.acqTime.getMillis}".getBytes("UTF-8")).toString
|
||||
|
source, _ := json.Marshal(raw) |
||||
|
_id := fmt.Sprintf("%s-%d", raw.IotaDevice, raw.CollectTime.UnixMilli()) |
||||
|
s := fmt.Sprintf( |
||||
|
`{"index": {"_index": "%s","_id": "%s"}} |
||||
|
%s |
||||
|
`, index, _id, source) |
||||
|
body.WriteString(s) |
||||
|
} |
||||
|
the.BulkWrite(index, body.String()) |
||||
|
|
||||
|
} |
||||
|
|
||||
|
func (the *ESHelper) BulkWriteRaws2EsLast(index string, raws []models.EsRaw) { |
||||
|
body := strings.Builder{} |
||||
|
for _, raw := range raws { |
||||
|
source, _ := json.Marshal(raw) |
||||
|
_id := raw.IotaDevice |
||||
|
s := fmt.Sprintf( |
||||
|
`{"index": {"_index": "%s","_id": "%s"}} |
||||
|
%s |
||||
|
`, index, _id, source) |
||||
|
body.WriteString(s) |
||||
|
} |
||||
|
the.BulkWrite(index, body.String()) |
||||
|
|
||||
|
} |
||||
|
|
||||
|
func (the *ESHelper) Close() { |
||||
|
|
||||
|
} |
@ -0,0 +1,133 @@ |
|||||
|
package dbHelper |
||||
|
|
||||
|
import ( |
||||
|
"context" |
||||
|
"encoding/json" |
||||
|
"errors" |
||||
|
"github.com/redis/go-redis/v9" |
||||
|
"log" |
||||
|
"time" |
||||
|
) |
||||
|
|
||||
|
type RedisHelper struct { |
||||
|
rdb redis.UniversalClient |
||||
|
isReady bool |
||||
|
ctx context.Context |
||||
|
} |
||||
|
|
||||
|
func NewRedisHelper(master string, address ...string) *RedisHelper { |
||||
|
r := &RedisHelper{ctx: context.Background()} |
||||
|
r.InitialCluster(master, address...) |
||||
|
return r |
||||
|
|
||||
|
} |
||||
|
|
||||
|
func (the *RedisHelper) InitialCluster(master string, address ...string) { |
||||
|
|
||||
|
the.rdb = redis.NewUniversalClient(&redis.UniversalOptions{ |
||||
|
Addrs: address, |
||||
|
MasterName: master, |
||||
|
}) |
||||
|
log.Printf("redis 初始化完成 %s", address) |
||||
|
the.isReady = true |
||||
|
} |
||||
|
|
||||
|
func (the *RedisHelper) Get(key string) string { |
||||
|
val, err := the.rdb.Get(the.ctx, key).Result() |
||||
|
if errors.Is(err, redis.Nil) { |
||||
|
log.Printf("%s does not exist", key) |
||||
|
} else if err != nil { |
||||
|
panic(err) |
||||
|
} else { |
||||
|
//log.Printf("get key => %s =%s", key, val)
|
||||
|
} |
||||
|
return val |
||||
|
} |
||||
|
|
||||
|
func (the *RedisHelper) GetObj(keys string, addr any) error { |
||||
|
err := the.rdb.Get(the.ctx, keys).Scan(addr) |
||||
|
if errors.Is(err, redis.Nil) { |
||||
|
log.Printf("%s does not exist", keys) |
||||
|
} else if err != nil { |
||||
|
es := err.Error() |
||||
|
log.Printf("err=%s ", es) |
||||
|
return err |
||||
|
} |
||||
|
return nil |
||||
|
} |
||||
|
func (the *RedisHelper) SetObj(keys string, obj any) error { |
||||
|
rs, err := the.rdb.Set(the.ctx, keys, obj, time.Minute*5).Result() |
||||
|
log.Printf("rs=%s ", rs) |
||||
|
if err != nil { |
||||
|
log.Printf("err=%s ", err.Error()) |
||||
|
} |
||||
|
return err |
||||
|
} |
||||
|
func (the *RedisHelper) GetLRange(keys string, addr any) error { |
||||
|
err := the.rdb.LRange(the.ctx, keys, 0, -1).ScanSlice(addr) |
||||
|
if errors.Is(err, redis.Nil) { |
||||
|
log.Printf("%s does not exist", keys) |
||||
|
} else if err != nil { |
||||
|
log.Printf("err=%s ", err.Error()) |
||||
|
return err |
||||
|
} |
||||
|
return nil |
||||
|
} |
||||
|
func (the *RedisHelper) MGet(addr any, keys ...string) error { |
||||
|
err := the.rdb.MGet(the.ctx, keys...).Scan(addr) |
||||
|
if errors.Is(err, redis.Nil) { |
||||
|
log.Printf("%s does not exist", keys) |
||||
|
} else if err != nil { |
||||
|
log.Printf("err=%s ", err.Error()) |
||||
|
return err |
||||
|
} |
||||
|
|
||||
|
return err |
||||
|
} |
||||
|
func (the *RedisHelper) MGetObj(addr any, keys ...string) error { |
||||
|
err := the.rdb.MGet(the.ctx, keys...).Scan(addr) |
||||
|
if errors.Is(err, redis.Nil) { |
||||
|
log.Printf("%s does not exist", keys) |
||||
|
} else if err != nil { |
||||
|
log.Printf("err=%s ", err.Error()) |
||||
|
return err |
||||
|
} |
||||
|
return nil |
||||
|
} |
||||
|
func (the *RedisHelper) HMGetObj(addr any, key, field string) error { |
||||
|
rp, err := the.rdb.HMGet(the.ctx, key, field).Result() |
||||
|
if errors.Is(err, redis.Nil) { |
||||
|
log.Printf("%s does not exist", key) |
||||
|
return err |
||||
|
} else if err != nil { |
||||
|
log.Printf("err=%s ", err.Error()) |
||||
|
return err |
||||
|
} |
||||
|
for _, i := range rp { |
||||
|
if v, ok := i.(string); ok { |
||||
|
err := json.Unmarshal([]byte(v), addr) |
||||
|
if err != nil { |
||||
|
return err |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
//todo scan有问题 后续待排查
|
||||
|
return nil |
||||
|
|
||||
|
//err := the.rdb.HMGet(the.ctx, key, field).Scan(addr)
|
||||
|
//if errors.Is(err, redis.Nil) {
|
||||
|
// log.Printf("%s does not exist", key)
|
||||
|
//} else if err != nil {
|
||||
|
// log.Printf("err=%s ", err.Error())
|
||||
|
// return err
|
||||
|
//}
|
||||
|
//return nil
|
||||
|
} |
||||
|
|
||||
|
func (the *RedisHelper) SRem(key string, members ...string) int64 { |
||||
|
return the.rdb.SRem(the.ctx, key, members).Val() |
||||
|
} |
||||
|
|
||||
|
func (the *RedisHelper) SAdd(key string, members ...string) int64 { |
||||
|
return the.rdb.SAdd(the.ctx, key, members).Val() |
||||
|
} |
@ -0,0 +1,40 @@ |
|||||
|
package models |
||||
|
|
||||
|
import ( |
||||
|
"time" |
||||
|
) |
||||
|
|
||||
|
type IotaData struct { |
||||
|
UserId string `json:"userId"` |
||||
|
ThingId string `json:"thingId"` |
||||
|
DimensionId string `json:"dimensionId"` |
||||
|
DimCapId string `json:"dimCapId"` |
||||
|
CapId string `json:"capId"` |
||||
|
DeviceId string `json:"deviceId"` |
||||
|
ScheduleId string `json:"scheduleId"` |
||||
|
TaskId string `json:"taskId"` |
||||
|
JobId int `json:"jobId"` |
||||
|
JobRepeatId int `json:"jobRepeatId"` |
||||
|
TriggerTime time.Time `json:"triggerTime"` |
||||
|
RealTime time.Time `json:"realTime"` |
||||
|
FinishTime time.Time `json:"finishTime"` |
||||
|
Seq int `json:"seq"` |
||||
|
Released bool `json:"released"` |
||||
|
Data Data `json:"data"` |
||||
|
} |
||||
|
|
||||
|
type Data struct { |
||||
|
Type int `json:"type"` |
||||
|
Data map[string]any `json:"data"` |
||||
|
Result struct { |
||||
|
Code int `json:"code"` |
||||
|
Msg string `json:"msg"` |
||||
|
Detail string `json:"detail"` |
||||
|
ErrTimes int `json:"errTimes"` |
||||
|
Dropped bool `json:"dropped"` |
||||
|
} `json:"result"` |
||||
|
} |
||||
|
|
||||
|
func (the *Data) Success() bool { |
||||
|
return the.Result.Code == 0 |
||||
|
} |
@ -0,0 +1,43 @@ |
|||||
|
package models |
||||
|
|
||||
|
import "encoding/json" |
||||
|
|
||||
|
type DeviceInfo struct { |
||||
|
Id string `json:"id"` |
||||
|
Name string `json:"name"` |
||||
|
Structure Structure `json:"structure"` |
||||
|
DeviceMeta DeviceMeta `json:"device_meta"` |
||||
|
} |
||||
|
|
||||
|
type DeviceMeta struct { |
||||
|
Id string `json:"id"` |
||||
|
Name string `json:"name"` |
||||
|
Model string `json:"model"` |
||||
|
Properties []IotaProperty `json:"properties"` |
||||
|
Capabilities []IotaCapability `json:"capabilities"` |
||||
|
} |
||||
|
|
||||
|
// redis序列化
|
||||
|
func (m *DeviceMeta) MarshalBinary() (data []byte, err error) { |
||||
|
return json.Marshal(m) |
||||
|
} |
||||
|
|
||||
|
// redis序列化
|
||||
|
func (m *DeviceMeta) UnmarshalBinary(data []byte) error { |
||||
|
return json.Unmarshal(data, m) |
||||
|
|
||||
|
} |
||||
|
|
||||
|
type IotaCapability struct { |
||||
|
CapabilityCategoryId int `json:"capabilityCategoryId"` |
||||
|
Id string `json:"id"` |
||||
|
Name string `json:"name"` |
||||
|
Properties []IotaProperty `json:"properties"` |
||||
|
} |
||||
|
|
||||
|
type IotaProperty struct { |
||||
|
Category string `json:"category"` |
||||
|
Name string `json:"name"` |
||||
|
ShowName string `json:"showName"` |
||||
|
Unit string `json:"unit"` |
||||
|
} |
@ -0,0 +1,37 @@ |
|||||
|
package models |
||||
|
|
||||
|
import "time" |
||||
|
|
||||
|
type EsRaw struct { |
||||
|
StructId int `json:"structId"` |
||||
|
IotaDeviceName string `json:"iota_device_name"` |
||||
|
Data map[string]any `json:"data"` |
||||
|
CollectTime time.Time `json:"collect_time"` |
||||
|
Meta map[string]string `json:"meta"` |
||||
|
IotaDevice string `json:"iota_device"` |
||||
|
CreateTime time.Time `json:"create_time"` |
||||
|
} |
||||
|
|
||||
|
type EsRawResp struct { |
||||
|
Took int `json:"took"` |
||||
|
TimedOut bool `json:"timed_out"` |
||||
|
Shards struct { |
||||
|
Total int `json:"total"` |
||||
|
Successful int `json:"successful"` |
||||
|
Skipped int `json:"skipped"` |
||||
|
Failed int `json:"failed"` |
||||
|
} `json:"_shards"` |
||||
|
Hits struct { |
||||
|
Total int `json:"total"` |
||||
|
MaxScore float64 `json:"max_score"` |
||||
|
Hits []HitRaw `json:"hits"` |
||||
|
} `json:"hits"` |
||||
|
} |
||||
|
|
||||
|
type HitRaw struct { |
||||
|
Index string `json:"_index"` |
||||
|
Type string `json:"_type"` |
||||
|
Id string `json:"_id"` |
||||
|
Score float64 `json:"_score"` |
||||
|
Source EsRaw `json:"_source"` |
||||
|
} |
@ -0,0 +1,41 @@ |
|||||
|
package models |
||||
|
|
||||
|
import "time" |
||||
|
|
||||
|
type EsTheme struct { |
||||
|
SensorName string `json:"sensor_name"` |
||||
|
FactorName string `json:"factor_name"` |
||||
|
FactorProtoCode string `json:"factor_proto_code"` |
||||
|
Data map[string]any `json:"data"` |
||||
|
FactorProtoName string `json:"factor_proto_name"` |
||||
|
Factor int `json:"factor"` |
||||
|
CollectTime time.Time `json:"collect_time"` |
||||
|
Sensor int `json:"sensor"` |
||||
|
Structure int `json:"structure"` |
||||
|
IotaDevice []string `json:"iota_device"` |
||||
|
CreateTime time.Time `json:"create_time"` |
||||
|
} |
||||
|
|
||||
|
type EsThemeResp struct { |
||||
|
Took int `json:"took"` |
||||
|
TimedOut bool `json:"timed_out"` |
||||
|
Shards struct { |
||||
|
Total int `json:"total"` |
||||
|
Successful int `json:"successful"` |
||||
|
Skipped int `json:"skipped"` |
||||
|
Failed int `json:"failed"` |
||||
|
} `json:"_shards"` |
||||
|
Hits struct { |
||||
|
Total int `json:"total"` |
||||
|
MaxScore float64 `json:"max_score"` |
||||
|
Hits []HitTheme `json:"hits"` |
||||
|
} `json:"hits"` |
||||
|
} |
||||
|
|
||||
|
type HitTheme struct { |
||||
|
Index string `json:"_index"` |
||||
|
Type string `json:"_type"` |
||||
|
Id string `json:"_id"` |
||||
|
Score float64 `json:"_score"` |
||||
|
Source EsTheme `json:"_source"` |
||||
|
} |
@ -0,0 +1,25 @@ |
|||||
|
package models |
||||
|
|
||||
|
import ( |
||||
|
"encoding/json" |
||||
|
) |
||||
|
|
||||
|
type IotaDevice struct { |
||||
|
Id string `json:"id"` |
||||
|
Name string `json:"name"` |
||||
|
Properties string `json:"properties"` |
||||
|
DeviceMetaId string `json:"deviceMetaId"` |
||||
|
ThingId string `json:"thingId"` |
||||
|
DeviceMeta DeviceMeta `json:"deviceMeta"` |
||||
|
} |
||||
|
|
||||
|
// redis序列化
|
||||
|
func (m *IotaDevice) MarshalBinary() (data []byte, err error) { |
||||
|
return json.Marshal(m) |
||||
|
} |
||||
|
|
||||
|
// redis序列化
|
||||
|
func (m *IotaDevice) UnmarshalBinary(data []byte) error { |
||||
|
return json.Unmarshal(data, m) |
||||
|
|
||||
|
} |
@ -0,0 +1,31 @@ |
|||||
|
package models |
||||
|
|
||||
|
import ( |
||||
|
"encoding/json" |
||||
|
) |
||||
|
|
||||
|
type Structure struct { |
||||
|
ThingId string `json:"thingId"` |
||||
|
Id int `json:"id"` |
||||
|
Name string `json:"name"` |
||||
|
Type string `json:"type"` |
||||
|
OrgId int `json:"orgId"` |
||||
|
} |
||||
|
|
||||
|
type ThingStruct struct { |
||||
|
ThingId string `json:"thingId"` |
||||
|
Id int `json:"id"` |
||||
|
Name string `json:"name"` |
||||
|
Type string `json:"type"` |
||||
|
OrgId int `json:"orgId"` |
||||
|
} |
||||
|
|
||||
|
// redis序列化
|
||||
|
func (m *ThingStruct) MarshalBinary() (data []byte, err error) { |
||||
|
return json.Marshal(m) |
||||
|
} |
||||
|
|
||||
|
// redis序列化
|
||||
|
func (m *ThingStruct) UnmarshalBinary(data []byte) error { |
||||
|
return json.Unmarshal(data, m) |
||||
|
} |
Loading…
Reference in new issue