合并FullName,增加insertSize为660
This commit is contained in:
parent
65b27cea23
commit
e464a2dee3
BIN
iniDataForLinux
BIN
iniDataForLinux
Binary file not shown.
BIN
iniDataForMacOs
BIN
iniDataForMacOs
Binary file not shown.
24
main.go
24
main.go
@ -41,7 +41,6 @@ func init() {
|
||||
iniPath() //初始化路径
|
||||
applogger = logrus.New() //创建日志
|
||||
iniLog() //初始化日志配置
|
||||
//链接Redis
|
||||
redisClient = redis.NewClient(&redis.Options{
|
||||
Addr: redisAddress,
|
||||
Password: redisPassword,
|
||||
@ -68,7 +67,6 @@ func main() {
|
||||
case <-ticker_merge.C:
|
||||
iniLog()
|
||||
fmt.Print("尝试执行名单合并...\n")
|
||||
//go downloadDecompression()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -189,8 +187,9 @@ func downloadDecompression() {
|
||||
fmt.Printf("共%d个文件\n", len(files))
|
||||
|
||||
sort.Sort(FileSorter(files))
|
||||
processingStatus := -1
|
||||
|
||||
for _, file := range files {
|
||||
processingStatus := -1
|
||||
fmt.Printf("第%d个文件处理中\n", it)
|
||||
it++
|
||||
// Check if file has been downloaded before
|
||||
@ -295,7 +294,7 @@ func downloadDecompression() {
|
||||
applogger.Info(fmt.Sprintf("%s(批次文件)下载完成", file.Name()))
|
||||
processingStatus = batchInsert(file.Name())
|
||||
}
|
||||
if processingStatus == 0 {
|
||||
if processingStatus != -1 {
|
||||
err = redisClient.Set(fileKey, 1, 0).Err() //写入文件处理完成
|
||||
if err != nil {
|
||||
body := fmt.Sprintf("写入文件处理完成标记失败文件名:%s,错误信息:%v\n", file.Name(), err)
|
||||
@ -325,7 +324,7 @@ func batchInsert(fileName string) int {
|
||||
for {
|
||||
record, err := reader.Read()
|
||||
if err != nil {
|
||||
return -1
|
||||
//return -1
|
||||
break
|
||||
}
|
||||
|
||||
@ -537,6 +536,7 @@ func batchDataInsert(fileName string) int {
|
||||
"ReservedField4": row[8],
|
||||
"ReservedField5": row[9],
|
||||
"DataFileName": fileName,
|
||||
"FullName": row[4],
|
||||
}
|
||||
reservedFieldsJson, err := json.Marshal(reservedFields) //个性化字段转json
|
||||
if err != nil {
|
||||
@ -556,7 +556,6 @@ func batchDataInsert(fileName string) int {
|
||||
dataBatchDuplicate = append(dataBatchDuplicate, BatchDataDuplicateLog{
|
||||
CommunicationChannelID: row[2],
|
||||
Mobile: row[3],
|
||||
FullName: row[4],
|
||||
ReservedField: string(reservedFieldsJson),
|
||||
})
|
||||
|
||||
@ -576,7 +575,6 @@ func batchDataInsert(fileName string) int {
|
||||
dataBatch = append(dataBatch, BatcheData{
|
||||
CommunicationChannelID: row[2],
|
||||
Mobile: row[3],
|
||||
FullName: row[4],
|
||||
ReservedField: string(reservedFieldsJson),
|
||||
})
|
||||
|
||||
@ -825,7 +823,7 @@ func iniConfi() {
|
||||
txtPath = "RawData/Txt/"
|
||||
logPath = "logs/"
|
||||
batchSize = 5000 //提交数据
|
||||
insertSize = 500 //一次性入库
|
||||
insertSize = 660 //一次性入库
|
||||
insertChanSize = 10 //通道缓冲数
|
||||
goSize = 10 //协程数
|
||||
taskTime = 1
|
||||
@ -847,10 +845,10 @@ func iniConfi() {
|
||||
zipPath = "RawData/Zip/"
|
||||
txtPath = "RawData/Txt/"
|
||||
logPath = "logs/"
|
||||
batchSize = 5000 //提交数据
|
||||
insertSize = 500 //一次性入库
|
||||
insertChanSize = 50 //通道缓冲数
|
||||
goSize = 50 //协程数
|
||||
batchSize = 5000 //提交数据
|
||||
insertSize = 660 //一次性入库
|
||||
insertChanSize = 100 //通道缓冲数
|
||||
goSize = 50 //协程数
|
||||
taskTime = 60
|
||||
to = []string{"chejiulong@wemediacn.com", "xiayujuan@wemediacn.com", "wangyuanbing@wemediacn.com", "tangweiqi@wemediacn.com"}
|
||||
token = "7100477930234217"
|
||||
@ -956,7 +954,6 @@ type BatcheData struct {
|
||||
ID uint `gorm:"primary_key"`
|
||||
CommunicationChannelID string `gorm:"column:communication_channel_id"`
|
||||
Mobile string `gorm:"column:mobile"`
|
||||
FullName string `gorm:"column:full_name"`
|
||||
ReservedField string `gorm:"column:reserved_field"`
|
||||
}
|
||||
|
||||
@ -980,7 +977,6 @@ type BatchDataDuplicateLog struct {
|
||||
ID int `gorm:"primaryKey;autoIncrement"`
|
||||
CommunicationChannelID string `gorm:"column:communication_channel_id"`
|
||||
Mobile string `gorm:"column:mobile"`
|
||||
FullName string `gorm:"column:full_name"`
|
||||
ReservedField string `gorm:"column:reserved_field"`
|
||||
}
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user