package com.bgy.shop.service.impl;
import com.bgy.shop.service.DataCoreService;
import com.bgy.shop.service.SendScheduService;
import com.google.gson.Gson;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
* 发送调度系统消息处理
*/
@Service
public class SendScheduerviceImpl implements SendScheduService {
@Autowired
private DataCoreService dataCoreService;
@Autowired
private KafkaTemplate<String,String> kafkaTemplate;
private final String key001="BGY_KEY_001";
/**
* 订单下发调度系统
*/
public void sendBykafka(Map<String, Object> _sMap) throws Exception {
String querySql = "B_CORE_DATA_R000014";
if(_sMap.get("creditFlag")!=null && "1".equals(_sMap.get("creditFlag"))){
querySql = "B_CORE_DATA_R000074";//经理赊欠确认,发送前的查询数据,不需要查询交易记录表
}
//根据流水号查询订单信息、分店ID
List<Map<String, Object>> dataCoreList = dataCoreService.getResultByTCODE(querySql, _sMap);
if (dataCoreList!=null && dataCoreList.size() == 0) {
throw new Exception("订单异常,请联系管理员");
}
List<Map<String, Object>> newCoreList = new ArrayList<Map<String, Object>>();
Object obj = null;
for (Map<String, Object> m : dataCoreList) {
obj = m.get("createTime");
if(obj == null){
continue;
}
m.put("createTime", ((Date)obj).getTime());
newCoreList.add(m);
}
StringBuffer mTopic = new StringBuffer();
List<Map<String, Object>> msgList = new ArrayList<Map<String, Object>>();
for (Map<String, Object> map : newCoreList) {
if ("1".equals(map.get("mode"))) {
msgList.add(map);
mTopic.append("order_").append(map.get("branchId")).append("_1"); //[人工:order_分店编号_1]
} else {
msgList.add(map);
//[机器:order_分店编号]
mTopic.append("order_").append(map.get("branchId"));
}
}
if(msgList.size()>0) {
kafkaTemplate.send(mTopic.toString(), key001, new Gson().toJson(msgList));
}
}
/**
* 下发消息给kafka
* @param topic 标题
* @param dataList 数据
* @throws Exception
*/
public void sendBykafka(String topic,List<Map<String,Object>> dataList) throws Exception {
kafkaTemplate.send(topic, key001, new Gson().toJson(dataList));
}
/**
* 下发消息给kafka
* @param topic 标题
* @param key 约定的key
* @param dataList 数据
* @throws Exception
*/
public void sendBykafka(String topic,String key,List<Map<String,Object>> dataList) throws Exception {
kafkaTemplate.send(topic, key001, new Gson().toJson(dataList));
}
/**
* 下发消息给kafka
* @param topic 标题
* @param dataJson 数据
* @throws Exception
*/
public void sendBykafka(String topic,String dataJson) throws Exception {
kafkaTemplate.send(topic, key001, dataJson);
}
/**
* 下发消息给kafka
* @param topic 标题
* @param key 约定的key
* @param dataJson 数据
* @throws Exception
*/
public void sendBykafka(String topic,String key,String dataJson) throws Exception {
kafkaTemplate.send(topic, key001, dataJson);
}
}
spring:
application:
name: shop
datasource:
driver-class-name: com.mysql.jdbc.Driver
username: root
password: BGY12BETA@$BZL
url: jdbc:mysql://rm-wz91qf956fu6op89pwo.mysql.rds.aliyuncs.com/bgy_restaurant?characterEncoding=utf-8&
redis:
host: 120.77.180.187
port: 6379
password: BgY12Beta@2018
# -------------------- Kafka --------------------
kafka:
# bootstrap-servers: ["192.168.1.204:9092","192.168.1.100:9092","192.168.1.200:9092"]
bootstrap-servers: ["120.77.180.187:9092"]
#对应字符串列表(属于"spring.kafka"配置组)
schema-registry-url:
- "http://120.77.180.187:18081"
#- "http://192.168.1.100:18081"
#下面的这些配置属于"spring.kafka.producer"配置组
producer:
retries: 0
batch-size: 4096
buffer-memory: 40960
enableIdempotence: true #写成驼峰式
max-in-flight-requests-per-connection: 1 #横线连接
linger-ms: 10
topic: "order_2"
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
#下面的这些配置属于"spring.kafka.consumer"配置组
consumer:
auto-offset-reset: "latest"
groupId: "record-service"
topics: ["order_2"]
group-id: test
session-timeout: 6000
enable-auto-commit: true
auto-commit-interval: 1000
concurrency: 10
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
eureka:
client:
service-url:
defaultZone: http://120.77.180.187:8761/eureka/
server:
port: 8084
ribbon:
ReadTimeout: 60000
ConnectTimeout: 60000
mybatis:
mapper-locations: classpath:com/bgy/shop/mapper/*.xml
config-locations: classpath:mybatis/mybatis-config.xml
<!-- Kafka -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-configuration-processor</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>