1. austin.sql 测试数据修改

2. 安装教程 fix
3. 全链路日志写到austinLog topic
4. 修整 austin-stream 的代码
master
3y 3 years ago
parent 4871a8f42a
commit d2e4914f89

@ -183,10 +183,10 @@ docker ps
docker exec -it kafka sh
```
创建一个topic(这里我的**topicName**就叫austinTopic,你们可以改成自己的)
创建一个topic(这里我的**topicName**就叫austinBusiness,你们可以改成自己的)
```
$KAFKA_HOME/bin/kafka-topics.sh --create --topic austinTopic --partitions 4 --zookeeper zookeeper:2181 --replication-factor 1
$KAFKA_HOME/bin/kafka-topics.sh --create --topic austinBusiness --partitions 4 --zookeeper zookeeper:2181 --replication-factor 1
```
查看刚创建的topic信息
@ -344,16 +344,16 @@ global:
scrape_configs:
- job_name: 'prometheus'
  static_configs:
  - targets: ['ip:9090'] // TODO ip自己写
  - targets: ['ip:9090']
- job_name: 'cadvisor'
  static_configs:
  - targets: ['ip:8899'] // TODO ip自己写
  - targets: ['ip:8899']
- job_name: 'node'
  static_configs:
  - targets: ['ip:9100'] // TODO ip自己写
  - targets: ['ip:9100']
```
**这里要注意端口,按自己配置的来**
**这里要注意端口,按自己配置的来,ip也要填写为自己的**
把这份`prometheus.yml`的配置往`/etc/prometheus/prometheus.yml` 路径下**复制**一份。随后在目录下`docker-compose up -d`启动,于是我们就可以分别访问:

@ -33,6 +33,9 @@ public abstract class AbstractDeduplicationService implements DeduplicationServi
@Autowired
private RedisUtils redisUtils;
@Autowired
private LogUtils logUtils;
@Override
public void deduplication(DeduplicationParam param) {
@ -62,7 +65,7 @@ public abstract class AbstractDeduplicationService implements DeduplicationServi
// 剔除符合去重条件的用户
if (CollUtil.isNotEmpty(filterReceiver)) {
taskInfo.getReceiver().removeAll(filterReceiver);
LogUtils.print(AnchorInfo.builder().businessId(taskInfo.getBusinessId()).ids(filterReceiver).state(param.getAnchorState().getCode()).build());
logUtils.print(AnchorInfo.builder().businessId(taskInfo.getBusinessId()).ids(filterReceiver).state(param.getAnchorState().getCode()).build());
}
}

@ -9,6 +9,7 @@ import com.java3y.austin.common.domain.AnchorInfo;
import com.java3y.austin.common.domain.TaskInfo;
import com.java3y.austin.common.enums.AnchorState;
import com.java3y.austin.support.utils.LogUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
@ -22,6 +23,10 @@ public class DiscardMessageService {
@ApolloConfig("boss.austin")
private Config config;
@Autowired
private LogUtils logUtils;
/**
* apollo
* @param taskInfo
@ -33,7 +38,7 @@ public class DiscardMessageService {
AustinConstant.APOLLO_DEFAULT_VALUE_JSON_ARRAY));
if (array.contains(String.valueOf(taskInfo.getMessageTemplateId()))) {
LogUtils.print(AnchorInfo.builder().businessId(taskInfo.getBusinessId()).ids(taskInfo.getReceiver()).state(AnchorState.DISCARD.getCode()).build());
logUtils.print(AnchorInfo.builder().businessId(taskInfo.getBusinessId()).ids(taskInfo.getReceiver()).state(AnchorState.DISCARD.getCode()).build());
return true;
}
return false;

@ -23,6 +23,9 @@ public abstract class BaseHandler implements Handler {
@Autowired
private HandlerHolder handlerHolder;
@Autowired
private LogUtils logUtils;
/**
* Handler
@ -35,10 +38,10 @@ public abstract class BaseHandler implements Handler {
@Override
public void doHandler(TaskInfo taskInfo) {
if (handler(taskInfo)) {
LogUtils.print(AnchorInfo.builder().state(AnchorState.SEND_SUCCESS.getCode()).businessId(taskInfo.getBusinessId()).ids(taskInfo.getReceiver()).build());
logUtils.print(AnchorInfo.builder().state(AnchorState.SEND_SUCCESS.getCode()).businessId(taskInfo.getBusinessId()).ids(taskInfo.getReceiver()).build());
return;
}
LogUtils.print(AnchorInfo.builder().state(AnchorState.SEND_FAIL.getCode()).businessId(taskInfo.getBusinessId()).ids(taskInfo.getReceiver()).build());
logUtils.print(AnchorInfo.builder().state(AnchorState.SEND_FAIL.getCode()).businessId(taskInfo.getBusinessId()).ids(taskInfo.getReceiver()).build());
}
/**

@ -38,6 +38,9 @@ public class Receiver {
@Autowired
private TaskPendingHolder taskPendingHolder;
@Autowired
private LogUtils logUtils;
@KafkaListener(topics = "#{'${austin.business.topic.name}'}")
public void consumer(ConsumerRecord<?, String> consumerRecord, @Header(KafkaHeaders.GROUP_ID) String topicGroupId) {
Optional<String> kafkaMessage = Optional.ofNullable(consumerRecord.value());
@ -51,7 +54,7 @@ public class Receiver {
*/
if (topicGroupId.equals(messageGroupId)) {
for (TaskInfo taskInfo : taskInfoLists) {
LogUtils.print(LogParam.builder().bizType(LOG_BIZ_TYPE).object(taskInfo).build(), AnchorInfo.builder().ids(taskInfo.getReceiver()).businessId(taskInfo.getBusinessId()).state(AnchorState.RECEIVE.getCode()).build());
logUtils.print(LogParam.builder().bizType(LOG_BIZ_TYPE).object(taskInfo).build(), AnchorInfo.builder().ids(taskInfo.getReceiver()).businessId(taskInfo.getBusinessId()).state(AnchorState.RECEIVE.getCode()).build());
Task task = context.getBean(Task.class).setTaskInfo(taskInfo);
taskPendingHolder.route(topicGroupId).execute(task);
}

@ -9,10 +9,10 @@ import com.java3y.austin.common.vo.BasicResultVO;
import com.java3y.austin.service.api.impl.domain.SendTaskModel;
import com.java3y.austin.support.pipeline.BusinessProcess;
import com.java3y.austin.support.pipeline.ProcessContext;
import com.java3y.austin.support.utils.KafkaUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.core.KafkaTemplate;
/**
* @author 3y
@ -22,7 +22,7 @@ import org.springframework.kafka.core.KafkaTemplate;
public class SendMqAction implements BusinessProcess {
@Autowired
private KafkaTemplate kafkaTemplate;
private KafkaUtils kafkaUtils;
@Value("${austin.business.topic.name}")
private String topicName;
@ -30,14 +30,14 @@ public class SendMqAction implements BusinessProcess {
@Override
public void process(ProcessContext context) {
SendTaskModel sendTaskModel = (SendTaskModel) context.getProcessModel();
String message = JSON.toJSONString(sendTaskModel.getTaskInfo(), new SerializerFeature[]{SerializerFeature.WriteClassName});
try {
kafkaTemplate.send(topicName, JSON.toJSONString(sendTaskModel.getTaskInfo(),
new SerializerFeature[] {SerializerFeature.WriteClassName}));
kafkaUtils.send(topicName, message);
} catch (Exception e) {
context.setNeedBreak(true).setResponse(BasicResultVO.fail(RespStatusEnum.SERVICE_ERROR));
log.error("send kafka fail! e:{},params:{}", Throwables.getStackTraceAsString(e)
, JSON.toJSONString(CollUtil.getFirst(sendTaskModel.getTaskInfo().listIterator())));
}
}
}

@ -38,12 +38,6 @@
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<dependency>
<groupId>com.java3y.austin</groupId>
<artifactId>austin-support</artifactId>

@ -1,5 +1,6 @@
package com.java3y.austin.stream;
import com.java3y.austin.stream.constants.AustinFlinkConstant;
import com.java3y.austin.stream.utils.FlinkUtils;
import com.java3y.austin.stream.utils.SpringContextUtils;
import lombok.extern.slf4j.Slf4j;
@ -8,7 +9,6 @@ import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.springframework.context.ApplicationContext;
/**
* flink
@ -20,29 +20,28 @@ public class AustinBootStrap {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
SpringContextUtils.loadContext(AustinFlinkConstant.SPRING_CONFIG_PATH);
/**
* 1.KafkaConsumer
*/
KafkaSource<String> kafkaConsumer = SpringContextUtils.getBean(FlinkUtils.class).getKafkaConsumer(AustinFlinkConstant.TOPIC_NAME, AustinFlinkConstant.GROUP_ID, AustinFlinkConstant.BROKER);
DataStreamSource<String> kafkaSource = env.fromSource(kafkaConsumer, WatermarkStrategy.noWatermarks(), AustinFlinkConstant.SOURCE_NAME);
String topicName = "austinTopicV2";
String groupId = "austinTopicV23";
ApplicationContext applicationContext = SpringContextUtils.loadContext("classpath*:austin-spring.xml");
FlinkUtils flinkUtils = applicationContext.getBean(FlinkUtils.class);
KafkaSource<String> kafkaConsumer = flinkUtils.getKafkaConsumer(topicName, groupId);
DataStreamSource<String> kafkaSource = env.fromSource(kafkaConsumer, WatermarkStrategy.noWatermarks(), "kafkaSource");
/**
* 2.
*/
/**
* 3. Redis()线hive()
*/
kafkaSource.addSink(new SinkFunction<String>() {
@Override
public void invoke(String value, Context context) throws Exception {
log.error("kafka value:{}", value);
}
});
// DataStream<AnchorInfo> stream = envBatchPendingThread
// .addSource(new AustinSource())
// .name("transactions");
//
// stream.addSink(new AustinSink());
env.execute("AustinBootStrap");
}

@ -0,0 +1,27 @@
package com.java3y.austin.stream.constants;
public class AustinFlinkConstant {
/**
* Kafka
* TODO 使broker
*/
public static final String GROUP_ID = "austinLogGroup";
public static final String TOPIC_NAME = "austinLog";
public static final String BROKER = "ip:port";
/**
* spring
*/
public static final String SPRING_CONFIG_PATH = "classpath*:austin-spring.xml";
/**
* Flink
*/
public static final String SOURCE_NAME = "austin_kafka_source";
public static final String FUNCTION_NAME = "austin_transfer";
public static final String SINK_NAME = "austin_sink";
}

@ -5,6 +5,9 @@ import com.java3y.austin.common.domain.AnchorInfo;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
/**
* mock
*/
@Slf4j
public class AustinSink extends RichSinkFunction<AnchorInfo> {

@ -1,34 +0,0 @@
package com.java3y.austin.stream.source;
import com.java3y.austin.common.domain.AnchorInfo;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import java.util.ArrayList;
import java.util.List;
/**
* mock
*
* @author 3y
*/
public class AustinSource extends RichSourceFunction<AnchorInfo> {
@Override
public void run(SourceContext<AnchorInfo> sourceContext) throws Exception {
List<AnchorInfo> anchorInfoList = new ArrayList<>();
for (int i = 0; i < 10; i++) {
anchorInfoList.add(AnchorInfo.builder()
.state(10).businessId(333L)
.timestamp(System.currentTimeMillis()).build());
}
for (AnchorInfo anchorInfo : anchorInfoList) {
sourceContext.collect(anchorInfo);
}
}
@Override
public void cancel() {
}
}

@ -1,5 +1,6 @@
package com.java3y.austin.stream.utils;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
@ -9,17 +10,18 @@ import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsIni
*
* @author 3y
*/
@Slf4j
public class FlinkUtils {
/**
* kafkaConsumer
*
* @param topicName
* @param groupId
* @return
*/
public KafkaSource<String> getKafkaConsumer(String topicName, String groupId) {
public KafkaSource<String> getKafkaConsumer(String topicName, String groupId, String broker) {
KafkaSource<String> source = KafkaSource.<String>builder()
.setBootstrapServers("ip:port")
.setBootstrapServers(broker)
.setTopics(topicName)
.setGroupId(groupId)
.setStartingOffsets(OffsetsInitializer.earliest())

@ -1,5 +1,6 @@
package com.java3y.austin.stream.utils;
import cn.hutool.core.collection.CollUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
@ -16,29 +17,32 @@ import java.util.List;
public class SpringContextUtils {
private static ApplicationContext context;
/**
* XML
*/
private static List<String> xmlPath = new ArrayList<>();
public static ApplicationContext loadContext(String path) {
return loadContext(new String[]{path});
}
/**
* spring.xml context
*
* @param paths
* @return
*/
public static synchronized ApplicationContext loadContext(String[] paths) {
if (null != paths && paths.length > 0) {
//筛选新增
List<String> newPaths = new ArrayList<>();
for (String path : paths) {
if (!xmlPath.contains(path)) {
log.info("ApplicationContextFactory add new path {}", path);
newPaths.add(path);
} else {
log.info("ApplicationContextFactory already load path {}", path);
}
}
if (!newPaths.isEmpty()) {
if (CollUtil.isNotEmpty(newPaths)) {
String[] array = new String[newPaths.size()];
for (int i=0; i<newPaths.size(); i++) {
for (int i = 0; i < newPaths.size(); i++) {
array[i] = newPaths.get(i);
xmlPath.add(newPaths.get(i));
}

@ -0,0 +1,30 @@
package com.java3y.austin.support.utils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;
/**
* @author 3y
* @date 2022/2/16
* Kafka
*/
@Component
@Slf4j
public class KafkaUtils {
@Autowired
private KafkaTemplate kafkaTemplate;
/**
* kafka
*
* @param topicName
* @param jsonMessage
*/
public void send(String topicName, String jsonMessage) {
kafkaTemplate.send(topicName, jsonMessage);
}
}

@ -3,19 +3,29 @@ package com.java3y.austin.support.utils;
import cn.monitor4all.logRecord.bean.LogDTO;
import cn.monitor4all.logRecord.service.CustomLogListener;
import com.alibaba.fastjson.JSON;
import com.google.common.base.Throwables;
import com.java3y.austin.common.domain.AnchorInfo;
import com.java3y.austin.common.domain.LogParam;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
/**
*
*
* @author 3y
*/
@Slf4j
@Component
public class LogUtils extends CustomLogListener {
@Autowired
private KafkaUtils kafkaUtils;
@Value("${austin.business.log.topic.name}")
private String topicName;
/**
* @OperationLog
*/
@ -27,7 +37,7 @@ public class LogUtils extends CustomLogListener {
/**
*
*/
public static void print(LogParam logParam) {
public void print(LogParam logParam) {
logParam.setTimestamp(System.currentTimeMillis());
log.info(JSON.toJSONString(logParam));
}
@ -35,15 +45,23 @@ public class LogUtils extends CustomLogListener {
/**
*
*/
public static void print(AnchorInfo anchorInfo) {
public void print(AnchorInfo anchorInfo) {
anchorInfo.setTimestamp(System.currentTimeMillis());
log.info(JSON.toJSONString(anchorInfo));
String message = JSON.toJSONString(anchorInfo);
log.info(message);
try {
kafkaUtils.send(topicName, message);
} catch (Exception e) {
log.error("LogUtils#print kafka fail! e:{},params:{}", Throwables.getStackTraceAsString(e)
, JSON.toJSONString(anchorInfo));
}
}
/**
*
*/
public static void print(LogParam logParam,AnchorInfo anchorInfo) {
public void print(LogParam logParam, AnchorInfo anchorInfo) {
print(anchorInfo);
print(logParam);
}

@ -28,9 +28,10 @@ spring.redis.port=${port}
spring.redis.password=${password}
##################### business properties #####################
austin.business.topic.name=austinTopicV2
austin.business.topic.name=austinBusiness
austin.business.log.topic.name=austinLog
# TODO if need graylog ,replace ip !
austin.business.graylog.ip=${ip}
austin.business.graylog.ip=120.48.13.113
# TODO if windows os ,replace path !
austin.business.upload.crowd.path=/Users/3y/temp
@ -44,7 +45,7 @@ xxl.job.executor.appname=austin
xxl.job.executor.jobHandlerName=austinJob
xxl.job.executor.ip=
xxl.job.executor.port=6666
xxl.job.executor.logpath=
xxl.job.executor.logpath=logs/xxl
xxl.job.executor.logretentiondays=30
xxl.job.accessToken=

@ -59,24 +59,8 @@ CREATE TABLE `sms_record`
COLLATE = utf8mb4_unicode_ci COMMENT ='短信记录信息';
/*测试短信*/
INSERT INTO austin.message_template (id, name, audit_status, msg_status, id_type,
send_channel, template_type, msg_type, msg_content, send_account, creator,
updator, auditor, team, proposer, is_deleted, created, updated)
VALUES (1, 'test短信', 10, 10, 30, 30, 10, 10, '{"content":"{$contentValue}"}', 10, 'yyyyc', 'yyyyu', 'yyyyyyz', 'yyyt',
'yyyy22', 0, 1636978066, 1636978066);
-- 短信测试
INSERT INTO austin.message_template (id, name, audit_status, flow_id, msg_status, cron_task_id, cron_crowd_path, expect_push_time, id_type, send_channel, template_type, msg_type, msg_content, send_account, creator, updator, auditor, team, proposer, is_deleted, created, updated) VALUES (1, '短信测试', 10, '', 10, null, '', '', 30, 30, 20, 20, '{"content":"6666","url":"","title":""}', 10, 'Java3y', 'Java3y', '3y', '公众号Java3y', '3y', 0, 1644387139, 1644387139);
/*测试短信+url链接追踪*/
INSERT INTO austin.message_template (id, name, audit_status, msg_status, id_type, send_channel, template_type,
msg_type, msg_content, send_account, creator, updator, auditor,
team, proposer, is_deleted, created, updated)
VALUES (2, 'test短信', 10, 10, 30, 30, 10, 20,
'{"content":"{$contentValue}","url":"https://gitee.com/zhongfucheng/austin"}', 10, 'yyyyc', 'yyyyu', 'yyyyyyz',
'yyyt', 'yyyy22', 0, 1637411536, 1637411536);
/*测试邮件发送*/
INSERT INTO austin.message_template (id, name, audit_status, msg_status, id_type, send_channel, template_type,
msg_type, msg_content, send_account, creator, updator, auditor,
team, proposer, is_deleted, created, updated)
VALUES (3, 'test邮件', 10, 10, 50, 40, 20, 10, '{"content":"{$contentValue}","title":"{$title}"}', 10,
'yyyyc', 'yyyyu', 'yyyyyyz', 'yyyt', 'yyyy22', 0, 1641546914, 1641546914);
-- 邮件测试
INSERT INTO austin.message_template (id, name, audit_status, flow_id, msg_status, cron_task_id, cron_crowd_path, expect_push_time, id_type, send_channel, template_type, msg_type, msg_content, send_account, creator, updator, auditor, team, proposer, is_deleted, created, updated) VALUES (2, '测试邮件', 10, '', 10, null, '', '', 50, 40, 20, 10, '{"content":"4344444444","url":"","title":"6666666"}', 10, 'Java3y', 'Java3y', '3y', '公众号Java3y', '3y', 0, 1644387638, 1644387638);
Loading…
Cancel
Save