首页 > 编程知识 正文

神策埋点流程,如何用神策做埋点

时间:2023-05-05 01:51:48 阅读:226097 作者:3051

导入的sdk依赖

<dependency><groupId>com.sensorsdata.analytics.javasdk</groupId><artifactId>SensorsAnalyticsSDK</artifactId><version>3.1.16</version></dependency>

业务使用神策数据埋点的事件 

 

/** * 司机接单结果 * @param passengerOrder 乘客订单信息 */ @Async @Override public void getMatchPassengerOrderSensorcParams(DrivingOrderDo passengerOrder , Integer orderMileage , Integer failCode , boolean isSuccess){ Map<String,Object> param = new HashMap<>(); // 业务类型 param.put("service_type","800"); // 乘客订单ID param.put("order_id", passengerOrder.getOrderId()); // 订单类型 param.put("order_type",String.valueOf(passengerOrder.getOrderType())); // 订单起点 if(StringUtils.isNotEmpty(passengerOrder.getStartAddr())) { RpcAddrPointDto startAddress = JsonUtil.fromJson(passengerOrder.getStartAddr(), RpcAddrPointDto.class); param.put("order_departure", startAddress.getAddress()); } // 订单终点 if(StringUtils.isNotEmpty(passengerOrder.getEndAddr())) { RpcAddrPointDto endAddress = JsonUtil.fromJson(passengerOrder.getEndAddr(), RpcAddrPointDto.class); param.put("order_destination", endAddress.getAddress()); } // 预约出发时间 param.put("subscribe_time",passengerOrder.getAppointTime().longValue() * 1000); // 行程预估公里数 param.put("estimate_mileage", orderMileage); // 是否成功 param.put("is_success", isSuccess); // 失败原因 param.put("fail_reason", String.valueOf(failCode)); param.put("scan_order_type" , String.valueOf(passengerOrder.getOrderType())); param.put("valet_driver_type" , String.valueOf(passengerOrder.getIsForOther())); SensorsAo sensorsAo = new SensorsAo(); // 事件类型 sensorsAo.setEventName("DriverAcceptOrderResult"); // 登录人id sensorsAo.setUserId(passengerOrder.getCarOwnerId()); // 订单id sensorsAo.setOrderId(passengerOrder.getOrderId()); // 推送参数 sensorsAo.setParam(param); sensorsBurialSiteClusterEventProducer.sendSensorsEnlistEvent(sensorsAo); }

神策数据埋点的kafka生产者

package com.wanshun.order.cluster.producer;import com.wanshun.common.utils.JsonUtil;import com.wanshun.constants.platform.daijiaservice.DaiJiaKafkaTopicConstants;import com.wanshun.net.cluster.ClusterEventBusProducer;import com.wanshun.net.cluster.metadata.ClusterEvent;import com.wanshun.net.kafka.KafkaConfig;import com.wanshun.rpcao.SensorsAo;import org.slf4j.Logger;import org.slf4j.LoggerFactory;/** * 神策埋点kafka消息 * @author htc */public class SensorsBurialSiteClusterEventProducer {public static Logger logger = LoggerFactory.getLogger(SensorsBurialSiteClusterEventProducer.class);private ClusterEventBusProducer producer;public SensorsBurialSiteClusterEventProducer(KafkaConfig kafkaConfig) {producer = ClusterEventBusProducer.getClusterEventBusProducer();producer.init(kafkaConfig);producer.addTopic(DaiJiaKafkaTopicConstants.THIRDPLAT_SERVICE_OUTER_TOPIC, false);}/** * 神策埋点推送消息 * @param sensorsAo 事件类 * @return 处理结果 */public boolean sendSensorsEnlistEvent(SensorsAo sensorsAo) {ClusterEvent clusterEvent = new ClusterEvent();String topic = DaiJiaKafkaTopicConstants.THIRDPLAT_SERVICE_OUTER_TOPIC;try {clusterEvent.setBalanceId(sensorsAo.getOrderId());clusterEvent.setData(JsonUtil.toJson(sensorsAo));clusterEvent.setClusterEventType(DaiJiaKafkaTopicConstants.THIRDPLAT_SERVICE_OUTER_TOPIC_EVENTTYPE_SENSORS);boolean result = producer.publishImportantEvent(topic, clusterEvent);if (!result) {logger.info("神策埋点 推送消息, 但发送消息时,kafka发送异常 " +"orderId:{}", sensorsAo.getOrderId());throw new RuntimeException("发送事件异常");}else{logger.info("神策埋点 推送消息,发送成功 orderId {}" , sensorsAo.getOrderId());}}catch (Exception e){logger.error(e.getMessage(), e);}return true;}}

神策埋点消费者

package com.wanshun.cluster;import com.alibaba.fastjson.JSON;import com.sensorsdata.analytics.javasdk.SensorsAnalytics;import com.wanshun.constants.platform.daijiaservice.DaiJiaKafkaTopicConstants;import com.wanshun.constants.platform.thirdplatform.ThirdplatformModuleConstant;import com.wanshun.net.cluster.ClusterEventBusConsumer;import com.wanshun.net.cluster.ClusterEventSubscribe;import com.wanshun.net.cluster.listener.ClusterEventBusListener;import com.wanshun.net.cluster.metadata.ClusterEvent;import com.wanshun.net.kafka.KafkaConfig;import com.wanshun.rpcao.SensorsAo;import org.slf4j.Logger;import org.slf4j.LoggerFactory;import org.springframework.beans.BeansException;import org.springframework.beans.factory.annotation.Autowired;import org.springframework.context.ApplicationContext;import org.springframework.context.ApplicationContextAware;import org.springframework.transaction.annotation.Transactional;import java.util.Set;public class SensorsConsumer extends ClusterEventBusListener implements ApplicationContextAware { public final static Logger logger = LoggerFactory.getLogger(SensorsConsumer.class); @Autowired private SensorsAnalytics sa; private final KafkaConfig kafkaConfig; private boolean isInit = false; public SensorsConsumer(KafkaConfig kafkaConfig) { this.kafkaConfig = kafkaConfig; } @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { if (!isInit) { synchronized (this) { if (!isInit) { isInit = true; ClusterEventSubscribe subscribe = new ClusterEventSubscribe(); subscribe.addTopic(false, false, DaiJiaKafkaTopicConstants.THIRDPLAT_SERVICE_OUTER_TOPIC); ClusterEventBusConsumer consumer = ClusterEventBusConsumer.getClusterEventBusConsumer(ThirdplatformModuleConstant.GROUP_NAME_SERVER, kafkaConfig); consumer.init(this, subscribe); } } } logger.info("三方服务kafka消费端启动"); } @Override public boolean checkCanStartConsumer(String topicName) { if ( DaiJiaKafkaTopicConstants.THIRDPLAT_SERVICE_OUTER_TOPIC.equals(topicName)) { return true; } return false; } @Transactional @Override public void eventNotify(String topicName, ClusterEvent clusterEvent) { logger.info("神策推送数据kafka消费事件: {}, topicName: {}", JSON.toJSONString(clusterEvent), topicName); if ( DaiJiaKafkaTopicConstants.THIRDPLAT_SERVICE_OUTER_TOPIC.equals(topicName) && DaiJiaKafkaTopicConstants.THIRDPLAT_SERVICE_OUTER_TOPIC_EVENTTYPE_SENSORS == clusterEvent.getClusterEventType()) { String data = clusterEvent.getData(); SensorsAo sensorsAo = JSON.parseObject(data, SensorsAo.class); try { sa.track(sensorsAo.getUserId().toString(), sensorsAo.isFlag(), sensorsAo.getEventName(), sensorsAo.getParam()); sa.flush(); } catch (Exception e) { logger.error("hotfix神策推送数据失败,data:{},sensorsAo:{}", data, JSON.toJSONString(sensorsAo), e); } logger.info("神策推送数据kafka消费事件: {}, topicName: 成功{}", JSON.toJSONString(clusterEvent), topicName); } } @Override public void notifyPartition(String topicName, Set<Integer> partitionSet) { } @Override public void destroyPartition(String topicName, Set<Integer> partitionSet) { }}

然后再去神策数据的官网查看对应的事件

 

 

 

版权声明:该文观点仅代表作者本人。处理文章:请发送邮件至 三1五14八八95#扣扣.com 举报,一经查实,本站将立刻删除。