1.添加智能蜂箱bee_hive与log的数据信息;
2.搭建netty服务信息(拆包、粘包) 3.解析智能蜂箱设备协议(假接入,设备程序暂未开发完成) 4.新建时间、转码、校验码等相关Utilmain
parent
aeba220a9a
commit
7b9ffea60b
@ -0,0 +1,101 @@
|
||||
package com.hive.bee.entity;
|
||||
|
||||
import lombok.*;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.*;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.LocalDateTime;
|
||||
import com.baomidou.mybatisplus.annotation.*;
|
||||
|
||||
/**
|
||||
* 蜂箱 DO
|
||||
*
|
||||
* @author 芋道源码
|
||||
*/
|
||||
@TableName("bee_hive")
|
||||
@KeySequence("bee_hive_seq") // 用于 Oracle、PostgreSQL、Kingbase、DB2、H2 数据库的主键自增。如果是 MySQL 等数据库,可不写。
|
||||
@Data
|
||||
@ToString(callSuper = true)
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class BeeHive implements Serializable {
|
||||
private static long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* 主键
|
||||
*/
|
||||
@TableId
|
||||
private Long id;
|
||||
/**
|
||||
* 蜂场id
|
||||
*
|
||||
* 枚举 {@link TODO mf_type 对应的类}
|
||||
*/
|
||||
private Long peakFieldId;
|
||||
/**
|
||||
* 蜂箱编号
|
||||
*/
|
||||
private String beeHiveNumber;
|
||||
/**
|
||||
* 蜂箱名称
|
||||
*/
|
||||
private String beeHiveName;
|
||||
/**
|
||||
* mac号
|
||||
*/
|
||||
private String beeHiveMac;
|
||||
/**
|
||||
* 蜂箱位置
|
||||
*/
|
||||
private String beeHivePosition;
|
||||
/**
|
||||
* 最后在线时间
|
||||
*/
|
||||
private LocalDateTime lastOnlineTime;
|
||||
/**
|
||||
* 在线状态
|
||||
*/
|
||||
private Integer onlineType;
|
||||
/**
|
||||
* 蜂箱重量
|
||||
*/
|
||||
private String beeHiveWeight;
|
||||
/**
|
||||
* 当前温度
|
||||
*/
|
||||
private String currentTemperature;
|
||||
/**
|
||||
* 当前湿度
|
||||
*/
|
||||
private String currentHumidity;
|
||||
/**
|
||||
* 当前噪音
|
||||
*/
|
||||
private String currentNoise;
|
||||
/**
|
||||
* 是否删除
|
||||
*/
|
||||
private boolean deleted;
|
||||
/**
|
||||
* 设备di
|
||||
*/
|
||||
private long deviceId;
|
||||
/**
|
||||
* 蜜蜂数量
|
||||
*/
|
||||
private String currentNumber;
|
||||
/**
|
||||
* 电量
|
||||
*/
|
||||
private String electricQuantity;
|
||||
|
||||
/**
|
||||
* 数据上传时间
|
||||
*/
|
||||
@TableField(exist = false)
|
||||
private LocalDateTime dateUploadTime;
|
||||
|
||||
}
|
@ -0,0 +1,70 @@
|
||||
package com.hive.bee.entity;
|
||||
|
||||
import lombok.*;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.time.LocalDateTime;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.*;
|
||||
|
||||
/**
|
||||
* 蜂箱日志 DO
|
||||
*
|
||||
* @author 智能蜂箱
|
||||
*/
|
||||
@TableName("bee_hive_log")
|
||||
@KeySequence("bee_hive_log_seq") // 用于 Oracle、PostgreSQL、Kingbase、DB2、H2 数据库的主键自增。如果是 MySQL 等数据库,可不写。
|
||||
@Data
|
||||
@ToString(callSuper = true)
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class BeeHiveLog implements Serializable {
|
||||
private static long serialVersionUID = 1L;
|
||||
/**
|
||||
* 主键
|
||||
*/
|
||||
@TableId
|
||||
private Long id;
|
||||
/**
|
||||
* 蜂场id
|
||||
*/
|
||||
private Long peakFieldId;
|
||||
/**
|
||||
* 蜂箱编号
|
||||
*/
|
||||
private String beeHiveNumber;
|
||||
/**
|
||||
* 蜂箱名称
|
||||
*/
|
||||
private String beeHiveName;
|
||||
/**
|
||||
* 重量
|
||||
*/
|
||||
private String beeHiveWeight;
|
||||
/**
|
||||
* 温度
|
||||
*/
|
||||
private String currentTemperature;
|
||||
/**
|
||||
* 湿度
|
||||
*/
|
||||
private String currentHumidity;
|
||||
/**
|
||||
* 噪音
|
||||
*/
|
||||
private String currentNoise;
|
||||
/**
|
||||
* 是否删除
|
||||
*/
|
||||
private boolean deleted;
|
||||
/**
|
||||
* 蜂箱id
|
||||
*/
|
||||
private long beeHiveId;
|
||||
|
||||
/**
|
||||
* 数据上传时间
|
||||
*/
|
||||
private LocalDateTime dateUploadTime;
|
||||
}
|
@ -0,0 +1,16 @@
|
||||
package com.hive.bee.mapper;
|
||||
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
import com.hive.bee.entity.BeeHive;
|
||||
import com.hive.bee.entity.BeeHiveLog;
|
||||
import org.apache.ibatis.annotations.Mapper;
|
||||
|
||||
/**
|
||||
* 蜂箱历史数据 Mapper
|
||||
*/
|
||||
@Mapper
|
||||
public interface HiveLogMapper extends BaseMapper<BeeHiveLog> {
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
package com.hive.bee.mapper;
|
||||
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
import com.hive.bee.entity.BeeHive;
|
||||
import org.apache.ibatis.annotations.Mapper;
|
||||
|
||||
/**
|
||||
* 蜂箱 Mapper
|
||||
*/
|
||||
@Mapper
|
||||
public interface HiveMapper extends BaseMapper<BeeHive> {
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,4 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
|
||||
<mapper namespace="com.hive.bee.mapper.HiveLogMapper">
|
||||
</mapper>
|
@ -0,0 +1,4 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
|
||||
<mapper namespace="com.hive.bee.mapper.HiveMapper">
|
||||
</mapper>
|
@ -0,0 +1,14 @@
|
||||
package com.hive.bee.service;
|
||||
|
||||
import com.baomidou.mybatisplus.extension.service.IService;
|
||||
import com.hive.bee.entity.BeeHive;
|
||||
import com.hive.bee.entity.BeeHiveLog;
|
||||
|
||||
/**
|
||||
* 蜂箱历史数据 Service 接口
|
||||
*/
|
||||
public interface HiveLogService extends IService<BeeHiveLog> {
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,13 @@
|
||||
package com.hive.bee.service;
|
||||
|
||||
import com.baomidou.mybatisplus.extension.service.IService;
|
||||
import com.hive.bee.entity.BeeHive;
|
||||
|
||||
/**
|
||||
* 蜂箱 Service 接口
|
||||
*/
|
||||
public interface HiveService extends IService<BeeHive> {
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,20 @@
|
||||
package com.hive.bee.service.impl;
|
||||
|
||||
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
|
||||
import com.hive.bee.entity.BeeHive;
|
||||
import com.hive.bee.entity.BeeHiveLog;
|
||||
import com.hive.bee.mapper.HiveLogMapper;
|
||||
import com.hive.bee.mapper.HiveMapper;
|
||||
import com.hive.bee.service.HiveLogService;
|
||||
import com.hive.bee.service.HiveService;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
|
||||
/**
|
||||
* 蜂箱历史数据 Service 实现类
|
||||
*/
|
||||
@Service
|
||||
public class HiveLogServiceImpl extends ServiceImpl<HiveLogMapper, BeeHiveLog> implements HiveLogService {
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,17 @@
|
||||
package com.hive.bee.service.impl;
|
||||
|
||||
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
|
||||
import com.hive.bee.entity.BeeHive;
|
||||
import com.hive.bee.mapper.HiveMapper;
|
||||
import com.hive.bee.service.HiveService;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
|
||||
/**
|
||||
* 蜂箱 Service 实现类
|
||||
*/
|
||||
@Service
|
||||
public class HiveServiceImpl extends ServiceImpl<HiveMapper, BeeHive> implements HiveService {
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,89 @@
|
||||
package com.hive.bee.vo;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.KeySequence;
|
||||
import com.baomidou.mybatisplus.annotation.TableId;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import lombok.*;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.time.LocalDateTime;
|
||||
|
||||
/**
|
||||
* 蜂箱 DO
|
||||
*
|
||||
* @author 芋道源码
|
||||
*/
|
||||
@TableName("bee_hive")
|
||||
@KeySequence("bee_hive_seq") // 用于 Oracle、PostgreSQL、Kingbase、DB2、H2 数据库的主键自增。如果是 MySQL 等数据库,可不写。
|
||||
@Data
|
||||
@ToString(callSuper = true)
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class BeeHiveVo implements Serializable {
|
||||
private static long serialVersionUID = 1L;
|
||||
/**
|
||||
* 蜂场id
|
||||
*
|
||||
* 枚举 {@link TODO mf_type 对应的类}
|
||||
*/
|
||||
private Long peakFieldId;
|
||||
/**
|
||||
* 蜂箱编号
|
||||
*/
|
||||
private String beeHiveNumber;
|
||||
/**
|
||||
* 蜂箱名称
|
||||
*/
|
||||
private String beeHiveName;
|
||||
/**
|
||||
* mac号
|
||||
*/
|
||||
private String beeHiveMac;
|
||||
/**
|
||||
* 蜂箱位置
|
||||
*/
|
||||
private String beeHivePosition;
|
||||
/**
|
||||
* 最后在线时间
|
||||
*/
|
||||
private LocalDateTime lastOnlineTime;
|
||||
/**
|
||||
* 在线状态
|
||||
*/
|
||||
private Integer onlineType;
|
||||
/**
|
||||
* 蜂箱重量
|
||||
*/
|
||||
private String beeHiveWeight;
|
||||
/**
|
||||
* 当前温度
|
||||
*/
|
||||
private String currentTemperature;
|
||||
/**
|
||||
* 当前湿度
|
||||
*/
|
||||
private String currentHumidity;
|
||||
/**
|
||||
* 当前噪音
|
||||
*/
|
||||
private String currentNoise;
|
||||
/**
|
||||
* 设备di
|
||||
*/
|
||||
private long deviceId;
|
||||
/**
|
||||
* 蜜蜂数量
|
||||
*/
|
||||
private String currentNumber;
|
||||
/**
|
||||
* 电量
|
||||
*/
|
||||
private String electricQuantity;
|
||||
|
||||
/**
|
||||
* 数据上传时间
|
||||
*/
|
||||
private LocalDateTime dateUploadTime;
|
||||
|
||||
}
|
@ -0,0 +1,42 @@
|
||||
package com.hive.communication.netty.server;
|
||||
|
||||
import com.hive.config.netty.NettyConfig;
|
||||
import io.netty.channel.Channel;
|
||||
import io.netty.channel.ChannelFuture;
|
||||
|
||||
/**
|
||||
* @Author: HIVE - LGH
|
||||
* @Date: 2024/10/11 15:28
|
||||
* @Version: V1.0
|
||||
*/
|
||||
public class ChannelManager {
|
||||
|
||||
// 添加Channel到group和map中
|
||||
public static void addChannel(Channel channel, String deviceId) {
|
||||
NettyConfig.group.add(channel);
|
||||
NettyConfig.portToChannelMap.put(deviceId, channel);
|
||||
}
|
||||
// 从group和map中移除Channel
|
||||
public static void removeChannel(Channel channel) {
|
||||
NettyConfig.group.remove(channel);
|
||||
NettyConfig.portToChannelMap.values().removeIf(c -> c.equals(channel));
|
||||
}
|
||||
// 关闭指定端口的Channel
|
||||
public static void closeChannelByPort(int port) {
|
||||
Channel channel = NettyConfig.portToChannelMap.get(port);
|
||||
System.out.println(channel.isOpen());
|
||||
if (channel != null && channel.isOpen()) {
|
||||
ChannelFuture future = channel.close();
|
||||
future.addListener(f -> {
|
||||
if (f.isSuccess()) {
|
||||
NettyConfig.portToChannelMap.remove(port);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
// 关闭所有Channel
|
||||
public static void closeAllChannels() {
|
||||
NettyConfig.group.close().syncUninterruptibly();
|
||||
NettyConfig.portToChannelMap.clear();
|
||||
}
|
||||
}
|
@ -0,0 +1,117 @@
|
||||
package com.hive.communication.netty.server;
|
||||
|
||||
import com.hive.communication.netty.server.decoder.DelimiterBasedFrameDecoder;
|
||||
import com.hive.communication.netty.server.handler.EchoServerHandler;
|
||||
import io.netty.bootstrap.ServerBootstrap;
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.Unpooled;
|
||||
import io.netty.channel.*;
|
||||
import io.netty.channel.nio.NioEventLoopGroup;
|
||||
import io.netty.channel.socket.SocketChannel;
|
||||
import io.netty.channel.socket.nio.NioServerSocketChannel;
|
||||
import io.netty.handler.codec.bytes.ByteArrayEncoder;
|
||||
import io.netty.handler.codec.string.StringEncoder;
|
||||
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class EchoServer {
|
||||
|
||||
private final int port;
|
||||
|
||||
public EchoServer(int port) {
|
||||
this.port = port;
|
||||
}
|
||||
|
||||
private static final Map<Integer, Channel> portToChannelMap = new HashMap<>();
|
||||
private static final Map<Integer, ServerBootstrap> portToBootstrapMap = new HashMap<>();
|
||||
private static final Map<Integer, EventLoopGroup> portToEventLoopGroupMap = new HashMap<>();
|
||||
|
||||
public void start() {
|
||||
EventLoopGroup bossGroup = new NioEventLoopGroup();
|
||||
EventLoopGroup group = new NioEventLoopGroup();
|
||||
try {
|
||||
ServerBootstrap sb = new ServerBootstrap();
|
||||
sb.option(ChannelOption.SO_BACKLOG, 1024);
|
||||
sb.group(group, bossGroup) // 绑定线程池
|
||||
.channel(NioServerSocketChannel.class) // 指定使用的channel
|
||||
.localAddress(port)// 绑定监听端口
|
||||
//保持连接数
|
||||
.option(ChannelOption.SO_BACKLOG, 128)
|
||||
//有数据立即发送
|
||||
.option(ChannelOption.TCP_NODELAY, true)
|
||||
//保持连接
|
||||
.childOption(ChannelOption.SO_KEEPALIVE, true)
|
||||
//处理新连接
|
||||
.childHandler(new ChannelInitializer<SocketChannel>() { // 绑定客户端连接时候触发操作
|
||||
|
||||
@Override
|
||||
protected void initChannel(SocketChannel ch) throws Exception {
|
||||
System.out.println("报告");
|
||||
System.out.println("信息:有一客户端链接到本服务端");
|
||||
System.out.println("IP:" + ch.localAddress().getHostName());
|
||||
System.out.println("Port:" + ch.localAddress().getPort());
|
||||
System.out.println("报告完毕");
|
||||
|
||||
ch.pipeline().addLast(new StringEncoder(Charset.forName("UTF-8")));
|
||||
String startDelimiterStr = "99";
|
||||
String endDelimiterStr = "0A";
|
||||
ByteBuf startDelimiter = Unpooled.copiedBuffer(startDelimiterStr, Charset.forName("UTF-8"));
|
||||
ByteBuf endDelimiter = Unpooled.copiedBuffer(endDelimiterStr, Charset.forName("UTF-8"));
|
||||
ch.pipeline().addLast(new DelimiterBasedFrameDecoder(startDelimiter, endDelimiter));
|
||||
ch.pipeline().addLast(new EchoServerHandler());
|
||||
ch.pipeline().addLast(new ByteArrayEncoder());
|
||||
|
||||
}
|
||||
});
|
||||
ChannelFuture cf = sb.bind().sync(); // 服务器异步创建绑定
|
||||
System.out.println(EchoServer.class + " 启动正在监听: " + cf.channel().localAddress());
|
||||
portToChannelMap.put(port, cf.channel());
|
||||
portToBootstrapMap.put(port, sb);
|
||||
portToEventLoopGroupMap.put(port, group);
|
||||
cf.channel().closeFuture().sync(); // 关闭服务器通道
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
//释放线程池资源
|
||||
group.shutdownGracefully();
|
||||
bossGroup.shutdownGracefully();
|
||||
}
|
||||
}
|
||||
|
||||
public void stopServer(int port) {
|
||||
Channel channel = portToChannelMap.get(port);
|
||||
if (channel != null && channel.isOpen()) {
|
||||
ChannelFuture future = channel.close();
|
||||
future.addListener(f -> {
|
||||
if (f.isSuccess()) {
|
||||
portToChannelMap.remove(port);
|
||||
// 确保在Channel关闭后处理ServerBootstrap和EventLoopGroup
|
||||
handleServerBootstrapAndEventLoopGroup(port);
|
||||
} else {
|
||||
// 处理关闭Channel失败的情况
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// 处理Channel不存在或已关闭的情况
|
||||
handleServerBootstrapAndEventLoopGroup(port);
|
||||
}
|
||||
}
|
||||
|
||||
private void handleServerBootstrapAndEventLoopGroup(int port) {
|
||||
ServerBootstrap bootstrap = portToBootstrapMap.get(port);
|
||||
if (bootstrap != null) {
|
||||
EventLoopGroup bossGroup = portToEventLoopGroupMap.get(port);
|
||||
EventLoopGroup workerGroup = bootstrap.childGroup();
|
||||
bossGroup.shutdownGracefully();
|
||||
workerGroup.shutdownGracefully();
|
||||
portToBootstrapMap.remove(port);
|
||||
portToEventLoopGroupMap.remove(port);
|
||||
} else {
|
||||
System.out.println("找不到端口" + port);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
package com.hive.communication.netty.server;
|
||||
|
||||
import com.hive.communication.util.NettyServerUtil;
|
||||
import com.hive.config.netty.NettyHiveServerConfig;
|
||||
import org.springframework.boot.CommandLineRunner;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@Component
|
||||
public class NettyServerRunner implements CommandLineRunner {
|
||||
|
||||
@Override
|
||||
public void run(String... args) throws Exception {
|
||||
new EchoServer(NettyServerUtil.getPort()).start();
|
||||
}
|
||||
}
|
@ -0,0 +1,37 @@
|
||||
package com.hive.communication.netty.server.dataprocessing;
|
||||
|
||||
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
|
||||
import com.hive.bee.entity.BeeHive;
|
||||
import com.hive.bee.entity.BeeHiveLog;
|
||||
import com.hive.bee.service.HiveLogService;
|
||||
import com.hive.bee.service.HiveService;
|
||||
import com.hive.bee.vo.BeeHiveVo;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import static com.hive.util.ConverterUtil.copySameFields;
|
||||
|
||||
@Slf4j
|
||||
@Component
|
||||
public class BeeHiveAdd {
|
||||
@Autowired
|
||||
private HiveService hiveService;
|
||||
@Autowired
|
||||
private HiveLogService hiveLogService;
|
||||
public void addAndEdit(BeeHive beeHive){
|
||||
hiveService.getBaseMapper().updateById(beeHive);
|
||||
BeeHiveLog beeHiveLog = new BeeHiveLog();
|
||||
BeeHiveVo beeHiveVo = new BeeHiveVo();
|
||||
//用来去除不需要的字段
|
||||
copySameFields(beeHive, beeHiveVo);
|
||||
copySameFields(beeHiveVo, beeHiveLog);
|
||||
beeHiveLog.setBeeHiveId(beeHive.getId());
|
||||
hiveLogService.save(beeHiveLog);
|
||||
}
|
||||
public BeeHive getOneBeeHive(long deviceId){
|
||||
QueryWrapper<BeeHive> queryWrapper = new QueryWrapper<>();
|
||||
queryWrapper.lambda().eq(BeeHive::isDeleted,true).eq(BeeHive::getDeviceId,deviceId);
|
||||
return hiveService.getBaseMapper().selectOne(queryWrapper);
|
||||
}
|
||||
}
|
@ -0,0 +1,26 @@
|
||||
package com.hive.communication.netty.server.launch;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.channel.ChannelHandlerContext;
|
||||
import io.netty.channel.ChannelInboundHandlerAdapter;
|
||||
|
||||
public class TimeServerHandler extends ChannelInboundHandlerAdapter {
|
||||
|
||||
//ChannelHandlerContext通道处理上下文
|
||||
@Override
|
||||
public void channelActive(final ChannelHandlerContext ctx) throws InterruptedException { // (1)
|
||||
|
||||
while (true) {
|
||||
ByteBuf time = ctx.alloc().buffer(4); //为ByteBuf分配四个字节
|
||||
time.writeInt((int) (System.currentTimeMillis() / 1000L + 2208988800L));
|
||||
ctx.writeAndFlush(time);
|
||||
Thread.sleep(200);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
|
||||
cause.printStackTrace();
|
||||
ctx.close();
|
||||
}
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
package com.hive.communication.util;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
public class NettyServerUtil {
|
||||
private static int port;
|
||||
public static void setPort(int port) {
|
||||
NettyServerUtil.port = port;
|
||||
}
|
||||
public static int getPort() {
|
||||
return port;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,17 @@
|
||||
package com.hive.config.netty;
|
||||
|
||||
import io.netty.channel.Channel;
|
||||
import io.netty.channel.group.ChannelGroup;
|
||||
import io.netty.channel.group.DefaultChannelGroup;
|
||||
import io.netty.util.concurrent.GlobalEventExecutor;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
public class NettyConfig {
|
||||
/**
|
||||
* 存储每一个客户端接入进来时的channel对象
|
||||
*/
|
||||
public static ChannelGroup group = new DefaultChannelGroup(GlobalEventExecutor.INSTANCE);
|
||||
public static final Map<String, Channel> portToChannelMap = new ConcurrentHashMap<>();
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
package com.hive.config.netty;
|
||||
|
||||
import com.hive.communication.util.NettyServerUtil;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@Slf4j
|
||||
@Configuration
|
||||
public class NettyHiveServerConfig {
|
||||
@Value(value = "${hive-server.port}")
|
||||
private int port;
|
||||
@Bean
|
||||
public void initServer(){
|
||||
NettyServerUtil.setPort(port);
|
||||
}
|
||||
}
|
@ -1,33 +0,0 @@
|
||||
package com.hive.controller;
|
||||
|
||||
import com.hive.common.AjaxResult;
|
||||
import com.hive.entity.XieZhuan;
|
||||
import com.hive.service.XieZhuanService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
@RestController
|
||||
public class ExcelController {
|
||||
|
||||
@Autowired
|
||||
private XieZhuanService xieZhuanService;
|
||||
|
||||
@PostMapping("/importFile")
|
||||
private AjaxResult importFile(@RequestParam("file") MultipartFile file) throws IOException {
|
||||
|
||||
xieZhuanService.importFile(file);
|
||||
return AjaxResult.success("导入成功");
|
||||
}
|
||||
|
||||
@GetMapping("/getXZList")
|
||||
private AjaxResult getXZList(){
|
||||
List<XieZhuan> list = xieZhuanService.getXZList();
|
||||
return AjaxResult.success("查询成功",list);
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -1,34 +0,0 @@
|
||||
package com.hive.entity;
|
||||
|
||||
import com.alibaba.excel.annotation.ExcelProperty;
|
||||
import com.baomidou.mybatisplus.annotation.IdType;
|
||||
import com.baomidou.mybatisplus.annotation.TableId;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
@TableName("xiezhuan")
|
||||
public class XieZhuan {
|
||||
|
||||
@TableId(type= IdType.AUTO)
|
||||
private Long id;
|
||||
|
||||
@ExcelProperty("省代码")
|
||||
private String provinceCode;
|
||||
|
||||
@ExcelProperty("省公司")
|
||||
private String provinceCompanny;
|
||||
|
||||
@ExcelProperty("数据日期")
|
||||
private String dataDate;
|
||||
|
||||
@ExcelProperty("省侧系统原因导致携转业务失败量")
|
||||
private String failCount;
|
||||
|
||||
@ExcelProperty("携转业务总量")
|
||||
private String businessCount;
|
||||
|
||||
@ExcelProperty("携转业务接口成功率")
|
||||
private String successRate;
|
||||
|
||||
}
|
@ -1,9 +0,0 @@
|
||||
package com.hive.mapper;
|
||||
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
import com.coffee.entity.XieZhuan;
|
||||
import org.apache.ibatis.annotations.Mapper;
|
||||
|
||||
@Mapper
|
||||
public interface XieZhuanMapper extends BaseMapper<XieZhuan> {
|
||||
}
|
@ -1,16 +0,0 @@
|
||||
package com.hive.service;
|
||||
|
||||
import com.baomidou.mybatisplus.extension.service.IService;
|
||||
import com.hive.entity.XieZhuan;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
public interface XieZhuanService extends IService<XieZhuan> {
|
||||
|
||||
|
||||
void importFile(MultipartFile file) throws IOException;
|
||||
|
||||
List<XieZhuan> getXZList();
|
||||
}
|
@ -1,90 +0,0 @@
|
||||
package com.hive.service.impl;
|
||||
|
||||
import com.alibaba.excel.EasyExcel;
|
||||
import com.alibaba.excel.context.AnalysisContext;
|
||||
import com.alibaba.excel.event.AnalysisEventListener;
|
||||
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
|
||||
import com.coffee.entity.XieZhuan;
|
||||
import com.coffee.mapper.XieZhuanMapper;
|
||||
import com.hive.service.XieZhuanService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
@Service
|
||||
public class XieZhuanImpl extends ServiceImpl<XieZhuanMapper, XieZhuan> implements XieZhuanService {
|
||||
|
||||
@Autowired
|
||||
XieZhuanMapper xieZhuanMapper;
|
||||
|
||||
private static final String UPLOAD_DIR = "C:\\Users\\Lenovo\\Desktop\\";
|
||||
|
||||
@Override
|
||||
public void importFile(MultipartFile file) throws IOException {
|
||||
|
||||
byte[] bytes = file.getBytes();
|
||||
Path path = Paths.get(UPLOAD_DIR);
|
||||
|
||||
// 创建目录(如果不存在)
|
||||
Files.createDirectories(path);
|
||||
|
||||
String fileName = file.getOriginalFilename();
|
||||
|
||||
// 存储文件
|
||||
Files.write(path.resolve(fileName), bytes);
|
||||
|
||||
String filePath = UPLOAD_DIR + fileName;
|
||||
readExcel(filePath, XieZhuan.class, new CustomExcelListener<XieZhuan>(fileName));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<XieZhuan> getXZList() {
|
||||
return xieZhuanMapper.selectList(null);
|
||||
}
|
||||
|
||||
class CustomExcelListener<T> extends AnalysisEventListener<T> {
|
||||
|
||||
private String filename;
|
||||
|
||||
public CustomExcelListener() {
|
||||
}
|
||||
|
||||
public CustomExcelListener(String filename) {
|
||||
this.filename = filename;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void invoke(T object, AnalysisContext context) {
|
||||
|
||||
switch (filename) {
|
||||
case "携转业务成功率日指标.xlsx":
|
||||
xieZhuanMapper.insert((XieZhuan) object);
|
||||
break;
|
||||
}
|
||||
System.out.println("解析数据:" + object);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doAfterAllAnalysed(AnalysisContext context) {
|
||||
System.out.println("读取" + filename + "文件并存入数据库结束======");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
public static <T> void readExcel(String path, Class<T> clazz, AnalysisEventListener<T> listener) {
|
||||
EasyExcel.read(path, clazz, listener).sheet().doRead();
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
package com.hive.util;
|
||||
import com.hive.bee.entity.BeeHive;
|
||||
import com.hive.bee.entity.BeeHiveLog;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneId;
|
||||
|
||||
public class ConverterUtil {
|
||||
|
||||
public static <T, U> void copySameFields(T source, U target) {
|
||||
Field[] sourceFields = source.getClass().getDeclaredFields();
|
||||
Field[] targetFields = target.getClass().getDeclaredFields();
|
||||
for (Field sourceField : sourceFields) {
|
||||
for (Field targetField : targetFields) {
|
||||
if (sourceField.getName().equals(targetField.getName()) &&
|
||||
sourceField.getType().equals(targetField.getType())) {
|
||||
try {
|
||||
sourceField.setAccessible(true);
|
||||
targetField.setAccessible(true);
|
||||
targetField.set(target, sourceField.get(source));
|
||||
} catch (IllegalAccessException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue