Merge remote-tracking branch 'origin/master'
| | |
| | | - Path=/admin-api/model/** |
| | | filters: |
| | | - RewritePath=/admin-api/model/v3/api-docs, /v3/api-docs |
| | | ## fast 服务 |
| | | - id: fast-admin-api # 路由的编号 |
| | | uri: grayLb://fast-server |
| | | predicates: # 断言,作为路由的匹配条件,对应 RouteDefinition 数组 |
| | | - Path=/admin-api/fast/** |
| | | filters: |
| | | - RewritePath=/admin-api/fast/v3/api-docs, /v3/api-docs |
| | | ## shasteel 服务 |
| | | - id: shasteel-admin-api # 路由的编号 |
| | | uri: grayLb://shasteel-server |
| | |
| | | if (!PointDataTypeEnum.BOOLEAN.getCode().equals(dataType)) { |
| | | BigDecimal decValue = new BigDecimal(value.toString()); |
| | | if (PointDataTypeEnum.FLOAT.getCode().equals(dataType)) { |
| | | return decValue.setScale(2, BigDecimal.ROUND_HALF_UP); |
| | | // return decValue.setScale(2, BigDecimal.ROUND_HALF_UP); |
| | | return value; |
| | | } else if (PointDataTypeEnum.INT.getCode().equals(dataType)) { |
| | | decValue = decValue.setScale(0, BigDecimal.ROUND_HALF_UP); |
| | | } |
| | |
| | | @AllArgsConstructor |
| | | public enum ArcTypeEnum { |
| | | |
| | | MIN15("MIN15","15分钟"), |
| | | |
| | | HOUR("HOUR","时"), |
| | | |
| | | SHIFT("SHIFT","班"), |
对比新文件 |
| | |
| | | package com.iailab.module.data.job.task; |
| | | |
| | | import com.iailab.module.data.arc.service.ArcDataService; |
| | | import com.iailab.module.data.common.enums.ArcTypeEnum; |
| | | import org.slf4j.Logger; |
| | | import org.slf4j.LoggerFactory; |
| | | import org.springframework.stereotype.Component; |
| | | |
| | | import javax.annotation.Resource; |
| | | import java.time.LocalDateTime; |
| | | |
| | | /** |
| | | * @description: Point归档1h |
| | | * @author: dyk |
| | | * @date: 2025/2/20 16:30 |
| | | **/ |
| | | @Component("pointArchivingTaskNet15min") |
| | | public class PointArchivingTaskNet15min implements ITask { |
| | | private Logger logger = LoggerFactory.getLogger(getClass()); |
| | | |
| | | @Resource |
| | | private ArcDataService arcPointDataService; |
| | | |
| | | @Override |
| | | public void run(String params){ |
| | | logger.debug("PointArchivingTaskNet1h定时任务正在执行,参数为:{}", params); |
| | | try { |
| | | arcPointDataService.archiving(ArcTypeEnum.MIN15.getCode()); |
| | | logger.info("PointArchivingTaskNet1h定时任务完成时间:" + LocalDateTime.now()); |
| | | } catch (Exception ex) { |
| | | ex.printStackTrace(); |
| | | logger.error("PointArchivingTaskNet1h定时任务失败时间:" + LocalDateTime.now()); |
| | | } |
| | | |
| | | } |
| | | } |
| | |
| | | import com.iailab.module.data.arc.service.ArcDataService; |
| | | import org.slf4j.Logger; |
| | | import org.slf4j.LoggerFactory; |
| | | import org.springframework.stereotype.Component; |
| | | |
| | | import javax.annotation.Resource; |
| | | import java.time.LocalDateTime; |
| | |
| | | * @author: dyk |
| | | * @date: 2025/2/20 16:30 |
| | | **/ |
| | | @Component("pointArchivingTaskNet1h") |
| | | public class PointArchivingTaskNet1h implements ITask { |
| | | private Logger logger = LoggerFactory.getLogger(getClass()); |
| | | |
| | | private final String NET = "1h"; |
| | | |
| | | @Resource |
| | | private ArcDataService arcPointDataService; |
| | |
| | | import com.iailab.module.data.arc.service.ArcDataService; |
| | | import org.slf4j.Logger; |
| | | import org.slf4j.LoggerFactory; |
| | | import org.springframework.stereotype.Component; |
| | | |
| | | import javax.annotation.Resource; |
| | | import java.time.LocalDateTime; |
| | |
| | | * @author: dyk |
| | | * @date: 2025/2/20 16:30 |
| | | **/ |
| | | @Component("pointArchivingTaskNetDay") |
| | | public class PointArchivingTaskNetDay implements ITask { |
| | | private Logger logger = LoggerFactory.getLogger(getClass()); |
| | | |
| | | private final String NET = "1h"; |
| | | |
| | | @Resource |
| | | private ArcDataService arcPointDataService; |
| | |
| | | import com.iailab.module.data.arc.service.ArcDataService; |
| | | import org.slf4j.Logger; |
| | | import org.slf4j.LoggerFactory; |
| | | import org.springframework.stereotype.Component; |
| | | |
| | | import javax.annotation.Resource; |
| | | import java.time.LocalDateTime; |
| | |
| | | * @author: dyk |
| | | * @date: 2025/2/20 16:30 |
| | | **/ |
| | | @Component("pointArchivingTaskNetMonth") |
| | | public class PointArchivingTaskNetMonth implements ITask { |
| | | private Logger logger = LoggerFactory.getLogger(getClass()); |
| | | |
| | | private final String NET = "1h"; |
| | | |
| | | @Resource |
| | | private ArcDataService arcPointDataService; |
| | |
| | | import com.iailab.module.data.arc.service.ArcDataService; |
| | | import org.slf4j.Logger; |
| | | import org.slf4j.LoggerFactory; |
| | | import org.springframework.stereotype.Component; |
| | | |
| | | import javax.annotation.Resource; |
| | | import java.time.LocalDateTime; |
| | |
| | | * @author: dyk |
| | | * @date: 2025/2/20 16:30 |
| | | **/ |
| | | @Component("pointArchivingTaskNetShift") |
| | | public class PointArchivingTaskNetShift implements ITask { |
| | | private Logger logger = LoggerFactory.getLogger(getClass()); |
| | | |
| | | private final String NET = "1h"; |
| | | |
| | | @Resource |
| | | private ArcDataService arcPointDataService; |
| | |
| | | import com.iailab.module.data.arc.service.ArcDataService; |
| | | import org.slf4j.Logger; |
| | | import org.slf4j.LoggerFactory; |
| | | import org.springframework.stereotype.Component; |
| | | |
| | | import javax.annotation.Resource; |
| | | import java.time.LocalDateTime; |
| | |
| | | * @author: dyk |
| | | * @date: 2025/2/20 16:30 |
| | | **/ |
| | | @Component("pointArchivingTaskNetYear") |
| | | public class PointArchivingTaskNetYear implements ITask { |
| | | private Logger logger = LoggerFactory.getLogger(getClass()); |
| | | |
| | | private final String NET = "1h"; |
| | | |
| | | @Resource |
| | | private ArcDataService arcPointDataService; |
| | |
| | | import com.iailab.module.data.common.enums.DataTypeEnum; |
| | | import com.iailab.module.data.common.enums.JsErrorCode; |
| | | import com.iailab.module.data.common.utils.JavaScriptHandler; |
| | | import com.iailab.module.data.enums.DataPointFreqEnum; |
| | | import com.iailab.module.data.point.collection.PointCollector; |
| | | import com.iailab.module.data.point.collection.utils.GenInfluxPointValueUtils; |
| | | import com.iailab.module.data.point.dto.DaPointDTO; |
| | |
| | | BigDecimal coefficient = dto.getUnittransfactor() == null ? BigDecimal.ONE : dto.getUnittransfactor(); |
| | | BigDecimal calValue = new BigDecimal(rawValue.toString()).multiply(coefficient); |
| | | InfluxPointValuePOJO pojo = GenInfluxPointValueUtils.getByPoint(dto, calValue); |
| | | pojo.setTimestamp(collectTime.toInstant()); |
| | | pojo.setTimestamp(GenInfluxPointValueUtils.getByMin(collectTime, DataPointFreqEnum.getEumByCode(dto.getMinfreqid()))); |
| | | result.add(pojo); |
| | | } catch (Exception ex) { |
| | | ex.printStackTrace(); |
| | |
| | | package com.iailab.module.data.point.collection.handler; |
| | | |
| | | import com.iailab.module.data.enums.DataPointFreqEnum; |
| | | import com.iailab.module.data.point.collection.utils.GenInfluxPointValueUtils; |
| | | import com.iailab.module.data.point.dto.DaPointDTO; |
| | | import com.iailab.module.data.point.service.DaPointService; |
| | |
| | | } |
| | | dtos.forEach(dto -> { |
| | | InfluxPointValuePOJO pojo = GenInfluxPointValueUtils.getByPoint(dto); |
| | | pojo.setTimestamp(collectTime.toInstant()); |
| | | pojo.setTimestamp(GenInfluxPointValueUtils.getByMin(collectTime, DataPointFreqEnum.getEumByCode(dto.getMinfreqid()))); |
| | | dataMap.put(dto.getPointNo(), dto.getDefaultValue()); |
| | | result.add(pojo); |
| | | listGood.add(dto.getPointNo()); |
| | |
| | | try { |
| | | Object value = singleCompute(dto, collectTime, listGood, listBad); |
| | | InfluxPointValuePOJO pojo = GenInfluxPointValueUtils.getByPoint(dto, value); |
| | | pojo.setTimestamp(collectTime.toInstant()); |
| | | pojo.setTimestamp(GenInfluxPointValueUtils.getByMin(collectTime, DataPointFreqEnum.getEumByCode(dto.getMinfreqid()))); |
| | | result.add(pojo); |
| | | } catch (Exception ex) { |
| | | ex.printStackTrace(); |
| | |
| | | import com.iailab.module.data.channel.kio.collector.KingIOCollector; |
| | | import com.iailab.module.data.channel.modbus.collector.ModBusCollector; |
| | | import com.iailab.module.data.channel.opcua.collector.OpcUaCollector; |
| | | import com.iailab.module.data.enums.DataPointFreqEnum; |
| | | import com.iailab.module.data.point.collection.PointCollector; |
| | | import com.iailab.module.data.point.collection.utils.GenInfluxPointValueUtils; |
| | | import com.iailab.module.data.point.common.PointDataTypeEnum; |
| | |
| | | if (tagValues.containsKey(tagId)) { |
| | | Object value = handleData(dto, tagValues.get(tagId)); |
| | | InfluxPointValuePOJO pojo = GenInfluxPointValueUtils.getByPoint(dto, value); |
| | | pojo.setTimestamp(collectTime.toInstant()); |
| | | pojo.setTimestamp(GenInfluxPointValueUtils.getByMin(collectTime, DataPointFreqEnum.getEumByCode(dto.getMinfreqid()))); |
| | | dataMap.put(dto.getPointNo(), value); |
| | | result.add(pojo); |
| | | listGood.add(dto.getPointNo()); |
| | | } else { |
| | | InfluxPointValuePOJO pojo = GenInfluxPointValueUtils.getByPoint(dto, CommonConstant.BAD_VALUE); |
| | | pojo.setTimestamp(collectTime.toInstant()); |
| | | pojo.setTimestamp(GenInfluxPointValueUtils.getByMin(collectTime, DataPointFreqEnum.getEumByCode(dto.getMinfreqid()))); |
| | | result.add(pojo); |
| | | listBad.add(dto.getPointNo()); |
| | | log.info("值异常!TagId=" + tagId); |
| | |
| | | package com.iailab.module.data.point.collection.utils; |
| | | |
| | | import com.iailab.module.data.common.enums.DataTypeEnum; |
| | | import com.iailab.module.data.enums.DataPointFreqEnum; |
| | | import com.iailab.module.data.point.dto.DaPointDTO; |
| | | import com.iailab.module.data.influxdb.pojo.*; |
| | | |
| | | import java.time.Instant; |
| | | import java.util.Calendar; |
| | | import java.util.Date; |
| | | |
| | | /** |
| | | * @author PanZhibao |
| | |
| | | return new InfluxPointValueStrPOJO(); |
| | | } |
| | | } |
| | | |
| | | public static Instant getByMin(Date collectTime, DataPointFreqEnum freqEnum) { |
| | | Calendar cal = Calendar.getInstance(); |
| | | cal.setTime(collectTime); |
| | | switch (freqEnum) { |
| | | case NET_1MIN: |
| | | cal.set(Calendar.SECOND, 0); |
| | | break; |
| | | case NET_1H: |
| | | cal.set(Calendar.SECOND, 0); |
| | | cal.set(Calendar.MINUTE, 0); |
| | | break; |
| | | case NET_Day: |
| | | cal.set(Calendar.SECOND, 0); |
| | | cal.set(Calendar.MINUTE, 0); |
| | | cal.set(Calendar.HOUR_OF_DAY, 0); |
| | | break; |
| | | default: |
| | | break; |
| | | } |
| | | return cal.getTime().toInstant(); |
| | | } |
| | | } |
| | |
| | | @Schema(description = "结束时间") |
| | | @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") |
| | | private Date endTime; |
| | | |
| | | @Schema(description = "数据精度,小于0,不限制位数,大于0设置位数") |
| | | private Integer prec; |
| | | } |
| | |
| | | @Schema(description = "结束时间") |
| | | @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") |
| | | private Date endTime; |
| | | |
| | | @Schema(description = "数据精度,小于0,不限制位数,大于0设置位数") |
| | | private Integer prec; |
| | | } |
| | |
| | | viewDto.setOutId(outId); |
| | | viewDto.setResultstr(output.getResultstr()); |
| | | viewDto.setResultName(output.getResultName()); |
| | | viewDto.setRealData(getHisData(output.getPointid(), startTime, endTime)); |
| | | viewDto.setRealData(getHisData(output.getPointid(), startTime, endTime, reqVO.getPrec())); |
| | | viewDto.setPreDataN(mmItemResultService.getData(output.getId(), startTime, endTime, DateUtils.FORMAT_YEAR_MONTH_DAY_HOUR_MINUTE_SECOND)); |
| | | viewDto.setPreDataL(mmItemResultLastPointService.getData(output.getId(), startTime, endTime, DateUtils.FORMAT_YEAR_MONTH_DAY_HOUR_MINUTE_SECOND)); |
| | | viewDto.setCurData(mmItemResultJsonService.getData(output.getId(), predictTime, DateUtils.FORMAT_YEAR_MONTH_DAY_HOUR_MINUTE_SECOND)); |
| | |
| | | //处理预测累计 |
| | | if (output.getIscumulant() == 1) { |
| | | if (StringUtils.isNotBlank(output.getCumulpoint())) { |
| | | viewDto.setCumulantRealData(getHisData(output.getCumulpoint(), startTime, endTime)); |
| | | viewDto.setCumulantRealData(getHisData(output.getCumulpoint(), startTime, endTime, reqVO.getPrec())); |
| | | } |
| | | viewDto.setCumulantPreData(mmItemResultService.getData(output.getId() + CommonDict.CUMULANT_SUFFIX, startTime, endTime, DateUtils.FORMAT_YEAR_MONTH_DAY_HOUR_MINUTE_SECOND)); |
| | | } |
| | |
| | | legend.add(out.getResultName()); |
| | | PreDataSampleViewRespDTO viewDto = new PreDataSampleViewRespDTO(); |
| | | if (StringUtils.isNotBlank(out.getPointid())) { |
| | | viewDto.setRealData(getHisData(out.getPointid(), startTime, endTime)); |
| | | viewDto.setRealData(getHisData(out.getPointid(), startTime, endTime, reqVO.getPrec())); |
| | | } |
| | | viewDto.setPreDataN(mmItemResultService.getData(out.getId(), startTime, endTime, DateUtils.FORMAT_YEAR_MONTH_DAY_HOUR_MINUTE_SECOND)); |
| | | //处理预测累计 |
| | | if (out.getIscumulant() == 1) { |
| | | if (StringUtils.isNotBlank(out.getCumulpoint())) { |
| | | viewDto.setCumulantRealData(getHisData(out.getCumulpoint(), startTime, endTime)); |
| | | viewDto.setCumulantRealData(getHisData(out.getCumulpoint(), startTime, endTime, reqVO.getPrec())); |
| | | } |
| | | viewDto.setCumulantPreData(mmItemResultService.getData(out.getId() + CommonDict.CUMULANT_SUFFIX, startTime, endTime, DateUtils.FORMAT_YEAR_MONTH_DAY_HOUR_MINUTE_SECOND)); |
| | | } |
| | |
| | | * @param pointId |
| | | * @param startTime |
| | | * @param endTime |
| | | * @param prec |
| | | * @return |
| | | */ |
| | | private List<Object[]> getHisData(String pointId, Date startTime, Date endTime) { |
| | | private List<Object[]> getHisData(String pointId, Date startTime, Date endTime, Integer prec) { |
| | | List<Object[]> result = new ArrayList<>(); |
| | | if (StringUtils.isBlank(pointId)) { |
| | | return result; |
| | |
| | | if (CollectionUtils.isEmpty(valueDTOS)) { |
| | | return result; |
| | | } |
| | | int defaultPrec = 3; |
| | | valueDTOS.forEach(item -> { |
| | | Object[] values = new Object[2]; |
| | | values[0] = DateUtils.format(item.getT(), DateUtils.FORMAT_YEAR_MONTH_DAY_HOUR_MINUTE_SECOND); |
| | | values[1] = new BigDecimal(item.getV()).setScale(3, BigDecimal.ROUND_HALF_UP); |
| | | if (prec != null && prec < 0) { |
| | | values[1] = item.getV(); |
| | | } else if (prec != null && prec > 0) { |
| | | values[1] = new BigDecimal(item.getV()).setScale(prec, BigDecimal.ROUND_HALF_UP); |
| | | } else { |
| | | values[1] = new BigDecimal(item.getV()).setScale(defaultPrec, BigDecimal.ROUND_HALF_UP); |
| | | } |
| | | result.add(values); |
| | | }); |
| | | return result; |
| | |
| | | resp.setResult(scheduleResult.getResult()); |
| | | stScheduleRecordService.create(scheduleResult); |
| | | stScheduleSchemeService.updateTime(scheduleResult.getSchemeId(), scheduleResult.getScheduleTime(), scheduleResult.getResultCode()); |
| | | log.info("预测计算结束: " + System.currentTimeMillis()); |
| | | log.info("调度计算结束: " + System.currentTimeMillis()); |
| | | } catch (Exception ex) { |
| | | log.info("调度计算异常: " + System.currentTimeMillis()); |
| | | ex.printStackTrace(); |
| | |
| | | // 如果相等,则engTime加1毫秒,否则influxdb报错(因为influxdb的range函数是左闭右开区间,所以将engTime加一毫秒,才可以查到startTime时间点的数据) |
| | | endTime.setTime(endTime.getTime() + 1); |
| | | } |
| | | String stop = endTime.toInstant().toString(); |
| | | |
| | | // 结束时间默认追加10s |
| | | Calendar calendar = Calendar.getInstance(); |
| | | calendar.setTime(endTime); |
| | | calendar.add(Calendar.MILLISECOND, 10 * 1000 + 1); |
| | | String stop = calendar.getTime().toInstant().toString(); |
| | | |
| | | List<InfluxModelResultVO> dataList = new ArrayList<>(); |
| | | String measurement = MeasurementUtils.getMeasurement(pojo.getType()); |
| | |
| | | } else if (PredGranularityEnum.D1.getCode().equals(predictItem.getGranularity())) { |
| | | calendar.set(Calendar.MINUTE, 0); |
| | | calendar.set(Calendar.HOUR_OF_DAY, 0); |
| | | // calendar.add(Calendar.DAY_OF_YEAR, 1);//天粒度数据23:58分插入,运行时间改为第二天 |
| | | } |
| | | PredictResultVO predictResult; |
| | | if (!predictItem.getStatus().equals(ItemStatus.STATUS1.getCode())) { |
| | |
| | | import com.baomidou.dynamic.datasource.annotation.DSTransactional; |
| | | import com.iailab.module.model.common.enums.CommonDict; |
| | | import com.iailab.module.model.mcs.pre.entity.MmItemOutputEntity; |
| | | import com.iailab.module.model.mcs.pre.enums.PredGranularityEnum; |
| | | import com.iailab.module.model.mcs.pre.service.MmItemResultService; |
| | | import com.iailab.module.model.mcs.sche.service.StAdjustResultService; |
| | | import com.iailab.module.model.mdk.factory.ItemEntityFactory; |
| | |
| | | List<DataValueVO> predictDataList = new ArrayList<>(); |
| | | Calendar calendar = Calendar.getInstance(); |
| | | calendar.setTime(predictResult.getPredictTime()); |
| | | for (Integer i = 0; i < rows; i++) { |
| | | // 从下个时间粒度开始 |
| | | calendar.add(Calendar.SECOND, predictResult.getGranularity()); |
| | | |
| | | if (predictResult.getGranularity() < PredGranularityEnum.D1.getCode()) { |
| | | // 如果小于天粒度,则从下个时间粒度开始 |
| | | calendar.add(Calendar.SECOND, predictResult.getGranularity()); |
| | | } |
| | | for (Integer i = 0; i < rows; i++) { |
| | | DataValueVO predictData = new DataValueVO(); |
| | | predictData.setDataTime(calendar.getTime()); |
| | | predictData.setDataValue(Double.valueOf(entry.getValue()[i])); |
| | | predictDataList.add(predictData); |
| | | |
| | | calendar.add(Calendar.SECOND, predictResult.getGranularity()); |
| | | } |
| | | resultMap.put(entry.getKey().getId(), predictDataList); |
| | | predictLists.put(entry.getKey().getResultstr(), predictDataList); |