| | |
| | | if (CollectionUtil.isEmpty(workstationData)) { |
| | | return new JSONObject(); |
| | | } |
| | | List<Long> ids = (List) workstationData.parallelStream().map((v0) -> { |
| | | List<Long> ids = workstationData.parallelStream().map((v0) -> { |
| | | return v0.getWorkstationId(); |
| | | }).collect(Collectors.toList()); |
| | | List<ShiftInfoDTO> shiftInfoDtoList = this.calendarMapper.listShiftInfo(ids, LocalDate.parse(queryVO.getBeginTime()), LocalDate.parse(queryVO.getEndTime())); |
| | | List<WorkstationShiftDTO> workstationShifts = CommonUtil.dataMerge(workstationData, shiftInfoDtoList); |
| | | List<WorkstationShiftDTO> filter = (List) workstationShifts.parallelStream().filter(CommonUtil.conditionFilter(queryVO.getShiftName(), queryVO.getShiftIndex())).collect(Collectors.toList()); |
| | | List<WorkstationShiftDTO> filter =workstationShifts.parallelStream().filter(CommonUtil.conditionFilter(queryVO.getShiftName(), queryVO.getShiftIndex())).collect(Collectors.toList()); |
| | | List<OutputShiftResultVO> result = settingResult(fillDateCol, filter); |
| | | String beginTime = DateCycleUtil.getDateByDateCycle(queryVO.getBeginTime(), anEnum, Boolean.TRUE); |
| | | String endTime = DateCycleUtil.getDateByDateCycle(queryVO.getEndTime(), anEnum, Boolean.FALSE); |
| | | List<SuperAggregateOutput> outputs = multithreadingTask(beginTime, endTime, ids, queryVO.getStatisticsType()); |
| | | Map<String, List<SuperAggregateOutput>> workstationOutMap = (Map) outputs.parallelStream().collect(Collectors.groupingBy(s -> { |
| | | Map<String, List<SuperAggregateOutput>> workstationOutMap = outputs.parallelStream().collect(Collectors.groupingBy(s -> { |
| | | return s.getWorkstationId() + "-" + s.getShiftIndex(); |
| | | })); |
| | | Map<String, OutputShiftResultVO> voMap = (Map) result.parallelStream().collect(LinkedHashMap::new, map, c -> { |
| | | //Map<String, OutputShiftResultVO> voMap2 = (Map<String, OutputShiftResultVO>)result.parallelStream().collect(LinkedHashMap::new, (map, c) -> {map.put(c.getWorkstationId() + "-" + c.getShiftIndex() + "-" + c.getDateCycle(), c)}, Map::putAll); |
| | | |
| | | Map<String, OutputShiftResultVO> voMap = result.parallelStream().collect(LinkedHashMap::new, (map, c) -> { |
| | | |
| | | OutputShiftResultVO outputShiftResultVO = (OutputShiftResultVO) map.put(c.getWorkstationId() + "-" + c.getShiftIndex() + "-" + c.getDateCycle(), c); |
| | | }, (v0, v1) -> { |
| | | v0.putAll(v1); |
| | | }); |
| | | }); /**/ |
| | | resultCount(workstationOutMap, anEnum, voMap); |
| | | return ResultUtil.dataToJsonObject(voMap.values(), Long.valueOf(iPage.getTotal()), queryVO.getPageSize()); |
| | | } |
| | |
| | | } |
| | | |
| | | private <T, R extends OutputResultVO> void resultCount(Map<T, List<SuperAggregateOutput>> workstationOutMap, StatisticalMethodEnum anEnum, Map<String, R> voMap) { |
| | | workstationOutMap.forEach(outputKey, outputList -> { |
| | | Map<String, Long> output = (Map) outputList.parallelStream().collect(Collectors.groupingBy(groupKey(anEnum), Collectors.mapping((v0) -> { |
| | | workstationOutMap.forEach((outputKey, outputList) -> { |
| | | Map<String, Long> output = outputList.parallelStream().collect(Collectors.groupingBy(groupKey(anEnum), Collectors.mapping((v0) -> { |
| | | return v0.getOutput(); |
| | | }, Collectors.reducing(0L, (v0, v1) -> { |
| | | return Long.sum(v0, v1); |
| | | })))); |
| | | output.forEach(k, v -> { |
| | | output.forEach((k, v) -> { |
| | | OutputResultVO vo = (OutputResultVO) voMap.get(outputKey + "-" + k); |
| | | if (!Objects.isNull(vo)) { |
| | | vo.setOutputCount(v); |
| | |
| | | return new JSONObject(); |
| | | } |
| | | List<OutputResultVO> result = Convert.toList(OutputResultVO.class, settingResult(fillDateCol, workstationData)); |
| | | Map<String, OutputResultVO> voMap = (Map) result.parallelStream().collect(Collectors.toMap(c -> { |
| | | Map<String, OutputResultVO> voMap = result.parallelStream().collect(Collectors.toMap(c -> { |
| | | return c.getWorkstationId() + "-" + c.getDateCycle(); |
| | | }, o -> { |
| | | return o; |
| | | })); |
| | | List<Long> ids = (List) workstationData.parallelStream().map((v0) -> { |
| | | List<Long> ids = workstationData.parallelStream().map((v0) -> { |
| | | return v0.getWorkstationId(); |
| | | }).collect(Collectors.toList()); |
| | | List<SuperAggregateOutput> outputs = multithreadingTask(beginTime, endTime, ids, queryVO.getStatisticsType()); |
| | | Map<Long, List<SuperAggregateOutput>> workstationOutMap = (Map) outputs.parallelStream().collect(Collectors.groupingBy((v0) -> { |
| | | Map<Long, List<SuperAggregateOutput>> workstationOutMap = outputs.parallelStream().collect(Collectors.groupingBy((v0) -> { |
| | | return v0.getWorkstationId(); |
| | | })); |
| | | resultCount(workstationOutMap, anEnum, voMap); |
| | |
| | | return jsonObject; |
| | | } |
| | | CommonUtil.fillWorkStationGroup(workstationVOPage); |
| | | List<Long> workstationIds = (List) voPageRecords.stream().map((v0) -> { |
| | | List<Long> workstationIds = voPageRecords.stream().map((v0) -> { |
| | | return v0.getWorkstationId(); |
| | | }).collect(Collectors.toList()); |
| | | StatisticalMethodEnum anEnum = StatisticalMethodEnum.of(workstationAlarmByTimeSelectVO.getDateCycle()); |
| | |
| | | result.add(resultVO); |
| | | }); |
| | | }); |
| | | Map<String, WorkstaionAlarmByTimeReportVO> voMap = (Map) result.parallelStream().collect(Collectors.toMap(c -> { |
| | | Map<String, WorkstaionAlarmByTimeReportVO> voMap = result.parallelStream().collect(Collectors.toMap(c -> { |
| | | return c.getWorkstationId() + c.getShowDate(); |
| | | }, o -> { |
| | | return o; |
| | | })); |
| | | Map<Long, Integer> wsCount = (Map) reportData.parallelStream().collect(Collectors.groupingBy((v0) -> { |
| | | Map<Long, Integer> wsCount = reportData.parallelStream().collect(Collectors.groupingBy((v0) -> { |
| | | return v0.getWorkstationId(); |
| | | }, Collectors.reducing(0, e -> { |
| | | return 1; |
| | | }, (v0, v1) -> { |
| | | return Integer.sum(v0, v1); |
| | | }))); |
| | | Map<Long, List<WorkstaionAlarmByTimeReportVO>> workstationOutMap = (Map) reportData.parallelStream().collect(Collectors.groupingBy((v0) -> { |
| | | Map<Long, List<WorkstaionAlarmByTimeReportVO>> workstationOutMap = reportData.parallelStream().collect(Collectors.groupingBy((v0) -> { |
| | | return v0.getWorkstationId(); |
| | | })); |
| | | workstationOutMap.forEach(workstationId, outputList -> { |
| | | Map<String, Integer> countMap = (Map) outputList.parallelStream().collect(Collectors.groupingBy(groupKey(anEnum), Collectors.reducing(0, e2 -> { |
| | | workstationOutMap.forEach((workstationId, outputList) -> { |
| | | Map<String, Integer> countMap = outputList.parallelStream().collect(Collectors.groupingBy(groupKey(anEnum), Collectors.reducing(0, e2 -> { |
| | | return 1; |
| | | }, (v0, v1) -> { |
| | | return Integer.sum(v0, v1); |
| | | }))); |
| | | countMap.forEach(k, v -> { |
| | | countMap.forEach((k, v) -> { |
| | | WorkstaionAlarmByTimeReportVO vo = (WorkstaionAlarmByTimeReportVO) voMap.get(workstationId + k); |
| | | if (!Objects.isNull(vo)) { |
| | | vo.setAlarmCount(v); |
| | |
| | | } |
| | | CommonUtil.fillWorkStationGroup(workstationVOPage); |
| | | List<WorkstationDataDTO> workstationData = workstationVOPage.getRecords(); |
| | | List<Long> ids = (List) workstationData.parallelStream().map((v0) -> { |
| | | List<Long> ids = workstationData.parallelStream().map((v0) -> { |
| | | return v0.getWorkstationId(); |
| | | }).collect(Collectors.toList()); |
| | | List<ShiftInfoDTO> shiftInfoDtoList = this.calendarMapper.listShiftInfo(ids, LocalDate.parse(workstationAlarmByTimeSelectVO.getShowDate_begin()), LocalDate.parse(workstationAlarmByTimeSelectVO.getShowDate_end())); |
| | | List<WorkstationShiftDTO> workstationShifts = CommonUtil.dataMerge(workstationData, shiftInfoDtoList); |
| | | List<WorkstationShiftDTO> filter = (List) workstationShifts.parallelStream().filter(CommonUtil.conditionFilter(workstationAlarmByTimeSelectVO.getShiftIndexName(), workstationAlarmByTimeSelectVO.getShiftIndex())).collect(Collectors.toList()); |
| | | List<WorkstationShiftDTO> filter = workstationShifts.parallelStream().filter(CommonUtil.conditionFilter(workstationAlarmByTimeSelectVO.getShiftIndexName(), workstationAlarmByTimeSelectVO.getShiftIndex())).collect(Collectors.toList()); |
| | | List<WorkstaionAlarmByShiftReportVO> result = new ArrayList<>(); |
| | | fillDateCol.forEach(f -> { |
| | | filter.forEach(shift -> { |
| | |
| | | boolean filterType = FilterOffUtils.whetherToFilter(OpenTypeEnums.PARAM_KEY_TYPE, OpenTypeEnums.ALARM_ANALYSIS); |
| | | boolean filterShift = FilterOffUtils.whetherToFilter(OpenTypeEnums.PARAM_KEY_SHIFT, OpenTypeEnums.ALARM_ANALYSIS); |
| | | List<WorkstaionAlarmByShiftReportVO> voList = this.superAlarmMapper.alarmReportByShift(ids, beginTime, endTime, Boolean.valueOf(filterType), Boolean.valueOf(filterShift)); |
| | | Map<String, Integer> wsCount = (Map) voList.parallelStream().collect(Collectors.groupingBy(o -> { |
| | | Map<String, Integer> wsCount = voList.parallelStream().collect(Collectors.groupingBy(o -> { |
| | | return o.getWorkstationId() + "-" + o.getShiftIndex(); |
| | | }, Collectors.reducing(0, e -> { |
| | | return 1; |
| | | }, (v0, v1) -> { |
| | | return Integer.sum(v0, v1); |
| | | }))); |
| | | Map<String, List<WorkstaionAlarmByShiftReportVO>> workstationOutMap = (Map) voList.parallelStream().collect(Collectors.groupingBy(s -> { |
| | | Map<String, List<WorkstaionAlarmByShiftReportVO>> workstationOutMap = voList.parallelStream().collect(Collectors.groupingBy(s -> { |
| | | return s.getWorkstationId() + "-" + s.getShiftIndex(); |
| | | })); |
| | | Map<String, WorkstaionAlarmByShiftReportVO> voMap = (Map) result.parallelStream().collect(LinkedHashMap::new, map, c -> { |
| | | Map<String, WorkstaionAlarmByShiftReportVO> voMap = result.parallelStream().collect(LinkedHashMap::new, (map, c) -> { |
| | | WorkstaionAlarmByShiftReportVO workstaionAlarmByShiftReportVO = (WorkstaionAlarmByShiftReportVO) map.put(c.getWorkstationId() + "-" + c.getShiftIndex() + "-" + c.getShowDate(), c); |
| | | }, (v0, v1) -> { |
| | | v0.putAll(v1); |
| | | }); |
| | | workstationOutMap.forEach(key, vos -> { |
| | | Map<String, Integer> countMap = (Map) vos.parallelStream().collect(Collectors.groupingBy(o2 -> { |
| | | workstationOutMap.forEach((key, vos) -> { |
| | | Map<String, Integer> countMap = vos.parallelStream().collect(Collectors.groupingBy(o2 -> { |
| | | return LocalDate.parse(String.valueOf(o2.getFactoryDate()), DateTimeFormatter.ofPattern("yyyyMMdd")).toString(); |
| | | }, Collectors.reducing(0, e2 -> { |
| | | return 1; |
| | | }, (v0, v1) -> { |
| | | return Integer.sum(v0, v1); |
| | | }))); |
| | | countMap.forEach(k, v -> { |
| | | countMap.forEach((k, v) -> { |
| | | WorkstaionAlarmByShiftReportVO vo = (WorkstaionAlarmByShiftReportVO) voMap.get(key + "-" + k); |
| | | if (!Objects.isNull(vo)) { |
| | | vo.setAlarmCount(v); |
| | |
| | | } |
| | | LocalDate currentDate = LocalDate.parse(drillWorkstationAlarmByShiftSelectVO.getCurrentDate(), DateTimeFormatter.ofPattern(DateConstant.PATTERN_DATE)); |
| | | List<ShiftInfoDTO> shiftInfoDtoList = this.shiftDetailService.getShiftInfoDtoList(Lists.newArrayList(new Long[]{drillWorkstationAlarmByShiftSelectVO.getWorkstationId()}), currentDate, currentDate); |
| | | Map<Integer, String> shiftMap = (Map) shiftInfoDtoList.stream().collect(Collectors.toMap((v0) -> { |
| | | Map<Integer, String> shiftMap = shiftInfoDtoList.stream().collect(Collectors.toMap((v0) -> { |
| | | return v0.getShiftIndex(); |
| | | }, (v0) -> { |
| | | return v0.getIndexName(); |
| | |
| | | if (!filterType && !filterShift) { |
| | | return data; |
| | | } |
| | | return (List) data.stream().filter(item -> { |
| | | return data.stream().filter(item -> { |
| | | return (filterType && item.getShiftTimeType().equals(2)) ? false : true; |
| | | }).filter(item2 -> { |
| | | return !filterShift || item2.getShiftIndex().intValue() > 0; |
| | |
| | | } |
| | | CommonUtil.fillWorkStationGroup(workstationVOPage); |
| | | List<WorkstationDataDTO> workstationVOS = workstationVOPage.getRecords(); |
| | | List<Long> workstationIds = (List) workstationVOS.stream().map((v0) -> { |
| | | List<Long> workstationIds = workstationVOS.stream().map((v0) -> { |
| | | return v0.getWorkstationId(); |
| | | }).distinct().collect(Collectors.toList()); |
| | | List<SuperAggregateState> superAggregateStates = getStatusDataByFactoryDate(workstationIds, LocalDateTimeUtil.format(LocalDateUtil.dateToLocalDate(parse), "yyyyMMdd"), LocalDateTimeUtil.format(LocalDateUtil.dateToLocalDate(parse2), "yyyyMMdd")); |
| | |
| | | workstationEfficiencyQueryVO.setEndTime(end); |
| | | workstationEfficiencyQueryVO.setWorkstationIds(ids); |
| | | List<SuperAggregateState> superAggregateStates = getStatusDataByFactoryDate(ids, start, end); |
| | | Map<String, List<SuperAggregateState>> workstationAggregateStateMap = (Map) superAggregateStates.parallelStream().collect(Collectors.groupingBy(s -> { |
| | | Map<String, List<SuperAggregateState>> workstationAggregateStateMap = superAggregateStates.parallelStream().collect(Collectors.groupingBy(s -> { |
| | | return s.getWorkstationId() + "_" + s.getShiftIndex(); |
| | | })); |
| | | Map<String, WorkstationEfficiencyVO> voMap = (Map) result.parallelStream().collect(LinkedHashMap::new, map, c -> { |
| | | Map<String, WorkstationEfficiencyVO> voMap = (Map) result.parallelStream().collect(LinkedHashMap::new, (map, c) -> { |
| | | WorkstationEfficiencyVO workstationEfficiencyVO = (WorkstationEfficiencyVO) map.put(c.getWorkstationId() + "_" + c.getShiftIndex() + "_" + c.getTime() + "_" + c.getType(), c); |
| | | }, (v0, v1) -> { |
| | | v0.putAll(v1); |
| | |
| | | if (yearStateList.containsKey(year)) { |
| | | List<SuperAggregateState> stateYear = yearStateList.get(year); |
| | | if (Objects.equals(dateCycle, StatisticalMethodEnum.MONTH.getCode())) { |
| | | stateTime = (Map) stateYear.stream().collect(Collectors.groupingBy((v0) -> { |
| | | stateTime = stateYear.stream().collect(Collectors.groupingBy((v0) -> { |
| | | return v0.getFactoryMonth(); |
| | | })); |
| | | } else if (Objects.equals(dateCycle, StatisticalMethodEnum.WEEK.getCode())) { |
| | | stateTime = (Map) stateYear.stream().collect(Collectors.groupingBy((v0) -> { |
| | | stateTime = stateYear.stream().collect(Collectors.groupingBy((v0) -> { |
| | | return v0.getFactoryWeek(); |
| | | })); |
| | | } else if (Objects.equals(dateCycle, StatisticalMethodEnum.DAY.getCode())) { |
| | | stateTime = (Map) stateYear.stream().collect(Collectors.groupingBy((v0) -> { |
| | | stateTime = stateYear.stream().collect(Collectors.groupingBy((v0) -> { |
| | | return v0.getFactoryDate(); |
| | | })); |
| | | } |
| | |
| | | } |
| | | |
| | | private <T, R extends WorkstationEfficiencyVO> void resultCount(Map<T, List<SuperAggregateState>> workstationAggregateStateMap, StatisticalMethodEnum anEnum, Map<String, R> voMap, List<Integer> productivityTypes) { |
| | | workstationAggregateStateMap.forEach(key, list -> { |
| | | Map<String, List<SuperAggregateState>> aggregateState = (Map) list.parallelStream().collect(Collectors.groupingBy(groupKey(anEnum))); |
| | | aggregateState.forEach(k, v -> { |
| | | workstationAggregateStateMap.forEach((key, list) -> { |
| | | Map<String, List<SuperAggregateState>> aggregateState = list.parallelStream().collect(Collectors.groupingBy(groupKey(anEnum))); |
| | | aggregateState.forEach((k, v) -> { |
| | | ProductivityTypeEnum[] values = ProductivityTypeEnum.values(); |
| | | for (ProductivityTypeEnum productivityTypeEnum : values) { |
| | | WorkstationEfficiencyVO vo = (WorkstationEfficiencyVO) voMap.get(key + "_" + k + "_" + productivityTypeEnum.getMessage()); |
| | |
| | | private Map<Integer, List<Integer>> groupYear(Date startTime, Date endTime, Integer dateCycle) { |
| | | LocalDate startLocal = LocalDateTimeUtils.dateToLocalDate(startTime); |
| | | LocalDate endLocal = LocalDateTimeUtils.dateToLocalDate(endTime); |
| | | HashMap hashMap = new HashMap(); |
| | | HashMap<Integer, List<Integer>> data = new HashMap<>(); |
| | | List<IntervalDateDto> dateDtos = LocalDateTimeUtils.getIntervalDate(startLocal, endLocal); |
| | | for (IntervalDateDto intervalDateDto : dateDtos) { |
| | | if (Objects.equals(dateCycle, StatisticalMethodEnum.MONTH.getCode())) { |
| | | hashMap.put(intervalDateDto.getYear(), intervalDateDto.getMonthList()); |
| | | data.put(intervalDateDto.getYear(), intervalDateDto.getMonthList()); |
| | | } else if (Objects.equals(dateCycle, StatisticalMethodEnum.WEEK.getCode())) { |
| | | hashMap.put(intervalDateDto.getYear(), intervalDateDto.getWeekList()); |
| | | data.put(intervalDateDto.getYear(), intervalDateDto.getWeekList()); |
| | | } else if (Objects.equals(dateCycle, StatisticalMethodEnum.DAY.getCode())) { |
| | | hashMap.put(intervalDateDto.getYear(), intervalDateDto.getDayList().stream().map(item -> { |
| | | data.put(intervalDateDto.getYear(), intervalDateDto.getDayList().stream().map(item -> { |
| | | return Integer.valueOf(DateUtil.format(DateUtil.parse(item, DateConstant.PATTERN_DATE), "yyyyMMdd")); |
| | | }).collect(Collectors.toList())); |
| | | } |
| | | } |
| | | return hashMap; |
| | | return data; |
| | | } |
| | | |
| | | private Function<SuperAggregateState, String> groupKey(StatisticalMethodEnum statisticalMethodEnum) { |
| | |
| | | superAggregateStates.addAll(statusDataByFactoryDateAndWorkstationId); |
| | | } |
| | | } |
| | | Date now = org.springblade.core.tool.utils.DateUtil.now(); |
| | | Date now = com.qianwen.core.tool.utils.DateUtil.now(); |
| | | superAggregateStates.forEach(x -> { |
| | | if (Func.isEmpty(x.getEndTime())) { |
| | | x.setEndTime(new Timestamp(now.getTime())); |
| | | } |
| | | x.setDurationCollect(Long.valueOf(LocalDateTimeUtils.betweenTwoTime(x.getStartTime().toLocalDateTime(), LocalDateTimeUtils.DateToLocalDateTime(x.getEndTime()), ChronoUnit.MILLIS))); |
| | | }); |
| | | return (List) superAggregateStates.stream().filter(x2 -> { |
| | | return superAggregateStates.stream().filter(x2 -> { |
| | | return x2.getRps().intValue() > 0; |
| | | }).collect(Collectors.toList()); |
| | | } |
| | |
| | | List<WorkstationShiftDTO> workstationShifts = new ArrayList<>(); |
| | | workstationData.forEach(workstation -> { |
| | | Boolean isBlendShiftModel = isBlendShiftModel(shiftInfoDtoList, workstation.getWorkstationId()); |
| | | List<ShiftInfoDTO> collect = (List) shiftInfoDtoList.parallelStream().filter(shift -> { |
| | | List<ShiftInfoDTO> collect = shiftInfoDtoList.parallelStream().filter(shift -> { |
| | | return shift.getWorkstationId().equals(workstation.getWorkstationId()); |
| | | }).collect(Collectors.collectingAndThen(Collectors.toCollection(() -> { |
| | | return new TreeSet(Comparator.comparing((v0) -> { |
| | | return new TreeSet<>(Comparator.comparing((v0) -> { |
| | | return v0.getShiftIndex(); |
| | | })); |
| | | }), (v1) -> { |
| | | return new ArrayList(v1); |
| | | return new ArrayList<>(v1); |
| | | })); |
| | | collect.forEach(c -> { |
| | | WorkstationShiftDTO workstationShiftDTO = WorkstationShiftDTO.of(c.getShiftIndex()).setShiftName(buildShiftName(c.getShiftIndex(), isBlendShiftModel, c.getIndexName())); |
| | |
| | | } |
| | | |
| | | public static Boolean isBlendShiftModel(List<ShiftInfoDTO> shiftInfos, Long workstationId) { |
| | | int size = ((ArrayList) shiftInfos.parallelStream().filter(s -> { |
| | | int size = ( shiftInfos.parallelStream().filter(s -> { |
| | | return s.getWorkstationId().equals(workstationId); |
| | | }).collect(Collectors.collectingAndThen(Collectors.toCollection(() -> { |
| | | return new TreeSet(Comparator.comparing((v0) -> { |
| | | return new TreeSet<>(Comparator.comparing((v0) -> { |
| | | return v0.getModelId(); |
| | | })); |
| | | }), (v1) -> { |
| | | return new ArrayList(v1); |
| | | return new ArrayList<>(v1); |
| | | }))).size(); |
| | | return Boolean.valueOf(size >= 2); |
| | | } |
| | | |
| | | public static List<WorkstationShiftDTO> fillShift(List<ShiftInfoDTO> collect, WorkstationDataDTO workstationDataDTO) { |
| | | if (collect.size() == 4) { |
| | | return new ArrayList(); |
| | | return new ArrayList<>(); |
| | | } |
| | | List<WorkstationShiftDTO> result = new ArrayList<>(); |
| | | if (CollectionUtil.isEmpty(collect)) { |
| | |
| | | } |
| | | |
| | | public static void fillWorkStationGroup(IPage<WorkstationDataDTO> source) { |
| | | List<WorkstationDataDTO> list = (List) source.getRecords().parallelStream().peek(w -> { |
| | | List<WorkstationDataDTO> list = source.getRecords().parallelStream().peek(w -> { |
| | | if (StrUtil.isEmpty(w.getWorkstationGroup())) { |
| | | w.setWorkstationGroup(CommonGroupConstant.DEFAULT_NAME); |
| | | } |
| | |
| | | } |
| | | |
| | | public static HttpServletRequest getRequest() { |
| | | ServletRequestAttributes attributes = RequestContextHolder.getRequestAttributes(); |
| | | ServletRequestAttributes attributes = (ServletRequestAttributes)RequestContextHolder.getRequestAttributes(); |
| | | if (attributes != null) { |
| | | HttpServletRequest request = attributes.getRequest(); |
| | | return request; |
| | |
| | | }); |
| | | return list; |
| | | case WEEK: |
| | | List<IntervalDateDto> intervalDateDtoList = LocalDateTimeUtils.getIntervalDate(LocalDateTimeUtils.dateToLocalDate(org.springblade.core.tool.utils.DateUtil.parse(startTime, DateConstant.PATTERN_DATE)), LocalDateTimeUtils.dateToLocalDate(org.springblade.core.tool.utils.DateUtil.parse(endTime, DateConstant.PATTERN_DATE))); |
| | | List<IntervalDateDto> intervalDateDtoList = LocalDateTimeUtils.getIntervalDate(LocalDateTimeUtils.dateToLocalDate(com.qianwen.core.tool.utils.DateUtil.parse(startTime, DateConstant.PATTERN_DATE)), LocalDateTimeUtils.dateToLocalDate(com.qianwen.core.tool.utils.DateUtil.parse(endTime, DateConstant.PATTERN_DATE))); |
| | | List<String> result = new ArrayList<>(); |
| | | intervalDateDtoList.forEach(o -> { |
| | | o.getWeekList().forEach(week -> { |
| | |
| | | }); |
| | | return result; |
| | | case MONTH: |
| | | List<IntervalDateDto> intervalDateDtoListM = LocalDateTimeUtils.getIntervalDate(LocalDateTimeUtils.dateToLocalDate(org.springblade.core.tool.utils.DateUtil.parse(startTime, DateConstant.PATTERN_DATE)), LocalDateTimeUtils.dateToLocalDate(org.springblade.core.tool.utils.DateUtil.parse(endTime, DateConstant.PATTERN_DATE))); |
| | | List<IntervalDateDto> intervalDateDtoListM = LocalDateTimeUtils.getIntervalDate(LocalDateTimeUtils.dateToLocalDate(com.qianwen.core.tool.utils.DateUtil.parse(startTime, DateConstant.PATTERN_DATE)), LocalDateTimeUtils.dateToLocalDate(com.qianwen.core.tool.utils.DateUtil.parse(endTime, DateConstant.PATTERN_DATE))); |
| | | List<String> resultM = new ArrayList<>(); |
| | | intervalDateDtoListM.forEach(o2 -> { |
| | | o2.getMonthList().forEach(month -> { |