yonge 3 years ago
parent
commit
a55ee5b51b

+ 4 - 3
audio-analysis/src/main/java/com/yonge/netty/dto/HardLevelEnum.java

@@ -8,20 +8,21 @@ public enum HardLevelEnum implements BaseEnum<String, HardLevelEnum> {
 	 * 节奏有效范围(1分音符), 节奏有效范围(2分音符), 节奏有效范围(4分音符), 节奏有效范围(8分音符), 节奏有效范围(16分音符), 节奏有效范围(32分音符)<br>
 	 * 完成度范围, 未演奏的范围
 	 */
-	BEGINNER("入门级", 3, 5, 5, 5, 10, 10, 13, 15, 60, 10), 
+	//BEGINNER("入门级", 3, 5, 5, 5, 10, 10, 13, 15, 60, 10), 
+	BEGINNER("入门级", 3, 5, 10, 10, 15, 15, 22, 22, 75, 25), 
 	/**
 	 * 进阶级, 振幅阈值, 频率阈值 <br>
 	 * 节奏有效范围(1分音符), 节奏有效范围(2分音符), 节奏有效范围(4分音符), 节奏有效范围(8分音符), 节奏有效范围(16分音符), 节奏有效范围(32分音符)<br>
 	 * 完成度范围, 未演奏的范围
 	 */
-	ADVANCED("进阶级", 3, 5, 8, 8, 15, 15, 30, 30, 80, 10),
+	ADVANCED("进阶级", 3, 5, 8, 8, 12, 12, 20, 20, 85, 15),
 	//ADVANCED("进阶级", 3, 5, 50, 50, 50, 50, 50, 5, 80, 10),
 	/**
 	 * 大师级, 振幅阈值, 频率阈值 <br>
 	 * 节奏有效范围(1分音符), 节奏有效范围(2分音符), 节奏有效范围(4分音符), 节奏有效范围(8分音符), 节奏有效范围(16分音符), 节奏有效范围(32分音符)<br>
 	 * 完成度范围, 未演奏的范围
 	 */
-	PERFORMER("大师级", 3, 3, 5, 5, 10, 10, 25, 25, 90, 20);
+	PERFORMER("大师级", 3, 3, 5, 5, 10, 10, 13, 15, 95, 10);
 
 	private String msg;
 

+ 34 - 15
audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext.java

@@ -37,6 +37,10 @@ public class UserChannelContext {
 	//打击乐
 	private final static List<Integer> percussionList = Arrays.asList(23, 113);
 	
+	private final static int MIN_FREQUECY = 100;
+	
+	private final static int MAX_FREQUECY = 2000;
+	
 	private FastYin detector;
 	
 	private String user;
@@ -369,7 +373,7 @@ public class UserChannelContext {
 			if(percussionList.contains(subjectId)){
 				flag = chunkAnalysis.getAmplitude() > hardLevel.getAmplitudeThreshold();
 			}else{
-				flag = chunkAnalysis.getFrequency() > 100;
+				flag = chunkAnalysis.getFrequency() > MIN_FREQUECY && chunkAnalysis.getFrequency() < MAX_FREQUECY;
 			}
 			
 			if(delayProcessed == false && flag){
@@ -552,7 +556,7 @@ public class UserChannelContext {
 			
 			if (noteAnalysis.getFrequency() == -1) {// 休止符
 
-				playDurationTime = chunkAnalysisList.stream().filter(t -> t.getFrequency() <= 100).mapToDouble(t -> t.getDurationTime()).sum();
+				playDurationTime = chunkAnalysisList.stream().filter(t -> t.getFrequency() <= MIN_FREQUECY).mapToDouble(t -> t.getDurationTime()).sum();
 
 				if (!noteAnalysis.isTempo()) {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
@@ -564,7 +568,7 @@ public class UserChannelContext {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
 				}
 			} else {
-				playDurationTime = chunkAnalysisList.stream().filter(t -> t.getFrequency() > 100 && t.getFrequency() < 2000)
+				playDurationTime = chunkAnalysisList.stream().filter(t -> t.getFrequency() > MIN_FREQUECY && t.getFrequency() < MAX_FREQUECY)
 						.mapToDouble(t -> t.getDurationTime()).sum();
 
 				if (playDurationTime * 100 / durationTime < hardLevel.getNotPlayRange()) {
@@ -627,7 +631,7 @@ public class UserChannelContext {
 		double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
 		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
 		
-		LOGGER.info("------------floatingRange StartTime:{}  EndTime:{}------------", startTime, endTime);
+		LOGGER.info("------------TimeStamp:{}  Duration:{}  floatingRange:{}  StartTime:{}  EndTime:{}------------", musicXmlNote.getTimeStamp(), musicXmlNote.getDuration(), floatingRange, startTime, endTime);
 		
 		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
 		
@@ -652,7 +656,7 @@ public class UserChannelContext {
 		
 		List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
 		
-		List<Integer> chunkFrequencyList = chunkList.stream().map(t -> t.getFrequency()).filter(t -> t.doubleValue() > 100 && t.doubleValue() < 2000)
+		List<Integer> chunkFrequencyList = chunkList.stream().map(t -> t.getFrequency()).filter(t -> t.doubleValue() > MIN_FREQUECY && t.doubleValue() < MAX_FREQUECY)
 				.collect(Collectors.toList());
 		
 		if (chunkFrequencyList.size() == 0) {
@@ -693,7 +697,7 @@ public class UserChannelContext {
 		}
 		
 		if (musicXmlNote.getFrequency() == -1) {// 休止符
-			return chunkList.stream().filter(t -> t.getFrequency() > 100).count() <= 1;
+			return chunkList.stream().filter(t -> t.getFrequency() > MIN_FREQUECY).count() <= 1;
 		}
 		
 		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
@@ -724,7 +728,7 @@ public class UserChannelContext {
 		for (int i = 0; i < chunkList.size(); i++) {
 			chunkAnalysis = chunkList.get(i);
 			if (chunkAnalysis != null) {
-				if (chunkAnalysis.getFrequency() > 100) {
+				if (chunkAnalysis.getFrequency() > MIN_FREQUECY) {
 					
 					tempo = true;
 					if (firstPeakIndex == -1) {
@@ -732,8 +736,9 @@ public class UserChannelContext {
 						noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, chunkAnalysis.getFrequency());
 					} else if (noteFrequencyRange.getMinFrequency() > chunkAnalysis.getFrequency()
 							|| chunkAnalysis.getFrequency() > noteFrequencyRange.getMaxFrequency()) {// 判断是否是同一个音
-						//是否是低八度
-						if(!(noteFrequencyRange.getMinFrequency() < chunkAnalysis.getFrequency() * 2 && chunkAnalysis.getFrequency() * 2 < noteFrequencyRange.getMaxFrequency())){
+						//是否是低八度或高八度
+						if(!((noteFrequencyRange.getMinFrequency() < chunkAnalysis.getFrequency() * 2 && chunkAnalysis.getFrequency() * 2 < noteFrequencyRange.getMaxFrequency())
+								|| (noteFrequencyRange.getMinFrequency() < chunkAnalysis.getFrequency() / 2 && chunkAnalysis.getFrequency() / 2 < noteFrequencyRange.getMaxFrequency()))){
 							tempo = false;
 							LOGGER.info("节奏错误原因:不是同一个音[{}]:{}-{}", chunkAnalysis.getFrequency(), noteFrequencyRange.getMinFrequency(), noteFrequencyRange.getMaxFrequency());
 							break;
@@ -764,7 +769,7 @@ public class UserChannelContext {
 				LOGGER.info("节奏错误原因:进入时间点太晚");
 			}else{
 				//判断是否与上一个音延续下来的
-				if(firstChunkAnalysis.getFrequency() > 100 && lastChunkAnalysis.getFrequency() > 100){
+				if(firstChunkAnalysis.getFrequency() > MIN_FREQUECY && lastChunkAnalysis.getFrequency() > MIN_FREQUECY){
 					tempo = new NoteFrequencyRange(standardFrequecy, firstChunkAnalysis.getFrequency()).equals(new NoteFrequencyRange(standardFrequecy, lastChunkAnalysis.getFrequency())) == false;
 					if(tempo == false){
 						LOGGER.info("节奏错误原因:上一个音[{}]延续下来导致的", lastChunkAnalysis.getFrequency());
@@ -783,6 +788,8 @@ public class UserChannelContext {
 		double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
 		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
 		
+		LOGGER.info("------------TimeStamp:{}  floatingRange:{}  StartTime:{}  EndTime:{}------------", musicXmlNote.getTimeStamp(), floatingRange, startTime, endTime);
+		
 		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
 		
 		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
@@ -790,17 +797,21 @@ public class UserChannelContext {
 		
 		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
 		
-		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
+		//根据完整度取部分有效信号
+		int elementSize = chunkAnalysisList.size() * (100 - hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())) / 100;
+		List<ChunkAnalysis> chunkList = chunkAnalysisList.subList(0, elementSize);
+		
+		if(chunkList == null || chunkList.size() == 0){
 			return false;
 		}
 		
 		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
 		
-		LOGGER.info("-------startTime:{}  endTime:{}------", firstChunkAnalysis.getStartTime(), chunkAnalysisList.get(chunkAnalysisList.size() - 1)
+		LOGGER.info("-------startTime:{}  endTime:{}------", firstChunkAnalysis.getStartTime(), chunkList.get(chunkList.size() - 1)
 				.getEndTime());
 
 		if (musicXmlNote.getFrequency() == -1) {// 休止符
-			return chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count() <= 0;
+			return chunkList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count() <= 0;
 		}
 		
 		Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(firstChunkAnalysis.getStartTime())).findFirst();
@@ -813,7 +824,7 @@ public class UserChannelContext {
 			lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, 0, 0, 0);
 		}
 		
-		List<Integer> chunkAmplitudeList = chunkAnalysisList.stream().map(ChunkAnalysis::getAmplitude).collect(Collectors.toList());
+		List<Integer> chunkAmplitudeList = chunkList.stream().map(ChunkAnalysis::getAmplitude).collect(Collectors.toList());
 
 		chunkAmplitudeList.add(0, lastChunkAnalysis.getAmplitude());
 		
@@ -865,14 +876,17 @@ public class UserChannelContext {
 	private double queryFirstNoteStartTime(List<ChunkAnalysis> chunkAnalysisList, MusicXmlNote musicXmlNote) {
 		
 		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
+			LOGGER.info("找不到数据,correctedStartTime:{}", musicXmlNote.getTimeStamp() + dynamicOffset);
 			return musicXmlNote.getTimeStamp() + dynamicOffset;
 		}
 		
 		if (percussionList.contains(subjectId)) {
 			Optional<ChunkAnalysis> optional = chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).findFirst();
 			if(optional.isPresent()){
+				LOGGER.info("范围内查询到信号,correctedStartTime:{}", optional.get().getStartTime());
 				return optional.get().getStartTime();
 			}else{
+				LOGGER.info("范围内未查询到信号,correctedStartTime:{}", musicXmlNote.getTimeStamp() + dynamicOffset);
 				return musicXmlNote.getTimeStamp() + dynamicOffset;
 			}
 		}
@@ -881,10 +895,12 @@ public class UserChannelContext {
 		if(musicXmlNote.getMusicalNotesIndex() > 0){
 			MusicXmlNote preMusicXmlNote = getCurrentMusicNote(null, musicXmlNote.getMusicalNotesIndex() - 1);
 			if((int)preMusicXmlNote.getFrequency() == (int)musicXmlNote.getFrequency()){
-				Optional<ChunkAnalysis> optional = chunkAnalysisList.stream().filter(t -> t.getFrequency() <= 100).findFirst();
+				Optional<ChunkAnalysis> optional = chunkAnalysisList.stream().filter(t -> t.getFrequency() <= MIN_FREQUECY).findFirst();
 				if(optional.isPresent()){
+					LOGGER.info("与上一个音同音,有断开,correctedStartTime:{}", optional.get().getStartTime());
 					return optional.get().getEndTime();
 				}else{
+					LOGGER.info("与上一个音同音,未断开,correctedStartTime:{}", musicXmlNote.getTimeStamp() + dynamicOffset);
 					return musicXmlNote.getTimeStamp() + dynamicOffset;
 				}
 			}
@@ -897,9 +913,12 @@ public class UserChannelContext {
 		for (ChunkAnalysis ca : chunkAnalysisList) {
 			noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, ca.getFrequency());
 			if (standardNote.equals(noteFrequencyRange)) {
+				LOGGER.info("范围内查询到信号,correctedStartTime:{}", ca.getStartTime());
 				return ca.getStartTime();
 			}
 		}
+		
+		LOGGER.info("范围内未查询到信号,correctedStartTime:{}", musicXmlNote.getTimeStamp() + dynamicOffset);
 
 		//return chunkAnalysisList.get(chunkAnalysisList.size() - 1).getEndTime();
 		return musicXmlNote.getTimeStamp() + dynamicOffset;

+ 4 - 4
audio-analysis/src/main/java/com/yonge/netty/server/service/AudioCompareHandler.java

@@ -299,6 +299,10 @@ public class AudioCompareHandler implements MessageHandler {
 			int beatByteLength = (int) (audioFormat.getSampleRate() * audioFormat.getSampleSizeInBits() / 8 * (channelContext.getOffsetMS() + channelContext.getBeatDuration()) / 1000);
 			
 			if(totalLength > beatByteLength){
+				if(beatByteLength % 2 != 0){
+					LOGGER.info("**************奇数*****************");
+					beatByteLength--;
+				}
 				channelContext.setChannelBufferBytes(ArrayUtil.extractByte(channelContext.getChannelBufferBytes(), beatByteLength, totalLength - 1));
 				
 				LOGGER.info("--------Length:{}  Times[{} + {}]:{}--------", waveFileProcessor.getFile().length() - channelContext.getChannelBufferBytes().length, channelContext.getOffsetMS() , channelContext.getBeatDuration(),(waveFileProcessor.getFile().length() - channelContext.getChannelBufferBytes().length) * 1000 /audioFormat.getSampleRate()/2);
@@ -306,15 +310,11 @@ public class AudioCompareHandler implements MessageHandler {
 				channelContext.setOffsetMS(0);
 				channelContext.setBeatDuration(0);
 			}else{
-				LOGGER.info("*************totalLength:{}*************", totalLength);
 				return false;
 			}
 		}
 		
 		totalLength = channelContext.getChannelBufferBytes().length;
-		if(totalLength % 2 != 0){
-			totalLength--;
-		}
 		
 
 		while (totalLength >= bufferSize) {