liujunchi 3 years ago
parent
commit
737c0ab346

+ 158 - 0
audio-analysis/src/main/java/com/yonge/Main.java

@@ -0,0 +1,158 @@
+package com.yonge;
+
+import be.tarsos.dsp.AudioDispatcher;
+import be.tarsos.dsp.AudioEvent;
+import be.tarsos.dsp.AudioProcessor;
+import be.tarsos.dsp.io.jvm.JVMAudioInputStream;
+import be.tarsos.dsp.mfcc.MFCC;
+import be.tarsos.dsp.pitch.FastYin;
+import be.tarsos.dsp.pitch.PitchDetectionHandler;
+import be.tarsos.dsp.pitch.PitchDetectionResult;
+import be.tarsos.dsp.pitch.PitchProcessor;
+import com.yonge.audio.analysis.AudioFloatConverter;
+import com.yonge.audio.analysis.detector.YINPitchDetector;
+import com.yonge.audio.utils.ArrayUtil;
+import org.apache.commons.io.IOUtils;
+
+import javax.sound.sampled.AudioFormat;
+import javax.sound.sampled.AudioInputStream;
+import javax.sound.sampled.AudioSystem;
+import javax.sound.sampled.UnsupportedAudioFileException;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.net.URL;
+import java.util.Arrays;
+
+/**
+ * Description
+ *
+ * @author liujunchi
+ * @date 2022-06-24
+ */
+public class Main {
+    private final static int audioBufferSize = 2048;
+    private final static int bufferOverlap = 1024;
+    private final static int amountOfMelFilters = 20;
+    private final static int amountOfCepstrumCoef = 30;
+    private final static float lowerFilterFreq = 133.33f;
+    private final static float upperFilterFreq = 8000f;
+
+    private static AudioFormat audioFormat = new AudioFormat(44100, 16, 1, true, false);
+
+    // private AudioFloatConverter converter = AudioFloatConverter.getConverter(audioFormat);
+
+    public static void main(String[] args){
+        try{
+            float sampleRate = 44100;
+            int audioBufferSize = 2048;
+            int bufferOverlap = 0;
+            AudioFloatConverter converter = AudioFloatConverter.getConverter(audioFormat);
+            //Create an AudioInputStream from my .wav file
+            URL soundURL = Main.class.getResource("/300.wav");
+            AudioInputStream stream = AudioSystem.getAudioInputStream(soundURL);
+            final MFCC mfccProcessor = new MFCC(audioBufferSize, stream.getFormat().getSampleRate(),
+                                                amountOfCepstrumCoef, amountOfMelFilters, lowerFilterFreq, upperFilterFreq);
+
+            FastYin detector = new FastYin(sampleRate, audioBufferSize );
+            byte[] bytes = IOUtils.toByteArray(stream);
+            AudioFormat format = stream.getFormat();
+
+            int b = 0;
+            int frequency = 0;
+            while (bytes.length > 2048 *2) {
+
+                byte[] bufferData = ArrayUtil.extractByte(bytes, 0, 2048*2 - 1);
+
+                float[] sampleFloats = new float[1024*2];
+
+                converter.toFloatArray(bufferData, sampleFloats);
+                int playFrequency = (int)detector.getPitch(sampleFloats).getPitch();
+                System.out.println("play frequency is " +playFrequency);
+                // ArrayUtil.extractByte(channelContext.getChannelBufferBytes(), bufferSize, totalLength - 1)
+                bytes  = ArrayUtil.extractByte(bytes, 2048*2, bytes.length - 1);
+                // if (b == 1) {
+                //     frequency += playFrequency;
+                //     System.out.println("play frequency is " +frequency/2);
+                //     b = 0;
+                //     frequency = 0;
+                // } else {
+                //     frequency += playFrequency;
+                //     b ++;
+                // }
+            }
+
+
+            //Convert into TarsosDSP API
+            // JVMAudioInputStream audioStream = new JVMAudioInputStream(stream);
+            // AudioDispatcher dispatcher = new AudioDispatcher(audioStream, audioBufferSize, bufferOverlap);
+            // MyPitchDetector myPitchDetector = new MyPitchDetector();
+            // dispatcher.addAudioProcessor(mfccProcessor);
+            // dispatcher.addAudioProcessor(new AudioProcessor() {
+            //     @Override
+            //     public boolean process(AudioEvent audioEvent) {
+            //         float[] mfccs = mfccProcessor.getMFCC();
+            //
+            //         // System.out.println(Arrays.toString(mfccs));
+            //
+            //         YINPitchDetector frequencyDetector = new YINPitchDetector(mfccs.length, sampleRate);
+            //
+            //         int playFrequency = (int)detector.getPitch(audioEvent.getFloatBuffer()).getPitch();
+            //         // int playFrequency = (int) frequencyDetector.getFrequency(mfccs);
+            //         System.out.println("play frequency is " +playFrequency);
+            //         return true;
+            //     }
+            //
+            //     @Override
+            //     public void processingFinished() {
+            //
+            //     }
+            // });
+            // // dispatcher.addAudioProcessor(new MyPitchProcessor(PitchProcessor.PitchEstimationAlgorithm.FFT_YIN, sampleRate, audioBufferSize, myPitchDetector));
+            // dispatcher.run();
+
+
+        }
+        catch(FileNotFoundException fne){fne.printStackTrace();}
+        catch(UnsupportedAudioFileException uafe){uafe.printStackTrace();}
+        catch(IOException ie){ie.printStackTrace();}
+    }
+}
+
+class  MyPitchDetector implements PitchDetectionHandler {
+
+    //Here the result of pitch is always less than half.
+    @Override
+    public void handlePitch(PitchDetectionResult pitchDetectionResult,
+                            AudioEvent audioEvent) {
+        if(pitchDetectionResult.getPitch() != -1){
+            double timeStamp = audioEvent.getTimeStamp();
+            float pitch = pitchDetectionResult.getPitch();
+            float probability = pitchDetectionResult.getProbability();
+            double rms = audioEvent.getRMS() * 100;
+            String message = String.format("Pitch detected at %.2fs: %.2fHz ( %.2f probability, RMS: %.5f )\n", timeStamp,pitch,probability,rms);
+            System.out.println(message);
+        }
+    }
+}
+
+
+class  MyPitchProcessor extends PitchProcessor {
+
+    /**
+     * Initialize a new pitch processor.
+     *
+     * @param algorithm  An enum defining the algorithm.
+     * @param sampleRate The sample rate of the buffer (Hz).
+     * @param bufferSize The size of the buffer in samples.
+     * @param handler
+     */
+    public MyPitchProcessor(PitchEstimationAlgorithm algorithm, float sampleRate, int bufferSize, PitchDetectionHandler handler) {
+        super(algorithm, sampleRate, bufferSize, handler);
+    }
+
+
+    @Override
+    public boolean process(AudioEvent audioEvent) {
+        return super.process(audioEvent);
+    }
+}

+ 1 - 1
cooleshow-user/user-biz/src/main/resources/config/mybatis/CourseHomeworkMapper.xml

@@ -349,7 +349,7 @@
         s.avatar_ as studentAvatar,
         s.avatar_ as studentAvatar,
         if(sch.id_ is not null,1,0) as submitHomework,
         if(sch.id_ is not null,1,0) as submitHomework,
         (case when sch.id_ is null then 'NOTCOMMIT'
         (case when sch.id_ is null then 'NOTCOMMIT'
-        when sch.teacher_replied_ is not null or sch.teacher_replied_ != '' then 'NOTREVIEW'
+        when sch.teacher_replied_ is null or sch.teacher_replied_ = '' then 'NOTREVIEW'
         else 'REVIEWED' end ) as homeworkStatus,
         else 'REVIEWED' end ) as homeworkStatus,
         (select group_concat(s2.name_) from subject s2
         (select group_concat(s2.name_) from subject s2
         where find_in_set(s2.id_,st.subject_id_) and s2.del_flag_ = 0 ) as subjectName
         where find_in_set(s2.id_,st.subject_id_) and s2.del_flag_ = 0 ) as subjectName