2017-02-23 11 views
0

Dies ist vielleicht nicht sehr akzeptabel Frage, aber ich bin so verzweifelt an dieser Stelle.Synchron-Media-Player javafx

Ich brauche einen synchronen Java-Media-Player mit schnellen Such-und Balance-Änderung.

Szenario:

Ich habe ein JavaFX-Projekt, wo ich eine sehr kurze spielen (50-100 ms) Mediendatei in einer Schleife. Die Sache ist, ich habe Anforderungen vor dem Neustart zu halten.

in Kürze: Spielsound -> bestimmte Anforderungen erfüllt (Balance wird geändert) (alle 120ms) -> wenn Sound fertig Wiedergabe Wiedergabe von Anfang an.

javafx bietet einen Mediaplayer, den ich modifiziert habe. noch mehr Effizienz erforderlich:

// this method is just to show how the modified mediaplayer class is called 
public void updateSoundBalance(double currentTimeInCycle) { 
    // control the balance of the sound 
    if(playingSound && mediaPlayer != null) 
    { 
     long b = System.nanoTime(); 

     // 0 <= balance < 4. balance represents the cycle phase. 
     double balance = currentTimeInCycle % RADIANCE_FULL_CYCLE/RADIANCE_QUARTER_CYCLE; 
     boolean firstHalfCycle = balance < 2; 

     double quarterCycleIndex = Math.floor(balance); 

     long a = System.nanoTime(); 
     if(swingingSound) 
      mediaPlayer.setBalance(firstHalfCycle ? 1 - balance : balance - 3); 
     else 
      mediaPlayer.setBalance(balance > 1 && balance < 3? -1 : 1); 
     System.out.println("C ::: sound balance = " + (System.nanoTime() - a)); 

     if ((quarterCycleIndex == 1 | quarterCycleIndex == 3) && 
      balance - quarterCycleIndex <= Settings.DEFAULT_PATTERN_SMOOTHNESS) 
     { 
      a = System.nanoTime(); 

      if (mediaDone){ 
       mediaPlayer.reset(); 
       mediaDone = false; 
      } 
      System.out.println("B ::: call reset = " + (System.nanoTime() - a)); 
     } 
     System.out.println("A ::: total time = " + (System.nanoTime() - b)); 
    } 
} 

import java.util.concurrent.ScheduledThreadPoolExecutor; 

import javafx.scene.media.Media; 
import javafx.scene.media.MediaPlayer; 
import javafx.util.Duration; 


public class MultiMediaPlayer 
{ 
private MediaPlayer mp1, mp2; 
private boolean usingMp1 = true; 

private ScheduledThreadPoolExecutor seekService = new ScheduledThreadPoolExecutor(2); 
private Runnable seekRun = new Runnable() { 
    @Override 
    public void run() { 
     try 
     { 
      long a = System.nanoTime(); 
      if(usingMp1) { 
       usingMp1 = false; 
       mp1.stop(); 
       mp2.play(); 
       mp1.seek(new Duration(0)); 
      } else { 
       usingMp1 = true; 
       mp2.stop(); 
       mp1.play(); 
       mp2.seek(new Duration(0)); 
      } 
      System.out.println("D ::: reset sound time taken = " + (System.nanoTime() - a)); 
     } 
     catch (Exception e){ 
      e.printStackTrace(); 
     } 
    } 
}; 

public MultiMediaPlayer(Media value) 
{ 
    mp1 = new MediaPlayer(value); 
    mp2 = new MediaPlayer(value); 

    mp1.balanceProperty().bindBidirectional(mp2.balanceProperty()); 
    mp1.onEndOfMediaProperty().bindBidirectional(mp2.onEndOfMediaProperty()); 
} 

public void setBalance(double value){ 
    mp1.setBalance(value); 
} 

public void reset(){ 
    seekService.execute(seekRun); 
} 

public void play(){ 
    if(usingMp1) { 
     mp1.play(); 
    } else { 
     mp2.play(); 
    } 
} 

public void stop(){ 
    mp1.stop(); 
    mp2.stop(); 
} 

public void pause(){ 
    mp1.pause(); 
    mp2.pause(); 
} 

public void setOnEndOfMedia(Runnable r) { 
    mp1.setOnEndOfMedia(r); 
} 
} 

wenn jemand mich in die richtige Richtung zeigen kann (Bibliothek/etwas, was ich verpasst) wäre ich dankbar

ps die Java-Version erlaubt ist die aktuell möglich

Antwort

0

dank Olof Kohlhaas, aber ich glaube, ich eine bessere Lösung für mein spezielles Problem gefunden, und es verwendet javafx des Audioclip. Aufgrund meines Mangels an Wissen habe ich jedoch eine grundlegende Methode verwendet, um die Länge der Datei zu ermitteln, die nur Wave und andere spezifische Formate unterstützt. Wenn dies geändert wird, kann diese Klasse mit jedem Format verwendet werden, das das Medienpaket von Javafx unterstützt:

import java.io.IOException; 
import java.net.URL; 

import javax.sound.sampled.AudioFormat; 
import javax.sound.sampled.AudioInputStream; 
import javax.sound.sampled.AudioSystem; 
import javax.sound.sampled.Clip; 
import javax.sound.sampled.DataLine; 
import javax.sound.sampled.LineUnavailableException; 
import javax.sound.sampled.UnsupportedAudioFileException; 

import javafx.beans.property.ReadOnlyBooleanProperty; 
import javafx.beans.property.SimpleBooleanProperty; 
import javafx.beans.property.SimpleObjectProperty; 
import javafx.scene.media.AudioClip; 
import javafx.scene.media.Media; 
import javafx.scene.media.MediaPlayer; 

/** 
* 
* this class is to optimize sound pattern playing 
* 
* reasoning: 
* - audio clips are for short repetitive files that don't need live changes to balance 
* - media players are for long files that will have the ability to swing with the visual patted 
* by updating the balance whenever needed (every few updates) 
*/ 
public class AudioPlayer 
{ 
    public enum Controller{ 
     MEDIA_PLAYER, AUDIO_CLIP; 
    } 

    /** 
    * this class is to help other classes keep track of this particular state of the AudioPlayer 
    */ 
    public class ControllerProperty extends SimpleObjectProperty<Controller>{ 
     SimpleBooleanProperty isMediaPlayerProperty = new SimpleBooleanProperty(); 

     @Override 
     public void set(Controller newValue) { 
      super.set(newValue); 
      if (newValue == Controller.MEDIA_PLAYER) 
       isMediaPlayerProperty.set(true); 
      else 
       isMediaPlayerProperty.set(false); 
     } 

     public ReadOnlyBooleanProperty isMediaPlayerProperty() { 
      return isMediaPlayerProperty; 
     } 
    } 
    // different controllers used 
    private Media media; 
    private MediaPlayer mediaPlayer; 
    private AudioClip audioClip; 

    // controllerProperty property indicator 
    private ControllerProperty controllerProperty = new ControllerProperty(); 

    private boolean mediaDonePlaying = true; 
    private double durationMillis; 

    /** 
    * Constructor. This will be the place where you can 
    * @param srcUrl 
    */ 
    public AudioPlayer(String srcUrl) { 
     boolean formatSupported = true; 
     try { 
      durationMillis = getLength(srcUrl); 
     } catch (IOException | LineUnavailableException e) { 
      e.printStackTrace(); 
     } catch (UnsupportedAudioFileException e) { 
      formatSupported = false; 
     } 
     // if file is long or format unsupported (not one of these: AudioSystem.getAudioFileTypes()) 
     if (durationMillis > 400 | !formatSupported){ 
      media = new Media(srcUrl); 
      mediaPlayer = new MediaPlayer(media); 
      controllerProperty.set(Controller.MEDIA_PLAYER); 
      mediaPlayer.setOnEndOfMedia(() -> mediaDonePlaying = true); 
     } 
     else { 
      audioClip = new AudioClip(srcUrl); 
      controllerProperty.set(Controller.AUDIO_CLIP); 
     } 
    } 

    /** 
    * gets the audio duration of the provided source in milliseconds 
    * @param path url string representation of the path 
    * @return the length in milliseconds 
    * @throws IOException 
    * @throws UnsupportedAudioFileException 
    * @throws LineUnavailableException 
    */ 
    public static double getLength(String path) throws IOException, UnsupportedAudioFileException, LineUnavailableException 
    { 
     AudioInputStream stream; 
     stream = AudioSystem.getAudioInputStream(new URL(path)); 
     AudioFormat format = stream.getFormat(); 
     if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) { 
      format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format 
       .getSampleRate(), format.getSampleSizeInBits() * 2, format 
       .getChannels(), format.getFrameSize() * 2, format 
       .getFrameRate(), true); // big endian 
      stream = AudioSystem.getAudioInputStream(format, stream); 
     } 
     DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(), 
      ((int) stream.getFrameLength() * format.getFrameSize())); 
     Clip clip = (Clip) AudioSystem.getLine(info); 
     clip.close(); 
     return clip.getBufferSize() 
      /(clip.getFormat().getFrameSize() * clip.getFormat() 
      .getFrameRate()); 
    } 

    public void stop(){ 
     switch (controllerProperty.get()) 
     { 
      case AUDIO_CLIP: 
       if (audioClip != null) 
        audioClip.stop(); 
       break; 

      case MEDIA_PLAYER: 
       if (mediaPlayer != null && media != null){ 
        mediaPlayer.stop(); 
        mediaDonePlaying = true; 
       } 
       break; 
     } 
    } 

    public void play(){ 
     switch (controllerProperty.get()) 
     { 
      case AUDIO_CLIP: 
       if (audioClip != null) 
        if(!audioClip.isPlaying()){ 
         audioClip.play(); 
        } 
       break; 

      case MEDIA_PLAYER: 
       if (mediaPlayer != null && media != null){ 
        mediaPlayer.play(); 
        mediaDonePlaying = false; 
       } 
       break; 
     } 
    } 

    public void pause(){ 
     switch (controllerProperty.get()) 
     { 
      case AUDIO_CLIP: 
       if (audioClip != null) 
        audioClip.stop(); 
       break; 

      case MEDIA_PLAYER: 
       if (mediaPlayer != null && media != null) 
        mediaPlayer.pause(); 
       break; 
     } 
    } 

    /** 
    * sets the balance of the player, if the controller is an {@link AudioClip}, the balance is 
    * updated at the next play cycle, if the controller is a {@link MediaPlayer} the balance is 
    * updated at the next time the {@link MediaPlayer} has Status.READY (read 
    * {@link MediaPlayer#setBalance(double)} for more details) 
    * @param balance 
    */ 
    public void setBalance(double balance){ 
     switch (controllerProperty.get()) 
     { 
      case AUDIO_CLIP: 
       if (audioClip != null) 
        audioClip.setBalance(balance); 
       break; 

      case MEDIA_PLAYER: 
       if (mediaPlayer != null && media != null) 
        mediaPlayer.setBalance(balance); 
       break; 
     } 
    } 

    public String getSource(){ 
     switch (controllerProperty.get()) 
     { 
      case AUDIO_CLIP: 
       if (audioClip != null) 
        return audioClip.getSource(); 
       break; 

      case MEDIA_PLAYER: 
       if (mediaPlayer != null && media != null) 
        return media.getSource(); 
       break; 
     } 
     return null; 
    } 

    /** 
    * @return if the file is done 
    */ 
    public boolean isDonePlaying(){ 
     switch (controllerProperty.get()) 
     { 
      case AUDIO_CLIP: 
       if (audioClip != null) 
        return !audioClip.isPlaying(); 
       break; 

      case MEDIA_PLAYER: 
       if (mediaPlayer != null && media != null) 
        return mediaDonePlaying; 
       break; 
     } 
     throw new IllegalStateException("Internal Error"); 
    } 

    public ControllerProperty controllerProperty() { 
     return controllerProperty; 
    } 
} 
0

In diesem Fall sollten Sie besser die Java Sound API verwenden. Es ist bereits Teil der Standard-API. Sie haben erwähnt, dass Sie sehr kurze Audiosignale haben. So können Sie es im Speicher zwischenspeichern und dann mehrmals in den Umriss schreiben. Auf den ersten Blick scheint es, dass Pan die richtige Wahl anstelle von Balance sein könnte. Das folgende Beispiel zeigt, wie Balance und Pan auf Maximum bzw. Minimum geschaltet werden. Die Java Sound API arbeitet nativ mit WAVE und AIFF. Wenn Sie ein anderes Dateiformat haben, müssen Sie sich die FormatConversionProvider von Java Sound API ansehen, z. B. mp3spi und vorbisspi.

import java.io.File; 
import java.io.IOException; 

import javax.sound.sampled.AudioFormat; 
import javax.sound.sampled.AudioInputStream; 
import javax.sound.sampled.AudioSystem; 
import javax.sound.sampled.DataLine; 
import javax.sound.sampled.FloatControl; 
import javax.sound.sampled.LineUnavailableException; 
import javax.sound.sampled.SourceDataLine; 
import javax.sound.sampled.UnsupportedAudioFileException; 

public class Main { 

    public static void main(String[] args) { 

     try { 
      int loops = 20; 

      File waveFile = new File(yourWaveFilePath); 

      AudioInputStream stream = AudioSystem.getAudioInputStream(waveFile); 
      AudioFormat format = stream.getFormat(); 

      // reading complete audio file into memory 
      byte[] frames = new byte[format.getFrameSize() * (int) stream.getFrameLength()]; 
      stream.read(frames, 0, frames.length); 

      DataLine.Info lineInfo = new DataLine.Info(SourceDataLine.class, format); 
      SourceDataLine line = (SourceDataLine) AudioSystem.getLine(lineInfo); 
      line.open(format); 
      line.start(); 

      FloatControl balance = (FloatControl) line.getControl(FloatControl.Type.BALANCE); 
      FloatControl pan = (FloatControl) line.getControl(FloatControl.Type.PAN); 

      for (int i = 0; i < loops; i++) { 
       // switching balance and pan with every iteration 
       if (i % 2 == 0) { 
        balance.setValue(balance.getMinimum()); 
        pan.setValue(pan.getMinimum()); 
       } else { 
        balance.setValue(balance.getMaximum()); 
        pan.setValue(pan.getMaximum()); 
       } 

       // playing complete audio file 
       line.write(frames, 0, frames.length); 

       System.out.println("iteration: " + i + ", balance: " + balance.getValue() + ", pan: " + pan.getValue()); 
      } 

      line.drain(); 
      line.close(); 
      stream.close(); 

     } catch (UnsupportedAudioFileException | IOException | LineUnavailableException e) { 
      e.printStackTrace(); 
     } 
    } 

}