Wenn Du das nur für YouTube brauchst... da gibt es Browser Plugins die das für Dich automatisch erledigen, ohne den Umweg das selbst aufnehmen zu müssen.
Zur Aufnahme-App:
Ja, das wollte ich vor Wochen auch mal machen... ich kann mir eine Device aussuchen und dann auch ganz gut aufnehmen.
Der Haken an der Sache, man braucht unter Mac das App LineIn. Man muss die Systemausgabe auf "Soundflower (2ch)" stellen, dann mit der LineIn von Soundflower (2ch) auf Systemausgabe rooten, dann kann man in der unten stehenden Java ab den Channel Soundflower (2ch) wählen und das was man hört aufnehmen.
Wenn einer einen Weg weiss ohne diese LineIn App auszukommen, bitte hier die Lösung posten
Wenn einer meinen untenstehen Code noch mit einer Wave-Anzeige (also einen Soundfile->Grafikanzeige der Waveform) erweitern könnte, wäre ich unendlich dankbar
[Java]import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.io.*;
import javax.sound.sampled.*;
public class Capture extends JFrame {
protected boolean running;
ByteArrayOutputStream out;
public Capture() {
super("Capture Sound Demo");
setDefaultCloseOperation(EXIT_ON_CLOSE);
Container content = getContentPane();
final JButton capture = new JButton("Capture");
final JButton stop = new JButton("Stop");
final JButton play = new JButton("Play");
capture.setEnabled(true);
stop.setEnabled(false);
play.setEnabled(false);
ActionListener captureListener = new ActionListener() {
public void actionPerformed(ActionEvent e) {
capture.setEnabled(false);
stop.setEnabled(true);
play.setEnabled(false);
captureAudio();
}
};
capture.addActionListener(captureListener);
content.add(capture, BorderLayout.NORTH);
ActionListener stopListener = new ActionListener() {
public void actionPerformed(ActionEvent e) {
capture.setEnabled(true);
stop.setEnabled(false);
play.setEnabled(true);
running = false;
}
};
stop.addActionListener(stopListener);
content.add(stop, BorderLayout.CENTER);
ActionListener playListener = new ActionListener() {
public void actionPerformed(ActionEvent e) {
playAudio();
}
};
play.addActionListener(playListener);
content.add(play, BorderLayout.SOUTH);
}
private AudioFormat getInputFormat() {
float sampleRate = 44100;
int sampleSizeInBits = 16;
int channels = 2;
boolean signed = true;
boolean bigEndian = false;
return new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
}
private AudioFormat getOutputFormat() {
float sampleRate = 44100;
int sampleSizeInBits = 16;
int channels = 2;
boolean signed = true;
boolean bigEndian = false;
return new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
}
private void captureAudio() {
Mixer mixer;
final Mixer.Info[] mixerInfo = AudioSystem.getMixerInfo();
// Soundflower
mixer = AudioSystem.getMixer(mixerInfo[1]);
printMixerLines(mixer);
try {
final AudioFormat format = getInputFormat();
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
// * Line-In Eingang *
//final TargetDataLine line = (TargetDataLine) AudioSystem.getLine(info);
final TargetDataLine line = (TargetDataLine) mixer.getLine(info);
line.open(format);
line.start();
Runnable runner = new Runnable() {
int bufferSize = (int)format.getSampleRate()
* format.getFrameSize();
byte buffer[] = new byte[bufferSize];
public void run() {
out = new ByteArrayOutputStream();
running = true;
try {
while (running) {
int count =
line.read(buffer, 0, buffer.length);
System.out.println("Level: \""+ (int)format.getSampleRate());
if (count > 0) {
out.write(buffer, 0, count);
}
}
out.close();
} catch (IOException e) {
System.err.println("I/O problems: " + e);
System.exit(-1);
}
}
};
Thread captureThread = new Thread(runner);
captureThread.start();
} catch (LineUnavailableException e) {
System.err.println("Line unavailable: " + e);
System.exit(-2);
}
}
public void printMixerLines(Mixer mixer) {
final Mixer.Info[] mixerInfo = AudioSystem.getMixerInfo();
String mixerName = null;
boolean errflag = false;
int i;
for (i = 0; i < mixerInfo.length; i++) {
mixerName = mixerInfo
.getName();
errflag = false;
mixer = AudioSystem.getMixer(mixerInfo);
System.out.println("Mixer["+i+"]: \""+ mixerName + "\" will be used");
for (Line.Info thisLineInfo:mixer.getSourceLineInfo()) {
Line thisLine;
try {
thisLine = mixer.getLine(thisLineInfo);
System.out.println("Line available: " + thisLine.getLineInfo().toString());
} catch (LineUnavailableException e) {
// TODO Auto-generated catch block
System.out.println("Line unavailable!");
}
}
}
}
private void playAudio() {
try {
byte audio[] = out.toByteArray();
InputStream input = new ByteArrayInputStream(audio);
final AudioFormat format = getOutputFormat();
final AudioInputStream ais = new AudioInputStream(input, format, audio.length / format.getFrameSize());
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
final SourceDataLine line = (SourceDataLine)
AudioSystem.getLine(info);
line.open(format);
line.start();
Control[] ctrl = line.getControls();
/*
Controller: Master Gain with current value: 0.0 dB (range: -80.0 - 13.9794)
Controller: Mute Control with current value: False
Controller: Pan with current value: 0.0 (range: -1.0 - 1.0)
Controller: Sample Rate with current value: 44100.0 FPS (range: 0.0 - 48000.0)
*/
for(int i=0;i<ctrl.length;i++) {
System.err.println("Controller: " + ctrl.toString());
}
FloatControl volCtrl = (FloatControl) ctrl[0];
volCtrl.setValue(13);
Runnable runner = new Runnable() {
int bufferSize = (int) format.getSampleRate() * format.getFrameSize();
byte buffer[] = new byte[bufferSize];
public void run() {
try {
int count;
while ((count = ais.read(buffer, 0, buffer.length)) != -1) {
if (count > 0) {
line.write(buffer, 0, count);
}
}
line.drain();
line.close();
} catch (IOException e) {
System.err.println("I/O problems: " + e);
System.exit(-3);
}
}
};
Thread playThread = new Thread(runner);
playThread.start();
} catch (LineUnavailableException e) {
System.err.println("Line unavailable: " + e);
System.exit(-4);
}
}
public static void main(String args[]) {
JFrame frame = new Capture();
frame.pack();
frame.show();
}
}
[/Java]