View Single Post
Old 24th June 2013, 12:34   #1
Smelter
Major Dude
 
Smelter's Avatar
 
Join Date: Jan 2004
Posts: 1,141
screen/mic capture xuggler

Several years late, but so what. Im enjoying myself.

code:


public class XugglerTest {

private int audtioTime;
private IContainer outContainer = null;
private IRational frameRate = null;
static private IStream audioStream = null;
private IStreamCoder audioStreamCoder= null;
ByteArrayOutputStream buffer = new ByteArrayOutputStream();

File toWrite ;

private BufferedOutputStream output = null;

XugglerTest(){
try {
output = new BufferedOutputStream(new FileOutputStream("audio.nsv"));
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}

openAudio();

}

private NSVStream nsv;
private NSVStreamConfig config;
private int numFrames = 0;
private IStream outStream;
private IStreamCoder outStreamCoder;
private long firstTimeStamp=-1;



public void openAudio(){

Robot robot = null;
try {
robot = new Robot();
} catch (AWTException e2) {
e2.printStackTrace();
}
final Toolkit toolkit = Toolkit.getDefaultToolkit();

final Rectangle screenBounds = new Rectangle(0,0,320,240);




// Change this to change the frame rate you record at
frameRate = IRational.make(44100,1);

outContainer = IContainer.make();
String outfile = "video.h264";
int retval = outContainer.open(outfile, IContainer.Type.WRITE, null);
if (retval < 0)
throw new RuntimeException("could not open output file");

ICodec audio = ICodec.guessEncodingCodec(null, null, "video.aac", null,
ICodec.Type.CODEC_TYPE_AUDIO);
if (audio == null)
throw new RuntimeException("could not guess audio codec");


audioStream = outContainer.addNewStream(audio);
audioStreamCoder = audioStream.getStreamCoder();
audioStreamCoder.setFrameRate(frameRate);
audioStreamCoder.setSampleFormat(Format.FMT_S16);
audioStreamCoder.setSampleRate(44100);
audioStreamCoder.setBitRate(64000);
audioStreamCoder.setChannels(2);
retval=audioStreamCoder.open(null,null);
if (retval < 0)
throw new RuntimeException("could not open audio encder");


outStream = outContainer.addNewStream(0);
outStreamCoder = outStream.getStreamCoder();



ICodec codec = ICodec.findEncodingCodec(ICodec.ID.CODEC_ID_H264);
//outStreamCoder.setNumPicturesInGroupOfPictures(30);
outStreamCoder.setCodec(codec);

outStreamCoder.setBitRate(250000);
outStreamCoder.setBitRateTolerance(50000);

int width = screenBounds.width;
int height = screenBounds.height;

outStreamCoder.setPixelType(IPixelFormat.Type.YUV420P);
outStreamCoder.setHeight(height);
outStreamCoder.setWidth(width);

IRational vRate = IRational.make(30);

outStreamCoder.setFrameRate(vRate);
IRational tBase = IRational.make(1,10000000);
outStreamCoder.setTimeBase(tBase);

retval = outStreamCoder.open();

retval = outContainer.writeHeader();

AudioFormat audioFormat = new AudioFormat(44100,
(int)16,
2,
true, /* xuggler defaults to signed 16 bit samples */
false);

TargetDataLine line = null;
DataLine.Info info = new DataLine.Info(TargetDataLine.class,
audioFormat); // format is an AudioFormat object
if (!AudioSystem.isLineSupported(info)) {
return;

}
// Obtain and open the line.
try {
line = (TargetDataLine) AudioSystem.getLine(info);
line.open(audioFormat);
} catch (LineUnavailableException ex) {
return;
}

// Begin audio capture.
line.start();

// Here, stopped is a global boolean set by another thread.
while ( numFrames < 700) {

// Read the next chunk of data from the TargetDataLine.
byte[] data = new byte[4096];

int sz = line.read(data, 0, data.length);
if(sz>0){

int samplesConsumed = 0;

IAudioSamples pSamples=IAudioSamples.make(1024, 2);

pSamples.put(data, 0, 0, sz);


audtioTime += (sz);//total bytes.
System.out.println("number of capture "+sz);

double sAudioTime = (audtioTime/4)/44.1000;


pSamples.setComplete(true, sz/4, 44100, 2, Format.FMT_S16, audtioTime/4);

while(samplesConsumed<pSamples.getNumSamples()){

IPacket packet= IPacket.make();

samplesConsumed += audioStreamCoder.encodeAudio(packet, pSamples, samplesConsumed);

System.out.println("samples Consumed "+samplesConsumed);


if (packet.isComplete()){

System.out.println("packet complete ");


int frameLength=packet.getSize();

byte fData[]=new byte[frameLength];

packet.get(0, fData, 0, frameLength);

try {

buffer.write(fData);
buffer.flush();
System.out.println("buffer writen");
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}


if(this.nsv==null){
System.out.println("make nsv stream "+new String(audioStreamCoder.getCodecTagArray()));
config = NSVStream.create("H264", "AAC ", 320, 240, 30.0);
nsv = NSVStream.create(config);

}

long delta = System.currentTimeMillis() - nsv.config.start_time;

System.out.println("sAudioTime "+sAudioTime+ " s " + ((1000.0/nsv.config.frame_rate) * nsv.config.total_frames ) );
if( ((1000.0/nsv.config.frame_rate)) * ( (double)nsv.config.total_frames) < sAudioTime ){

System.out.println("frame");

BufferedImage screen = robot.createScreenCapture(screenBounds);
// convert to the right image type
BufferedImage bgrScreen = convertToType(screen, BufferedImage.TYPE_3BYTE_BGR);
IPacket p = encodeImage(bgrScreen);




NSVFrame newFrame = NSVStream.initiate(nsv, NSVStream.NSV_SYNC_DWORD);
byte vBUf[]= null;

if(p!=null){
System.out.println(" v packet");
vBUf= new byte[p.getSize()];
p.get(0, vBUf,0,vBUf.length);
p.delete();
}
if(vBUf!=null){


newFrame.vid_len=vBUf.length;
newFrame.vid_data = vBUf;
}

byte cRaw[]=buffer.toByteArray();

if(cRaw.length>0){
buffer.reset();
newFrame.aud_len=cRaw.length;
newFrame.aud_data = cRaw;
}
nsv.config.total_frames++;

numFrames++;

if(output!=null){

try {
output.write(newFrame.toBitStream());
output.flush();
} catch (IOException e) {

e.printStackTrace();
}
}
}
outContainer.writePacket(packet);
}
packet.delete();
}
pSamples.delete();
}
}
//uVox.close();
try {
output.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public IPacket encodeImage(BufferedImage originalImage)
{
BufferedImage worksWithXugglerBufferedImage = convertToType(originalImage,
BufferedImage.TYPE_3BYTE_BGR);
IPacket packet = IPacket.make();

long now = System.currentTimeMillis();
if (firstTimeStamp == -1)
firstTimeStamp = now;

IConverter converter = null;
try
{
converter = ConverterFactory.createConverter(
worksWithXugglerBufferedImage, IPixelFormat.Type.YUV420P);
}
catch (UnsupportedOperationException e)
{
System.out.println(e.getMessage());
e.printStackTrace(System.out);
}

long timeStamp = (now - firstTimeStamp) * 1000; // convert to microseconds

IVideoPicture outFrame = converter.toPicture(worksWithXugglerBufferedImage, timeStamp);

outFrame.setQuality(0);

int retval = outStreamCoder.encodeVideo(packet, outFrame, 0);

if (retval < 0)
return null;
if (packet.isComplete()){
return packet;
}
packet.delete();
return null;
}
public static BufferedImage convertToType(BufferedImage sourceImage,
int targetType)
{
BufferedImage image;

// if the source image is already the target type, return the source image

if (sourceImage.getType() == targetType)
image = sourceImage;

// otherwise create a new image of the target type and draw the new
// image

else
{
image = new BufferedImage(sourceImage.getWidth(),
sourceImage.getHeight(), targetType);
image.getGraphics().drawImage(sourceImage, 0, 0, null);
}

return image;
}

/**
* @param args
* @throws AWTException
*/
public static void main(String[] args) throws AWTException {

XugglerTest xt = new XugglerTest();
}

}



Smelter is offline   Reply With Quote